Update to libjingle 0.6.7.

* Release new PeerConnection implementation to app/webrtc.

Based on cl26389030.
Review URL: http://webrtc-codereview.appspot.com/331012

git-svn-id: http://libjingle.googlecode.com/svn/trunk@102 dd674b97-3498-5ee5-1854-bdd07cd0ff33
diff --git a/CHANGELOG b/CHANGELOG
index ab264a1..15ac417 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,5 +1,9 @@
 Libjingle
 
+0.6.7 - Dec 21, 2011
+  - Release new PeerConnection implementation to app/webrtc.
+  - Bug fixes.
+
 0.6.6 - Dec 14, 2011
   - Fix support for rtcp multiplexing (aka rtcp-mux).
   - Add more support for FreeBSD and OpenBSD.
diff --git a/talk/app/webrtc/audiotrackimpl.cc b/talk/app/webrtc/audiotrackimpl.cc
new file mode 100644
index 0000000..6fcef4a
--- /dev/null
+++ b/talk/app/webrtc/audiotrackimpl.cc
@@ -0,0 +1,71 @@
+/*
+ * libjingle
+ * Copyright 2004--2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+#include "talk/app/webrtc/audiotrackimpl.h"
+
+#include <string>
+
+namespace webrtc {
+
+static const char kAudioTrackKind[] = "audio";
+
+AudioTrack::AudioTrack(const std::string& label)
+    : MediaStreamTrack<LocalAudioTrackInterface>(label),
+      audio_device_(NULL) {
+}
+
+AudioTrack::AudioTrack(const std::string& label,
+                       AudioDeviceModule* audio_device)
+    : MediaStreamTrack<LocalAudioTrackInterface>(label),
+      audio_device_(audio_device) {
+}
+
+  // Get the AudioDeviceModule associated with this track.
+AudioDeviceModule* AudioTrack::GetAudioDevice() {
+  return audio_device_.get();
+}
+
+  // Implement MediaStreamTrack
+std::string AudioTrack::kind() const {
+  return kAudioTrackKind;
+}
+
+talk_base::scoped_refptr<AudioTrack> AudioTrack::CreateRemote(
+    const std::string& label) {
+  talk_base::RefCountedObject<AudioTrack>* track =
+      new talk_base::RefCountedObject<AudioTrack>(label);
+  return track;
+}
+
+talk_base::scoped_refptr<AudioTrack> AudioTrack::CreateLocal(
+    const std::string& label,
+    AudioDeviceModule* audio_device) {
+  talk_base::RefCountedObject<AudioTrack>* track =
+      new talk_base::RefCountedObject<AudioTrack>(label, audio_device);
+  return track;
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/audiotrackimpl.h b/talk/app/webrtc/audiotrackimpl.h
new file mode 100644
index 0000000..df3b3cb
--- /dev/null
+++ b/talk/app/webrtc/audiotrackimpl.h
@@ -0,0 +1,70 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_AUDIOTRACKIMPL_H_
+#define TALK_APP_WEBRTC_AUDIOTRACKIMPL_H_
+
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/mediatrackimpl.h"
+#include "talk/app/webrtc/notifierimpl.h"
+#include "talk/base/scoped_ref_ptr.h"
+
+#ifdef WEBRTC_RELATIVE_PATH
+#include "modules/audio_device/main/interface/audio_device.h"
+#else
+#include "third_party/webrtc/files/include/audio_device.h"
+#endif
+
+namespace webrtc {
+
+class AudioTrack : public MediaStreamTrack<LocalAudioTrackInterface> {
+ public:
+  // Creates a remote audio track.
+  static talk_base::scoped_refptr<AudioTrack> CreateRemote(
+      const std::string& label);
+  // Creates a local audio track.
+  static talk_base::scoped_refptr<AudioTrack> CreateLocal(
+      const std::string& label,
+      AudioDeviceModule* audio_device);
+
+  // Get the AudioDeviceModule associated with this track.
+  virtual AudioDeviceModule* GetAudioDevice();
+
+  // Implement MediaStreamTrack
+  virtual std::string kind() const;
+
+ protected:
+  explicit AudioTrack(const std::string& label);
+  AudioTrack(const std::string& label, AudioDeviceModule* audio_device);
+
+ private:
+  talk_base::scoped_refptr<AudioDeviceModule> audio_device_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_AUDIOTRACKIMPL_H_
diff --git a/talk/app/webrtc/fakeportallocatorfactory.h b/talk/app/webrtc/fakeportallocatorfactory.h
new file mode 100644
index 0000000..85885d4
--- /dev/null
+++ b/talk/app/webrtc/fakeportallocatorfactory.h
@@ -0,0 +1,60 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file defines a fake port allocator factory used for testing.
+// This implementation creates instances of cricket::FakePortAllocator.
+
+#ifndef TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
+#define TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
+
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/p2p/client/fakeportallocator.h"
+
+namespace webrtc {
+
+class FakePortAllocatorFactory : public PortAllocatorFactoryInterface {
+ public:
+  static PortAllocatorFactoryInterface* Create() {
+    talk_base::RefCountedObject<FakePortAllocatorFactory>* allocator =
+          new talk_base::RefCountedObject<FakePortAllocatorFactory>();
+    return allocator;
+  }
+
+  virtual cricket::PortAllocator* CreatePortAllocator(
+      const std::vector<StunConfiguration>& stun_configurations,
+      const std::vector<TurnConfiguration>& turn_configurations) {
+    return new cricket::FakePortAllocator(talk_base::Thread::Current(), NULL);
+  }
+
+ protected:
+  FakePortAllocatorFactory() {}
+  ~FakePortAllocatorFactory() {}
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_FAKEPORTALLOCATORFACTORY_H_
diff --git a/talk/app/webrtc/mediastream.h b/talk/app/webrtc/mediastream.h
new file mode 100644
index 0000000..4bb0b6c
--- /dev/null
+++ b/talk/app/webrtc/mediastream.h
@@ -0,0 +1,190 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains interfaces for MediaStream and MediaTrack. These
+// interfaces are used for implementing MediaStream and MediaTrack as defined
+// in http://dev.w3.org/2011/webrtc/editor/webrtc.html#stream-api. These
+// interfaces must be used only with PeerConnection. PeerConnectionManager
+// interface provides the factory methods to create MediaStream and MediaTracks.
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAM_H_
+#define TALK_APP_WEBRTC_MEDIASTREAM_H_
+
+#include <string>
+
+#include "talk/base/basictypes.h"
+#include "talk/base/refcount.h"
+#include "talk/base/scoped_ref_ptr.h"
+
+namespace cricket {
+
+class VideoCapturer;
+class VideoRenderer;
+class MediaEngine;
+
+}  // namespace cricket
+
+namespace webrtc {
+
+class AudioDeviceModule;
+class VideoCaptureModule;
+
+// Generic observer interface.
+class ObserverInterface {
+ public:
+  virtual void OnChanged() = 0;
+
+ protected:
+  virtual ~ObserverInterface() {}
+};
+
+class NotifierInterface {
+ public:
+  virtual void RegisterObserver(ObserverInterface* observer) = 0;
+  virtual void UnregisterObserver(ObserverInterface* observer) = 0;
+
+  virtual ~NotifierInterface() {}
+};
+
+// Information about a track.
+class MediaStreamTrackInterface : public talk_base::RefCountInterface,
+                                  public NotifierInterface {
+ public:
+  enum TrackState {
+    kInitializing,  // Track is beeing negotiated.
+    kLive = 1,  // Track alive
+    kEnded = 2,  // Track have ended
+    kFailed = 3,  // Track negotiation failed.
+  };
+
+  virtual std::string kind() const = 0;
+  virtual std::string label() const = 0;
+  virtual bool enabled() const = 0;
+  virtual TrackState state() const = 0;
+  virtual bool set_enabled(bool enable) = 0;
+  // These methods should be called by implementation only.
+  virtual bool set_state(TrackState new_state) = 0;
+};
+
+// Reference counted wrapper for a VideoRenderer.
+class VideoRendererWrapperInterface : public talk_base::RefCountInterface {
+ public:
+  virtual cricket::VideoRenderer* renderer() = 0;
+
+ protected:
+  virtual ~VideoRendererWrapperInterface() {}
+};
+
+// Creates a reference counted object of type cricket::VideoRenderer.
+// webrtc::VideoRendererWrapperInterface take ownership of
+// cricket::VideoRenderer.
+talk_base::scoped_refptr<VideoRendererWrapperInterface> CreateVideoRenderer(
+    cricket::VideoRenderer* renderer);
+
+class VideoTrackInterface : public MediaStreamTrackInterface {
+ public:
+  // Set the video renderer for a local or remote stream.
+  // This call will start decoding the received video stream and render it.
+  // The VideoRendererInterface is stored as a scoped_refptr. This means that
+  // it is not allowed to call delete renderer after this API has been called.
+  virtual void SetRenderer(VideoRendererWrapperInterface* renderer) = 0;
+
+  // Get the VideoRenderer associated with this track.
+  virtual VideoRendererWrapperInterface* GetRenderer() = 0;
+
+ protected:
+  virtual ~VideoTrackInterface() {}
+};
+
+class LocalVideoTrackInterface : public VideoTrackInterface {
+ public:
+  // Get the VideoCapturer associated with the track.
+  virtual cricket::VideoCapturer* GetVideoCapture() = 0;
+
+ protected:
+  virtual ~LocalVideoTrackInterface() {}
+};
+
+class AudioTrackInterface : public MediaStreamTrackInterface {
+ public:
+ protected:
+  virtual ~AudioTrackInterface() {}
+};
+
+class LocalAudioTrackInterface : public AudioTrackInterface {
+ public:
+  // Get the AudioDeviceModule associated with this track.
+  virtual AudioDeviceModule* GetAudioDevice() =  0;
+ protected:
+  virtual ~LocalAudioTrackInterface() {}
+};
+
+// List of of tracks.
+template <class TrackType>
+class MediaStreamTrackListInterface : public talk_base::RefCountInterface {
+ public:
+  virtual size_t count() = 0;
+  virtual TrackType* at(size_t index) = 0;
+
+ protected:
+  virtual ~MediaStreamTrackListInterface() {}
+};
+
+typedef MediaStreamTrackListInterface<AudioTrackInterface> AudioTracks;
+typedef MediaStreamTrackListInterface<VideoTrackInterface> VideoTracks;
+
+class MediaStreamInterface : public talk_base::RefCountInterface,
+                             public NotifierInterface {
+ public:
+  virtual std::string label() const = 0;
+  virtual AudioTracks* audio_tracks() = 0;
+  virtual VideoTracks* video_tracks() = 0;
+
+  enum ReadyState {
+    kInitializing,
+    kLive = 1,  // Stream alive
+    kEnded = 2,  // Stream have ended
+  };
+
+  virtual ReadyState ready_state() = 0;
+
+  // These methods should be called by implementation only.
+  virtual void set_ready_state(ReadyState state) = 0;
+
+ protected:
+  virtual ~MediaStreamInterface() {}
+};
+
+class LocalMediaStreamInterface : public MediaStreamInterface {
+ public:
+  virtual bool AddTrack(AudioTrackInterface* track) = 0;
+  virtual bool AddTrack(VideoTrackInterface* track) = 0;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_MEDIASTREAM_H_
diff --git a/talk/app/webrtc/mediastream_unittest.cc b/talk/app/webrtc/mediastream_unittest.cc
new file mode 100644
index 0000000..97d0c64
--- /dev/null
+++ b/talk/app/webrtc/mediastream_unittest.cc
@@ -0,0 +1,389 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/mediastreamproxy.h"
+#include "talk/app/webrtc/mediastreamtrackproxy.h"
+#include "talk/base/refcount.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/base/thread.h"
+#include "talk/base/gunit.h"
+#include "testing/base/public/gmock.h"
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kVideoTrackLabel[] = "dummy_video_cam_1";
+static const char kAudioTrackLabel[] = "dummy_microphone_1";
+
+using talk_base::scoped_refptr;
+using ::testing::Exactly;
+
+namespace {
+
+class ReadyStateMessageData : public talk_base::MessageData {
+ public:
+  ReadyStateMessageData(
+      webrtc::MediaStreamInterface* stream,
+      webrtc::MediaStreamInterface::ReadyState new_state)
+      : stream_(stream),
+        ready_state_(new_state) {
+  }
+
+  scoped_refptr<webrtc::MediaStreamInterface> stream_;
+  webrtc::MediaStreamInterface::ReadyState ready_state_;
+};
+
+class TrackStateMessageData : public talk_base::MessageData {
+ public:
+  TrackStateMessageData(
+      webrtc::MediaStreamTrackInterface* track,
+      webrtc::MediaStreamTrackInterface::TrackState state)
+      : track_(track),
+        state_(state) {
+  }
+
+  scoped_refptr<webrtc::MediaStreamTrackInterface> track_;
+  webrtc::MediaStreamTrackInterface::TrackState state_;
+};
+
+}  // namespace anonymous
+
+namespace webrtc {
+
+// Helper class to test Observer.
+class MockObserver : public ObserverInterface {
+ public:
+  explicit MockObserver(talk_base::Thread* signaling_thread)
+      : signaling_thread_(signaling_thread) {
+  }
+
+  MOCK_METHOD0(DoOnChanged, void());
+  virtual void OnChanged() {
+    ASSERT_TRUE(talk_base::Thread::Current() == signaling_thread_);
+    DoOnChanged();
+  }
+ private:
+  talk_base::Thread* signaling_thread_;
+};
+
+class MockMediaStream: public LocalMediaStreamInterface {
+ public:
+  MockMediaStream(const std::string& label, talk_base::Thread* signaling_thread)
+      : stream_impl_(MediaStream::Create(label)),
+        signaling_thread_(signaling_thread) {
+  }
+  virtual void RegisterObserver(webrtc::ObserverInterface* observer) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    stream_impl_->RegisterObserver(observer);
+  }
+  virtual void UnregisterObserver(webrtc::ObserverInterface* observer) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    stream_impl_->UnregisterObserver(observer);
+  }
+  virtual std::string label() const {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return stream_impl_->label();
+  }
+  virtual AudioTracks* audio_tracks() {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return stream_impl_->audio_tracks();
+  }
+  virtual VideoTracks* video_tracks() {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return stream_impl_->video_tracks();
+  }
+  virtual ReadyState ready_state() {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return stream_impl_->ready_state();
+  }
+  virtual void set_ready_state(ReadyState state) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return stream_impl_->set_ready_state(state);
+  }
+  virtual bool AddTrack(AudioTrackInterface* audio_track) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return stream_impl_->AddTrack(audio_track);
+  }
+  virtual bool AddTrack(VideoTrackInterface* video_track) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return stream_impl_->AddTrack(video_track);
+  }
+
+ private:
+  scoped_refptr<MediaStream> stream_impl_;
+  talk_base::Thread* signaling_thread_;
+};
+
+template <class T>
+class MockMediaStreamTrack: public T {
+ public:
+  MockMediaStreamTrack(T* implementation,
+                       talk_base::Thread* signaling_thread)
+      : track_impl_(implementation),
+        signaling_thread_(signaling_thread) {
+  }
+  virtual void RegisterObserver(webrtc::ObserverInterface* observer) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    track_impl_->RegisterObserver(observer);
+  }
+  virtual void UnregisterObserver(webrtc::ObserverInterface* observer) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    track_impl_->UnregisterObserver(observer);
+  }
+  virtual std::string kind() const {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return track_impl_->kind();
+  }
+  virtual std::string label() const {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return track_impl_->label();
+  }
+  virtual bool enabled() const {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return track_impl_->enabled();
+  }
+  virtual MediaStreamTrackInterface::TrackState state() const {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return track_impl_->state();
+  }
+  virtual bool set_enabled(bool enabled) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return track_impl_->set_enabled(enabled);
+  }
+  virtual bool set_state(webrtc::MediaStreamTrackInterface::TrackState state) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return track_impl_->set_state(state);
+  }
+
+ protected:
+  scoped_refptr<T> track_impl_;
+  talk_base::Thread* signaling_thread_;
+};
+
+class MockLocalVideoTrack
+    : public MockMediaStreamTrack<LocalVideoTrackInterface> {
+ public:
+    MockLocalVideoTrack(LocalVideoTrackInterface* implementation,
+                        talk_base::Thread* signaling_thread)
+        : MockMediaStreamTrack<LocalVideoTrackInterface>(implementation,
+                                                         signaling_thread) {
+    }
+  virtual void SetRenderer(webrtc::VideoRendererWrapperInterface* renderer) {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    track_impl_->SetRenderer(renderer);
+  }
+  virtual VideoRendererWrapperInterface* GetRenderer() {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return track_impl_->GetRenderer();
+  }
+  virtual cricket::VideoCapturer* GetVideoCapture() {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return track_impl_->GetVideoCapture();
+  }
+};
+
+class MockLocalAudioTrack
+    : public MockMediaStreamTrack<LocalAudioTrackInterface> {
+ public:
+  MockLocalAudioTrack(LocalAudioTrackInterface* implementation,
+                      talk_base::Thread* signaling_thread)
+    : MockMediaStreamTrack<LocalAudioTrackInterface>(implementation,
+                                                     signaling_thread) {
+  }
+
+  virtual AudioDeviceModule* GetAudioDevice() {
+    EXPECT_EQ(talk_base::Thread::Current(), signaling_thread_);
+    return track_impl_->GetAudioDevice();
+  }
+};
+
+class MediaStreamTest: public testing::Test,
+                       public talk_base::MessageHandler {
+ protected:
+  virtual void SetUp() {
+    signaling_thread_ .reset(new talk_base::Thread());
+    ASSERT_TRUE(signaling_thread_->Start());
+
+    std::string label(kStreamLabel1);
+    // Create a stream proxy object that uses our mocked
+    // version of a LocalMediaStream.
+    scoped_refptr<MockMediaStream> mock_stream(
+        new talk_base::RefCountedObject<MockMediaStream>(label,
+                                                 signaling_thread_.get()));
+    stream_ = MediaStreamProxy::Create(label, signaling_thread_.get(),
+                                       mock_stream);
+    ASSERT_TRUE(stream_.get() != NULL);
+    EXPECT_EQ(label, stream_->label());
+    EXPECT_EQ(MediaStreamInterface::kInitializing, stream_->ready_state());
+
+    // Create a video track proxy object that uses our mocked
+    // version of a LocalVideoTrack
+    scoped_refptr<VideoTrack> video_track_impl(
+        VideoTrack::CreateLocal(kVideoTrackLabel, NULL));
+    scoped_refptr<MockLocalVideoTrack> mock_videotrack(
+        new talk_base::RefCountedObject<MockLocalVideoTrack>(video_track_impl,
+                                                     signaling_thread_.get()));
+    video_track_ = VideoTrackProxy::CreateLocal(mock_videotrack,
+                                                signaling_thread_.get());
+
+    ASSERT_TRUE(video_track_.get() != NULL);
+    EXPECT_EQ(MediaStreamTrackInterface::kInitializing, video_track_->state());
+
+    // Create an audio track proxy object that uses our mocked
+    // version of a LocalAudioTrack
+    scoped_refptr<AudioTrack> audio_track_impl(
+        AudioTrack::CreateLocal(kAudioTrackLabel, NULL));
+    scoped_refptr<MockLocalAudioTrack> mock_audiotrack(
+        new talk_base::RefCountedObject<MockLocalAudioTrack>(audio_track_impl,
+                                                     signaling_thread_.get()));
+    audio_track_ = AudioTrackProxy::CreateLocal(mock_audiotrack,
+                                                signaling_thread_.get());
+
+    ASSERT_TRUE(audio_track_.get() != NULL);
+    EXPECT_EQ(MediaStreamTrackInterface::kInitializing, audio_track_->state());
+  }
+
+  enum {
+    MSG_SET_READYSTATE,
+    MSG_SET_TRACKSTATE,
+  };
+
+  // Set the ready state on the signaling thread.
+  // State can only be changed on the signaling thread.
+  void SetReadyState(MediaStreamInterface* stream,
+                     MediaStreamInterface::ReadyState new_state) {
+    ReadyStateMessageData state(stream, new_state);
+    signaling_thread_->Send(this, MSG_SET_READYSTATE, &state);
+  }
+
+  // Set the track state on the signaling thread.
+  // State can only be changed on the signaling thread.
+  void SetTrackState(MediaStreamTrackInterface* track,
+                     MediaStreamTrackInterface::TrackState new_state) {
+    TrackStateMessageData state(track, new_state);
+    signaling_thread_->Send(this, MSG_SET_TRACKSTATE, &state);
+  }
+
+  talk_base::scoped_ptr<talk_base::Thread> signaling_thread_;
+  scoped_refptr<LocalMediaStreamInterface> stream_;
+  scoped_refptr<LocalVideoTrackInterface> video_track_;
+  scoped_refptr<LocalAudioTrackInterface> audio_track_;
+
+ private:
+  // Implements talk_base::MessageHandler.
+  virtual void OnMessage(talk_base::Message* msg) {
+    switch (msg->message_id) {
+      case MSG_SET_READYSTATE: {
+        ReadyStateMessageData* state =
+            static_cast<ReadyStateMessageData*>(msg->pdata);
+        state->stream_->set_ready_state(state->ready_state_);
+        break;
+      }
+      case MSG_SET_TRACKSTATE: {
+        TrackStateMessageData* state =
+            static_cast<TrackStateMessageData*>(msg->pdata);
+        state->track_->set_state(state->state_);
+        break;
+      }
+      default:
+        break;
+    }
+  }
+};
+
+TEST_F(MediaStreamTest, CreateLocalStream) {
+  EXPECT_TRUE(stream_->AddTrack(video_track_));
+  EXPECT_TRUE(stream_->AddTrack(audio_track_));
+
+  ASSERT_EQ(1u, stream_->video_tracks()->count());
+  ASSERT_EQ(1u, stream_->audio_tracks()->count());
+
+  // Verify the video track.
+  scoped_refptr<webrtc::MediaStreamTrackInterface> track(
+      stream_->video_tracks()->at(0));
+  EXPECT_EQ(0, track->label().compare(kVideoTrackLabel));
+  EXPECT_TRUE(track->enabled());
+
+  // Verify the audio track.
+  track = stream_->audio_tracks()->at(0);
+  EXPECT_EQ(0, track->label().compare(kAudioTrackLabel));
+  EXPECT_TRUE(track->enabled());
+}
+
+TEST_F(MediaStreamTest, ChangeStreamState) {
+  MockObserver observer(signaling_thread_.get());
+  stream_->RegisterObserver(&observer);
+
+  EXPECT_CALL(observer, DoOnChanged())
+      .Times(Exactly(1));
+  SetReadyState(stream_, MediaStreamInterface::kLive);
+
+  EXPECT_EQ(MediaStreamInterface::kLive, stream_->ready_state());
+  // It should not be possible to add
+  // streams when the state has changed to live.
+  EXPECT_FALSE(stream_->AddTrack(audio_track_));
+  EXPECT_EQ(0u, stream_->audio_tracks()->count());
+}
+
+TEST_F(MediaStreamTest, ChangeVideoTrack) {
+  MockObserver observer(signaling_thread_.get());
+  video_track_->RegisterObserver(&observer);
+
+  EXPECT_CALL(observer, DoOnChanged())
+      .Times(Exactly(1));
+  video_track_->set_enabled(false);
+  EXPECT_FALSE(video_track_->state());
+
+  EXPECT_CALL(observer, DoOnChanged())
+      .Times(Exactly(1));
+  SetTrackState(video_track_, MediaStreamTrackInterface::kLive);
+  EXPECT_EQ(MediaStreamTrackInterface::kLive, video_track_->state());
+
+  EXPECT_CALL(observer, DoOnChanged())
+      .Times(Exactly(1));
+  scoped_refptr<VideoRendererWrapperInterface> renderer(
+      CreateVideoRenderer(NULL));
+  video_track_->SetRenderer(renderer.get());
+  EXPECT_TRUE(renderer.get() == video_track_->GetRenderer());
+}
+
+TEST_F(MediaStreamTest, ChangeAudioTrack) {
+  MockObserver observer(signaling_thread_.get());
+  audio_track_->RegisterObserver(&observer);
+
+  EXPECT_CALL(observer, DoOnChanged())
+      .Times(Exactly(1));
+  audio_track_->set_enabled(false);
+  EXPECT_FALSE(audio_track_->enabled());
+
+  EXPECT_CALL(observer, DoOnChanged())
+      .Times(Exactly(1));
+  SetTrackState(audio_track_, MediaStreamTrackInterface::kLive);
+  EXPECT_EQ(MediaStreamTrackInterface::kLive, audio_track_->state());
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/mediastreamhandler.cc b/talk/app/webrtc/mediastreamhandler.cc
new file mode 100644
index 0000000..d120b80
--- /dev/null
+++ b/talk/app/webrtc/mediastreamhandler.cc
@@ -0,0 +1,258 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediastreamhandler.h"
+
+#ifdef WEBRTC_RELATIVE_PATH
+#include "modules/video_capture/main/interface/video_capture.h"
+#else
+#include "third_party/webrtc/files/include/video_capture.h"
+#endif
+
+namespace webrtc {
+
+VideoTrackHandler::VideoTrackHandler(VideoTrackInterface* track,
+                                     MediaProviderInterface* provider)
+    : provider_(provider),
+      video_track_(track),
+      state_(track->state()),
+      enabled_(track->enabled()),
+      renderer_(track->GetRenderer()) {
+  video_track_->RegisterObserver(this);
+}
+
+VideoTrackHandler::~VideoTrackHandler() {
+  video_track_->UnregisterObserver(this);
+}
+
+void VideoTrackHandler::OnChanged() {
+  if (state_ != video_track_->state()) {
+    state_ = video_track_->state();
+    OnStateChanged();
+  }
+  if (renderer_.get() != video_track_->GetRenderer()) {
+    renderer_ = video_track_->GetRenderer();
+    OnRendererChanged();
+  }
+  if (enabled_ != video_track_->enabled()) {
+    enabled_ = video_track_->enabled();
+    OnEnabledChanged();
+  }
+}
+
+LocalVideoTrackHandler::LocalVideoTrackHandler(
+    LocalVideoTrackInterface* track,
+    MediaProviderInterface* provider)
+    : VideoTrackHandler(track, provider),
+      local_video_track_(track) {
+}
+
+LocalVideoTrackHandler::~LocalVideoTrackHandler() {
+  // cricket::VideoRenderer and cricket::VideoCapturer is owned and deleted by
+  // the track. It must be removed from the media stream provider since it is
+  // possible that the tracks reference count is set to zero when
+  // local_video_track_ falls out of scope.
+  provider_->SetLocalRenderer(local_video_track_->label(), NULL);
+  provider_->SetCaptureDevice(local_video_track_->label(), NULL);
+}
+
+void LocalVideoTrackHandler::OnRendererChanged() {
+  VideoRendererWrapperInterface* renderer = video_track_->GetRenderer();
+  if (renderer)
+    provider_->SetLocalRenderer(video_track_->label(), renderer->renderer());
+  else
+    provider_->SetLocalRenderer(video_track_->label(), NULL);
+}
+
+void LocalVideoTrackHandler::OnStateChanged() {
+  if (local_video_track_->state() == VideoTrackInterface::kLive) {
+    provider_->SetCaptureDevice(local_video_track_->label(),
+                                local_video_track_->GetVideoCapture());
+    VideoRendererWrapperInterface* renderer = video_track_->GetRenderer();
+    if (renderer)
+      provider_->SetLocalRenderer(video_track_->label(), renderer->renderer());
+    else
+      provider_->SetLocalRenderer(video_track_->label(), NULL);
+  }
+}
+
+void LocalVideoTrackHandler::OnEnabledChanged() {
+  // TODO What should happen when enabled is changed?
+}
+
+RemoteVideoTrackHandler::RemoteVideoTrackHandler(
+    VideoTrackInterface* track,
+    MediaProviderInterface* provider)
+    : VideoTrackHandler(track, provider),
+      remote_video_track_(track) {
+}
+
+RemoteVideoTrackHandler::~RemoteVideoTrackHandler() {
+  // Since cricket::VideoRenderer is not reference counted
+  // we need to remove the renderer before we are deleted.
+  provider_->SetRemoteRenderer(video_track_->label(), NULL);
+}
+
+
+void RemoteVideoTrackHandler::OnRendererChanged() {
+  VideoRendererWrapperInterface* renderer = video_track_->GetRenderer();
+  if (renderer)
+    provider_->SetRemoteRenderer(video_track_->label(), renderer->renderer());
+  else
+    provider_->SetRemoteRenderer(video_track_->label(), NULL);
+}
+
+void RemoteVideoTrackHandler::OnStateChanged() {
+}
+
+void RemoteVideoTrackHandler::OnEnabledChanged() {
+  // TODO: What should happen when enabled is changed?
+}
+
+MediaStreamHandler::MediaStreamHandler(MediaStreamInterface* stream,
+                                       MediaProviderInterface* provider)
+    : stream_(stream),
+      provider_(provider) {
+}
+
+MediaStreamHandler::~MediaStreamHandler() {
+  for (VideoTrackHandlers::iterator it = video_handlers_.begin();
+       it != video_handlers_.end(); ++it) {
+    delete *it;
+  }
+}
+
+MediaStreamInterface* MediaStreamHandler::stream() {
+  return stream_.get();
+}
+
+void MediaStreamHandler::OnChanged() {
+  // TODO: Implement state change and enabled changed.
+}
+
+
+LocalMediaStreamHandler::LocalMediaStreamHandler(
+    MediaStreamInterface* stream,
+    MediaProviderInterface* provider)
+    : MediaStreamHandler(stream, provider) {
+  VideoTracks* tracklist(stream->video_tracks());
+
+  for (size_t j = 0; j < tracklist->count(); ++j) {
+    LocalVideoTrackInterface* track =
+        static_cast<LocalVideoTrackInterface*>(tracklist->at(j));
+    VideoTrackHandler* handler(new LocalVideoTrackHandler(track, provider));
+    video_handlers_.push_back(handler);
+  }
+}
+
+RemoteMediaStreamHandler::RemoteMediaStreamHandler(
+    MediaStreamInterface* stream,
+    MediaProviderInterface* provider)
+    : MediaStreamHandler(stream, provider) {
+  VideoTracks* tracklist(stream->video_tracks());
+
+  for (size_t j = 0; j < tracklist->count(); ++j) {
+    VideoTrackInterface* track =
+        static_cast<VideoTrackInterface*>(tracklist->at(j));
+    VideoTrackHandler* handler(new RemoteVideoTrackHandler(track, provider));
+    video_handlers_.push_back(handler);
+  }
+}
+
+MediaStreamHandlers::MediaStreamHandlers(MediaProviderInterface* provider)
+    : provider_(provider) {
+}
+
+MediaStreamHandlers::~MediaStreamHandlers() {
+  for (StreamHandlerList::iterator it = remote_streams_handlers_.begin();
+       it != remote_streams_handlers_.end(); ++it) {
+    delete *it;
+  }
+  for (StreamHandlerList::iterator it = local_streams_handlers_.begin();
+       it != local_streams_handlers_.end(); ++it) {
+    delete *it;
+  }
+}
+
+void MediaStreamHandlers::AddRemoteStream(MediaStreamInterface* stream) {
+  RemoteMediaStreamHandler* handler = new RemoteMediaStreamHandler(stream,
+                                                                   provider_);
+  remote_streams_handlers_.push_back(handler);
+}
+
+void MediaStreamHandlers::RemoveRemoteStream(MediaStreamInterface* stream) {
+  StreamHandlerList::iterator it = remote_streams_handlers_.begin();
+  for (; it != remote_streams_handlers_.end(); ++it) {
+    if ((*it)->stream() == stream) {
+      delete *it;
+      break;
+    }
+  }
+  ASSERT(it != remote_streams_handlers_.end());
+  remote_streams_handlers_.erase(it);
+}
+
+void MediaStreamHandlers::CommitLocalStreams(
+    StreamCollectionInterface* streams) {
+  // Iterate the old list of local streams.
+  // If its not found in the new collection it have been removed.
+  // We can not erase from the old collection at the same time as we iterate.
+  // That is what the ugly while(1) fix.
+  while (1) {
+    StreamHandlerList::iterator it = local_streams_handlers_.begin();
+    for (; it != local_streams_handlers_.end(); ++it) {
+      if (streams->find((*it)->stream()->label()) == NULL) {
+        delete *it;
+        break;
+      }
+    }
+    if (it != local_streams_handlers_.end()) {
+      local_streams_handlers_.erase(it);
+      continue;
+    }
+    break;
+  }
+
+  // Iterate the new collection of local streams.
+  // If its not found in the old collection it have been added.
+  for (size_t j = 0; j < streams->count(); ++j) {
+    MediaStreamInterface* stream = streams->at(j);
+    StreamHandlerList::iterator it = local_streams_handlers_.begin();
+    for (; it != local_streams_handlers_.end(); ++it) {
+      if (stream == (*it)->stream())
+        break;
+    }
+    if (it == local_streams_handlers_.end()) {
+      LocalMediaStreamHandler* handler = new LocalMediaStreamHandler(
+          stream, provider_);
+      local_streams_handlers_.push_back(handler);
+    }
+  }
+};
+
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/mediastreamhandler.h b/talk/app/webrtc/mediastreamhandler.h
new file mode 100644
index 0000000..550a2d4
--- /dev/null
+++ b/talk/app/webrtc/mediastreamhandler.h
@@ -0,0 +1,144 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains classes for listening on changes on MediaStreams and
+// MediaTracks and making sure appropriate action is taken.
+// Example: If a user sets a rendererer on a local video track the renderer is
+// connected to the appropriate camera.
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMHANDLER_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMHANDLER_H_
+
+#include <list>
+#include <vector>
+
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/mediastreamprovider.h"
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/base/thread.h"
+
+namespace webrtc {
+
+// VideoTrackHandler listen to events on a VideoTrack instance and
+// executes the requested change.
+class VideoTrackHandler : public ObserverInterface {
+ public:
+  VideoTrackHandler(VideoTrackInterface* track,
+                    MediaProviderInterface* provider);
+  virtual ~VideoTrackHandler();
+  virtual void OnChanged();
+
+ protected:
+  virtual void OnRendererChanged() = 0;
+  virtual void OnStateChanged() = 0;
+  virtual void OnEnabledChanged() = 0;
+
+  MediaProviderInterface* provider_;
+  VideoTrackInterface* video_track_;
+
+ private:
+  MediaStreamTrackInterface::TrackState state_;
+  bool enabled_;
+  talk_base::scoped_refptr<VideoRendererWrapperInterface> renderer_;
+};
+
+class LocalVideoTrackHandler : public VideoTrackHandler {
+ public:
+  LocalVideoTrackHandler(LocalVideoTrackInterface* track,
+                         MediaProviderInterface* provider);
+  virtual ~LocalVideoTrackHandler();
+
+ protected:
+  virtual void OnRendererChanged();
+  virtual void OnStateChanged();
+  virtual void OnEnabledChanged();
+
+ private:
+  talk_base::scoped_refptr<LocalVideoTrackInterface> local_video_track_;
+};
+
+class RemoteVideoTrackHandler : public VideoTrackHandler {
+ public:
+  RemoteVideoTrackHandler(VideoTrackInterface* track,
+                          MediaProviderInterface* provider);
+  virtual ~RemoteVideoTrackHandler();
+
+ protected:
+  virtual void OnRendererChanged();
+  virtual void OnStateChanged();
+  virtual void OnEnabledChanged();
+
+ private:
+  talk_base::scoped_refptr<VideoTrackInterface> remote_video_track_;
+};
+
+class MediaStreamHandler : public ObserverInterface {
+ public:
+  MediaStreamHandler(MediaStreamInterface* stream,
+                     MediaProviderInterface* provider);
+  ~MediaStreamHandler();
+  MediaStreamInterface* stream();
+  virtual void OnChanged();
+
+ protected:
+  talk_base::scoped_refptr<MediaStreamInterface> stream_;
+  MediaProviderInterface* provider_;
+  typedef std::vector<VideoTrackHandler*> VideoTrackHandlers;
+  VideoTrackHandlers video_handlers_;
+};
+
+class LocalMediaStreamHandler : public MediaStreamHandler {
+ public:
+  LocalMediaStreamHandler(MediaStreamInterface* stream,
+                          MediaProviderInterface* provider);
+};
+
+class RemoteMediaStreamHandler : public MediaStreamHandler {
+ public:
+  RemoteMediaStreamHandler(MediaStreamInterface* stream,
+                           MediaProviderInterface* provider);
+};
+
+class MediaStreamHandlers {
+ public:
+  explicit MediaStreamHandlers(MediaProviderInterface* provider);
+  ~MediaStreamHandlers();
+  void AddRemoteStream(MediaStreamInterface* stream);
+  void RemoveRemoteStream(MediaStreamInterface* stream);
+  void CommitLocalStreams(StreamCollectionInterface* streams);
+
+ private:
+  typedef std::list<MediaStreamHandler*> StreamHandlerList;
+  StreamHandlerList local_streams_handlers_;
+  StreamHandlerList remote_streams_handlers_;
+  MediaProviderInterface* provider_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_MEDIASTREAMHANDLER_H_
+
diff --git a/talk/app/webrtc/mediastreamhandler_unittest.cc b/talk/app/webrtc/mediastreamhandler_unittest.cc
new file mode 100644
index 0000000..d4cf433
--- /dev/null
+++ b/talk/app/webrtc/mediastreamhandler_unittest.cc
@@ -0,0 +1,148 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/mediastreamimpl.h"
+#include "talk/app/webrtc/videotrackimpl.h"
+#include "talk/app/webrtc/mediastreamhandler.h"
+#include "talk/app/webrtc/streamcollectionimpl.h"
+#include "talk/base/thread.h"
+#include "talk/base/gunit.h"
+#include "testing/base/public/gmock.h"
+
+using ::testing::Exactly;
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kVideoDeviceName[] = "dummy_video_cam_1";
+
+namespace webrtc {
+
+// Helper class to test MediaStreamHandler.
+class MockMediaProvier : public MediaProviderInterface {
+ public:
+  MOCK_METHOD1(SetCaptureDevice, void(const std::string& name));
+  MOCK_METHOD1(SetLocalRenderer, void(const std::string& name));
+  MOCK_METHOD1(SetRemoteRenderer, void(const std::string& name));
+
+  virtual void SetCaptureDevice(const std::string& name,
+                                cricket::VideoCapturer* camera) {
+    SetCaptureDevice(name);
+  }
+  virtual void SetLocalRenderer(const std::string& name,
+                                cricket::VideoRenderer* renderer) {
+    SetLocalRenderer(name);
+  }
+
+  virtual void SetRemoteRenderer(const std::string& name,
+                                 cricket::VideoRenderer* renderer) {
+    SetRemoteRenderer(name);
+  }
+  ~MockMediaProvier() {}
+};
+
+TEST(MediaStreamHandlerTest, LocalStreams) {
+  // Create a local stream.
+  std::string label(kStreamLabel1);
+  talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
+      MediaStream::Create(label));
+  talk_base::scoped_refptr<LocalVideoTrackInterface>
+      video_track(VideoTrack::CreateLocal(kVideoDeviceName, NULL));
+  EXPECT_TRUE(stream->AddTrack(video_track));
+  talk_base::scoped_refptr<VideoRendererWrapperInterface> renderer(
+      CreateVideoRenderer(NULL));
+  video_track->SetRenderer(renderer);
+
+  MockMediaProvier provider;
+  MediaStreamHandlers handlers(&provider);
+
+  talk_base::scoped_refptr<StreamCollection> collection(
+      StreamCollection::Create());
+  collection->AddStream(stream);
+
+  EXPECT_CALL(provider, SetLocalRenderer(kVideoDeviceName))
+      .Times(Exactly(2));  // SetLocalRender will also be called from dtor of
+                           // LocalVideoTrackHandler
+  EXPECT_CALL(provider, SetCaptureDevice(kVideoDeviceName))
+      .Times(Exactly(2)); // SetCaptureDevice will also be called from dtor of
+                          // LocalVideoTrackHandler
+  handlers.CommitLocalStreams(collection);
+
+  video_track->set_state(MediaStreamTrackInterface::kLive);
+  // Process posted messages.
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  collection->RemoveStream(stream);
+  handlers.CommitLocalStreams(collection);
+
+  video_track->set_state(MediaStreamTrackInterface::kEnded);
+  // Process posted messages.
+  talk_base::Thread::Current()->ProcessMessages(1);
+}
+
+TEST(MediaStreamHandlerTest, RemoteStreams) {
+  // Create a local stream. We use local stream in this test as well because
+  // they are easier to create.
+  // LocalMediaStreams inherit from MediaStreams.
+  std::string label(kStreamLabel1);
+  talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
+      MediaStream::Create(label));
+  talk_base::scoped_refptr<LocalVideoTrackInterface>
+      video_track(VideoTrack::CreateLocal(kVideoDeviceName, NULL));
+  EXPECT_TRUE(stream->AddTrack(video_track));
+
+  MockMediaProvier provider;
+  MediaStreamHandlers handlers(&provider);
+
+  handlers.AddRemoteStream(stream);
+
+  EXPECT_CALL(provider, SetRemoteRenderer(kVideoDeviceName))
+      .Times(Exactly(3));  // SetRemoteRenderer is also called from dtor of
+                           // RemoteVideoTrackHandler.
+
+  // Set the renderer once.
+  talk_base::scoped_refptr<VideoRendererWrapperInterface> renderer(
+      CreateVideoRenderer(NULL));
+    video_track->SetRenderer(renderer);
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  // Change the already set renderer.
+  renderer = CreateVideoRenderer(NULL);
+    video_track->SetRenderer(renderer);
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  handlers.RemoveRemoteStream(stream);
+
+  // Change the renderer after the stream have been removed from handler.
+  // This should not trigger a call to SetRemoteRenderer.
+  renderer = CreateVideoRenderer(NULL);
+    video_track->SetRenderer(renderer);
+  // Process posted messages.
+  talk_base::Thread::Current()->ProcessMessages(1);
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/mediastreamimpl.cc b/talk/app/webrtc/mediastreamimpl.cc
new file mode 100644
index 0000000..8920379
--- /dev/null
+++ b/talk/app/webrtc/mediastreamimpl.cc
@@ -0,0 +1,73 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediastreamimpl.h"
+#include "talk/base/logging.h"
+
+namespace webrtc {
+
+talk_base::scoped_refptr<MediaStream> MediaStream::Create(
+    const std::string& label) {
+  talk_base::RefCountedObject<MediaStream>* stream =
+      new talk_base::RefCountedObject<MediaStream>(label);
+  return stream;
+}
+
+MediaStream::MediaStream(const std::string& label)
+    : label_(label),
+      ready_state_(MediaStreamInterface::kInitializing),
+      audio_track_list_(
+          new talk_base::RefCountedObject<
+          MediaStreamTrackList<AudioTrackInterface> >()),
+      video_track_list_(
+          new talk_base::RefCountedObject<
+          MediaStreamTrackList<VideoTrackInterface> >()) {
+}
+
+void MediaStream::set_ready_state(
+    MediaStreamInterface::ReadyState new_state) {
+  if (ready_state_ != new_state) {
+    ready_state_ = new_state;
+    Notifier<LocalMediaStreamInterface>::FireOnChanged();
+  }
+}
+
+bool MediaStream::AddTrack(AudioTrackInterface* track) {
+  if (ready_state() != kInitializing)
+    return false;
+  audio_track_list_->AddTrack(track);
+  return true;
+}
+
+bool MediaStream::AddTrack(VideoTrackInterface* track) {
+  if (ready_state() != kInitializing)
+    return false;
+  video_track_list_->AddTrack(track);
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/mediastreamimpl.h b/talk/app/webrtc/mediastreamimpl.h
new file mode 100644
index 0000000..44ede9b
--- /dev/null
+++ b/talk/app/webrtc/mediastreamimpl.h
@@ -0,0 +1,89 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains the implementation of MediaStreamInterface interface.
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMIMPL_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMIMPL_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/notifierimpl.h"
+
+namespace webrtc {
+class AudioTrack;
+class VideoTrack;
+
+class MediaStream : public Notifier<LocalMediaStreamInterface> {
+ public:
+  template<class T>
+  class MediaStreamTrackList : public MediaStreamTrackListInterface<T> {
+   public:
+    void AddTrack(T* track) {
+      tracks_.push_back(track);
+    }
+    virtual size_t count() { return tracks_.size(); }
+    virtual T* at(size_t index) {
+      return tracks_.at(index);
+    }
+
+   private:
+    std::vector<talk_base::scoped_refptr<T> > tracks_;
+  };
+
+  static talk_base::scoped_refptr<MediaStream> Create(const std::string& label);
+
+  // Implement LocalMediaStreamInterface.
+  virtual bool AddTrack(AudioTrackInterface* track);
+  virtual bool AddTrack(VideoTrackInterface* track);
+  // Implement MediaStreamInterface.
+  virtual std::string label() const { return label_; }
+  virtual MediaStreamTrackListInterface<AudioTrackInterface>* audio_tracks() {
+    return audio_track_list_;
+  }
+  virtual MediaStreamTrackListInterface<VideoTrackInterface>* video_tracks() {
+    return video_track_list_;
+  }
+  virtual ReadyState ready_state() { return ready_state_; }
+  virtual void set_ready_state(ReadyState new_state);
+
+ protected:
+  explicit MediaStream(const std::string& label);
+
+  std::string label_;
+  MediaStreamInterface::ReadyState ready_state_;
+  talk_base::scoped_refptr<MediaStreamTrackList<AudioTrackInterface> >
+      audio_track_list_;
+  talk_base::scoped_refptr<MediaStreamTrackList<VideoTrackInterface> >
+      video_track_list_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_MEDIASTREAMIMPL_H_
diff --git a/talk/app/webrtc/mediastreamprovider.h b/talk/app/webrtc/mediastreamprovider.h
new file mode 100644
index 0000000..1cd4962
--- /dev/null
+++ b/talk/app/webrtc/mediastreamprovider.h
@@ -0,0 +1,58 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
+
+#include "talk/app/webrtc/mediastream.h"
+
+namespace cricket {
+
+class VideoCapturer;
+
+}  // namespace cricket
+
+namespace webrtc {
+
+// Interface for setting media devices on a certain MediaTrack.
+// This interface is called by classes in mediastreamhandler.h to
+// set new devices.
+class MediaProviderInterface {
+ public:
+  virtual void SetCaptureDevice(const std::string& name,
+                                cricket::VideoCapturer* camera) = 0;
+  virtual void SetLocalRenderer(const std::string& name,
+                                cricket::VideoRenderer* renderer) = 0;
+  virtual void SetRemoteRenderer(const std::string& name,
+                                 cricket::VideoRenderer* renderer) = 0;
+ protected:
+  virtual ~MediaProviderInterface() {}
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_MEDIASTREAMPROVIDER_H_
diff --git a/talk/app/webrtc/mediastreamproxy.cc b/talk/app/webrtc/mediastreamproxy.cc
new file mode 100644
index 0000000..7e74c86
--- /dev/null
+++ b/talk/app/webrtc/mediastreamproxy.cc
@@ -0,0 +1,318 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediastreamproxy.h"
+#include "talk/base/refcount.h"
+#include "talk/base/scoped_ref_ptr.h"
+
+namespace {
+
+enum {
+  MSG_SET_TRACKLIST_IMPLEMENTATION = 1,
+  MSG_REGISTER_OBSERVER,
+  MSG_UNREGISTER_OBSERVER,
+  MSG_LABEL,
+  MSG_ADD_AUDIO_TRACK,
+  MSG_ADD_VIDEO_TRACK,
+  MSG_READY_STATE,
+  MSG_COUNT,
+  MSG_AT
+};
+
+typedef talk_base::TypedMessageData<std::string*> LabelMessageData;
+typedef talk_base::TypedMessageData<size_t> SizeTMessageData;
+typedef talk_base::TypedMessageData<webrtc::ObserverInterface*>
+    ObserverMessageData;
+typedef talk_base::TypedMessageData<webrtc::MediaStreamInterface::ReadyState>
+    ReadyStateMessageData;
+
+template<typename T>
+class MediaStreamTrackMessageData : public talk_base::MessageData {
+ public:
+  explicit MediaStreamTrackMessageData(T* track)
+      : track_(track),
+        result_(false) {
+  }
+
+  talk_base::scoped_refptr<T> track_;
+  bool result_;
+};
+
+typedef MediaStreamTrackMessageData<webrtc::AudioTrackInterface>
+    AudioTrackMsgData;
+typedef MediaStreamTrackMessageData<webrtc::VideoTrackInterface>
+    VideoTrackMsgData;
+
+template <class TrackType>
+class MediaStreamTrackAtMessageData : public talk_base::MessageData {
+ public:
+  explicit MediaStreamTrackAtMessageData(size_t index)
+      : index_(index) {
+  }
+
+  size_t index_;
+  talk_base::scoped_refptr<TrackType> track_;
+};
+
+class MediaStreamTrackListsMessageData : public talk_base::MessageData {
+ public:
+  talk_base::scoped_refptr<webrtc::AudioTracks> audio_tracks_;
+  talk_base::scoped_refptr<webrtc::VideoTracks> video_tracks_;
+};
+
+}  // namespace anonymous
+
+namespace webrtc {
+
+talk_base::scoped_refptr<MediaStreamProxy> MediaStreamProxy::Create(
+    const std::string& label,
+    talk_base::Thread* signaling_thread) {
+  ASSERT(signaling_thread != NULL);
+  talk_base::RefCountedObject<MediaStreamProxy>* stream =
+      new talk_base::RefCountedObject<MediaStreamProxy>(
+          label, signaling_thread,
+          reinterpret_cast<LocalMediaStreamInterface*>(NULL));
+  return stream;
+}
+
+talk_base::scoped_refptr<MediaStreamProxy> MediaStreamProxy::Create(
+    const std::string& label,
+    talk_base::Thread* signaling_thread,
+    LocalMediaStreamInterface* media_stream_impl) {
+  ASSERT(signaling_thread != NULL);
+  ASSERT(media_stream_impl != NULL);
+  talk_base::RefCountedObject<MediaStreamProxy>* stream =
+      new talk_base::RefCountedObject<MediaStreamProxy>(label, signaling_thread,
+                                                media_stream_impl);
+  return stream;
+}
+
+MediaStreamProxy::MediaStreamProxy(const std::string& label,
+                                   talk_base::Thread* signaling_thread,
+                                   LocalMediaStreamInterface* media_stream_impl)
+    : signaling_thread_(signaling_thread),
+      media_stream_impl_(media_stream_impl),
+      audio_tracks_(new talk_base::RefCountedObject<
+                        MediaStreamTrackListProxy<AudioTrackInterface> >(
+                              signaling_thread_)),
+      video_tracks_(new talk_base::RefCountedObject<
+                        MediaStreamTrackListProxy<VideoTrackInterface> >(
+                            signaling_thread_)) {
+  if (media_stream_impl_ == NULL) {
+    media_stream_impl_ = MediaStream::Create(label);
+  }
+
+  MediaStreamTrackListsMessageData tracklists;
+  Send(MSG_SET_TRACKLIST_IMPLEMENTATION, &tracklists);
+  audio_tracks_->SetImplementation(tracklists.audio_tracks_);
+  video_tracks_->SetImplementation(tracklists.video_tracks_);
+}
+
+std::string MediaStreamProxy::label() const {
+  if (!signaling_thread_->IsCurrent()) {
+    std::string label;
+    LabelMessageData msg(&label);
+    Send(MSG_LABEL, &msg);
+    return label;
+  }
+  return media_stream_impl_->label();
+}
+
+MediaStreamInterface::ReadyState MediaStreamProxy::ready_state() {
+  if (!signaling_thread_->IsCurrent()) {
+    ReadyStateMessageData msg(MediaStreamInterface::kInitializing);
+    Send(MSG_READY_STATE, &msg);
+    return msg.data();
+  }
+  return media_stream_impl_->ready_state();
+}
+
+void MediaStreamProxy::set_ready_state(
+    MediaStreamInterface::ReadyState new_state) {
+  if (!signaling_thread_->IsCurrent()) {
+    // State should only be allowed to be changed from the signaling thread.
+    ASSERT(!"Not Allowed!");
+    return;
+  }
+  media_stream_impl_->set_ready_state(new_state);
+}
+
+bool MediaStreamProxy::AddTrack(AudioTrackInterface* track) {
+  if (!signaling_thread_->IsCurrent()) {
+    AudioTrackMsgData msg(track);
+    Send(MSG_ADD_AUDIO_TRACK, &msg);
+    return msg.result_;
+  }
+  return media_stream_impl_->AddTrack(track);
+}
+
+bool MediaStreamProxy::AddTrack(VideoTrackInterface* track) {
+  if (!signaling_thread_->IsCurrent()) {
+    VideoTrackMsgData msg(track);
+    Send(MSG_ADD_VIDEO_TRACK, &msg);
+    return msg.result_;
+  }
+  return media_stream_impl_->AddTrack(track);
+}
+
+void MediaStreamProxy::RegisterObserver(ObserverInterface* observer) {
+  if (!signaling_thread_->IsCurrent()) {
+    ObserverMessageData msg(observer);
+    Send(MSG_REGISTER_OBSERVER, &msg);
+    return;
+  }
+  media_stream_impl_->RegisterObserver(observer);
+}
+
+void MediaStreamProxy::UnregisterObserver(ObserverInterface* observer) {
+  if (!signaling_thread_->IsCurrent()) {
+    ObserverMessageData msg(observer);
+    Send(MSG_UNREGISTER_OBSERVER, &msg);
+    return;
+  }
+  media_stream_impl_->UnregisterObserver(observer);
+}
+
+void MediaStreamProxy::Send(uint32 id, talk_base::MessageData* data) const {
+  signaling_thread_->Send(const_cast<MediaStreamProxy*>(this), id,
+                          data);
+}
+
+// Implement MessageHandler
+void MediaStreamProxy::OnMessage(talk_base::Message* msg) {
+  talk_base::MessageData* data = msg->pdata;
+  switch (msg->message_id) {
+    case MSG_SET_TRACKLIST_IMPLEMENTATION: {
+      MediaStreamTrackListsMessageData* lists =
+          static_cast<MediaStreamTrackListsMessageData*>(data);
+      lists->audio_tracks_ = media_stream_impl_->audio_tracks();
+      lists->video_tracks_ = media_stream_impl_->video_tracks();
+      break;
+    }
+    case MSG_REGISTER_OBSERVER: {
+      ObserverMessageData* observer = static_cast<ObserverMessageData*>(data);
+      media_stream_impl_->RegisterObserver(observer->data());
+      break;
+    }
+    case MSG_UNREGISTER_OBSERVER: {
+      ObserverMessageData* observer = static_cast<ObserverMessageData*>(data);
+      media_stream_impl_->UnregisterObserver(observer->data());
+      break;
+    }
+    case MSG_LABEL: {
+      LabelMessageData * label = static_cast<LabelMessageData*>(data);
+      *(label->data()) = media_stream_impl_->label();
+      break;
+    }
+    case MSG_ADD_AUDIO_TRACK: {
+      AudioTrackMsgData * track =
+          static_cast<AudioTrackMsgData *>(data);
+      track->result_ = media_stream_impl_->AddTrack(track->track_.get());
+      break;
+    }
+    case MSG_ADD_VIDEO_TRACK: {
+      VideoTrackMsgData * track =
+          static_cast<VideoTrackMsgData *>(data);
+      track->result_ = media_stream_impl_->AddTrack(track->track_.get());
+      break;
+    }
+    case MSG_READY_STATE: {
+      ReadyStateMessageData* state = static_cast<ReadyStateMessageData*>(data);
+      state->data() = media_stream_impl_->ready_state();
+      break;
+    }
+    default:
+      ASSERT(!"Not Implemented!");
+      break;
+  }
+}
+
+template <class T>
+MediaStreamProxy::MediaStreamTrackListProxy<T>::MediaStreamTrackListProxy(
+    talk_base::Thread* signaling_thread)
+    : signaling_thread_(signaling_thread) {
+}
+
+template <class T>
+void MediaStreamProxy::MediaStreamTrackListProxy<T>::SetImplementation(
+    MediaStreamTrackListInterface<T>* track_list) {
+  track_list_ = track_list;
+}
+
+template <class T>
+size_t MediaStreamProxy::MediaStreamTrackListProxy<T>::count() {
+  if (!signaling_thread_->IsCurrent()) {
+    SizeTMessageData msg(0u);
+    Send(MSG_COUNT, &msg);
+    return msg.data();
+  }
+  return track_list_->count();
+}
+
+template <class T>
+T* MediaStreamProxy::MediaStreamTrackListProxy<T>::at(
+    size_t index) {
+  if (!signaling_thread_->IsCurrent()) {
+    MediaStreamTrackAtMessageData<T> msg(index);
+    Send(MSG_AT, &msg);
+    return msg.track_;
+  }
+  return track_list_->at(index);
+}
+
+template <class T>
+void MediaStreamProxy::MediaStreamTrackListProxy<T>::Send(
+    uint32 id, talk_base::MessageData* data) const {
+  signaling_thread_->Send(
+      const_cast<MediaStreamProxy::MediaStreamTrackListProxy<T>*>(
+          this), id, data);
+}
+
+// Implement MessageHandler
+template <class T>
+void MediaStreamProxy::MediaStreamTrackListProxy<T>::OnMessage(
+    talk_base::Message* msg) {
+  talk_base::MessageData* data = msg->pdata;
+  switch (msg->message_id) {
+    case MSG_COUNT: {
+      SizeTMessageData* count = static_cast<SizeTMessageData*>(data);
+      count->data() = track_list_->count();
+      break;
+    }
+    case MSG_AT: {
+      MediaStreamTrackAtMessageData<T>* track =
+          static_cast<MediaStreamTrackAtMessageData<T>*>(data);
+      track->track_ = track_list_->at(track->index_);
+      break;
+    }
+    default:
+      ASSERT(!"Not Implemented!");
+      break;
+  }
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/mediastreamproxy.h b/talk/app/webrtc/mediastreamproxy.h
new file mode 100644
index 0000000..7d1068a
--- /dev/null
+++ b/talk/app/webrtc/mediastreamproxy.h
@@ -0,0 +1,111 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/mediastreamimpl.h"
+#include "talk/base/thread.h"
+
+namespace webrtc {
+using talk_base::scoped_refptr;
+
+// MediaStreamProxy is a proxy for the MediaStream interface. The purpose is
+// to make sure MediaStreamImpl is only accessed from the signaling thread.
+// It can be used as a proxy for both local and remote MediaStreams.
+class MediaStreamProxy : public LocalMediaStreamInterface,
+                         public talk_base::MessageHandler {
+ public:
+  static scoped_refptr<MediaStreamProxy> Create(
+      const std::string& label,
+      talk_base::Thread* signaling_thread);
+
+  static scoped_refptr<MediaStreamProxy> Create(
+      const std::string& label,
+      talk_base::Thread* signaling_thread,
+      LocalMediaStreamInterface* media_stream_impl);
+
+  // Implement LocalStream.
+  virtual bool AddTrack(AudioTrackInterface* track);
+  virtual bool AddTrack(VideoTrackInterface* track);
+
+  // Implement MediaStream.
+  virtual std::string label() const;
+  virtual AudioTracks* audio_tracks() {
+    return audio_tracks_;
+  }
+  virtual VideoTracks* video_tracks() {
+    return video_tracks_;
+  }
+  virtual ReadyState ready_state();
+  virtual void set_ready_state(ReadyState new_state);
+
+  // Implement Notifier
+  virtual void RegisterObserver(ObserverInterface* observer);
+  virtual void UnregisterObserver(ObserverInterface* observer);
+
+ protected:
+  MediaStreamProxy(const std::string& label,
+                   talk_base::Thread* signaling_thread,
+                   LocalMediaStreamInterface* media_stream_impl);
+
+  template <class T>
+  class MediaStreamTrackListProxy : public MediaStreamTrackListInterface<T>,
+                                    public talk_base::MessageHandler {
+   public:
+    explicit MediaStreamTrackListProxy(talk_base::Thread* signaling_thread);
+
+    void SetImplementation(MediaStreamTrackListInterface<T>* track_list);
+    virtual size_t count();
+    virtual T* at(size_t index);
+
+   private:
+    void Send(uint32 id, talk_base::MessageData* data) const;
+    void OnMessage(talk_base::Message* msg);
+
+    talk_base::scoped_refptr<MediaStreamTrackListInterface<T> > track_list_;
+    mutable talk_base::Thread* signaling_thread_;
+  };
+  typedef MediaStreamTrackListProxy<AudioTrackInterface> AudioTrackListProxy;
+  typedef MediaStreamTrackListProxy<VideoTrackInterface> VideoTrackListProxy;
+
+  void Send(uint32 id, talk_base::MessageData* data) const;
+  // Implement MessageHandler.
+  virtual void OnMessage(talk_base::Message* msg);
+
+  mutable talk_base::Thread* signaling_thread_;
+  scoped_refptr<LocalMediaStreamInterface> media_stream_impl_;
+  scoped_refptr<AudioTrackListProxy> audio_tracks_;
+  scoped_refptr<VideoTrackListProxy> video_tracks_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_MEDIASTREAMPROXY_H_
diff --git a/talk/app/webrtc/mediastreamtrackproxy.cc b/talk/app/webrtc/mediastreamtrackproxy.cc
new file mode 100644
index 0000000..afb222e
--- /dev/null
+++ b/talk/app/webrtc/mediastreamtrackproxy.cc
@@ -0,0 +1,396 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediastreamtrackproxy.h"
+
+#include "talk/session/phone/videocapturer.h"
+
+namespace {
+
+enum {
+  MSG_REGISTER_OBSERVER = 1,
+  MSG_UNREGISTER_OBSERVER,
+  MSG_LABEL,
+  MSG_ENABLED,
+  MSG_SET_ENABLED,
+  MSG_STATE,
+  MSG_GET_AUDIODEVICE,
+  MSG_GET_VIDEODEVICE,
+  MSG_GET_VIDEORENDERER,
+  MSG_SET_VIDEORENDERER,
+};
+
+typedef talk_base::TypedMessageData<std::string*> LabelMessageData;
+typedef talk_base::TypedMessageData<webrtc::ObserverInterface*>
+    ObserverMessageData;
+typedef talk_base::TypedMessageData
+    <webrtc::MediaStreamTrackInterface::TrackState> TrackStateMessageData;
+typedef talk_base::TypedMessageData<bool> EnableMessageData;
+
+
+class AudioDeviceMessageData : public talk_base::MessageData {
+ public:
+  talk_base::scoped_refptr<webrtc::AudioDeviceModule> audio_device_;
+};
+
+class VideoDeviceMessageData : public talk_base::MessageData {
+ public:
+  cricket::VideoCapturer* video_device_;
+};
+
+class VideoRendererMessageData : public talk_base::MessageData {
+ public:
+  talk_base::scoped_refptr<webrtc::VideoRendererWrapperInterface>
+      video_renderer_;
+};
+
+}  // namespace anonymous
+
+namespace webrtc {
+
+template <class T>
+MediaStreamTrackProxy<T>::MediaStreamTrackProxy(
+    talk_base::Thread* signaling_thread)
+    : signaling_thread_(signaling_thread) {
+}
+
+template <class T>
+void MediaStreamTrackProxy<T>::Init(MediaStreamTrackInterface* track) {
+  track_ = track;
+}
+
+template <class T>
+std::string MediaStreamTrackProxy<T>::kind() const {
+  return track_->kind();
+}
+
+template <class T>
+std::string MediaStreamTrackProxy<T>::label() const {
+  if (!signaling_thread_->IsCurrent()) {
+    std::string label;
+    LabelMessageData msg(&label);
+    Send(MSG_LABEL, &msg);
+    return label;
+  }
+  return track_->label();
+}
+
+template <class T>
+MediaStreamTrackInterface::TrackState MediaStreamTrackProxy<T>::state() const {
+  if (!signaling_thread_->IsCurrent()) {
+    TrackStateMessageData msg(MediaStreamTrackInterface::kInitializing);
+    Send(MSG_STATE, &msg);
+    return msg.data();
+  }
+  return track_->state();
+}
+
+template <class T>
+bool MediaStreamTrackProxy<T>::enabled() const {
+  if (!signaling_thread_->IsCurrent()) {
+    EnableMessageData msg(false);
+    Send(MSG_ENABLED, &msg);
+    return msg.data();
+  }
+  return track_->enabled();
+}
+
+template <class T>
+bool MediaStreamTrackProxy<T>::set_enabled(bool enable) {
+  if (!signaling_thread_->IsCurrent()) {
+    EnableMessageData msg(enable);
+    Send(MSG_SET_ENABLED, &msg);
+    return msg.data();
+  }
+  return track_->set_enabled(enable);
+}
+
+template <class T>
+bool MediaStreamTrackProxy<T>::set_state(
+    MediaStreamTrackInterface::TrackState new_state) {
+  if (!signaling_thread_->IsCurrent()) {
+    // State should only be allowed to be changed from the signaling thread.
+    ASSERT(!"Not Allowed!");
+    return false;
+  }
+  return track_->set_state(new_state);
+}
+
+template <class T>
+void MediaStreamTrackProxy<T>::RegisterObserver(ObserverInterface* observer) {
+  if (!signaling_thread_->IsCurrent()) {
+    ObserverMessageData msg(observer);
+    Send(MSG_REGISTER_OBSERVER, &msg);
+    return;
+  }
+  track_->RegisterObserver(observer);
+}
+
+template <class T>
+void MediaStreamTrackProxy<T>::UnregisterObserver(ObserverInterface* observer) {
+  if (!signaling_thread_->IsCurrent()) {
+    ObserverMessageData msg(observer);
+    Send(MSG_UNREGISTER_OBSERVER, &msg);
+    return;
+  }
+  track_->UnregisterObserver(observer);
+}
+
+template <class T>
+void MediaStreamTrackProxy<T>::Send(uint32 id,
+                                    talk_base::MessageData* data) const {
+  signaling_thread_->Send(const_cast<MediaStreamTrackProxy<T>*>(this), id,
+                          data);
+}
+
+template <class T>
+bool MediaStreamTrackProxy<T>::HandleMessage(talk_base::Message* msg) {
+  talk_base::MessageData* data = msg->pdata;
+  switch (msg->message_id) {
+    case MSG_REGISTER_OBSERVER: {
+      ObserverMessageData* observer = static_cast<ObserverMessageData*>(data);
+      track_->RegisterObserver(observer->data());
+      return true;
+      break;
+    }
+    case MSG_UNREGISTER_OBSERVER: {
+      ObserverMessageData* observer = static_cast<ObserverMessageData*>(data);
+      track_->UnregisterObserver(observer->data());
+      return true;
+      break;
+    }
+    case MSG_LABEL: {
+      LabelMessageData* label = static_cast<LabelMessageData*>(data);
+      *(label->data()) = track_->label();
+      return true;
+    }
+    case MSG_SET_ENABLED: {
+      EnableMessageData* enabled = static_cast<EnableMessageData*>(data);
+      enabled->data() = track_->set_enabled(enabled->data());
+      return true;
+      break;
+    }
+    case MSG_ENABLED: {
+      EnableMessageData* enabled = static_cast<EnableMessageData*>(data);
+      enabled->data() = track_->enabled();
+      return true;
+      break;
+    }
+    case MSG_STATE: {
+      TrackStateMessageData* state = static_cast<TrackStateMessageData*>(data);
+      state->data() = track_->state();
+      return true;
+      break;
+    }
+    default:
+      return false;
+  }
+}
+
+AudioTrackProxy::AudioTrackProxy(const std::string& label,
+                                 talk_base::Thread* signaling_thread)
+    : MediaStreamTrackProxy<LocalAudioTrackInterface>(signaling_thread),
+      audio_track_(AudioTrack::CreateRemote(label)) {
+  Init(audio_track_);
+}
+
+AudioTrackProxy::AudioTrackProxy(const std::string& label,
+                                 AudioDeviceModule* audio_device,
+                                 talk_base::Thread* signaling_thread)
+    : MediaStreamTrackProxy<LocalAudioTrackInterface>(signaling_thread),
+      audio_track_(AudioTrack::CreateLocal(label, audio_device)) {
+  Init(audio_track_);
+}
+
+AudioTrackProxy::AudioTrackProxy(LocalAudioTrackInterface* implementation,
+                                 talk_base::Thread* signaling_thread)
+    : MediaStreamTrackProxy<LocalAudioTrackInterface>(signaling_thread),
+      audio_track_(implementation) {
+  Init(audio_track_);
+}
+
+talk_base::scoped_refptr<AudioTrackInterface> AudioTrackProxy::CreateRemote(
+    const std::string& label,
+    talk_base::Thread* signaling_thread) {
+  ASSERT(signaling_thread != NULL);
+  talk_base::RefCountedObject<AudioTrackProxy>* track =
+      new talk_base::RefCountedObject<AudioTrackProxy>(label, signaling_thread);
+  return track;
+}
+
+talk_base::scoped_refptr<LocalAudioTrackInterface> AudioTrackProxy::CreateLocal(
+    const std::string& label,
+    AudioDeviceModule* audio_device,
+    talk_base::Thread* signaling_thread) {
+  ASSERT(signaling_thread != NULL);
+  talk_base::RefCountedObject<AudioTrackProxy>* track =
+      new talk_base::RefCountedObject<AudioTrackProxy>(label,
+                                               audio_device,
+                                               signaling_thread);
+  return track;
+}
+
+talk_base::scoped_refptr<LocalAudioTrackInterface> AudioTrackProxy::CreateLocal(
+    LocalAudioTrackInterface* implementation,
+    talk_base::Thread* signaling_thread) {
+  ASSERT(signaling_thread != NULL);
+  talk_base::RefCountedObject<AudioTrackProxy>* track =
+      new talk_base::RefCountedObject<AudioTrackProxy>(implementation,
+                                               signaling_thread);
+  return track;
+}
+
+AudioDeviceModule* AudioTrackProxy::GetAudioDevice() {
+  if (!signaling_thread_->IsCurrent()) {
+    AudioDeviceMessageData msg;
+    Send(MSG_GET_AUDIODEVICE, &msg);
+    return msg.audio_device_;
+  }
+  return audio_track_->GetAudioDevice();
+}
+
+void AudioTrackProxy::OnMessage(talk_base::Message* msg) {
+  if (!MediaStreamTrackProxy<LocalAudioTrackInterface>::HandleMessage(msg)) {
+    if (msg->message_id == MSG_GET_AUDIODEVICE) {
+      AudioDeviceMessageData* audio_device =
+          static_cast<AudioDeviceMessageData*>(msg->pdata);
+      audio_device->audio_device_ = audio_track_->GetAudioDevice();
+    } else {
+      ASSERT(!"Not Implemented!");
+    }
+  }
+}
+
+VideoTrackProxy::VideoTrackProxy(const std::string& label,
+                                 talk_base::Thread* signaling_thread)
+    : MediaStreamTrackProxy<LocalVideoTrackInterface>(signaling_thread),
+      video_track_(VideoTrack::CreateRemote(label)) {
+  Init(video_track_);
+}
+
+VideoTrackProxy::VideoTrackProxy(const std::string& label,
+                                 VideoCaptureModule* video_device,
+                                 talk_base::Thread* signaling_thread)
+    : MediaStreamTrackProxy<LocalVideoTrackInterface>(signaling_thread),
+      video_track_(VideoTrack::CreateLocal(label, video_device)) {
+  Init(video_track_);
+}
+
+VideoTrackProxy::VideoTrackProxy(LocalVideoTrackInterface* implementation,
+                                 talk_base::Thread* signaling_thread)
+    : MediaStreamTrackProxy<LocalVideoTrackInterface>(signaling_thread),
+      video_track_(implementation) {
+  Init(video_track_);
+}
+
+talk_base::scoped_refptr<VideoTrackInterface> VideoTrackProxy::CreateRemote(
+    const std::string& label,
+    talk_base::Thread* signaling_thread) {
+  ASSERT(signaling_thread != NULL);
+  talk_base::RefCountedObject<VideoTrackProxy>* track =
+      new talk_base::RefCountedObject<VideoTrackProxy>(label, signaling_thread);
+  return track;
+}
+
+talk_base::scoped_refptr<LocalVideoTrackInterface> VideoTrackProxy::CreateLocal(
+    const std::string& label,
+    VideoCaptureModule* video_device,
+    talk_base::Thread* signaling_thread) {
+  ASSERT(signaling_thread != NULL);
+  talk_base::RefCountedObject<VideoTrackProxy>* track =
+      new talk_base::RefCountedObject<VideoTrackProxy>(label, video_device,
+                                                       signaling_thread);
+  return track;
+}
+
+talk_base::scoped_refptr<LocalVideoTrackInterface> VideoTrackProxy::CreateLocal(
+    LocalVideoTrackInterface* implementation,
+    talk_base::Thread* signaling_thread) {
+  ASSERT(signaling_thread != NULL);
+  talk_base::RefCountedObject<VideoTrackProxy>* track =
+      new talk_base::RefCountedObject<VideoTrackProxy>(implementation,
+                                                       signaling_thread);
+  return track;
+}
+
+cricket::VideoCapturer* VideoTrackProxy::GetVideoCapture() {
+  if (!signaling_thread_->IsCurrent()) {
+    VideoDeviceMessageData msg;
+    Send(MSG_GET_VIDEODEVICE, &msg);
+    return msg.video_device_;
+  }
+  return video_track_->GetVideoCapture();
+}
+
+void VideoTrackProxy::SetRenderer(VideoRendererWrapperInterface* renderer) {
+  if (!signaling_thread_->IsCurrent()) {
+    VideoRendererMessageData msg;
+    msg.video_renderer_ = renderer;
+    Send(MSG_SET_VIDEORENDERER, &msg);
+    return;
+  }
+  return video_track_->SetRenderer(renderer);
+}
+
+VideoRendererWrapperInterface* VideoTrackProxy::GetRenderer() {
+  if (!signaling_thread_->IsCurrent()) {
+    VideoRendererMessageData msg;
+    Send(MSG_GET_VIDEORENDERER, &msg);
+    return msg.video_renderer_;
+  }
+  return video_track_->GetRenderer();
+}
+
+void VideoTrackProxy::OnMessage(talk_base::Message* msg) {
+  if (!MediaStreamTrackProxy<LocalVideoTrackInterface>::HandleMessage(msg)) {
+    switch (msg->message_id) {
+      case  MSG_GET_VIDEODEVICE: {
+        VideoDeviceMessageData* video_device =
+            static_cast<VideoDeviceMessageData*>(msg->pdata);
+        video_device->video_device_ = video_track_->GetVideoCapture();
+        break;
+      }
+      case MSG_GET_VIDEORENDERER: {
+        VideoRendererMessageData* video_renderer =
+            static_cast<VideoRendererMessageData*>(msg->pdata);
+        video_renderer->video_renderer_ = video_track_->GetRenderer();
+        break;
+      }
+      case MSG_SET_VIDEORENDERER: {
+        VideoRendererMessageData* video_renderer =
+            static_cast<VideoRendererMessageData*>(msg->pdata);
+        video_track_->SetRenderer(video_renderer->video_renderer_.get());
+        break;
+      }
+    default:
+      ASSERT(!"Not Implemented!");
+      break;
+    }
+  }
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/mediastreamtrackproxy.h b/talk/app/webrtc/mediastreamtrackproxy.h
new file mode 100644
index 0000000..000f81e
--- /dev/null
+++ b/talk/app/webrtc/mediastreamtrackproxy.h
@@ -0,0 +1,149 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file includes proxy classes for tracks. The purpose is
+// to make sure tracks are only accessed from the signaling thread.
+
+#ifndef TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
+#define TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/audiotrackimpl.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/videotrackimpl.h"
+#include "talk/base/thread.h"
+
+namespace cricket {
+
+class VideoCapturer;
+
+}  // namespace cricket
+
+namespace webrtc {
+
+template <class T>
+class MediaStreamTrackProxy : public T,
+                              talk_base::MessageHandler {
+ public:
+  void Init(MediaStreamTrackInterface* track);
+  // Implement MediaStreamTrack.
+
+  virtual std::string kind() const;
+  virtual std::string label() const;
+  virtual bool enabled() const;
+  virtual MediaStreamTrackInterface::TrackState state() const;
+  virtual bool set_enabled(bool enable);
+  virtual bool set_state(MediaStreamTrackInterface::TrackState new_state);
+
+  // Implement Notifier
+  virtual void RegisterObserver(ObserverInterface* observer);
+  virtual void UnregisterObserver(ObserverInterface* observer);
+
+ protected:
+  explicit MediaStreamTrackProxy(talk_base::Thread* signaling_thread);
+
+  void Send(uint32 id, talk_base::MessageData* data) const;
+  // Returns true if the message is handled.
+  bool HandleMessage(talk_base::Message* msg);
+
+  mutable talk_base::Thread* signaling_thread_;
+  MediaStreamTrackInterface* track_;
+};
+
+// AudioTrackProxy is a proxy for the AudioTrackInterface. The purpose is
+// to make sure AudioTrack is only accessed from the signaling thread.
+// It can be used as a proxy for both local and remote audio tracks.
+class AudioTrackProxy : public MediaStreamTrackProxy<LocalAudioTrackInterface> {
+ public:
+  static talk_base::scoped_refptr<AudioTrackInterface> CreateRemote(
+      const std::string& label,
+      talk_base::Thread* signaling_thread);
+  static talk_base::scoped_refptr<LocalAudioTrackInterface> CreateLocal(
+      const std::string& label,
+      AudioDeviceModule* audio_device,
+      talk_base::Thread* signaling_thread);
+  static talk_base::scoped_refptr<LocalAudioTrackInterface> CreateLocal(
+      LocalAudioTrackInterface* implementation,
+      talk_base::Thread* signaling_thread);
+
+  virtual AudioDeviceModule* GetAudioDevice();
+
+ protected:
+  AudioTrackProxy(const std::string& label,
+                  talk_base::Thread* signaling_thread);
+  AudioTrackProxy(const std::string& label,
+                  AudioDeviceModule* audio_device,
+                  talk_base::Thread* signaling_thread);
+  AudioTrackProxy(LocalAudioTrackInterface* implementation,
+                  talk_base::Thread* signaling_thread);
+  // Implement MessageHandler
+  virtual void OnMessage(talk_base::Message* msg);
+
+  talk_base::scoped_refptr<LocalAudioTrackInterface> audio_track_;
+};
+
+// VideoTrackProxy is a proxy for the VideoTrackInterface and
+// LocalVideoTrackInterface. The purpose is
+// to make sure VideoTrack is only accessed from the signaling thread.
+// It can be used as a proxy for both local and remote video tracks.
+class VideoTrackProxy : public MediaStreamTrackProxy<LocalVideoTrackInterface> {
+ public:
+  static talk_base::scoped_refptr<VideoTrackInterface> CreateRemote(
+      const std::string& label,
+      talk_base::Thread* signaling_thread);
+  static talk_base::scoped_refptr<LocalVideoTrackInterface> CreateLocal(
+      const std::string& label,
+      VideoCaptureModule* video_device,
+      talk_base::Thread* signaling_thread);
+  static talk_base::scoped_refptr<LocalVideoTrackInterface> CreateLocal(
+      LocalVideoTrackInterface* implementation,
+      talk_base::Thread* signaling_thread);
+
+  virtual cricket::VideoCapturer* GetVideoCapture();
+  virtual void SetRenderer(VideoRendererWrapperInterface* renderer);
+  VideoRendererWrapperInterface* GetRenderer();
+
+ protected:
+  VideoTrackProxy(const std::string& label,
+                  talk_base::Thread* signaling_thread);
+  VideoTrackProxy(const std::string& label,
+                  VideoCaptureModule* video_device,
+                  talk_base::Thread* signaling_thread);
+  VideoTrackProxy(LocalVideoTrackInterface* implementation,
+                  talk_base::Thread* signaling_thread);
+
+  // Implement MessageHandler
+  virtual void OnMessage(talk_base::Message* msg);
+
+  talk_base::scoped_refptr<LocalVideoTrackInterface> video_track_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_MEDIASTREAMTRACKPROXY_H_
diff --git a/talk/app/webrtc/mediatrackimpl.h b/talk/app/webrtc/mediatrackimpl.h
new file mode 100644
index 0000000..e60801d
--- /dev/null
+++ b/talk/app/webrtc/mediatrackimpl.h
@@ -0,0 +1,81 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_MEDIATRACKIMPL_H_
+#define TALK_APP_WEBRTC_MEDIATRACKIMPL_H_
+
+#include <string>
+
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/notifierimpl.h"
+
+namespace webrtc {
+
+// MediaTrack implements the interface common to AudioTrackInterface and
+// VideoTrackInterface.
+template <typename T>
+class MediaStreamTrack : public Notifier<T> {
+ public:
+  typedef typename T::TrackState TypedTrackState;
+
+  virtual std::string label() const { return label_; }
+  virtual MediaStreamTrackInterface::TrackState state() const {
+    return state_;
+  }
+  virtual bool enabled() const { return enabled_; }
+  virtual bool set_enabled(bool enable) {
+    bool fire_on_change = (enable != enabled_);
+    enabled_ = enable;
+    if (fire_on_change) {
+      Notifier<T>::FireOnChanged();
+    }
+    return fire_on_change;
+  }
+  virtual bool set_state(MediaStreamTrackInterface::TrackState new_state) {
+    bool fire_on_change = (state_ != new_state);
+    state_ = new_state;
+    if (fire_on_change)
+      Notifier<T>::FireOnChanged();
+    return true;
+  }
+
+ protected:
+  explicit MediaStreamTrack(const std::string& label)
+      : enabled_(true),
+        label_(label),
+        state_(MediaStreamTrackInterface::kInitializing) {
+  }
+
+ private:
+  bool enabled_;
+  std::string label_;
+  MediaStreamTrackInterface::TrackState state_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_MEDIATRACKIMPL_H_
diff --git a/talk/app/webrtc/notifierimpl.h b/talk/app/webrtc/notifierimpl.h
new file mode 100644
index 0000000..bd7c183
--- /dev/null
+++ b/talk/app/webrtc/notifierimpl.h
@@ -0,0 +1,73 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_NOTIFIERIMPL_H_
+#define TALK_APP_WEBRTC_NOTIFIERIMPL_H_
+
+#include <list>
+
+#include "talk/base/common.h"
+#include "talk/app/webrtc/mediastream.h"
+
+namespace webrtc {
+
+// Implement a template version of a notifier.
+template <class T>
+class Notifier : public T {
+ public:
+  Notifier() {
+  }
+
+  virtual void RegisterObserver(ObserverInterface* observer) {
+    ASSERT(observer != NULL);
+    observers_.push_back(observer);
+  }
+
+  virtual void UnregisterObserver(ObserverInterface* observer) {
+    for (std::list<ObserverInterface*>::iterator it = observers_.begin();
+         it != observers_.end(); it++) {
+      if (*it == observer) {
+        observers_.erase(it);
+        break;
+      }
+    }
+  }
+
+  void FireOnChanged() {
+    for (std::list<ObserverInterface*>::iterator it = observers_.begin();
+         it != observers_.end(); ++it) {
+      (*it)-> OnChanged();
+    }
+  }
+
+ protected:
+  std::list<ObserverInterface*> observers_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_NOTIFIERIMPL_H_
diff --git a/talk/app/webrtc/peerconnection.h b/talk/app/webrtc/peerconnection.h
index c28508f..4e2ad0a 100644
--- a/talk/app/webrtc/peerconnection.h
+++ b/talk/app/webrtc/peerconnection.h
@@ -1,6 +1,6 @@
 /*
  * libjingle
- * Copyright 2004--2011, Google Inc.
+ * Copyright 2011, Google Inc.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -25,107 +25,245 @@
  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
+// This file contains the PeerConnection interface as defined in
+// http://dev.w3.org/2011/webrtc/editor/webrtc.html#peer-to-peer-connections.
+// Applications must use this interface to implement peerconnection.
+// PeerConnectionFactory class provides factory methods to create
+// peerconnection, mediastream and media tracks objects.
+//
+// The Following steps are needed to setup a typical call.
+// 1. Create a PeerConnectionFactoryInterface. Check constructors for more
+// information about input parameters.
+// 2. Create a PeerConnection object. Provide a configuration string which
+// points either to stun or turn server to generate ICE candidates and provide
+// an object that implements the PeerConnectionObserver interface.
+// Now PeerConnection will startcollecting ICE candidates.
+// 3. Create local MediaStream and MediaTracks using the PeerConnectionFactory
+// and add it to PeerConnection by calling AddStream.
+// 4. Once all mediastreams are added to peerconnection, call
+// CommitStreamChanges. Now PeerConnection starts generating an offer based on
+// the local mediastreams.
+// 5. When PeerConnection have generated the ICE candidates it will call the
+// observer OnSignalingMessage callback with the initial offer.
+// 6. When an Answer from peer received it must be supplied to the
+// PeerConnection by calling ProcessSignalingMessage.
+// At this point PeerConnection knows remote capabilities and ICE candidates.
+// Media will start flowing to the remote peer.
+
+// The Receiver of a call can decide to accept or reject the call.
+// This decision will be taken by the application not peerconnection.
+// If application decides to accept the call
+// 1. Create PeerConnectionFactoryInterface if it doesn't exist.
+// 2. Create new PeerConnection
+// 3. Provide the remote offer to the new PeerConnection object by calling
+// ProcessSignalingMessage.
+// 4. PeerConnection will call the observer function OnAddStream with remote
+// MediaStream and tracks information.
+// 5. PeerConnection will call the observer function OnSignalingMessage with
+// local ICE candidates in a answer message.
+// 6. Application can add it's own MediaStreams by calling AddStream.
+// When all streams have been added the application must call
+// CommitStreamChanges. Streams can be added at any time after the
+// PeerConnection object have been created.
+
 #ifndef TALK_APP_WEBRTC_PEERCONNECTION_H_
 #define TALK_APP_WEBRTC_PEERCONNECTION_H_
 
-// TODO - Add a factory class or some kind of PeerConnection manager
-// to support multiple PeerConnection object instantiation. This class will
-// create ChannelManager object and pass it to PeerConnection object. Otherwise
-// each PeerConnection object will have its own ChannelManager hence MediaEngine
-// and VoiceEngine/VideoEngine.
-
 #include <string>
+#include <vector>
 
-namespace cricket {
-class VideoRenderer;
-}
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/base/socketaddress.h"
 
 namespace talk_base {
 class Thread;
 }
 
-namespace webrtc {
+namespace cricket {
+class PortAllocator;
+}
 
+namespace webrtc {
+// MediaStream container interface.
+class StreamCollectionInterface : public talk_base::RefCountInterface {
+ public:
+  virtual size_t count() = 0;
+  virtual MediaStreamInterface* at(size_t index) = 0;
+  virtual MediaStreamInterface* find(const std::string& label) = 0;
+ protected:
+  // Dtor protected as objects shouldn't be deleted via this interface.
+  ~StreamCollectionInterface() {}
+};
+
+// PeerConnection callback interface. Application should implement these
+// methods.
 class PeerConnectionObserver {
  public:
-  // serialized signaling message
+  enum StateType {
+    kReadyState,
+    kIceState,
+    kSdpState,
+  };
+
+  virtual void OnError() = 0;
+
+  virtual void OnMessage(const std::string& msg) = 0;
+
+  // Serialized signaling message
   virtual void OnSignalingMessage(const std::string& msg) = 0;
 
-  // Triggered when a remote peer accepts a media connection.
-  virtual void OnAddStream(const std::string& stream_id, bool video) = 0;
+  // Triggered when ReadyState, SdpState or IceState have changed.
+  virtual void OnStateChange(StateType state_changed) = 0;
 
-  // Triggered when a remote peer closes a media stream.
-  virtual void OnRemoveStream(const std::string& stream_id, bool video) = 0;
+  // Triggered when media is received on a new stream from remote peer.
+  virtual void OnAddStream(MediaStreamInterface* stream) = 0;
+
+  // Triggered when a remote peer close a stream.
+  virtual void OnRemoveStream(MediaStreamInterface* stream) = 0;
 
  protected:
   // Dtor protected as objects shouldn't be deleted via this interface.
-  virtual ~PeerConnectionObserver() {}
+  ~PeerConnectionObserver() {}
 };
 
-class PeerConnection {
+
+class PeerConnectionInterface : public talk_base::RefCountInterface {
  public:
   enum ReadyState {
-    NEW = 0,
-    NEGOTIATING,
-    ACTIVE,
-    CLOSED,
+    kNew,
+    kNegotiating,
+    kActive,
+    kClosing,
+    kClosed,
   };
 
-  virtual ~PeerConnection() {}
+  enum SdpState {
+    kSdpNew,
+    kSdpIdle,
+    kSdpWaiting,
+  };
 
-  // Register a listener
-  virtual void RegisterObserver(PeerConnectionObserver* observer) = 0;
+  // Process a signaling message using the ROAP protocol.
+  virtual void ProcessSignalingMessage(const std::string& msg) = 0;
 
-  // SignalingMessage in json format
-  virtual bool SignalingMessage(const std::string& msg) = 0;
+  // Sends the msg over a data stream.
+  virtual bool Send(const std::string& msg) = 0;
 
-  // Asynchronously adds a local stream device to the peer
-  // connection.
-  virtual bool AddStream(const std::string& stream_id, bool video) = 0;
+  // Accessor methods to active local streams.
+  virtual talk_base::scoped_refptr<StreamCollectionInterface>
+      local_streams() = 0;
 
-  // Asynchronously removes a local stream device from the peer
-  // connection. The operation is complete when
-  // PeerConnectionObserver::OnRemoveStream is called.
-  virtual bool RemoveStream(const std::string& stream_id) = 0;
+  // Accessor methods to remote streams.
+  virtual talk_base::scoped_refptr<StreamCollectionInterface>
+      remote_streams() = 0;
 
-  // Info the peerconnection that it is time to return the signaling
-  // information. The operation is complete when
-  // PeerConnectionObserver::OnSignalingMessage is called.
-  virtual bool Connect() = 0;
+  // Add a new local stream.
+  // This function does not trigger any changes to the stream until
+  // CommitStreamChanges is called.
+  virtual void AddStream(LocalMediaStreamInterface* stream) = 0;
 
-  // Remove all the streams and tear down the session.
-  // After the Close() is called, the OnSignalingMessage will be invoked
-  // asynchronously. And before OnSignalingMessage is called,
-  // OnRemoveStream will be called for each stream that was active.
-  // TODO: Add an event such as onclose, or onreadystatechanged
-  // when the readystate reaches the closed state (no more streams in the
-  // peerconnection object.
-  virtual bool Close() = 0;
+  // Remove a local stream and stop sending it.
+  // This function does not trigger any changes to the stream until
+  // CommitStreamChanges is called.
+  virtual void RemoveStream(LocalMediaStreamInterface* stream) = 0;
 
-  // Set the audio input & output devices based on the given device name.
-  // An empty device name means to use the default audio device.
-  virtual bool SetAudioDevice(const std::string& wave_in_device,
-                              const std::string& wave_out_device,
-                              int opts) = 0;
+  // Commit Stream changes. This will start sending media on new streams
+  // and stop sending media on removed streams.
+  virtual void CommitStreamChanges() = 0;
 
-  // Set the video renderer for the camera preview.
-  virtual bool SetLocalVideoRenderer(cricket::VideoRenderer* renderer) = 0;
+  // Close the current session. This will trigger a Shutdown message
+  // being sent and the readiness state change to Closing.
+  // After calling this function no changes can be made to the sending streams.
+  virtual void Close() = 0;
 
-  // Set the video renderer for the specified stream.
-  virtual bool SetVideoRenderer(const std::string& stream_id,
-                                cricket::VideoRenderer* renderer) = 0;
+  // Returns the current ReadyState.
+  virtual ReadyState ready_state() = 0;
 
-  // Set video capture device
-  // For Chromium the cam_device should use the capture session id.
-  // For standalone app, cam_device is the camera name. It will try to
-  // set the default capture device when cam_device is "".
-  virtual bool SetVideoCapture(const std::string& cam_device) = 0;
+  // Returns the current SdpState.
+  virtual SdpState sdp_state() = 0;
 
-  // Returns the state of the PeerConnection object.  See the ReadyState
-  // enum for valid values.
-  virtual ReadyState GetReadyState() = 0;
+ protected:
+  // Dtor protected as objects shouldn't be deleted via this interface.
+  ~PeerConnectionInterface() {}
 };
 
+// Factory class used for creating cricket::PortAllocator that is used
+// for ICE negotiation.
+class PortAllocatorFactoryInterface : public talk_base::RefCountInterface {
+ public:
+  struct StunConfiguration {
+    StunConfiguration(const std::string& address, int port)
+        : server(address, port) {}
+    // STUN server address and port.
+    talk_base::SocketAddress server;
+  };
+
+  struct TurnConfiguration {
+    TurnConfiguration(const std::string& address,
+                      int port,
+                      const std::string& user_name,
+                      const std::string& password)
+        : server(address, port),
+          username(username),
+          password(password) {}
+    talk_base::SocketAddress server;
+    std::string username;
+    std::string password;
+  };
+
+  virtual cricket::PortAllocator* CreatePortAllocator(
+      const std::vector<StunConfiguration>& stun_servers,
+      const std::vector<TurnConfiguration>& turn_configurations) = 0;
+
+ protected:
+  PortAllocatorFactoryInterface() {}
+  ~PortAllocatorFactoryInterface() {}
+};
+
+// PeerConnectionFactoryInterface is the factory interface use for creating
+// PeerConnection, MediaStream and media tracks.
+// PeerConnectionFactoryInterface will create required libjingle threads,
+// socket and network manager factory classes for networking.
+// If application decides to provide its own implementation of these classes
+// it should use alternate create method which accepts a threads and a
+// PortAllocatorFactoryInterface as input.
+class PeerConnectionFactoryInterface : public talk_base::RefCountInterface {
+ public:
+  virtual talk_base::scoped_refptr<PeerConnectionInterface>
+      CreatePeerConnection(const std::string& config,
+                           PeerConnectionObserver* observer) = 0;
+
+  virtual talk_base::scoped_refptr<LocalMediaStreamInterface>
+      CreateLocalMediaStream(const std::string& label) = 0;
+
+  virtual talk_base::scoped_refptr<LocalVideoTrackInterface>
+      CreateLocalVideoTrack(const std::string& label,
+                            VideoCaptureModule* video_device) = 0;
+
+  virtual talk_base::scoped_refptr<LocalAudioTrackInterface>
+      CreateLocalAudioTrack(const std::string& label,
+                            AudioDeviceModule* audio_device) = 0;
+
+ protected:
+  // Dtor and ctor protected as objects shouldn't be created or deleted via
+  // this interface.
+  PeerConnectionFactoryInterface() {}
+  ~PeerConnectionFactoryInterface() {} // NOLINT
+};
+
+// Create a new instance of PeerConnectionFactoryInterface.
+talk_base::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory();
+
+// Create a new instance of PeerConnectionFactoryInterface.
+// Ownership of the arguments are not transfered to this object and must
+// remain in scope for the lifetime of the PeerConnectionFactoryInterface.
+talk_base::scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory(talk_base::Thread* worker_thread,
+                            talk_base::Thread* signaling_thread,
+                            PortAllocatorFactoryInterface* factory,
+                            AudioDeviceModule* default_adm);
+
 }  // namespace webrtc
 
 #endif  // TALK_APP_WEBRTC_PEERCONNECTION_H_
diff --git a/talk/app/webrtc/peerconnection_unittest.cc b/talk/app/webrtc/peerconnection_unittest.cc
new file mode 100644
index 0000000..6e61dc2
--- /dev/null
+++ b/talk/app/webrtc/peerconnection_unittest.cc
@@ -0,0 +1,346 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <stdio.h>
+
+#include <list>
+
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/test/fakevideocapturemodule.h"
+#include "talk/base/gunit.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/base/thread.h"
+#include "talk/session/phone/fakevideorenderer.h"
+#include "talk/session/phone/videorenderer.h"
+
+void GetAllVideoTracks(webrtc::MediaStreamInterface* media_stream,
+                       std::list<webrtc::VideoTrackInterface*>* video_tracks) {
+  webrtc::VideoTracks* track_list = media_stream->video_tracks();
+  for (size_t i = 0; i < track_list->count(); ++i) {
+    webrtc::VideoTrackInterface* track = track_list->at(i);
+    video_tracks->push_back(track);
+  }
+}
+
+class SignalingMessageReceiver {
+ public:
+  virtual void ReceiveMessage(const std::string& msg) = 0;
+
+  virtual int num_rendered_frames() = 0;
+
+  // Makes it possible for the remote side to decide when to start capturing.
+  // This makes it possible to wait with capturing until a renderer has been
+  // added.
+  virtual void StartCapturing() = 0;
+
+ protected:
+  SignalingMessageReceiver() {}
+  virtual ~SignalingMessageReceiver() {}
+};
+
+class PeerConnectionP2PTestClient
+    : public webrtc::PeerConnectionObserver,
+      public SignalingMessageReceiver {
+ public:
+  static PeerConnectionP2PTestClient* CreateClient(int id) {
+    PeerConnectionP2PTestClient* client = new PeerConnectionP2PTestClient(id);
+    if (!client->Init()) {
+      delete client;
+      return NULL;
+    }
+    return client;
+  }
+
+  ~PeerConnectionP2PTestClient() {
+  }
+
+  void StartSession() {
+    if (video_track_.get() != NULL) {
+      // Tracks have already been set up.
+      return;
+    }
+    // TODO: the default audio device module is used regardless of
+    // the second parameter to the CreateLocalAudioTrack(..) call. Maybe remove
+    // the second parameter from the API altogether?
+    talk_base::scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track(
+        peer_connection_factory_->CreateLocalAudioTrack("audio_track", NULL));
+
+    CreateLocalVideoTrack();
+
+    talk_base::scoped_refptr<webrtc::LocalMediaStreamInterface> stream =
+        peer_connection_factory_->CreateLocalMediaStream("stream_label");
+
+    stream->AddTrack(audio_track);
+    stream->AddTrack(video_track_);
+
+    peer_connection_->AddStream(stream);
+    peer_connection_->CommitStreamChanges();
+  }
+
+  void StartCapturing() {
+    if (fake_video_capture_module_ != NULL) {
+      fake_video_capture_module_->StartCapturing();
+    }
+  }
+
+  bool SessionActive() {
+    return peer_connection_->ready_state() ==
+        webrtc::PeerConnectionInterface::kActive;
+  }
+
+  void StopSession() {
+    if (fake_video_capture_module_ != NULL) {
+      fake_video_capture_module_->StopCapturing();
+    }
+    // TODO: investigate why calling Close() triggers a crash when
+    // deleting the PeerConnection.
+    // peer_connection_->Close();
+  }
+
+  void set_signaling_message_receiver(
+      SignalingMessageReceiver* signaling_message_receiver) {
+    signaling_message_receiver_ = signaling_message_receiver;
+  }
+
+  bool FramesReceivedCheck(int number_of_frames) {
+    if (number_of_frames > signaling_message_receiver_->num_rendered_frames()) {
+      return false;
+    }
+    else {
+      EXPECT_LT(number_of_frames, fake_video_capture_module_->sent_frames());
+    }
+    return true;
+  }
+
+  // SignalingMessageReceiver callback.
+  virtual void ReceiveMessage(const std::string& msg) {
+    peer_connection_->ProcessSignalingMessage(msg);
+  }
+
+  virtual int num_rendered_frames() {
+    if (fake_video_renderer_ == NULL) {
+      return -1;
+    }
+    return fake_video_renderer_->num_rendered_frames();
+  }
+
+  // PeerConnectionObserver callbacks.
+  virtual void OnError() {}
+  virtual void OnMessage(const std::string&) {}
+  virtual void OnSignalingMessage(const std::string& msg)  {
+    if (signaling_message_receiver_ == NULL) {
+      // Remote party may be deleted.
+      return;
+    }
+    signaling_message_receiver_->ReceiveMessage(msg);
+  }
+  virtual void OnStateChange(StateType /*state_changed*/) {}
+  virtual void OnAddStream(webrtc::MediaStreamInterface* media_stream) {
+    std::list<webrtc::VideoTrackInterface*> video_tracks;
+    GetAllVideoTracks(media_stream, &video_tracks);
+    int track_id = 0;
+    // Currently only one video track is supported.
+    // TODO: enable multiple video tracks.
+    EXPECT_EQ(1u, video_tracks.size());
+    for (std::list<webrtc::VideoTrackInterface*>::iterator iter =
+             video_tracks.begin();
+         iter != video_tracks.end();
+         ++iter) {
+      fake_video_renderer_ = new cricket::FakeVideoRenderer();
+      video_renderer_wrapper_ = webrtc::CreateVideoRenderer(
+          fake_video_renderer_);
+      (*iter)->SetRenderer(video_renderer_wrapper_);
+      track_id++;
+    }
+    // The video renderer has been added. Tell the far end to start capturing
+    // frames. That way the number of captured frames should be equal to number
+    // of rendered frames.
+    if (signaling_message_receiver_ != NULL) {
+      signaling_message_receiver_->StartCapturing();
+      return;
+    }
+  }
+  virtual void OnRemoveStream(webrtc::MediaStreamInterface* /*media_stream*/) {
+  }
+
+ private:
+  explicit PeerConnectionP2PTestClient(int id)
+      : id_(id),
+        fake_video_capture_module_(NULL),
+        fake_video_renderer_(NULL),
+        signaling_message_receiver_(NULL) {
+  }
+
+  bool Init() {
+    EXPECT_TRUE(peer_connection_.get() == NULL);
+    EXPECT_TRUE(peer_connection_factory_.get() == NULL);
+    peer_connection_factory_ = webrtc::CreatePeerConnectionFactory();
+    if (peer_connection_factory_.get() == NULL) {
+      return false;
+    }
+
+    const std::string server_configuration = "STUN stun.l.google.com:19302";
+    peer_connection_ = peer_connection_factory_->CreatePeerConnection(
+        server_configuration, this);
+    return peer_connection_.get() != NULL;
+  }
+
+  void GenerateRecordingFileName(int track, std::string* file_name) {
+    if (file_name == NULL) {
+      return;
+    }
+    std::stringstream file_name_stream;
+    file_name_stream << "p2p_test_client_" << id_ << "_videotrack_" << track <<
+        ".yuv";
+    file_name->clear();
+    *file_name = file_name_stream.str();
+  }
+
+  void CreateLocalVideoTrack() {
+    fake_video_capture_module_ = FakeVideoCaptureModule::Create(
+        talk_base::Thread::Current());
+    video_track_ = peer_connection_factory_->CreateLocalVideoTrack(
+        "video_track", fake_video_capture_module_);
+  }
+
+  int id_;
+  talk_base::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+  talk_base::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+      peer_connection_factory_;
+
+  // Owns and ensures that fake_video_capture_module_ is available as long as
+  // this class exists.  It also ensures destruction of the memory associated
+  // with it when this class is deleted.
+  talk_base::scoped_refptr<webrtc::LocalVideoTrackInterface> video_track_;
+  // Needed to keep track of number of frames send.
+  FakeVideoCaptureModule* fake_video_capture_module_;
+  // Ensures that fake_video_renderer_ is available as long as this class
+  // exists. It also ensures destruction of the memory associated with it when
+  // this class is deleted.
+  talk_base::scoped_refptr<webrtc::VideoRendererWrapperInterface>
+      video_renderer_wrapper_;
+  // Needed to keep track of number of frames received.
+  cricket::FakeVideoRenderer* fake_video_renderer_;
+
+  // For remote peer communication.
+  SignalingMessageReceiver* signaling_message_receiver_;
+};
+
+class P2PTestConductor : public testing::Test {
+ public:
+  virtual void SetUp() {
+    EXPECT_TRUE(Init());
+  }
+  // Return true if session no longer is pending. I.e. if the session is active
+  // or failed.
+  bool ActivationNotPending() {
+    if (!IsInitialized()) {
+      return true;
+    }
+    return SessionActive();
+  }
+  bool SessionActive() {
+    return initiating_client_->SessionActive() &&
+        receiving_client_->SessionActive();
+  }
+  // Return true if the number of frames provided have been received or it is
+  // known that that will never occur (e.g. no frames will be sent or
+  // captured).
+  bool FramesNotPending(int frames_to_receive) {
+    if (!IsInitialized()) {
+      return true;
+    }
+    return FramesReceivedCheck(frames_to_receive);
+  }
+  bool FramesReceivedCheck(int frames_received) {
+    return initiating_client_->FramesReceivedCheck(frames_received) &&
+        receiving_client_->FramesReceivedCheck(frames_received);
+  }
+  ~P2PTestConductor() {
+    if (initiating_client_.get() != NULL) {
+      initiating_client_->set_signaling_message_receiver(NULL);
+    }
+    if (receiving_client_.get() != NULL) {
+      receiving_client_->set_signaling_message_receiver(NULL);
+    }
+  }
+
+  bool StartSession() {
+    if (!IsInitialized()) {
+      return false;
+    }
+    initiating_client_->StartSession();
+    receiving_client_->StartSession();
+    return true;
+  }
+
+  bool StopSession() {
+    if (!IsInitialized()) {
+      return false;
+    }
+    initiating_client_->StopSession();
+    receiving_client_->StopSession();
+    return true;
+  }
+
+ private:
+  bool Init() {
+    initiating_client_.reset(PeerConnectionP2PTestClient::CreateClient(0));
+    receiving_client_.reset(PeerConnectionP2PTestClient::CreateClient(1));
+    if ((initiating_client_.get() == NULL) ||
+        (receiving_client_.get() == NULL)) {
+      return false;
+    }
+    initiating_client_->set_signaling_message_receiver(receiving_client_.get());
+    receiving_client_->set_signaling_message_receiver(initiating_client_.get());
+    return true;
+  }
+  bool IsInitialized() const {
+    return (initiating_client_.get() != NULL) &&
+        (receiving_client_.get() != NULL);
+  }
+
+  talk_base::scoped_ptr<PeerConnectionP2PTestClient> initiating_client_;
+  talk_base::scoped_ptr<PeerConnectionP2PTestClient> receiving_client_;
+};
+
+// This test sets up a call between two parties. Both parties send static frames
+// to each other. Once the test is finished the number of sent frames is
+// compared to the number of recieved frames.
+TEST_F(P2PTestConductor, LocalP2PTest) {
+  EXPECT_TRUE(StartSession());
+  const int kMaxWaitForActivationMs = 5000;
+  EXPECT_TRUE_WAIT(ActivationNotPending(), kMaxWaitForActivationMs);
+  EXPECT_TRUE(SessionActive());
+
+  const int kEndFrameCount = 10;
+  const int kMaxWaitForFramesMs = 5000;
+  EXPECT_TRUE_WAIT(FramesNotPending(kEndFrameCount), kMaxWaitForFramesMs);
+  EXPECT_TRUE(FramesReceivedCheck(kEndFrameCount));
+  EXPECT_TRUE(StopSession());
+}
diff --git a/talk/app/webrtc/peerconnectionfactory.cc b/talk/app/webrtc/peerconnectionfactory.cc
deleted file mode 100644
index a3dac39..0000000
--- a/talk/app/webrtc/peerconnectionfactory.cc
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * libjingle
- * Copyright 2004--2011, Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- *  1. Redistributions of source code must retain the above copyright notice,
- *     this list of conditions and the following disclaimer.
- *  2. Redistributions in binary form must reproduce the above copyright notice,
- *     this list of conditions and the following disclaimer in the documentation
- *     and/or other materials provided with the distribution.
- *  3. The name of the author may not be used to endorse or promote products
- *     derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "talk/app/webrtc/peerconnectionfactory.h"
-
-#include "talk/app/webrtc/peerconnectionproxy.h"
-#include "talk/base/logging.h"
-#include "talk/p2p/client/basicportallocator.h"
-#include "talk/session/phone/channelmanager.h"
-
-namespace webrtc {
-
-PeerConnectionFactory::PeerConnectionFactory(
-    cricket::MediaEngineInterface* media_engine,
-    cricket::DeviceManagerInterface* device_manager,
-    talk_base::Thread* worker_thread)
-    : initialized_(false),
-      channel_manager_(new cricket::ChannelManager(media_engine,
-                                                   device_manager,
-                                                   worker_thread)) {
-}
-
-PeerConnectionFactory::PeerConnectionFactory(
-    talk_base::Thread* worker_thread)
-    : initialized_(false),
-      channel_manager_(new cricket::ChannelManager(worker_thread)) {
-}
-
-PeerConnectionFactory::~PeerConnectionFactory() {
-}
-
-bool PeerConnectionFactory::Initialize() {
-  ASSERT(channel_manager_.get() != NULL);
-  initialized_ = channel_manager_->Init();
-  return initialized_;
-}
-
-PeerConnection* PeerConnectionFactory::CreatePeerConnection(
-    cricket::PortAllocator* port_allocator,
-    talk_base::Thread* signaling_thread) {
-  PeerConnectionProxy* pc = NULL;
-  if (initialized_) {
-    pc =  new PeerConnectionProxy(
-        port_allocator, channel_manager_.get(), signaling_thread);
-    if (!pc->Init()) {
-      LOG(LERROR) << "Error in initializing PeerConnection";
-      delete pc;
-      pc = NULL;
-    }
-  } else {
-    LOG(LERROR) << "PeerConnectionFactory is not initialize";
-  }
-  return pc;
-}
-
-}  // namespace webrtc
diff --git a/talk/app/webrtc/peerconnectionfactory.h b/talk/app/webrtc/peerconnectionfactory.h
deleted file mode 100644
index ea509d6..0000000
--- a/talk/app/webrtc/peerconnectionfactory.h
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * libjingle
- * Copyright 2004--2011, Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- *  1. Redistributions of source code must retain the above copyright notice,
- *     this list of conditions and the following disclaimer.
- *  2. Redistributions in binary form must reproduce the above copyright notice,
- *     this list of conditions and the following disclaimer in the documentation
- *     and/or other materials provided with the distribution.
- *  3. The name of the author may not be used to endorse or promote products
- *     derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef TALK_APP_WEBRTC_PEERCONNECTIONFACTORY_H_
-#define TALK_APP_WEBRTC_PEERCONNECTIONFACTORY_H_
-
-#include <string>
-#include <vector>
-
-#include "talk/base/scoped_ptr.h"
-
-namespace cricket {
-class ChannelManager;
-class DeviceManagerInterface;
-class MediaEngineInterface;
-class PortAllocator;
-}  // namespace cricket
-
-namespace talk_base {
-class SocketAddress;
-class Thread;
-}  // namespace talk_base
-
-namespace webrtc {
-
-class PeerConnection;
-
-class PeerConnectionFactory {
- public:
-  PeerConnectionFactory(cricket::MediaEngineInterface* media_engine,
-                        cricket::DeviceManagerInterface* device_manager,
-                        talk_base::Thread* worker_thread);
-  PeerConnectionFactory(talk_base::Thread* worker_thread);
-
-  virtual ~PeerConnectionFactory();
-  bool Initialize();
-
-  PeerConnection* CreatePeerConnection(
-      cricket::PortAllocator* port_allocator,
-      talk_base::Thread* signaling_thread);
-
- private:
-  bool initialized_;
-  talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
-};
-
-}  // namespace webrtc
-
-#endif  // TALK_APP_WEBRTC_PEERCONNECTIONFACTORY_H_
-
diff --git a/talk/app/webrtc/peerconnectionfactory_unittest.cc b/talk/app/webrtc/peerconnectionfactory_unittest.cc
new file mode 100644
index 0000000..7a3b15f
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionfactory_unittest.cc
@@ -0,0 +1,88 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/mediastreamimpl.h"
+#include "talk/app/webrtc/peerconnectionfactoryimpl.h"
+#include "talk/app/webrtc/fakeportallocatorfactory.h"
+#include "talk/base/gunit.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/base/thread.h"
+#include "talk/session/phone/webrtccommon.h"
+#include "talk/session/phone/webrtcvoe.h"
+
+static const char kStunConfiguration[] = "STUN stun.l.google.com:19302";
+
+namespace webrtc {
+
+class NullPeerConnectionObserver : public PeerConnectionObserver {
+ public:
+  virtual void OnError() {}
+  virtual void OnMessage(const std::string& msg) {}
+  virtual void OnSignalingMessage(const std::string& msg) {}
+  virtual void OnStateChange(StateType state_changed) {}
+  virtual void OnAddStream(MediaStreamInterface* stream) {}
+  virtual void OnRemoveStream(MediaStreamInterface* stream) {}
+};
+
+TEST(PeerConnectionFactory, CreatePCUsingInternalModules) {
+  NullPeerConnectionObserver observer;
+  talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
+      CreatePeerConnectionFactory());
+  ASSERT_TRUE(factory.get() != NULL);
+  talk_base::scoped_refptr<PeerConnectionInterface> pc1(
+      factory->CreatePeerConnection("", &observer));
+  EXPECT_TRUE(pc1.get() == NULL);
+
+  talk_base::scoped_refptr<PeerConnectionInterface> pc2(
+      factory->CreatePeerConnection(kStunConfiguration, &observer));
+
+  EXPECT_TRUE(pc2.get() != NULL);
+}
+
+TEST(PeerConnectionFactory, CreatePCUsingExternalModules) {
+  talk_base::scoped_refptr<PortAllocatorFactoryInterface> allocator_factory(
+      FakePortAllocatorFactory::Create());
+
+  talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory =
+      CreatePeerConnectionFactory(talk_base::Thread::Current(),
+                                  talk_base::Thread::Current(),
+                                  allocator_factory.get(),
+                                  NULL);
+  ASSERT_TRUE(factory.get() != NULL);
+
+  NullPeerConnectionObserver observer;
+  talk_base::scoped_refptr<webrtc::PeerConnectionInterface> pc1(
+      factory->CreatePeerConnection("", &observer));
+
+  EXPECT_TRUE(pc1.get() == NULL);
+
+  talk_base::scoped_refptr<PeerConnectionInterface> pc2(
+      factory->CreatePeerConnection(kStunConfiguration, &observer));
+  EXPECT_TRUE(pc2.get() != NULL);
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/peerconnectionfactoryimpl.cc b/talk/app/webrtc/peerconnectionfactoryimpl.cc
new file mode 100644
index 0000000..4275a5f
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionfactoryimpl.cc
@@ -0,0 +1,252 @@
+/*
+ * libjingle
+ * Copyright 2004--2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/peerconnectionfactoryimpl.h"
+
+#include "talk/app/webrtc/mediastreamproxy.h"
+#include "talk/app/webrtc/mediastreamtrackproxy.h"
+#include "talk/app/webrtc/peerconnectionimpl.h"
+#include "talk/app/webrtc/portallocatorfactory.h"
+#include "talk/session/phone/dummydevicemanager.h"
+#include "talk/session/phone/webrtcmediaengine.h"
+
+#ifdef WEBRTC_RELATIVE_PATH
+#include "modules/audio_device/main/interface/audio_device.h"
+#else
+#include "third_party/webrtc/files/include/audio_device.h"
+#endif
+
+using talk_base::scoped_refptr;
+
+namespace {
+
+typedef talk_base::TypedMessageData<bool> InitMessageData;
+
+struct CreatePeerConnectionParams : public talk_base::MessageData {
+  CreatePeerConnectionParams(const std::string& configuration,
+                             webrtc::PeerConnectionObserver* observer)
+      : configuration(configuration), observer(observer) {
+  }
+  scoped_refptr<webrtc::PeerConnectionInterface> peerconnection;
+  const std::string& configuration;
+  webrtc::PeerConnectionObserver* observer;
+};
+
+enum {
+  MSG_INIT_FACTORY = 1,
+  MSG_TERMINATE_FACTORY = 2,
+  MSG_CREATE_PEERCONNECTION = 3,
+};
+
+}  // namespace
+
+namespace webrtc {
+
+scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory() {
+  talk_base::RefCountedObject<PeerConnectionFactory>* pc_factory =
+      new talk_base::RefCountedObject<PeerConnectionFactory>();
+
+  if (!pc_factory->Initialize()) {
+    delete pc_factory;
+    pc_factory = NULL;
+  }
+  return pc_factory;
+}
+
+scoped_refptr<PeerConnectionFactoryInterface>
+CreatePeerConnectionFactory(talk_base::Thread* worker_thread,
+                            talk_base::Thread* signaling_thread,
+                            PortAllocatorFactoryInterface* factory,
+                            AudioDeviceModule* default_adm) {
+  talk_base::RefCountedObject<PeerConnectionFactory>* pc_factory =
+      new talk_base::RefCountedObject<PeerConnectionFactory>(
+          worker_thread, signaling_thread, factory, default_adm);
+  if (!pc_factory->Initialize()) {
+    delete pc_factory;
+    pc_factory = NULL;
+  }
+  return pc_factory;
+}
+
+PeerConnectionFactory::PeerConnectionFactory()
+    : owns_ptrs_(true),
+      signaling_thread_(new talk_base::Thread),
+      worker_thread_(new talk_base::Thread) {
+  bool result = signaling_thread_->Start();
+  ASSERT(result);
+  result = worker_thread_->Start();
+  ASSERT(result);
+}
+
+PeerConnectionFactory::PeerConnectionFactory(
+    talk_base::Thread* worker_thread,
+    talk_base::Thread* signaling_thread,
+    PortAllocatorFactoryInterface* port_allocator_factory,
+    AudioDeviceModule* default_adm)
+    : owns_ptrs_(false),
+      signaling_thread_(signaling_thread),
+      worker_thread_(worker_thread),
+      allocator_factory_(port_allocator_factory),
+      default_adm_(default_adm) {
+  ASSERT(worker_thread != NULL);
+  ASSERT(signaling_thread != NULL);
+  ASSERT(allocator_factory_.get() != NULL);
+  // TODO: Currently there is no way creating an external adm in
+  // libjingle source tree. So we can 't currently assert if this is NULL.
+  // ASSERT(default_adm != NULL);
+}
+
+PeerConnectionFactory::~PeerConnectionFactory() {
+  signaling_thread_->Clear(this);
+  signaling_thread_->Send(this, MSG_TERMINATE_FACTORY);
+  if (owns_ptrs_) {
+    delete signaling_thread_;
+    delete worker_thread_;
+  }
+}
+
+bool PeerConnectionFactory::Initialize() {
+  InitMessageData result(false);
+  signaling_thread_->Send(this, MSG_INIT_FACTORY, &result);
+  return result.data();
+}
+
+void PeerConnectionFactory::OnMessage(talk_base::Message* msg) {
+  switch (msg->message_id) {
+    case MSG_INIT_FACTORY: {
+     InitMessageData* pdata = static_cast<InitMessageData*> (msg->pdata);
+     pdata->data() = Initialize_s();
+     break;
+    }
+    case MSG_TERMINATE_FACTORY: {
+      Terminate_s();
+      break;
+    }
+    case MSG_CREATE_PEERCONNECTION: {
+      CreatePeerConnectionParams* pdata =
+          static_cast<CreatePeerConnectionParams*> (msg->pdata);
+      pdata->peerconnection = CreatePeerConnection_s(pdata->configuration,
+                                                     pdata->observer);
+      break;
+    }
+  }
+}
+
+bool PeerConnectionFactory::Initialize_s() {
+  if (owns_ptrs_) {
+    allocator_factory_ = PortAllocatorFactory::Create(worker_thread_);
+    if (allocator_factory_.get() == NULL)
+      return false;
+  }
+
+  cricket::DummyDeviceManager* device_manager(
+      new cricket::DummyDeviceManager());
+  // TODO:  Need to make sure only one VoE is created inside
+  // WebRtcMediaEngine.
+  cricket::WebRtcMediaEngine* webrtc_media_engine(
+      new cricket::WebRtcMediaEngine(default_adm_.get(),
+                                     NULL,   // No secondary adm.
+                                     NULL));  // No vcm available.
+
+  channel_manager_.reset(new cricket::ChannelManager(
+      webrtc_media_engine, device_manager, worker_thread_));
+  if (!channel_manager_->Init()) {
+    return false;
+  }
+  return true;
+}
+
+// Terminate what we created on the signaling thread.
+void PeerConnectionFactory::Terminate_s() {
+  channel_manager_.reset(NULL);
+  if (owns_ptrs_) {
+    allocator_factory_ = NULL;
+  }
+}
+
+scoped_refptr<PeerConnectionInterface>
+PeerConnectionFactory::CreatePeerConnection(
+    const std::string& configuration,
+    PeerConnectionObserver* observer) {
+  CreatePeerConnectionParams params(configuration, observer);
+  signaling_thread_->Send(this, MSG_CREATE_PEERCONNECTION, &params);
+  return params.peerconnection;
+}
+
+scoped_refptr<PeerConnectionInterface>
+PeerConnectionFactory::CreatePeerConnection_s(
+    const std::string& configuration,
+    PeerConnectionObserver* observer) {
+  talk_base::RefCountedObject<PeerConnection>* pc(
+      new talk_base::RefCountedObject<PeerConnection>(this));
+  if (!pc->Initialize(configuration, observer)) {
+    delete pc;
+    pc = NULL;
+  }
+  return pc;
+}
+
+scoped_refptr<LocalMediaStreamInterface>
+PeerConnectionFactory::CreateLocalMediaStream(
+      const std::string& label) {
+  return MediaStreamProxy::Create(label, signaling_thread_);
+}
+
+scoped_refptr<LocalVideoTrackInterface>
+PeerConnectionFactory::CreateLocalVideoTrack(
+    const std::string& label,
+    VideoCaptureModule* video_device) {
+  return VideoTrackProxy::CreateLocal(label, video_device,
+                                      signaling_thread_);
+}
+
+scoped_refptr<LocalAudioTrackInterface>
+PeerConnectionFactory::CreateLocalAudioTrack(
+    const std::string& label,
+    AudioDeviceModule* audio_device) {
+  return AudioTrackProxy::CreateLocal(label, audio_device,
+                                      signaling_thread_);
+}
+
+cricket::ChannelManager* PeerConnectionFactory::channel_manager() {
+  return channel_manager_.get();
+}
+
+talk_base::Thread* PeerConnectionFactory::signaling_thread() {
+  return signaling_thread_;
+}
+
+talk_base::Thread* PeerConnectionFactory::worker_thread() {
+  return worker_thread_;
+}
+
+PortAllocatorFactoryInterface* PeerConnectionFactory::port_allocator_factory() {
+  return allocator_factory_.get();
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/peerconnectionfactoryimpl.h b/talk/app/webrtc/peerconnectionfactoryimpl.h
new file mode 100644
index 0000000..9a64f58
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionfactoryimpl.h
@@ -0,0 +1,94 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+#ifndef TALK_APP_WEBRTC_PEERCONNECTIONFACTORYIMPL_H_
+#define TALK_APP_WEBRTC_PEERCONNECTIONFACTORYIMPL_H_
+
+#include <string>
+
+#include "talk/base/scoped_ptr.h"
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/base/thread.h"
+#include "talk/session/phone/channelmanager.h"
+
+namespace webrtc {
+
+class PeerConnectionFactory : public PeerConnectionFactoryInterface,
+                              public talk_base::MessageHandler {
+ public:
+  talk_base::scoped_refptr<PeerConnectionInterface> CreatePeerConnection(
+      const std::string& config,
+      PeerConnectionObserver* observer);
+  bool Initialize();
+
+  virtual talk_base::scoped_refptr<LocalMediaStreamInterface>
+      CreateLocalMediaStream(const std::string& label);
+
+  virtual talk_base::scoped_refptr<LocalVideoTrackInterface>
+      CreateLocalVideoTrack(const std::string& label,
+                            VideoCaptureModule* video_device);
+
+  virtual talk_base::scoped_refptr<LocalAudioTrackInterface>
+      CreateLocalAudioTrack(const std::string& label,
+                            AudioDeviceModule* audio_device);
+
+  virtual cricket::ChannelManager* channel_manager();
+  virtual talk_base::Thread* signaling_thread();
+  virtual talk_base::Thread* worker_thread();
+  virtual PortAllocatorFactoryInterface* port_allocator_factory();
+
+ protected:
+  PeerConnectionFactory();
+  PeerConnectionFactory(
+      talk_base::Thread* worker_thread,
+      talk_base::Thread* signaling_thread,
+      PortAllocatorFactoryInterface* port_allocator_factory,
+      AudioDeviceModule* default_adm);
+  virtual ~PeerConnectionFactory();
+
+
+ private:
+  bool Initialize_s();
+  void Terminate_s();
+  talk_base::scoped_refptr<PeerConnectionInterface> CreatePeerConnection_s(
+      const std::string& configuration,
+      PeerConnectionObserver* observer);
+  // Implements talk_base::MessageHandler.
+  void OnMessage(talk_base::Message* msg);
+
+  bool owns_ptrs_;
+  talk_base::Thread* signaling_thread_;
+  talk_base::Thread* worker_thread_;
+  talk_base::scoped_refptr<PortAllocatorFactoryInterface> allocator_factory_;
+  // External Audio device used for audio playback.
+  talk_base::scoped_refptr<AudioDeviceModule> default_adm_;
+  talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_PEERCONNECTIONFACTORYIMPL_H_
diff --git a/talk/app/webrtc/peerconnectionimpl.cc b/talk/app/webrtc/peerconnectionimpl.cc
index 28e4685..d5607e1 100644
--- a/talk/app/webrtc/peerconnectionimpl.cc
+++ b/talk/app/webrtc/peerconnectionimpl.cc
@@ -1,6 +1,6 @@
 /*
  * libjingle
- * Copyright 2004--2011, Google Inc.
+ * Copyright 2011, Google Inc.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -27,199 +27,356 @@
 
 #include "talk/app/webrtc/peerconnectionimpl.h"
 
-#include "talk/app/webrtc/webrtcjson.h"
-#include "talk/app/webrtc/webrtcsession.h"
-#include "talk/base/basicpacketsocketfactory.h"
-#include "talk/base/helpers.h"
+#include <vector>
+
+#include "talk/app/webrtc/mediastreamhandler.h"
+#include "talk/app/webrtc/streamcollectionimpl.h"
 #include "talk/base/logging.h"
 #include "talk/base/stringencode.h"
-#include "talk/p2p/base/session.h"
-#include "talk/p2p/client/basicportallocator.h"
+#include "talk/session/phone/channelmanager.h"
+
+namespace {
+
+// The number of the tokens in the config string.
+static const size_t kConfigTokens = 2;
+static const size_t kServiceCount = 5;
+// The default stun port.
+static const int kDefaultPort = 3478;
+
+// NOTE: Must be in the same order as the ServiceType enum.
+static const char* kValidServiceTypes[kServiceCount] = {
+    "STUN", "STUNS", "TURN", "TURNS", "INVALID" };
+
+enum ServiceType {
+  STUN,     // Indicates a STUN server.
+  STUNS,    // Indicates a STUN server used with a TLS session.
+  TURN,     // Indicates a TURN server
+  TURNS,    // Indicates a TURN server used with a TLS session.
+  INVALID,  // Unknown.
+};
+
+enum {
+  MSG_COMMITSTREAMCHANGES = 1,
+  MSG_PROCESSSIGNALINGMESSAGE = 2,
+  MSG_RETURNREMOTEMEDIASTREAMS = 3,
+  MSG_CLOSE = 4,
+  MSG_READYSTATE = 5,
+  MSG_SDPSTATE = 6,
+  MSG_TERMINATE = 7
+};
+
+typedef webrtc::PortAllocatorFactoryInterface::StunConfiguration
+    StunConfiguration;
+typedef webrtc::PortAllocatorFactoryInterface::TurnConfiguration
+    TurnConfiguration;
+
+bool static ParseConfigString(const std::string& config,
+                              std::vector<StunConfiguration>* stun_config,
+                              std::vector<TurnConfiguration>* turn_config) {
+  std::vector<std::string> tokens;
+  talk_base::tokenize(config, ' ', &tokens);
+
+  if (tokens.size() != kConfigTokens) {
+    LOG(WARNING) << "Invalid config string";
+    return false;
+  }
+
+  ServiceType service_type = INVALID;
+
+  const std::string& type = tokens[0];
+  for (size_t i = 0; i < kServiceCount; ++i) {
+    if (type.compare(kValidServiceTypes[i]) == 0) {
+      service_type = static_cast<ServiceType>(i);
+      break;
+    }
+  }
+
+  if (service_type == INVALID) {
+    LOG(WARNING) << "Invalid service type: " << type;
+    return false;
+  }
+  std::string service_address = tokens[1];
+
+  int port;
+  tokens.clear();
+  talk_base::tokenize(service_address, ':', &tokens);
+  if (tokens.size() != kConfigTokens) {
+    port = kDefaultPort;
+  } else {
+    port = talk_base::FromString<int>(tokens[1]);
+    if (port <= 0 || port > 0xffff) {
+      LOG(WARNING) << "Invalid port: " << tokens[1];
+      return false;
+    }
+  }
+
+  // TODO: Currently the specification does not tell us how to parse
+  // multiple addresses, username and password from the configuration string.
+  switch (service_type) {
+    case STUN:
+      stun_config->push_back(StunConfiguration(service_address, port));
+      break;
+    case TURN:
+      turn_config->push_back(TurnConfiguration(service_address, port, "", ""));
+      break;
+    case TURNS:
+    case STUNS:
+    case INVALID:
+    default:
+      ASSERT(!"Configuration not supported");
+      return false;
+  }
+  return true;
+}
+
+struct SignalingParams : public talk_base::MessageData {
+  SignalingParams(const std::string& msg,
+                  webrtc::StreamCollectionInterface* local_streams)
+      : msg(msg),
+        local_streams(local_streams) {}
+  const std::string msg;
+  talk_base::scoped_refptr<webrtc::StreamCollectionInterface> local_streams;
+};
+
+struct StreamCollectionParams : public talk_base::MessageData {
+  explicit StreamCollectionParams(webrtc::StreamCollectionInterface* streams)
+      : streams(streams) {}
+  talk_base::scoped_refptr<webrtc::StreamCollectionInterface> streams;
+};
+
+struct ReadyStateMessage : public talk_base::MessageData {
+  ReadyStateMessage() : state(webrtc::PeerConnectionInterface::kNew) {}
+  webrtc::PeerConnectionInterface::ReadyState state;
+};
+
+struct SdpStateMessage : public talk_base::MessageData {
+  SdpStateMessage() : state(webrtc::PeerConnectionInterface::kSdpNew) {}
+  webrtc::PeerConnectionInterface::SdpState state;
+};
+
+}  // namespace
 
 namespace webrtc {
 
-
-PeerConnectionImpl::PeerConnectionImpl(
-    cricket::PortAllocator* port_allocator,
-    cricket::ChannelManager* channel_manager,
-    talk_base::Thread* signaling_thread)
-  : port_allocator_(port_allocator),
-    channel_manager_(channel_manager),
-    signaling_thread_(signaling_thread),
-    event_callback_(NULL),
-    session_(NULL) {
+PeerConnection::PeerConnection(PeerConnectionFactory* factory)
+    : factory_(factory),
+      observer_(NULL),
+      ready_state_(kNew),
+      sdp_state_(kSdpNew),
+      local_media_streams_(StreamCollection::Create()) {
 }
 
-PeerConnectionImpl::~PeerConnectionImpl() {
+PeerConnection::~PeerConnection() {
+  signaling_thread()->Clear(this);
+  signaling_thread()->Send(this, MSG_TERMINATE);
 }
 
-bool PeerConnectionImpl::Init() {
-  std::string sid;
-  talk_base::CreateRandomString(8, &sid);
-  const bool incoming = false;
-  // default outgoing direction
-  session_.reset(CreateMediaSession(sid, incoming));
-  if (session_.get() == NULL) {
-    ASSERT(false && "failed to initialize a session");
+// Clean up what needs to be cleaned up on the signaling thread.
+void PeerConnection::Terminate_s() {
+  stream_handler_.reset();
+  signaling_.reset();
+  session_.reset();
+  port_allocator_.reset();
+}
+
+bool PeerConnection::Initialize(const std::string& configuration,
+                                PeerConnectionObserver* observer) {
+  ASSERT(observer != NULL);
+  if (!observer)
     return false;
-  }
-  return true;
-}
+  observer_ = observer;
+  std::vector<PortAllocatorFactoryInterface::StunConfiguration> stun_config;
+  std::vector<PortAllocatorFactoryInterface::TurnConfiguration> turn_config;
 
-void PeerConnectionImpl::RegisterObserver(PeerConnectionObserver* observer) {
-  // This assert is to catch cases where two observer pointers are registered.
-  // We only support one and if another is to be used, the current one must be
-  // cleared first.
-  ASSERT(observer == NULL || event_callback_ == NULL);
-  event_callback_ = observer;
-}
-
-bool PeerConnectionImpl::SignalingMessage(
-    const std::string& signaling_message) {
-  // Deserialize signaling message
-  cricket::SessionDescription* incoming_sdp = NULL;
-  std::vector<cricket::Candidate> candidates;
-  if (!ParseJsonSignalingMessage(signaling_message,
-                                 &incoming_sdp, &candidates)) {
+  if (!ParseConfigString(configuration, &stun_config, &turn_config))
     return false;
-  }
 
-  bool ret = false;
-  if (GetReadyState() == NEW) {
-    // set direction to incoming, as message received first
-    session_->set_incoming(true);
-    ret = session_->OnInitiateMessage(incoming_sdp, candidates);
-  } else {
-    ret = session_->OnRemoteDescription(incoming_sdp, candidates);
-  }
-  return ret;
+  port_allocator_.reset(factory_->port_allocator_factory()->CreatePortAllocator(
+      stun_config, turn_config));
+
+  session_.reset(new WebRtcSession(factory_->channel_manager(),
+                                   factory_->signaling_thread(),
+                                   factory_->worker_thread(),
+                                   port_allocator_.get()));
+  signaling_.reset(new PeerConnectionSignaling(factory_->signaling_thread(),
+                                               session_.get()));
+  stream_handler_.reset(new MediaStreamHandlers(session_.get()));
+
+  signaling_->SignalNewPeerConnectionMessage.connect(
+      this, &PeerConnection::OnNewPeerConnectionMessage);
+  signaling_->SignalRemoteStreamAdded.connect(
+      this, &PeerConnection::OnRemoteStreamAdded);
+  signaling_->SignalRemoteStreamRemoved.connect(
+      this, &PeerConnection::OnRemoteStreamRemoved);
+  signaling_->SignalStateChange.connect(
+      this, &PeerConnection::OnSignalingStateChange);
+  // Register with WebRtcSession
+  session_->RegisterObserver(signaling_.get());
+
+  // Initialize the WebRtcSession. It creates transport channels etc.
+  const bool result = session_->Initialize();
+  if (result)
+    ChangeReadyState(PeerConnectionInterface::kNegotiating);
+  return result;
 }
 
-WebRtcSession* PeerConnectionImpl::CreateMediaSession(
-    const std::string& id, bool incoming) {
-  ASSERT(port_allocator_ != NULL);
-  WebRtcSession* session = new WebRtcSession(id, incoming,
-      port_allocator_, channel_manager_, signaling_thread_);
-
-  if (session->Initiate()) {
-    session->SignalAddStream.connect(
-        this,
-        &PeerConnectionImpl::OnAddStream);
-    session->SignalRemoveStream.connect(
-        this,
-        &PeerConnectionImpl::OnRemoveStream);
-    session->SignalLocalDescription.connect(
-        this,
-        &PeerConnectionImpl::OnLocalDescription);
-    session->SignalFailedCall.connect(
-        this,
-        &PeerConnectionImpl::OnFailedCall);
-  } else {
-    delete session;
-    session = NULL;
-  }
-  return session;
+talk_base::scoped_refptr<StreamCollectionInterface>
+PeerConnection::local_streams() {
+  return local_media_streams_;
 }
 
-bool PeerConnectionImpl::AddStream(const std::string& stream_id, bool video) {
-  bool ret = false;
-  if (session_->HasStream(stream_id)) {
-    ASSERT(false && "A stream with this name already exists");
-  } else {
-    if (!video) {
-      ret = !session_->HasAudioChannel() &&
-            session_->CreateVoiceChannel(stream_id);
-    } else {
-      ret = !session_->HasVideoChannel() &&
-            session_->CreateVideoChannel(stream_id);
+talk_base::scoped_refptr<StreamCollectionInterface>
+PeerConnection::remote_streams() {
+  StreamCollectionParams msg(NULL);
+  signaling_thread()->Send(this, MSG_RETURNREMOTEMEDIASTREAMS, &msg);
+  return msg.streams;
+}
+
+void PeerConnection::ProcessSignalingMessage(const std::string& msg) {
+  SignalingParams* parameter(new SignalingParams(
+      msg, StreamCollection::Create(local_media_streams_)));
+  signaling_thread()->Post(this, MSG_PROCESSSIGNALINGMESSAGE, parameter);
+}
+
+void PeerConnection::AddStream(LocalMediaStreamInterface* local_stream) {
+  local_media_streams_->AddStream(local_stream);
+}
+
+void PeerConnection::RemoveStream(LocalMediaStreamInterface* remove_stream) {
+  local_media_streams_->RemoveStream(remove_stream);
+}
+
+void PeerConnection::CommitStreamChanges() {
+  StreamCollectionParams* msg(new StreamCollectionParams(
+          StreamCollection::Create(local_media_streams_)));
+  signaling_thread()->Post(this, MSG_COMMITSTREAMCHANGES, msg);
+}
+
+void PeerConnection::Close() {
+  signaling_thread()->Send(this, MSG_CLOSE);
+}
+
+PeerConnectionInterface::ReadyState PeerConnection::ready_state() {
+  ReadyStateMessage msg;
+  signaling_thread()->Send(this, MSG_READYSTATE, &msg);
+  return msg.state;
+}
+
+PeerConnectionInterface::SdpState PeerConnection::sdp_state() {
+  SdpStateMessage msg;
+  signaling_thread()->Send(this, MSG_SDPSTATE, &msg);
+  return msg.state;
+}
+
+void PeerConnection::OnMessage(talk_base::Message* msg) {
+  talk_base::MessageData* data = msg->pdata;
+  switch (msg->message_id) {
+    case MSG_COMMITSTREAMCHANGES: {
+      if (ready_state_ != PeerConnectionInterface::kClosed ||
+          ready_state_ != PeerConnectionInterface::kClosing) {
+        StreamCollectionParams* param(
+            static_cast<StreamCollectionParams*> (data));
+        signaling_->CreateOffer(param->streams);
+        stream_handler_->CommitLocalStreams(param->streams);
+      }
+      delete data;  // Because it is Posted.
+      break;
     }
-  }
-  return ret;
-}
-
-bool PeerConnectionImpl::RemoveStream(const std::string& stream_id) {
-  return session_->RemoveStream(stream_id);
-}
-
-void PeerConnectionImpl::OnLocalDescription(
-    const cricket::SessionDescription* desc,
-    const std::vector<cricket::Candidate>& candidates) {
-  if (!desc) {
-    LOG(WARNING) << "no local SDP ";
-    return;
-  }
-
-  std::string message;
-  if (GetJsonSignalingMessage(desc, candidates, &message)) {
-    if (event_callback_) {
-      event_callback_->OnSignalingMessage(message);
+    case MSG_PROCESSSIGNALINGMESSAGE: {
+      if (ready_state_ != PeerConnectionInterface::kClosed) {
+        SignalingParams* params(static_cast<SignalingParams*> (data));
+        signaling_->ProcessSignalingMessage(params->msg, params->local_streams);
+      }
+      delete data;  // Because it is Posted.
+      break;
     }
+    case MSG_RETURNREMOTEMEDIASTREAMS: {
+      StreamCollectionParams* param(
+          static_cast<StreamCollectionParams*> (data));
+      param->streams = StreamCollection::Create(signaling_->remote_streams());
+      break;
+    }
+    case MSG_CLOSE: {
+      if (ready_state_ != PeerConnectionInterface::kClosed) {
+        ChangeReadyState(PeerConnectionInterface::kClosing);
+        signaling_->SendShutDown();
+      }
+      break;
+    }
+    case MSG_READYSTATE: {
+      ReadyStateMessage* msg(static_cast<ReadyStateMessage*> (data));
+      msg->state = ready_state_;
+      break;
+    }
+    case MSG_SDPSTATE: {
+      SdpStateMessage* msg(static_cast<SdpStateMessage*> (data));
+      msg->state = sdp_state_;
+      break;
+    }
+    case MSG_TERMINATE: {
+      Terminate_s();
+      break;
+    }
+    default:
+      ASSERT(!"NOT IMPLEMENTED");
+      break;
   }
 }
 
-void PeerConnectionImpl::OnFailedCall() {
-  // TODO: implement.
+void PeerConnection::OnNewPeerConnectionMessage(const std::string& message) {
+  observer_->OnSignalingMessage(message);
 }
 
-bool PeerConnectionImpl::SetAudioDevice(const std::string& wave_in_device,
-                                        const std::string& wave_out_device,
-                                        int opts) {
-  return channel_manager_->SetAudioOptions(wave_in_device,
-                                           wave_out_device,
-                                           opts);
+void PeerConnection::OnRemoteStreamAdded(MediaStreamInterface* remote_stream) {
+  stream_handler_->AddRemoteStream(remote_stream);
+  observer_->OnAddStream(remote_stream);
 }
 
-bool PeerConnectionImpl::SetLocalVideoRenderer(
-    cricket::VideoRenderer* renderer) {
-  return channel_manager_->SetLocalRenderer(renderer);
+void PeerConnection::OnRemoteStreamRemoved(
+    MediaStreamInterface* remote_stream) {
+  stream_handler_->RemoveRemoteStream(remote_stream);
+  observer_->OnRemoveStream(remote_stream);
 }
 
-bool PeerConnectionImpl::SetVideoRenderer(const std::string& stream_id,
-                                          cricket::VideoRenderer* renderer) {
-  return session_->SetVideoRenderer(stream_id, renderer);
-}
-
-bool PeerConnectionImpl::SetVideoCapture(const std::string& cam_device) {
-  return channel_manager_->SetVideoOptions(cam_device);
-}
-
-bool PeerConnectionImpl::Connect() {
-  return session_->Connect();
-}
-
-// TODO - Close is not used anymore, should be removed.
-bool PeerConnectionImpl::Close() {
-  session_->RemoveAllStreams();
-  return true;
-}
-
-void PeerConnectionImpl::OnAddStream(const std::string& stream_id,
-                                     bool video) {
-  if (event_callback_) {
-    event_callback_->OnAddStream(stream_id, video);
+void PeerConnection::OnSignalingStateChange(
+    PeerConnectionSignaling::State state) {
+  switch (state) {
+    case PeerConnectionSignaling::kInitializing:
+      break;
+    case PeerConnectionSignaling::kIdle:
+      if (ready_state_ == PeerConnectionInterface::kNegotiating)
+        ChangeReadyState(PeerConnectionInterface::kActive);
+      ChangeSdpState(PeerConnectionInterface::kSdpIdle);
+      break;
+    case PeerConnectionSignaling::kWaitingForAnswer:
+      ChangeSdpState(PeerConnectionInterface::kSdpWaiting);
+      break;
+    case PeerConnectionSignaling::kWaitingForOK:
+      ChangeSdpState(PeerConnectionInterface::kSdpWaiting);
+      break;
+    case PeerConnectionSignaling::kShutingDown:
+      ChangeReadyState(PeerConnectionInterface::kClosing);
+      break;
+    case PeerConnectionSignaling::kShutdownComplete:
+      ChangeReadyState(PeerConnectionInterface::kClosed);
+      signaling_thread()->Post(this, MSG_TERMINATE);
+      break;
+    default:
+      ASSERT(!"NOT IMPLEMENTED");
+      break;
   }
 }
 
-void PeerConnectionImpl::OnRemoveStream(const std::string& stream_id,
-                                        bool video) {
-  if (event_callback_) {
-    event_callback_->OnRemoveStream(stream_id, video);
-  }
+void PeerConnection::ChangeReadyState(
+    PeerConnectionInterface::ReadyState ready_state) {
+  ready_state_ = ready_state;
+  observer_->OnStateChange(PeerConnectionObserver::kReadyState);
 }
 
-PeerConnectionImpl::ReadyState PeerConnectionImpl::GetReadyState() {
-  ReadyState ready_state;
-  cricket::BaseSession::State state = session_->state();
-  if (state == cricket::BaseSession::STATE_INIT) {
-    ready_state = NEW;
-  } else if (state == cricket::BaseSession::STATE_INPROGRESS) {
-    ready_state = ACTIVE;
-  } else if (state == cricket::BaseSession::STATE_DEINIT) {
-    ready_state = CLOSED;
-  } else {
-    ready_state = NEGOTIATING;
-  }
-  return ready_state;
+void PeerConnection::ChangeSdpState(
+    PeerConnectionInterface::SdpState sdp_state) {
+  sdp_state_ = sdp_state;
+  observer_->OnStateChange(PeerConnectionObserver::kSdpState);
 }
 
 }  // namespace webrtc
diff --git a/talk/app/webrtc/peerconnectionimpl.h b/talk/app/webrtc/peerconnectionimpl.h
index 6ff2f25..2686cf2 100644
--- a/talk/app/webrtc/peerconnectionimpl.h
+++ b/talk/app/webrtc/peerconnectionimpl.h
@@ -1,6 +1,6 @@
 /*
  * libjingle
- * Copyright 2004--2011, Google Inc.
+ * Copyright 2011, Google Inc.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -28,68 +28,83 @@
 #ifndef TALK_APP_WEBRTC_PEERCONNECTIONIMPL_H_
 #define TALK_APP_WEBRTC_PEERCONNECTIONIMPL_H_
 
+#include <map>
 #include <string>
-#include <vector>
 
 #include "talk/app/webrtc/peerconnection.h"
-#include "talk/base/sigslot.h"
+#include "talk/app/webrtc/peerconnectionfactoryimpl.h"
+#include "talk/app/webrtc/peerconnectionsignaling.h"
+#include "talk/app/webrtc/streamcollectionimpl.h"
+#include "talk/app/webrtc/webrtcsession.h"
 #include "talk/base/scoped_ptr.h"
-#include "talk/base/thread.h"
-#include "talk/session/phone/channelmanager.h"
-
-namespace cricket {
-class ChannelManager;
-class PortAllocator;
-class SessionDescription;
-}
+#include "talk/p2p/client/httpportallocator.h"
 
 namespace webrtc {
-class WebRtcSession;
+class MediaStreamHandlers;
 
-class PeerConnectionImpl : public PeerConnection,
-                           public sigslot::has_slots<> {
+// PeerConnectionImpl implements the PeerConnection interface.
+// It uses PeerConnectionSignaling and WebRtcSession to implement
+// the PeerConnection functionality.
+class PeerConnection : public PeerConnectionInterface,
+                       public talk_base::MessageHandler,
+                       public sigslot::has_slots<> {
  public:
-  PeerConnectionImpl(cricket::PortAllocator* port_allocator,
-                     cricket::ChannelManager* channel_manager,
-                     talk_base::Thread* signaling_thread);
-  virtual ~PeerConnectionImpl();
+  explicit PeerConnection(PeerConnectionFactory* factory);
 
-  // PeerConnection interfaces
-  virtual void RegisterObserver(PeerConnectionObserver* observer);
-  virtual bool SignalingMessage(const std::string& msg);
-  virtual bool AddStream(const std::string& stream_id, bool video);
-  virtual bool RemoveStream(const std::string& stream_id);
-  virtual bool Connect();
-  virtual bool Close();
-  virtual bool SetAudioDevice(const std::string& wave_in_device,
-                              const std::string& wave_out_device, int opts);
-  virtual bool SetLocalVideoRenderer(cricket::VideoRenderer* renderer);
-  virtual bool SetVideoRenderer(const std::string& stream_id,
-                                cricket::VideoRenderer* renderer);
-  virtual bool SetVideoCapture(const std::string& cam_device);
-  virtual ReadyState GetReadyState();
+  bool Initialize(const std::string& configuration,
+                  PeerConnectionObserver* observer);
 
-  cricket::ChannelManager* channel_manager() {
-    return channel_manager_;
+  virtual ~PeerConnection();
+
+  virtual void ProcessSignalingMessage(const std::string& msg);
+  virtual bool Send(const std::string& msg) {
+    // TODO: implement
+    ASSERT(false);
+    return false;
   }
-
-  // Callbacks from PeerConnectionImplCallbacks
-  void OnAddStream(const std::string& stream_id, bool video);
-  void OnRemoveStream(const std::string& stream_id, bool video);
-  void OnLocalDescription(
-      const cricket::SessionDescription* desc,
-      const std::vector<cricket::Candidate>& candidates);
-  void OnFailedCall();
-  bool Init();
+  virtual talk_base::scoped_refptr<StreamCollectionInterface> local_streams();
+  virtual talk_base::scoped_refptr<StreamCollectionInterface> remote_streams();
+  virtual void AddStream(LocalMediaStreamInterface* stream);
+  virtual void RemoveStream(LocalMediaStreamInterface* stream);
+  virtual void CommitStreamChanges();
+  virtual void Close();
+  virtual ReadyState ready_state();
+  virtual SdpState sdp_state();
 
  private:
-  WebRtcSession* CreateMediaSession(const std::string& id, bool incoming);
+  // Implement talk_base::MessageHandler.
+  void OnMessage(talk_base::Message* msg);
 
-  cricket::PortAllocator* port_allocator_;
-  cricket::ChannelManager* channel_manager_;
-  talk_base::Thread* signaling_thread_;
-  PeerConnectionObserver* event_callback_;
+  // Signals from PeerConnectionSignaling.
+  void OnNewPeerConnectionMessage(const std::string& message);
+  void OnRemoteStreamAdded(MediaStreamInterface* remote_stream);
+  void OnRemoteStreamRemoved(MediaStreamInterface* remote_stream);
+  void OnSignalingStateChange(PeerConnectionSignaling::State state);
+
+  void ChangeReadyState(PeerConnectionInterface::ReadyState ready_state);
+  void ChangeSdpState(PeerConnectionInterface::SdpState sdp_state);
+  void Terminate_s();
+
+  talk_base::Thread* signaling_thread() {
+    return factory_->signaling_thread();
+  }
+
+  // Storing the factory as a scoped reference pointer ensures that the memory
+  // in the PeerConnectionFactoryImpl remains available as long as the
+  // PeerConnection is running. It is passed to PeerConnection as a raw pointer.
+  // However, since the reference counting is done in the
+  // PeerConnectionFactoryInteface all instances created using the raw pointer
+  // will refer to the same reference count.
+  talk_base::scoped_refptr<PeerConnectionFactory> factory_;
+  PeerConnectionObserver* observer_;
+  ReadyState ready_state_;
+  SdpState sdp_state_;
+  talk_base::scoped_refptr<StreamCollection> local_media_streams_;
+
+  talk_base::scoped_ptr<cricket::PortAllocator> port_allocator_;
   talk_base::scoped_ptr<WebRtcSession> session_;
+  talk_base::scoped_ptr<PeerConnectionSignaling> signaling_;
+  talk_base::scoped_ptr<MediaStreamHandlers> stream_handler_;
 };
 
 }  // namespace webrtc
diff --git a/talk/app/webrtc/peerconnectionimpl_unittest.cc b/talk/app/webrtc/peerconnectionimpl_unittest.cc
new file mode 100644
index 0000000..e690b2c
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionimpl_unittest.cc
@@ -0,0 +1,255 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/fakeportallocatorfactory.h"
+#include "talk/app/webrtc/mediastreamimpl.h"
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/peerconnectionimpl.h"
+#include "talk/app/webrtc/roapmessages.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/base/thread.h"
+#include "talk/base/gunit.h"
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kStreamLabel2[] = "local_stream_2";
+static const char kStunConfiguration[] = "STUN stun.l.google.com:19302";
+static const uint32 kTimeout = 5000U;
+
+using talk_base::scoped_ptr;
+using talk_base::scoped_refptr;
+using webrtc::FakePortAllocatorFactory;
+using webrtc::LocalMediaStreamInterface;
+using webrtc::LocalVideoTrackInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::PortAllocatorFactoryInterface;
+using webrtc::RoapMessageBase;
+using webrtc::RoapOffer;
+
+
+// Create ROAP message for shutdown.
+static std::string CreateShutdownMessage() {
+  webrtc::RoapShutdown shutdown("dummy_session", "", "", 1);
+  return shutdown.Serialize();
+}
+
+// Create a ROAP answer message.
+// The session description in the answer is set to the same as in the offer.
+static std::string CreateAnswerMessage(const RoapMessageBase& msg) {
+  webrtc::RoapOffer offer(msg);
+  EXPECT_TRUE(offer.Parse());
+  webrtc::RoapAnswer answer(offer.offer_session_id(), "dummy_session",
+                            offer.session_token(), offer.response_token(),
+                            offer.seq(), offer.ReleaseSessionDescription(),
+                            offer.candidates());
+  return answer.Serialize();
+}
+
+// Create ROAP message to answer ok to a ROAP shutdown or ROAP answer message.
+static std::string CreateOkMessage(const RoapMessageBase& msg) {
+  webrtc::RoapOk ok(msg.offer_session_id(), "dummy_session",
+                    msg.session_token(), msg.response_token(), msg.seq());
+  return ok.Serialize();
+}
+
+class MockPeerConnectionObserver : public PeerConnectionObserver {
+ public:
+  void SetPeerConnectionInterface(PeerConnectionInterface* pc) {
+    pc_ = pc;
+    state_ = pc_->ready_state();
+    sdp_state_ = pc_->sdp_state();
+  }
+  virtual void OnError() {}
+  virtual void OnMessage(const std::string& msg) {}
+  virtual void OnSignalingMessage(const std::string& msg) {
+    EXPECT_TRUE(last_message_.Parse(msg));
+  }
+  virtual void OnStateChange(StateType state_changed) {
+    if (pc_.get() == NULL)
+      return;
+    switch (state_changed) {
+      case kReadyState:
+        state_ = pc_->ready_state();
+        break;
+      case kSdpState:
+        sdp_state_ = pc_->sdp_state();
+        break;
+      case kIceState:
+        ADD_FAILURE();
+        break;
+      default:
+        ADD_FAILURE();
+        break;
+    }
+  }
+  virtual void OnAddStream(MediaStreamInterface* stream) {
+    last_added_stream_ = stream;
+  }
+  virtual void OnRemoveStream(MediaStreamInterface* stream) {
+    last_removed_stream_ = stream;
+  }
+
+  // Returns the label of the last added stream.
+  // Empty string if no stream have been added.
+  std::string GetLastAddedStreamLabel() {
+    if (last_added_stream_.get())
+      return last_added_stream_->label();
+    return "";
+  }
+  std::string GetLastRemovedStreamLabel() {
+    if (last_removed_stream_.get())
+      return last_removed_stream_->label();
+    return "";
+  }
+
+  talk_base::scoped_refptr<PeerConnectionInterface> pc_;
+  RoapMessageBase last_message_;
+  PeerConnectionInterface::ReadyState state_;
+  PeerConnectionInterface::SdpState sdp_state_;
+
+ private:
+  scoped_refptr<MediaStreamInterface> last_added_stream_;
+  scoped_refptr<MediaStreamInterface> last_removed_stream_;
+};
+
+class PeerConnectionImplTest : public testing::Test {
+ protected:
+  virtual void SetUp() {
+    port_allocator_factory_ = FakePortAllocatorFactory::Create();
+
+    pc_factory_ = webrtc::CreatePeerConnectionFactory(
+        talk_base::Thread::Current(), talk_base::Thread::Current(),
+        port_allocator_factory_.get(), NULL);
+    ASSERT_TRUE(pc_factory_.get() != NULL);
+    pc_ = pc_factory_->CreatePeerConnection(kStunConfiguration, &observer_);
+    ASSERT_TRUE(pc_.get() != NULL);
+    observer_.SetPeerConnectionInterface(pc_.get());
+    EXPECT_EQ(PeerConnectionInterface::kNegotiating, observer_.state_);
+  }
+
+  void AddStream(const std::string& label) {
+    // Create a local stream.
+    scoped_refptr<LocalMediaStreamInterface> stream(
+        pc_factory_->CreateLocalMediaStream(label));
+    scoped_refptr<LocalVideoTrackInterface> video_track(
+        pc_factory_->CreateLocalVideoTrack(label, NULL));
+    stream->AddTrack(video_track.get());
+    pc_->AddStream(stream);
+    pc_->CommitStreamChanges();
+
+    EXPECT_EQ_WAIT(PeerConnectionInterface::kSdpWaiting, observer_.sdp_state_,
+                   kTimeout);
+    // Wait for the ICE agent to find the candidates and send an offer.
+    EXPECT_EQ_WAIT(RoapMessageBase::kOffer, observer_.last_message_.type(),
+                   kTimeout);
+  }
+
+  scoped_refptr<PortAllocatorFactoryInterface> port_allocator_factory_;
+  scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory_;
+  scoped_refptr<PeerConnectionInterface> pc_;
+  MockPeerConnectionObserver observer_;
+};
+
+TEST_F(PeerConnectionImplTest, AddStream) {
+  AddStream(kStreamLabel1);
+  ASSERT_EQ(1u, pc_->local_streams()->count());
+  EXPECT_EQ(kStreamLabel1, pc_->local_streams()->at(0)->label());
+
+  EXPECT_EQ_WAIT(PeerConnectionInterface::kNegotiating, observer_.state_,
+                 kTimeout);
+  pc_->ProcessSignalingMessage(CreateAnswerMessage(observer_.last_message_));
+  EXPECT_EQ_WAIT(PeerConnectionInterface::kActive, observer_.state_, kTimeout);
+  EXPECT_EQ_WAIT(PeerConnectionInterface::kSdpIdle, observer_.sdp_state_,
+                 kTimeout);
+  // Since we answer with the same session description as we offer we can
+  // check if OnAddStream have been called.
+  EXPECT_EQ(kStreamLabel1, observer_.GetLastAddedStreamLabel());
+  ASSERT_EQ(1u, pc_->remote_streams()->count());
+  EXPECT_EQ(kStreamLabel1, pc_->remote_streams()->at(0)->label());
+}
+
+TEST_F(PeerConnectionImplTest, DISABLED_UpdateStream) {
+  AddStream(kStreamLabel1);
+  WAIT(PeerConnectionInterface::kNegotiating == observer_.state_, kTimeout);
+  pc_->ProcessSignalingMessage(CreateAnswerMessage(observer_.last_message_));
+  WAIT(PeerConnectionInterface::kActive ==  observer_.state_, kTimeout);
+  WAIT(PeerConnectionInterface::kSdpIdle == observer_.sdp_state_, kTimeout);
+
+  AddStream(kStreamLabel2);
+  ASSERT_EQ(2u, pc_->local_streams()->count());
+  EXPECT_EQ(kStreamLabel2, pc_->local_streams()->at(1)->label());
+  EXPECT_EQ_WAIT(PeerConnectionInterface::kSdpWaiting, observer_.sdp_state_,
+                 kTimeout);
+  EXPECT_EQ(PeerConnectionInterface::kActive, observer_.state_);
+  pc_->ProcessSignalingMessage(CreateAnswerMessage(observer_.last_message_));
+  EXPECT_EQ_WAIT(PeerConnectionInterface::kSdpIdle, observer_.sdp_state_,
+                 kTimeout);
+  // Since we answer with the same session description as we offer we can
+  // check if OnAddStream have been called.
+  EXPECT_EQ(kStreamLabel2, observer_.GetLastAddedStreamLabel());
+  ASSERT_EQ(2u, pc_->remote_streams()->count());
+  EXPECT_EQ(kStreamLabel2, pc_->remote_streams()->at(1)->label());
+
+  pc_->RemoveStream(static_cast<LocalMediaStreamInterface*>(
+      pc_->local_streams()->at(1)));
+  pc_->CommitStreamChanges();
+  EXPECT_EQ_WAIT(PeerConnectionInterface::kSdpWaiting, observer_.sdp_state_,
+                 kTimeout);
+  pc_->ProcessSignalingMessage(CreateAnswerMessage(observer_.last_message_));
+  EXPECT_EQ_WAIT(PeerConnectionInterface::kSdpIdle, observer_.sdp_state_,
+                 kTimeout);
+  EXPECT_EQ(kStreamLabel2, observer_.GetLastRemovedStreamLabel());
+  EXPECT_EQ(1u, pc_->local_streams()->count());
+}
+
+TEST_F(PeerConnectionImplTest, SendClose) {
+  pc_->Close();
+  EXPECT_EQ(RoapMessageBase::kShutdown, observer_.last_message_.type());
+  EXPECT_EQ(PeerConnectionInterface::kClosing, observer_.state_);
+  pc_->ProcessSignalingMessage(CreateOkMessage(observer_.last_message_));
+  EXPECT_EQ_WAIT(PeerConnectionInterface::kClosed, observer_.state_, kTimeout);
+}
+
+TEST_F(PeerConnectionImplTest, ReceiveClose) {
+  pc_->ProcessSignalingMessage(CreateShutdownMessage());
+  EXPECT_EQ_WAIT(RoapMessageBase::kOk, observer_.last_message_.type(),
+                 kTimeout);
+  EXPECT_EQ(PeerConnectionInterface::kClosed, observer_.state_);
+}
+
+TEST_F(PeerConnectionImplTest, ReceiveCloseWhileExpectingAnswer) {
+  AddStream(kStreamLabel1);
+
+  // Receive the shutdown message.
+  pc_->ProcessSignalingMessage(CreateShutdownMessage());
+  EXPECT_EQ_WAIT(RoapMessageBase::kOk, observer_.last_message_.type(),
+                 kTimeout);
+  EXPECT_EQ(PeerConnectionInterface::kClosed, observer_.state_);
+}
diff --git a/talk/app/webrtc/peerconnectionproxy.cc b/talk/app/webrtc/peerconnectionproxy.cc
deleted file mode 100644
index fca3ad4..0000000
--- a/talk/app/webrtc/peerconnectionproxy.cc
+++ /dev/null
@@ -1,313 +0,0 @@
-/*
- * libjingle
- * Copyright 2004--2011, Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- *  1. Redistributions of source code must retain the above copyright notice,
- *     this list of conditions and the following disclaimer.
- *  2. Redistributions in binary form must reproduce the above copyright notice,
- *     this list of conditions and the following disclaimer in the documentation
- *     and/or other materials provided with the distribution.
- *  3. The name of the author may not be used to endorse or promote products
- *     derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "talk/app/webrtc/peerconnectionproxy.h"
-
-#include "talk/app/webrtc/peerconnectionimpl.h"
-#include "talk/base/logging.h"
-
-namespace webrtc {
-
-enum {
-  MSG_WEBRTC_ADDSTREAM = 1,
-  MSG_WEBRTC_CLOSE,
-  MSG_WEBRTC_CONNECT,
-  MSG_WEBRTC_INIT,
-  MSG_WEBRTC_REGISTEROBSERVER,
-  MSG_WEBRTC_RELEASE,
-  MSG_WEBRTC_REMOVESTREAM,
-  MSG_WEBRTC_SETAUDIODEVICE,
-  MSG_WEBRTC_SETLOCALRENDERER,
-  MSG_WEBRTC_SETVIDEOCAPTURE,
-  MSG_WEBRTC_SETVIDEORENDERER,
-  MSG_WEBRTC_SIGNALINGMESSAGE,
-  MSG_WEBRTC_GETREADYSTATE,
-};
-
-struct AddStreamParams : public talk_base::MessageData {
-  AddStreamParams(const std::string& stream_id, bool video)
-      : stream_id(stream_id),
-        video(video),
-        result(false) {}
-
-  std::string stream_id;
-  bool video;
-  bool result;
-};
-
-struct RemoveStreamParams : public talk_base::MessageData {
-  explicit RemoveStreamParams(const std::string& stream_id)
-      : stream_id(stream_id),
-        result(false) {}
-
-  std::string stream_id;
-  bool result;
-};
-
-struct SignalingMsgParams : public talk_base::MessageData {
-  explicit SignalingMsgParams(const std::string& signaling_message)
-      : signaling_message(signaling_message),
-        result(false) {}
-
-  std::string signaling_message;
-  bool result;
-};
-
-struct SetAudioDeviceParams : public talk_base::MessageData {
-  SetAudioDeviceParams(const std::string& wave_in_device,
-                       const std::string& wave_out_device,
-                       int opts)
-      : wave_in_device(wave_in_device), wave_out_device(wave_out_device),
-        opts(opts), result(false) {}
-
-  std::string wave_in_device;
-  std::string wave_out_device;
-  int opts;
-  bool result;
-};
-
-struct SetLocalRendererParams : public talk_base::MessageData {
-  explicit SetLocalRendererParams(cricket::VideoRenderer* renderer)
-      : renderer(renderer), result(false) {}
-
-  cricket::VideoRenderer* renderer;
-  bool result;
-};
-
-struct SetVideoRendererParams : public talk_base::MessageData {
-  SetVideoRendererParams(const std::string& stream_id,
-                         cricket::VideoRenderer* renderer)
-      : stream_id(stream_id), renderer(renderer), result(false) {}
-
-  std::string stream_id;
-  cricket::VideoRenderer* renderer;
-  bool result;
-};
-
-struct SetVideoCaptureParams : public talk_base::MessageData {
-  explicit SetVideoCaptureParams(const std::string& cam_device)
-      : cam_device(cam_device), result(false) {}
-
-  std::string cam_device;
-  bool result;
-};
-
-struct RegisterObserverParams : public talk_base::MessageData {
-  explicit RegisterObserverParams(PeerConnectionObserver* observer)
-      : observer(observer), result(false) {}
-
-  PeerConnectionObserver* observer;
-  bool result;
-};
-
-struct ResultParams : public talk_base::MessageData {
-  ResultParams()
-      : result(false) {}
-
-  bool result;
-};
-
-PeerConnectionProxy::PeerConnectionProxy(
-    cricket::PortAllocator* port_allocator,
-    cricket::ChannelManager* channel_manager,
-      talk_base::Thread* signaling_thread)
-  : peerconnection_impl_(new PeerConnectionImpl(port_allocator,
-                             channel_manager, signaling_thread)),
-    signaling_thread_(signaling_thread) {
-}
-
-PeerConnectionProxy::~PeerConnectionProxy() {
-  ResultParams params;
-  Send(MSG_WEBRTC_RELEASE, &params);
-}
-
-bool PeerConnectionProxy::Init() {
-  ResultParams params;
-  return (Send(MSG_WEBRTC_INIT, &params) && params.result);
-}
-
-void PeerConnectionProxy::RegisterObserver(PeerConnectionObserver* observer) {
-  RegisterObserverParams params(observer);
-  Send(MSG_WEBRTC_REGISTEROBSERVER, &params);
-}
-
-bool PeerConnectionProxy::SignalingMessage(
-    const std::string& signaling_message) {
-  SignalingMsgParams params(signaling_message);
-  return (Send(MSG_WEBRTC_SIGNALINGMESSAGE, &params) && params.result);
-}
-
-bool PeerConnectionProxy::AddStream(const std::string& stream_id, bool video) {
-  AddStreamParams params(stream_id, video);
-  return (Send(MSG_WEBRTC_ADDSTREAM, &params) && params.result);
-}
-
-bool PeerConnectionProxy::RemoveStream(const std::string& stream_id) {
-  RemoveStreamParams params(stream_id);
-  return (Send(MSG_WEBRTC_REMOVESTREAM, &params) && params.result);
-}
-
-bool PeerConnectionProxy::SetAudioDevice(const std::string& wave_in_device,
-                                         const std::string& wave_out_device,
-                                         int opts) {
-  SetAudioDeviceParams params(wave_in_device, wave_out_device, opts);
-  return (Send(MSG_WEBRTC_SETAUDIODEVICE, &params) && params.result);
-}
-
-bool PeerConnectionProxy::SetLocalVideoRenderer(
-    cricket::VideoRenderer* renderer) {
-  SetLocalRendererParams params(renderer);
-  return (Send(MSG_WEBRTC_SETLOCALRENDERER, &params) && params.result);
-}
-
-bool PeerConnectionProxy::SetVideoRenderer(const std::string& stream_id,
-                                          cricket::VideoRenderer* renderer) {
-  SetVideoRendererParams params(stream_id, renderer);
-  return (Send(MSG_WEBRTC_SETVIDEORENDERER, &params) && params.result);
-}
-
-bool PeerConnectionProxy::SetVideoCapture(const std::string& cam_device) {
-  SetVideoCaptureParams params(cam_device);
-  return (Send(MSG_WEBRTC_SETVIDEOCAPTURE, &params) && params.result);
-}
-
-PeerConnection::ReadyState PeerConnectionProxy::GetReadyState() {
-  PeerConnection::ReadyState ready_state = NEW;
-  Send(MSG_WEBRTC_GETREADYSTATE,
-       reinterpret_cast<talk_base::MessageData*>(&ready_state));
-  return ready_state;
-}
-
-bool PeerConnectionProxy::Connect() {
-  ResultParams params;
-  return (Send(MSG_WEBRTC_CONNECT, &params) && params.result);
-}
-
-bool PeerConnectionProxy::Close() {
-  ResultParams params;
-  return (Send(MSG_WEBRTC_CLOSE, &params) && params.result);
-}
-
-bool PeerConnectionProxy::Send(uint32 id, talk_base::MessageData* data) {
-  if (!signaling_thread_)
-    return false;
-  signaling_thread_->Send(this, id, data);
-  return true;
-}
-
-void PeerConnectionProxy::OnMessage(talk_base::Message* message) {
-  talk_base::MessageData* data = message->pdata;
-  switch (message->message_id) {
-    case MSG_WEBRTC_ADDSTREAM: {
-      AddStreamParams* params = reinterpret_cast<AddStreamParams*>(data);
-      params->result = peerconnection_impl_->AddStream(
-          params->stream_id, params->video);
-      break;
-    }
-    case MSG_WEBRTC_SIGNALINGMESSAGE: {
-      SignalingMsgParams* params =
-          reinterpret_cast<SignalingMsgParams*>(data);
-      params->result = peerconnection_impl_->SignalingMessage(
-          params->signaling_message);
-      break;
-    }
-    case MSG_WEBRTC_REMOVESTREAM: {
-      RemoveStreamParams* params = reinterpret_cast<RemoveStreamParams*>(data);
-      params->result = peerconnection_impl_->RemoveStream(
-          params->stream_id);
-      break;
-    }
-    case MSG_WEBRTC_SETAUDIODEVICE: {
-      SetAudioDeviceParams* params =
-          reinterpret_cast<SetAudioDeviceParams*>(data);
-      params->result = peerconnection_impl_->SetAudioDevice(
-          params->wave_in_device, params->wave_out_device, params->opts);
-      break;
-    }
-    case MSG_WEBRTC_SETLOCALRENDERER: {
-      SetLocalRendererParams* params =
-          reinterpret_cast<SetLocalRendererParams*>(data);
-      params->result = peerconnection_impl_->SetLocalVideoRenderer(
-          params->renderer);
-      break;
-    }
-    case MSG_WEBRTC_SETVIDEOCAPTURE: {
-      SetVideoCaptureParams* params =
-          reinterpret_cast<SetVideoCaptureParams*>(data);
-      params->result = peerconnection_impl_->SetVideoCapture(
-          params->cam_device);
-      break;
-    }
-    case MSG_WEBRTC_GETREADYSTATE: {
-      PeerConnection::ReadyState* ready_state =
-          reinterpret_cast<PeerConnection::ReadyState*>(data);
-      *ready_state = peerconnection_impl_->GetReadyState();
-      break;
-    }
-    case MSG_WEBRTC_SETVIDEORENDERER: {
-      SetVideoRendererParams* params =
-          reinterpret_cast<SetVideoRendererParams*>(data);
-      params->result = peerconnection_impl_->SetVideoRenderer(
-          params->stream_id, params->renderer);
-      break;
-    }
-    case MSG_WEBRTC_CONNECT: {
-      ResultParams* params =
-          reinterpret_cast<ResultParams*>(data);
-      params->result = peerconnection_impl_->Connect();
-      break;
-    }
-    case MSG_WEBRTC_CLOSE: {
-      ResultParams* params =
-          reinterpret_cast<ResultParams*>(data);
-      params->result = peerconnection_impl_->Close();
-      break;
-    }
-    case MSG_WEBRTC_INIT: {
-      ResultParams* params =
-          reinterpret_cast<ResultParams*>(data);
-      params->result = peerconnection_impl_->Init();
-      break;
-    }
-    case MSG_WEBRTC_REGISTEROBSERVER: {
-      RegisterObserverParams* params =
-          reinterpret_cast<RegisterObserverParams*>(data);
-      peerconnection_impl_->RegisterObserver(params->observer);
-      break;
-    }
-    case MSG_WEBRTC_RELEASE: {
-      peerconnection_impl_.reset();
-      break;
-    }
-    default: {
-      ASSERT(false);
-      break;
-    }
-  }
-}
-
-}  // namespace webrtc
diff --git a/talk/app/webrtc/peerconnectionproxy.h b/talk/app/webrtc/peerconnectionproxy.h
deleted file mode 100644
index e83b4ec..0000000
--- a/talk/app/webrtc/peerconnectionproxy.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * libjingle
- * Copyright 2004--2011, Google Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- *  1. Redistributions of source code must retain the above copyright notice,
- *     this list of conditions and the following disclaimer.
- *  2. Redistributions in binary form must reproduce the above copyright notice,
- *     this list of conditions and the following disclaimer in the documentation
- *     and/or other materials provided with the distribution.
- *  3. The name of the author may not be used to endorse or promote products
- *     derived from this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
- * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
- * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
- * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef TALK_APP_WEBRTC_PEERCONNECTIONPROXY_H_
-#define TALK_APP_WEBRTC_PEERCONNECTIONPROXY_H_
-
-#include <string>
-
-#include "talk/app/webrtc/peerconnection.h"
-#include "talk/base/scoped_ptr.h"
-#include "talk/base/thread.h"
-
-namespace cricket {
-class ChannelManager;
-class PortAllocator;
-}
-
-namespace webrtc {
-
-class PeerConnectionImpl;
-
-class PeerConnectionProxy : public PeerConnection,
-                            public talk_base::MessageHandler {
- public:
-  PeerConnectionProxy(cricket::PortAllocator* port_allocator,
-                      cricket::ChannelManager* channel_manager,
-                      talk_base::Thread* signaling_thread);
-  virtual ~PeerConnectionProxy();
-
-  // PeerConnection interface implementation.
-  virtual void RegisterObserver(PeerConnectionObserver* observer);
-  virtual bool SignalingMessage(const std::string& msg);
-  virtual bool AddStream(const std::string& stream_id, bool video);
-  virtual bool RemoveStream(const std::string& stream_id);
-  virtual bool Connect();
-  virtual bool Close();
-  virtual bool SetAudioDevice(const std::string& wave_in_device,
-                              const std::string& wave_out_device, int opts);
-  virtual bool SetLocalVideoRenderer(cricket::VideoRenderer* renderer);
-  virtual bool SetVideoRenderer(const std::string& stream_id,
-                                cricket::VideoRenderer* renderer);
-  virtual bool SetVideoCapture(const std::string& cam_device);
-  virtual ReadyState GetReadyState();
-
- private:
-  bool Init();
-  bool Send(uint32 id, talk_base::MessageData* data);
-  virtual void OnMessage(talk_base::Message* message);
-
-  talk_base::scoped_ptr<PeerConnectionImpl> peerconnection_impl_;
-  talk_base::Thread* signaling_thread_;
-
-  friend class PeerConnectionFactory;
-};
-}
-
-#endif  // TALK_APP_WEBRTC_PEERCONNECTIONPROXY_H_
diff --git a/talk/app/webrtc/peerconnectionsignaling.cc b/talk/app/webrtc/peerconnectionsignaling.cc
new file mode 100644
index 0000000..9accf05
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionsignaling.cc
@@ -0,0 +1,576 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/peerconnectionsignaling.h"
+
+#include <utility>
+
+#include "talk/app/webrtc/mediastreamproxy.h"
+#include "talk/app/webrtc/mediastreamtrackproxy.h"
+#include "talk/app/webrtc/sessiondescriptionprovider.h"
+#include "talk/app/webrtc/streamcollectionimpl.h"
+#include "talk/base/helpers.h"
+#include "talk/base/logging.h"
+#include "talk/base/messagequeue.h"
+#include "talk/session/phone/channelmanager.h"
+
+using talk_base::scoped_refptr;
+
+namespace webrtc {
+
+enum {
+  MSG_SEND_QUEUED_OFFER = 1,
+  MSG_GENERATE_ANSWER = 2,
+};
+
+// Verifies that a SessionDescription contains as least one valid media content
+// and a valid codec.
+static bool VerifyAnswer(const cricket::SessionDescription* answer_desc) {
+  // We need to verify that at least one media content with
+  // a codec is available.
+  const cricket::ContentInfo* audio_content =
+      GetFirstAudioContent(answer_desc);
+  if (audio_content) {
+    const cricket::AudioContentDescription* audio_desc =
+        static_cast<const cricket::AudioContentDescription*>(
+            audio_content->description);
+    if (audio_desc->codecs().size() > 0) {
+      return true;
+    }
+  }
+  const cricket::ContentInfo* video_content =
+      GetFirstVideoContent(answer_desc);
+  if (video_content) {
+    const cricket::VideoContentDescription* video_desc =
+        static_cast<const cricket::VideoContentDescription*>(
+            video_content->description);
+    if (video_desc->codecs().size() > 0) {
+      return true;
+    }
+  }
+  return false;
+}
+
+// Fills a MediaSessionOptions struct with the MediaTracks we want to sent given
+// the local MediaStreams.
+static void InitMediaSessionOptions(
+    cricket::MediaSessionOptions* options,
+    StreamCollectionInterface* local_streams) {
+  // In order to be able to receive video,
+  // the has_video should always be true even if there are no video tracks.
+  options->has_video = true;
+  for (size_t i = 0; i < local_streams->count(); ++i) {
+    MediaStreamInterface* stream = local_streams->at(i);
+
+    scoped_refptr<AudioTracks> audio_tracks(stream->audio_tracks());
+    // For each audio track in the stream, add it to the MediaSessionOptions.
+    for (size_t j = 0; j < audio_tracks->count(); ++j) {
+      scoped_refptr<MediaStreamTrackInterface> track(audio_tracks->at(j));
+      options->AddStream(cricket::MEDIA_TYPE_AUDIO, track->label(),
+                         stream->label());
+    }
+
+    scoped_refptr<VideoTracks> video_tracks(stream->video_tracks());
+    // For each video track in the stream, add it to the MediaSessionOptions.
+    for (size_t j = 0; j <  video_tracks->count(); ++j) {
+      scoped_refptr<MediaStreamTrackInterface> track(video_tracks->at(j));
+      options->AddStream(cricket::MEDIA_TYPE_VIDEO, track->label(),
+                         stream->label());
+    }
+  }
+}
+
+PeerConnectionSignaling::PeerConnectionSignaling(
+    talk_base::Thread* signaling_thread,
+    SessionDescriptionProvider* provider)
+    : signaling_thread_(signaling_thread),
+      provider_(provider),
+      state_(kInitializing),
+      received_pre_offer_(false),
+      remote_streams_(StreamCollection::Create()),
+      local_streams_(StreamCollection::Create()) {
+}
+
+PeerConnectionSignaling::~PeerConnectionSignaling() {}
+
+void PeerConnectionSignaling::OnCandidatesReady(
+    const cricket::Candidates& candidates) {
+  if (!VERIFY(state_ == kInitializing))
+    return;
+  // Store the candidates.
+  candidates_ = candidates;
+  // If we have a queued remote offer we need to handle this first.
+  if (received_pre_offer_) {
+    received_pre_offer_ = false;
+    ChangeState(kWaitingForOK);
+    signaling_thread_->Post(this, MSG_GENERATE_ANSWER);
+  } else if (!queued_local_streams_.empty()) {
+    // Else if we have local queued offers.
+    ChangeState(kWaitingForAnswer);
+    signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
+  } else {
+    ChangeState(kIdle);
+  }
+}
+
+void PeerConnectionSignaling::ChangeState(State new_state) {
+  state_ = new_state;
+  SignalStateChange(state_);
+}
+
+void PeerConnectionSignaling::ProcessSignalingMessage(
+    const std::string& message,
+    StreamCollectionInterface* local_streams) {
+  ASSERT(talk_base::Thread::Current() == signaling_thread_);
+
+  RoapSession::ParseResult result = roap_session_.Parse(message);
+
+  // Signal an error message and return if a message is received after shutdown
+  // or it is not an ok message that is received during shutdown.
+  // No other messages from the remote peer can be processed in these states.
+  if (state_ == kShutdownComplete ||
+      (state_ == kShutingDown && result != RoapSession::kOk)) {
+    SignalNewPeerConnectionMessage(roap_session_.CreateErrorMessage(kNoMatch));
+    return;
+  }
+
+  switch (result) {
+    case RoapSession::kOffer: {
+      queued_local_streams_.clear();
+      queued_local_streams_.push_back(local_streams);
+
+      // If we are still Initializing we need to wait before we can handle
+      // the offer. Queue it and handle it when the state changes.
+      if (state_ == kInitializing) {
+        received_pre_offer_ = true;
+        break;
+      }
+
+      if (state_ == kWaitingForAnswer) {
+        // Message received out of order or Glare occurred and the decision was
+        // to use the incoming offer.
+        LOG(LS_INFO) << "Received offer while waiting for answer.";
+        // Be nice and handle this offer instead of the pending offer.
+        signaling_thread_->Clear(this, MSG_SEND_QUEUED_OFFER);
+      }
+      // Post a task to handle the answer.
+      signaling_thread_->Post(this, MSG_GENERATE_ANSWER);
+      ChangeState(kWaitingForOK);
+      break;
+    }
+    case RoapSession::kAnswerMoreComing: {
+      // We ignore this message for now and wait for the complete result.
+      LOG(LS_INFO) << "Received answer more coming.";
+      break;
+    }
+    case RoapSession::kAnswer: {
+      if (state_ != kWaitingForAnswer) {
+        LOG(LS_WARNING) << "Received an unexpected answer.";
+        return;
+      }
+
+      // Hand over the remote session description and the remote candidates from
+      // the parsed ROAP message to the provider_.
+      // The session description ownership is transferred from roap_session_ to
+      // the provider_;
+      const cricket::SessionDescription* remote_desc =
+          provider_->SetRemoteSessionDescription(
+              roap_session_.ReleaseRemoteDescription(),
+              roap_session_.RemoteCandidates());
+      // Let the provider now that the negotiation is done and both local and
+      // remote session description is now valid.
+      provider_->NegotiationDone();
+
+      // Update the list of known remote MediaStreams.
+      UpdateRemoteStreams(remote_desc);
+      // Pop the first item of queued StreamCollections containing local
+      // MediaStreams that just have been negotiated.
+      scoped_refptr<StreamCollectionInterface> streams(
+          queued_local_streams_.front());
+      queued_local_streams_.pop_front();
+      // Update the state of the local MediaStreams.
+      UpdateSendingLocalStreams(remote_desc, streams);
+
+      // Let the remote peer know we have received the answer.
+      SignalNewPeerConnectionMessage(roap_session_.CreateOk());
+      // Check if we have more offers waiting in the queue.
+      if (!queued_local_streams_.empty()) {
+        // Send the next offer.
+        signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
+      } else {
+        ChangeState(kIdle);
+      }
+      break;
+    }
+    case RoapSession::kOk: {
+      if (state_ == kWaitingForOK) {
+        // Let the provider know the negotiation is done.
+        provider_->NegotiationDone();
+
+        scoped_refptr<StreamCollectionInterface> streams(
+            queued_local_streams_.front());
+        queued_local_streams_.pop_front();
+        // Update the state of the local streams.
+        UpdateSendingLocalStreams(local_desc_, streams);
+        ChangeState(kIdle);
+        // Check if we have an updated offer waiting in the queue.
+        if (!queued_local_streams_.empty())
+          signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
+      } else if (state_ == kShutingDown) {
+        ChangeState(kShutdownComplete);
+      }
+      break;
+    }
+    case RoapSession::kConflict: {
+      SignalNewPeerConnectionMessage(roap_session_.CreateErrorMessage(
+          kConflict));
+      break;
+    }
+    case RoapSession::kDoubleConflict: {
+      SignalNewPeerConnectionMessage(roap_session_.CreateErrorMessage(
+          kDoubleConflict));
+
+      // Recreate the offer with new sequence values etc.
+      ChangeState(kWaitingForAnswer);
+      signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
+      break;
+    }
+    case RoapSession::kError: {
+      if (roap_session_.RemoteError() != kConflict &&
+          roap_session_.RemoteError() != kDoubleConflict) {
+        SignalErrorMessageReceived(roap_session_.RemoteError());
+        // An error have occurred that we can't do anything about.
+        // Reset the state and wait for user action.
+        signaling_thread_->Clear(this);
+        queued_local_streams_.clear();
+        ChangeState(kIdle);
+      }
+      break;
+    }
+    case RoapSession::kShutDown: {
+      DoShutDown();
+      SignalNewPeerConnectionMessage(roap_session_.CreateOk());
+      ChangeState(kShutdownComplete);
+      break;
+    }
+    case RoapSession::kInvalidMessage: {
+      SignalNewPeerConnectionMessage(roap_session_.CreateErrorMessage(
+          kNoMatch));
+      return;
+    }
+  }
+}
+
+void PeerConnectionSignaling::CreateOffer(
+    StreamCollectionInterface* local_streams) {
+  if (!VERIFY(talk_base::Thread::Current() == signaling_thread_ &&
+              state_ != kShutingDown && state_ != kShutdownComplete)) {
+    return;
+  }
+
+  queued_local_streams_.push_back(local_streams);
+  if (state_ == kIdle) {
+    // Check if we can send a new offer.
+    // Only one offer is allowed at the time.
+    ChangeState(kWaitingForAnswer);
+    signaling_thread_->Post(this, MSG_SEND_QUEUED_OFFER);
+  }
+}
+
+void PeerConnectionSignaling::SendShutDown() {
+  DoShutDown();
+  SignalNewPeerConnectionMessage(roap_session_.CreateShutDown());
+}
+
+// Implement talk_base::MessageHandler.
+void PeerConnectionSignaling::OnMessage(talk_base::Message* msg) {
+  switch (msg->message_id) {
+    case MSG_SEND_QUEUED_OFFER:
+      CreateOffer_s();
+      break;
+    case MSG_GENERATE_ANSWER:
+      CreateAnswer_s();
+      break;
+    default:
+      ASSERT(!"Invalid value in switch statement.");
+      break;
+  }
+}
+
+void PeerConnectionSignaling::CreateOffer_s() {
+  ASSERT(!queued_local_streams_.empty());
+  scoped_refptr<StreamCollectionInterface> local_streams(
+      queued_local_streams_.front());
+  cricket::MediaSessionOptions options;
+  InitMediaSessionOptions(&options, local_streams);
+
+  const cricket::SessionDescription* local_desc =
+      provider_->ProvideOffer(options);
+
+  SignalNewPeerConnectionMessage(roap_session_.CreateOffer(local_desc,
+                                                           candidates_));
+}
+
+void PeerConnectionSignaling::DoShutDown() {
+  ChangeState(kShutingDown);
+  signaling_thread_->Clear(this);  // Don't send queued offers or answers.
+  queued_local_streams_.clear();
+  provider_->SetRemoteSessionDescription(NULL, cricket::Candidates());
+  provider_->NegotiationDone();
+  UpdateRemoteStreams(NULL);
+}
+
+void PeerConnectionSignaling::CreateAnswer_s() {
+  // Let the provider know about the remote offer.
+  // The provider takes ownership and return a pointer for us to use.
+  const cricket::SessionDescription* remote_desc =
+      provider_->SetRemoteSessionDescription(
+          roap_session_.ReleaseRemoteDescription(),
+          roap_session_.RemoteCandidates());
+
+  scoped_refptr<StreamCollectionInterface> streams(
+      queued_local_streams_.back());
+  // Clean up all queued collections of local streams except the last one.
+  // The last one is kept until the ok message is received for this answer and
+  // is needed for updating the state of the local streams.
+  queued_local_streams_.erase(queued_local_streams_.begin(),
+                              --queued_local_streams_.end());
+
+  // Create a MediaSessionOptions object with the sources we want to send.
+  cricket::MediaSessionOptions options;
+  InitMediaSessionOptions(&options, streams);
+  // Create an local session description based on this.
+  local_desc_ = provider_->ProvideAnswer(options);
+
+  if (!VerifyAnswer(local_desc_)) {
+    SignalNewPeerConnectionMessage(roap_session_.CreateErrorMessage(kRefused));
+    return;
+  }
+
+  UpdateRemoteStreams(remote_desc);
+  ChangeState(kWaitingForOK);
+  SignalNewPeerConnectionMessage(roap_session_.CreateAnswer(local_desc_,
+                                                            candidates_));
+}
+
+// Updates or Creates remote MediaStream objects given a
+// remote SessionDesription.
+// If the remote SessionDesription contain new remote MediaStreams
+// SignalRemoteStreamAdded is triggered. If a remote MediaStream is missing from
+// the remote SessionDescription SignalRemoteStreamRemoved is triggered.
+void PeerConnectionSignaling::UpdateRemoteStreams(
+    const cricket::SessionDescription* remote_desc) {
+  talk_base::scoped_refptr<StreamCollection> current_streams(
+      StreamCollection::Create());
+
+  const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
+  if (audio_content) {
+    const cricket::AudioContentDescription* audio_desc =
+        static_cast<const cricket::AudioContentDescription*>(
+            audio_content->description);
+
+    for (cricket::StreamParamsVec::const_iterator it =
+             audio_desc->streams().begin();
+         it != audio_desc->streams().end(); ++it) {
+      MediaStreamInterface* old_stream = remote_streams_->find(it->sync_label);
+      scoped_refptr<MediaStreamProxy> new_stream(static_cast<MediaStreamProxy*>(
+          current_streams->find(it->sync_label)));
+
+      if (old_stream == NULL) {
+        if (new_stream == NULL) {
+          // New stream
+          new_stream = MediaStreamProxy::Create(it->sync_label,
+                                                signaling_thread_);
+          current_streams->AddStream(new_stream);
+        }
+        scoped_refptr<AudioTrackInterface> track(
+            AudioTrackProxy::CreateRemote(it->name, signaling_thread_));
+        track->set_state(MediaStreamTrackInterface::kLive);
+        new_stream->AddTrack(track);
+      } else {
+        current_streams->AddStream(old_stream);
+      }
+    }
+  }
+
+  const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
+  if (video_content) {
+    const cricket::VideoContentDescription* video_desc =
+        static_cast<const cricket::VideoContentDescription*>(
+            video_content->description);
+
+    for (cricket::StreamParamsVec::const_iterator it =
+             video_desc->streams().begin();
+         it != video_desc->streams().end(); ++it) {
+      MediaStreamInterface* old_stream = remote_streams_->find(it->sync_label);
+      scoped_refptr<MediaStreamProxy> new_stream(static_cast<MediaStreamProxy*>(
+          current_streams->find(it->sync_label)));
+      if (old_stream == NULL) {
+        if (new_stream == NULL) {
+          // New stream
+          new_stream = MediaStreamProxy::Create(it->sync_label,
+                                                signaling_thread_);
+          current_streams->AddStream(new_stream);
+        }
+        scoped_refptr<VideoTrackInterface> track(
+            VideoTrackProxy::CreateRemote(it->name, signaling_thread_));
+        new_stream->AddTrack(track);
+        track->set_state(MediaStreamTrackInterface::kLive);
+      } else {
+        current_streams->AddStream(old_stream);
+      }
+    }
+  }
+
+  // Iterate current_streams to find all new streams.
+  // Change the state of the new stream and SignalRemoteStreamAdded.
+  for (size_t i = 0; i < current_streams->count(); ++i) {
+    MediaStreamInterface* new_stream = current_streams->at(i);
+    MediaStreamInterface* old_stream = remote_streams_->find(
+        new_stream->label());
+    if (old_stream != NULL) continue;
+
+    new_stream->set_ready_state(MediaStreamInterface::kLive);
+    SignalRemoteStreamAdded(new_stream);
+  }
+
+  // Iterate the old list of remote streams.
+  // If a stream is not found in the new list it have been removed.
+  // Change the state of the removed stream and SignalRemoteStreamRemoved.
+  for (size_t i = 0; i < remote_streams_->count(); ++i) {
+    MediaStreamInterface* old_stream = remote_streams_->at(i);
+    MediaStreamInterface* new_stream = current_streams->find(
+        old_stream->label());
+    if (new_stream != NULL) continue;
+
+    old_stream->set_ready_state(MediaStreamInterface::kEnded);
+    scoped_refptr<AudioTracks> audio_tracklist(old_stream->audio_tracks());
+    for (size_t j = 0; j < audio_tracklist->count(); ++j) {
+      audio_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
+    }
+    scoped_refptr<VideoTracks> video_tracklist(old_stream->video_tracks());
+    for (size_t j = 0; j < video_tracklist->count(); ++j) {
+      video_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
+    }
+    SignalRemoteStreamRemoved(old_stream);
+  }
+  // Prepare for next offer.
+  remote_streams_ = current_streams;
+}
+
+// Update the state of all local streams we have just negotiated. If the
+// negotiation succeeded the state is changed to kLive, if the negotiation
+// failed the state is changed to kEnded.
+void PeerConnectionSignaling::UpdateSendingLocalStreams(
+    const cricket::SessionDescription* answer_desc,
+    StreamCollectionInterface* negotiated_streams) {
+  talk_base::scoped_refptr<StreamCollection> current_local_streams(
+      StreamCollection::Create());
+
+  for (size_t i = 0; i < negotiated_streams->count(); ++i) {
+    scoped_refptr<MediaStreamInterface> stream(negotiated_streams->at(i));
+    scoped_refptr<AudioTracks> audiotracklist(stream->audio_tracks());
+    scoped_refptr<VideoTracks> videotracklist(stream->video_tracks());
+
+    bool stream_ok = false;  // A stream is ok if at least one track succeed.
+    // Update tracks based on its type.
+    for (size_t j = 0; j < audiotracklist->count(); ++j) {
+      scoped_refptr<MediaStreamTrackInterface> track(audiotracklist->at(j));
+      const cricket::ContentInfo* audio_content =
+          GetFirstAudioContent(answer_desc);
+      if (!audio_content) {  // The remote does not accept audio.
+        track->set_state(MediaStreamTrackInterface::kFailed);
+        continue;
+      }
+
+      const cricket::AudioContentDescription* audio_desc =
+          static_cast<const cricket::AudioContentDescription*>(
+              audio_content->description);
+      if (audio_desc->codecs().size() <= 0) {
+        // No common codec.
+        track->set_state(MediaStreamTrackInterface::kFailed);
+      }
+      track->set_state(MediaStreamTrackInterface::kLive);
+      stream_ok = true;
+    }
+
+    for (size_t j = 0; j < videotracklist->count(); ++j) {
+      scoped_refptr<MediaStreamTrackInterface> track(videotracklist->at(j));
+      const cricket::ContentInfo* video_content =
+          GetFirstVideoContent(answer_desc);
+      if (!video_content) {  // The remote does not accept video.
+        track->set_state(MediaStreamTrackInterface::kFailed);
+        continue;
+      }
+
+      const cricket::VideoContentDescription* video_desc =
+          static_cast<const cricket::VideoContentDescription*>(
+              video_content->description);
+      if (video_desc->codecs().size() <= 0) {
+        // No common codec.
+        track->set_state(MediaStreamTrackInterface::kFailed);
+      }
+      track->set_state(MediaStreamTrackInterface::kLive);
+      stream_ok = true;
+    }
+
+    if (stream_ok) {
+      // We have successfully negotiated to send this stream.
+      // Change the stream and store it as successfully negotiated.
+      stream->set_ready_state(MediaStreamInterface::kLive);
+      current_local_streams->AddStream(stream);
+    } else {
+      stream->set_ready_state(MediaStreamInterface::kEnded);
+    }
+  }
+
+  // Iterate the old list of local streams.
+  // If a stream is not found in the new list it have been removed.
+  // Change the state of the removed stream and all its tracks to kEnded.
+  for (size_t i = 0; i < local_streams_->count(); ++i) {
+    MediaStreamInterface* old_stream = local_streams_->at(i);
+    MediaStreamInterface* new_streams =
+        negotiated_streams->find(old_stream->label());
+
+    if (new_streams != NULL) continue;
+
+    old_stream->set_ready_state(MediaStreamInterface::kEnded);
+    scoped_refptr<AudioTracks> audio_tracklist(old_stream->audio_tracks());
+    for (size_t j = 0; j < audio_tracklist->count(); ++j) {
+      audio_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
+    }
+    scoped_refptr<VideoTracks> video_tracklist(old_stream->video_tracks());
+    for (size_t j = 0; j < video_tracklist->count(); ++j) {
+      video_tracklist->at(j)->set_state(MediaStreamTrackInterface::kEnded);
+    }
+  }
+
+  // Update the local_streams_ for next update.
+  local_streams_ = current_local_streams;
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/peerconnectionsignaling.h b/talk/app/webrtc/peerconnectionsignaling.h
new file mode 100644
index 0000000..14188a9
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionsignaling.h
@@ -0,0 +1,262 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains classes used for handling signaling between
+// two PeerConnections.
+
+#ifndef TALK_APP_WEBRTC_PEERCONNECTIONSIGNALING_H_
+#define TALK_APP_WEBRTC_PEERCONNECTIONSIGNALING_H_
+
+#include <list>
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/roaperrorcodes.h"
+#include "talk/app/webrtc/roapsession.h"
+#include "talk/app/webrtc/webrtcsessionobserver.h"
+#include "talk/base/messagehandler.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/base/scoped_ref_ptr.h"
+#include "talk/base/sigslot.h"
+
+namespace cricket {
+class ChannelManager;
+class Candidate;
+typedef std::vector<Candidate> Candidates;
+}
+
+namespace talk_base {
+class Thread;
+}
+
+namespace webrtc {
+
+class SessionDescriptionProvider;
+class StreamCollection;
+class StreamCollectionInterface;
+class MediaStreamInterface;
+
+// PeerConnectionSignaling is a class responsible for handling signaling
+// between two PeerConnection objects. It creates remote MediaStream objects
+// when the remote peer signals it wants to send a new MediaStream. It changes
+// the state of local MediaStreams and tracks when a remote peer is ready to
+// receive media.
+//
+// PeerConnectionSignaling is Thread-compatible and all non-const methods are
+// expected to be called on the signaling thread.
+//
+// Note that before PeerConnectionSignaling can process an answer or create an
+// offer OnCandidatesReady has to be called. The last request to create an offer
+// or process an answer will be processed after OnCandidatesReady has been
+// called.
+//
+// Call CreateOffer to negotiate new local streams to send.
+// Call ProcessSignalingMessage when a new message has been received from the
+// remote peer. This might result in one or more signals being triggered to
+// indicate changes in the offer from the the remote peer or a detected error.
+// PeerConnectionSignaling creates Offers and Answers asynchronous on the
+// signaling thread.
+//
+// Example usage: Creating an offer with one audio track.
+//
+// class ProviderImpl : public SessionDescriptionProvider {
+//  ...
+// };
+//
+// void OnSignalingMessage(const std::string& smessage) { ... }
+//
+// ProviderImpl impl;
+// PeerConnectionSignaling pc(talk_base::Thread::Current(), &impl);
+//
+// // Connect the function OnSignalingMessage to the signal
+// // SignalNewPeerConnectionMessage.
+// pc.SignalNewPeerConnectionMessage.connect(&OnSignalingMessage);
+//
+// // Initialize PeerConnectionSignaling by providing the candidates for
+// // this session.
+// pc.OnCandidatesReady(candidates);
+// // Create an offer with one stream with one audio track.
+// AudioTrack audio;
+// MediaStream local_stream1;
+// local_stream1.AddTrack(&audio);
+// StreamCollection local_streams;
+// local_streams.AddStream(&local_stream1)
+// pc.CreateOffer(&local_streams);
+// // When the offer has been created, OnsignalingMessage is called
+// // with the offer in a string. Provide this offer to the remote
+// // PeerConnection. The remote PeerConnection will then respond with an answer
+// // string. Provide this answer string to PeerConnectionSignaling.
+// pc.ProcessSignalingMessage(remote_message, &local_streams);
+
+
+class PeerConnectionSignaling : public WebRtcSessionObserver,
+                                public talk_base::MessageHandler {
+ public:
+  enum State {
+    // Awaiting the local candidates.
+    kInitializing,
+    // Ready to sent new offer or receive a new offer.
+    kIdle,
+    // An offer has been sent and expect an answer.
+    kWaitingForAnswer,
+    // An answer have been sent and expect an ok message.
+    kWaitingForOK,
+    // SendShutdown has been called. No more messages are processed.
+    kShutingDown,
+    // Shutdown message have been received or remote peer have answered ok
+    // to a sent shutdown message.
+    kShutdownComplete,
+  };
+
+  // Constructs a PeerConnectionSignaling instance.
+  // signaling_thread - the thread where all signals will be triggered from.
+  // Also all calls to to methods are expected to be called on this thread.
+  // provider - Implementation of the SessionDescriptionProvider interface.
+  // This interface provides methods for returning local offer and answer
+  // session descriptions as well as functions for receiving events about
+  // negotiation completion and received remote session descriptions.
+  PeerConnectionSignaling(talk_base::Thread* signaling_thread,
+                          SessionDescriptionProvider* provider);
+  virtual ~PeerConnectionSignaling();
+
+  // Process a received offer/answer from the remote peer.
+  // local_streams must be the collection of streams the peerconnection
+  // currently would like to send.
+  void ProcessSignalingMessage(const std::string& message,
+                               StreamCollectionInterface* local_streams);
+
+  // Creates an offer containing all tracks in local_streams.
+  // When the offer is ready it is signaled by SignalNewPeerConnectionMessage.
+  // When the remote peer is ready to receive media on a stream , the state of
+  // the local streams will change to kLive.
+  void CreateOffer(StreamCollectionInterface* local_streams);
+
+  // Creates a ShutDown message to be sent to the remote peer.
+  // When the message is ready it is signaled by SignalNewPeerConnectionMessage.
+  // After calling this no more offers or answers to offers can be created.
+  void SendShutDown();
+
+  // Implements WebRtcSessionObserver interface.
+  // OnCandidatesReady is called when local candidates have been collected.
+  // This tell PeerConnectionSignaling that it is ready to respond to offers
+  // and create offer messages.
+  virtual void OnCandidatesReady(const cricket::Candidates& candidates);
+
+  // Returns all current remote MediaStreams.
+  StreamCollection* remote_streams() { return remote_streams_.get(); }
+
+  // Returns the current state.
+  State GetState() const { return state_; }
+
+  // A new ROAP message is ready to be sent. The listener to this signal is
+  // supposed to deliver this message to the remote peer.
+  sigslot::signal1<const std::string&> SignalNewPeerConnectionMessage;
+
+  // A new remote stream has been discovered.
+  sigslot::signal1<MediaStreamInterface*> SignalRemoteStreamAdded;
+
+  // A remote stream is no longer available.
+  sigslot::signal1<MediaStreamInterface*> SignalRemoteStreamRemoved;
+
+  // The signaling state have changed.
+  sigslot::signal1<State> SignalStateChange;
+
+  // Remote PeerConnection sent an error message.
+  sigslot::signal1<RoapErrorCode> SignalErrorMessageReceived;
+
+ private:
+  typedef std::list<talk_base::scoped_refptr<StreamCollectionInterface> >
+          StreamCollectionList;
+
+  // Implements talk_base::MessageHandler.
+  virtual void OnMessage(talk_base::Message* msg);
+
+  // Change the State and triggers the SignalStateChange signal.
+  void ChangeState(State new_state);
+
+  // Creates an offer on the signaling_thread_.
+  // This is either initiated by CreateOffer or OnCandidatesReady.
+  void CreateOffer_s();
+
+  // Creates an answer on the signaling thread.
+  // This is either initiated by ProcessSignalingMessage when a remote offer
+  // have been received or OnCandidatesReady.
+  void CreateAnswer_s();
+
+  // Notifies the provider_ and the active remote media streams
+  // about the shutdown.
+  // This is either initiated by ProcessSignalingMessage when a remote shutdown
+  // message have been received or by a call to SendShutDown.
+  void DoShutDown();
+
+  // Creates and destroys remote media streams based on remote_desc.
+  void UpdateRemoteStreams(const cricket::SessionDescription* remote_desc);
+
+  // Updates the state of local streams based on the answer_desc and the streams
+  // that have been negotiated in negotiated_streams.
+  void UpdateSendingLocalStreams(
+      const cricket::SessionDescription* answer_desc,
+      StreamCollectionInterface* negotiated_streams);
+
+  talk_base::Thread* signaling_thread_;
+  SessionDescriptionProvider* provider_;
+  State state_;
+
+  // Flag indicating PeerConnectionSignaling was called with an offer while
+  // PeerConnectionSignaling is in kInitializing state.
+  bool received_pre_offer_;
+
+  // LocalStreams queued for later use if ProcessSignalingMessage or CreateOffer
+  // is called while PeerConnectionSignaling is in kInitializing state or
+  // CreateOffer is called while PeerConnectionSignaling is currently sending
+  // an offer.
+  StreamCollectionList queued_local_streams_;
+
+  // Currently known remote MediaStreams.
+  talk_base::scoped_refptr<StreamCollection> remote_streams_;
+
+  // Weak reference to the local session description of the local MediaStreams
+  // being negotiated.
+  const cricket::SessionDescription* local_desc_;
+
+  // Local MediaStreams being negotiated.
+  talk_base::scoped_refptr<StreamCollection> local_streams_;
+
+  // The set of local transport candidates used in negotiation.
+  // This is set by OnCandidatesReady.
+  cricket::Candidates candidates_;
+
+  // roap_session_ holds the ROAP-specific session state and is used for
+  // creating a parsing ROAP messages.
+  RoapSession roap_session_;
+
+  DISALLOW_COPY_AND_ASSIGN(PeerConnectionSignaling);
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_PEERCONNECTIONSIGNALING_H_
diff --git a/talk/app/webrtc/peerconnectionsignaling_unittest.cc b/talk/app/webrtc/peerconnectionsignaling_unittest.cc
new file mode 100644
index 0000000..7b4d3a0
--- /dev/null
+++ b/talk/app/webrtc/peerconnectionsignaling_unittest.cc
@@ -0,0 +1,568 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <map>
+#include <string>
+#include <utility>
+
+#include "talk/app/webrtc/audiotrackimpl.h"
+#include "talk/app/webrtc/mediastreamimpl.h"
+#include "talk/app/webrtc/peerconnectionsignaling.h"
+#include "talk/app/webrtc/sessiondescriptionprovider.h"
+#include "talk/app/webrtc/streamcollectionimpl.h"
+#include "talk/app/webrtc/videotrackimpl.h"
+#include "talk/base/gunit.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/base/thread.h"
+#include "talk/session/phone/channelmanager.h"
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kStreamLabel2[] = "local_stream_2";
+static const char kAudioTrackLabel1[] = "local_audio_1";
+static const char kAudioTrackLabel2[] = "local_audio_2";
+static const char kVideoTrackLabel1[] = "local_video_1";
+
+namespace webrtc {
+
+typedef std::map<std::string, talk_base::scoped_refptr<MediaStreamInterface> >
+    MediaStreamMap;
+typedef std::pair<std::string, talk_base::scoped_refptr<MediaStreamInterface> >
+    RemotePair;
+
+// MockSignalingObserver implements functions for listening all signals from a
+// PeerConnectionSignaling instance.
+// The method AnswerPeer can be used to forward messages from one
+// PeerConnectionSignaling instance to another.
+class MockSignalingObserver : public sigslot::has_slots<> {
+ public:
+  MockSignalingObserver()
+      : last_error_(-1),  // Initialize last_error_ to unused error code.
+        state_(PeerConnectionSignaling::kInitializing),
+        remote_peer_(NULL) {
+  }
+
+  virtual ~MockSignalingObserver() {}
+
+  // New remote stream have been discovered.
+  virtual void OnRemoteStreamAdded(MediaStreamInterface* remote_stream) {
+    EXPECT_EQ(MediaStreamInterface::kLive, remote_stream->ready_state());
+    remote_media_streams_.insert(RemotePair(remote_stream->label(),
+                                            remote_stream));
+  }
+
+  // Remote stream is no longer available.
+  virtual void OnRemoteStreamRemoved(MediaStreamInterface* remote_stream) {
+    EXPECT_TRUE(remote_media_streams_.find(remote_stream->label()) !=
+                remote_media_streams_.end());
+    EXPECT_EQ(MediaStreamInterface::kEnded, remote_stream->ready_state());
+    remote_media_streams_.erase(remote_stream->label());
+  }
+
+  virtual void OnStateChange(PeerConnectionSignaling::State state) {
+    state_  = state;
+  }
+
+  virtual void OnErrorReceived(RoapErrorCode error) {
+    last_error_ = error;
+  }
+
+  void OnSignalingMessage(const std::string& smessage) {
+    last_message_ = smessage;
+    if (remote_peer_) {
+      remote_peer_->ProcessSignalingMessage(smessage, remote_local_collection_);
+    }
+  }
+
+  // Tell this object to answer the remote_peer.
+  // remote_local_collection is the local collection the remote peer want to
+  // send in an answer.
+  void AnswerPeer(PeerConnectionSignaling* remote_peer,
+                  StreamCollection* remote_local_collection) {
+    remote_peer_ = remote_peer;
+    remote_local_collection_ = remote_local_collection;
+  }
+
+  void CancelAnswerPeer() {
+    remote_peer_ = NULL;
+    remote_local_collection_.release();
+  }
+
+  MediaStreamInterface* RemoteStream(const std::string& label) {
+    MediaStreamMap::iterator it = remote_media_streams_.find(label);
+    if (it != remote_media_streams_.end())
+      return it->second;
+    return NULL;
+  }
+
+  std::string last_message_;
+  int last_error_;
+  PeerConnectionSignaling::State state_;
+
+ private:
+  MediaStreamMap remote_media_streams_;
+  talk_base::scoped_refptr<StreamCollection> remote_local_collection_;
+  PeerConnectionSignaling* remote_peer_;
+};
+
+// Mock implementation of SessionDescriptionProvider.
+// PeerConnectionSignaling uses this object to create session descriptions.
+class MockSessionDescriptionProvider : public SessionDescriptionProvider {
+ public:
+  explicit MockSessionDescriptionProvider(
+      cricket::ChannelManager* channel_manager)
+      : update_session_description_counter_(0),
+        session_description_factory_(
+          new cricket::MediaSessionDescriptionFactory(channel_manager)) {
+  }
+  virtual const cricket::SessionDescription* ProvideOffer(
+      const cricket::MediaSessionOptions& options) {
+    local_desc_.reset(session_description_factory_->CreateOffer(
+        options, local_desc_.get()));
+    return local_desc_.get();
+  }
+
+  virtual const cricket::SessionDescription* SetRemoteSessionDescription(
+      const cricket::SessionDescription* remote_offer,
+      const cricket::Candidates& remote_candidates) {
+    remote_desc_.reset(remote_offer);
+    return remote_desc_.get();
+  }
+
+  virtual const cricket::SessionDescription* ProvideAnswer(
+      const cricket::MediaSessionOptions& options) {
+    local_desc_.reset(session_description_factory_->CreateAnswer(
+        remote_desc_.get(), options, local_desc_.get()));
+    return local_desc_.get();
+  }
+
+  virtual void NegotiationDone() {
+    ++update_session_description_counter_;
+  }
+
+  size_t update_session_description_counter_;
+
+ protected:
+  talk_base::scoped_ptr<cricket::MediaSessionDescriptionFactory>
+      session_description_factory_;
+  talk_base::scoped_ptr<const cricket::SessionDescription> local_desc_;
+  talk_base::scoped_ptr<const cricket::SessionDescription> remote_desc_;
+};
+
+// PeerConnectionSignalingTest create two PeerConnectionSignaling instances
+// and connects the signals to two MockSignalingObservers.
+// This is used in tests to test the signaling between to peers.
+class PeerConnectionSignalingTest: public testing::Test {
+ protected:
+  virtual void SetUp() {
+    channel_manager_.reset(new cricket::ChannelManager(
+        talk_base::Thread::Current()));
+    EXPECT_TRUE(channel_manager_->Init());
+    provider1_.reset(new MockSessionDescriptionProvider(
+        channel_manager_.get()));
+    provider2_.reset(new MockSessionDescriptionProvider(
+        channel_manager_.get()));
+
+    signaling1_.reset(new PeerConnectionSignaling(
+        talk_base::Thread::Current(), provider1_.get()));
+    observer1_.reset(new MockSignalingObserver());
+    signaling1_->SignalNewPeerConnectionMessage.connect(
+        observer1_.get(), &MockSignalingObserver::OnSignalingMessage);
+    signaling1_->SignalRemoteStreamAdded.connect(
+        observer1_.get(), &MockSignalingObserver::OnRemoteStreamAdded);
+    signaling1_->SignalRemoteStreamRemoved.connect(
+        observer1_.get(), &MockSignalingObserver::OnRemoteStreamRemoved);
+    signaling1_->SignalErrorMessageReceived.connect(
+        observer1_.get(), &MockSignalingObserver::OnErrorReceived);
+    signaling1_->SignalStateChange.connect(
+        observer1_.get(), &MockSignalingObserver::OnStateChange);
+
+    signaling2_.reset(new PeerConnectionSignaling(
+        talk_base::Thread::Current(), provider2_.get()));
+    observer2_.reset(new MockSignalingObserver());
+    signaling2_->SignalNewPeerConnectionMessage.connect(
+        observer2_.get(), &MockSignalingObserver::OnSignalingMessage);
+    signaling2_->SignalRemoteStreamAdded.connect(
+        observer2_.get(), &MockSignalingObserver::OnRemoteStreamAdded);
+    signaling2_->SignalRemoteStreamRemoved.connect(
+        observer2_.get(), &MockSignalingObserver::OnRemoteStreamRemoved);
+    signaling2_->SignalErrorMessageReceived.connect(
+        observer2_.get(), &MockSignalingObserver::OnErrorReceived);
+    signaling2_->SignalStateChange.connect(
+        observer2_.get(), &MockSignalingObserver::OnStateChange);
+  }
+
+  // Create a collection of streams be sent on signaling1__
+  talk_base::scoped_refptr<StreamCollection> CreateLocalCollection1() {
+    std::string label(kStreamLabel1);
+    talk_base::scoped_refptr<LocalMediaStreamInterface> stream1(
+        MediaStream::Create(label));
+
+    // Add a local audio track.
+    talk_base::scoped_refptr<LocalAudioTrackInterface>
+        audio_track(AudioTrack::CreateLocal(kAudioTrackLabel1, NULL));
+    stream1->AddTrack(audio_track);
+
+    // Add a local video track.
+    talk_base::scoped_refptr<LocalVideoTrackInterface>
+        video_track(VideoTrack::CreateLocal(kVideoTrackLabel1, NULL));
+    stream1->AddTrack(video_track);
+
+    talk_base::scoped_refptr<StreamCollection> local_collection1(
+        StreamCollection::Create());
+    local_collection1->AddStream(stream1);
+
+    return local_collection1;
+  }
+
+  talk_base::scoped_refptr<StreamCollection> CreateLocalCollection2() {
+    std::string label(kStreamLabel2);
+    talk_base::scoped_refptr<LocalMediaStreamInterface> stream2(
+        MediaStream::Create(label));
+
+    // Add a local audio track.
+    talk_base::scoped_refptr<LocalAudioTrackInterface>
+        audio_track(AudioTrack::CreateLocal(kAudioTrackLabel2, NULL));
+    stream2->AddTrack(audio_track);
+
+    talk_base::scoped_refptr<StreamCollection> local_collection2(
+        StreamCollection::Create());
+    local_collection2->AddStream(stream2);
+
+    return local_collection2;
+  }
+
+  void VerifyStreamStates(StreamCollection* collection,
+                          MediaStreamInterface::ReadyState state,
+                          MediaStreamTrackInterface::TrackState track_state) {
+    for (size_t i = 0; i < collection->count(); ++i) {
+      MediaStreamInterface* stream = collection->at(i);
+      EXPECT_EQ(state, stream->ready_state());
+      for (size_t j = 0; j < stream->audio_tracks()->count(); ++j) {
+        AudioTrackInterface* audio = stream->audio_tracks()->at(j);
+        EXPECT_EQ(track_state, audio->state());
+      }
+      for (size_t j = 0; j < stream->video_tracks()->count(); ++j) {
+        VideoTrackInterface* video = stream->video_tracks()->at(j);
+        EXPECT_EQ(track_state, video->state());
+      }
+    }
+  }
+
+  // Initialize and setup a simple call between signaling1_ and signaling2_.
+  // signaling1_ send stream with label kStreamLabel1 to signaling2_.
+  void SetUpOneWayCall() {
+    // Initialize signaling1_ and signaling_2 by providing the candidates.
+    signaling1_->OnCandidatesReady(candidates_);
+    signaling2_->OnCandidatesReady(candidates_);
+
+    // Create a local stream collection to be sent on signaling1_.
+    talk_base::scoped_refptr<StreamCollection> local_collection1(
+        CreateLocalCollection1());
+
+    talk_base::scoped_refptr<StreamCollection> local_collection2(
+        StreamCollection::Create());
+
+    // Connect all messages sent from signaling1_ to be received on signaling2_
+    observer1_->AnswerPeer(signaling2_.get(), local_collection2);
+    // Connect all messages sent from Peer2 to be received on Peer1
+    observer2_->AnswerPeer(signaling1_.get(), local_collection1);
+
+    signaling1_->CreateOffer(local_collection1);
+    EXPECT_EQ(PeerConnectionSignaling::kWaitingForAnswer,
+              signaling1_->GetState());
+    EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling2_->GetState());
+
+    // Process posted messages to generate the offer and the answer to the
+    // offer.
+    talk_base::Thread::Current()->ProcessMessages(1);
+    talk_base::Thread::Current()->ProcessMessages(1);
+
+    // Make sure all is setup.
+    EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling1_->GetState());
+    EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling2_->GetState());
+
+    EXPECT_TRUE(observer2_->RemoteStream(kStreamLabel1) != NULL);
+    EXPECT_EQ(0u, signaling1_->remote_streams()->count());
+    EXPECT_EQ(1u, signaling2_->remote_streams()->count());
+  }
+
+  cricket::Candidates candidates_;
+  talk_base::scoped_ptr<MockSignalingObserver> observer1_;
+  talk_base::scoped_ptr<MockSignalingObserver> observer2_;
+  talk_base::scoped_ptr<MockSessionDescriptionProvider> provider1_;
+  talk_base::scoped_ptr<MockSessionDescriptionProvider> provider2_;
+  talk_base::scoped_ptr<PeerConnectionSignaling> signaling1_;
+  talk_base::scoped_ptr<PeerConnectionSignaling> signaling2_;
+  talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
+};
+
+TEST_F(PeerConnectionSignalingTest, SimpleOneWayCall) {
+  // Peer 1 create an offer with only one audio track.
+  talk_base::scoped_refptr<StreamCollection> local_collection1(
+      CreateLocalCollection1());
+
+  // Verify that the local stream is now initializing.
+  VerifyStreamStates(local_collection1.get(),
+                     MediaStreamInterface::kInitializing,
+                     MediaStreamTrackInterface::kInitializing);
+
+  // Peer 2 only receive. Create an empty collection
+  talk_base::scoped_refptr<StreamCollection> local_collection2(
+      StreamCollection::Create());
+
+  // Connect all messages sent from Peer1 to be received on Peer2
+  observer1_->AnswerPeer(signaling2_.get(), local_collection2);
+  // Connect all messages sent from Peer2 to be received on Peer1
+  observer2_->AnswerPeer(signaling1_.get(), local_collection1);
+
+  // Peer 1 generates the offer. It is not sent since there is no
+  // local candidates ready.
+  signaling1_->CreateOffer(local_collection1);
+
+  // Process posted messages.
+  talk_base::Thread::Current()->ProcessMessages(1);
+  EXPECT_EQ(PeerConnectionSignaling::kInitializing, signaling1_->GetState());
+
+  // Initialize signaling1_ by providing the candidates.
+  signaling1_->OnCandidatesReady(candidates_);
+  EXPECT_EQ(PeerConnectionSignaling::kWaitingForAnswer,
+            signaling1_->GetState());
+  // Process posted messages to allow signaling_1 to send the offer.
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  // Verify that signaling_2 is still not initialized.
+  // Even though it have received an offer.
+  EXPECT_EQ(PeerConnectionSignaling::kInitializing, signaling2_->GetState());
+
+  // Provide the candidates to signaling_2 and let it process the offer.
+  signaling2_->OnCandidatesReady(candidates_);
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  // Verify that the offer/answer have been exchanged and the state is good.
+  EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling1_->GetState());
+  EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling2_->GetState());
+
+  // Verify that the local stream is now sending.
+  VerifyStreamStates(local_collection1, MediaStreamInterface::kLive,
+                     MediaStreamTrackInterface::kLive);
+
+  // Verify that PeerConnection2 is aware of the sending stream.
+  EXPECT_TRUE(observer2_->RemoteStream(kStreamLabel1) != NULL);
+
+  // Verify that both peers have updated the session descriptions.
+  EXPECT_EQ(1u, provider1_->update_session_description_counter_);
+  EXPECT_EQ(1u, provider2_->update_session_description_counter_);
+}
+
+TEST_F(PeerConnectionSignalingTest, Glare) {
+  // Setup a call.
+  SetUpOneWayCall();
+
+  // Stop sending all messages automatically between Peer 1 and Peer 2.
+  observer1_->CancelAnswerPeer();
+  observer2_->CancelAnswerPeer();
+
+  // Create an empty collection for Peer 1.
+  talk_base::scoped_refptr<StreamCollection> local_collection1(
+      StreamCollection::Create());
+  // Create a collection for Peer 2.
+  talk_base::scoped_refptr<StreamCollection> local_collection2(
+      CreateLocalCollection2());
+
+  // Peer 1 create an updated offer.
+  signaling1_->CreateOffer(local_collection1);
+  // Peer 2 create an updated offer.
+  signaling2_->CreateOffer(local_collection2);
+
+  // Process posted messages.
+  talk_base::Thread::Current()->ProcessMessages(1);
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  std::string offer_1 = observer1_->last_message_;
+  std::string offer_2 = observer2_->last_message_;
+  EXPECT_EQ(PeerConnectionSignaling::kWaitingForAnswer,
+            signaling1_->GetState());
+  EXPECT_EQ(PeerConnectionSignaling::kWaitingForAnswer,
+            signaling2_->GetState());
+
+  // Connect all messages sent from Peer 1 to be received on Peer 2
+  observer1_->AnswerPeer(signaling2_.get(), local_collection2);
+  // Connect all messages sent from Peer 2 to be received on Peer 1
+  observer2_->AnswerPeer(signaling1_.get(), local_collection1);
+
+  // Insert the two offers to each Peer to create the Glare.
+  signaling1_->ProcessSignalingMessage(offer_2, local_collection1);
+  signaling2_->ProcessSignalingMessage(offer_1, local_collection2);
+
+  talk_base::Thread::Current()->ProcessMessages(1);
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  // Make sure all is good.
+  EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling1_->GetState());
+  EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling2_->GetState());
+
+  // Verify that Peer 1 is receiving kStreamLabel2.
+  EXPECT_TRUE(observer1_->RemoteStream(kStreamLabel2) != NULL);
+  // Verify that Peer 2 don't receive any streams
+  // since it has been removed.
+  EXPECT_TRUE(observer2_->RemoteStream(kStreamLabel1) == NULL);
+
+  // Verify that both peers have updated the session descriptions.
+  EXPECT_EQ(2u, provider1_->update_session_description_counter_);
+  EXPECT_EQ(2u, provider2_->update_session_description_counter_);
+}
+
+TEST_F(PeerConnectionSignalingTest, AddRemoveStream) {
+  // Initialize signaling1_ and signaling_2 by providing the candidates.
+  signaling1_->OnCandidatesReady(candidates_);
+  signaling2_->OnCandidatesReady(candidates_);
+  // Create a local stream.
+  std::string label(kStreamLabel1);
+  talk_base::scoped_refptr<LocalMediaStreamInterface> stream(
+      MediaStream::Create(label));
+
+  // Add a local audio track.
+  talk_base::scoped_refptr<LocalAudioTrackInterface>
+      audio_track(AudioTrack::CreateLocal(kAudioTrackLabel1, NULL));
+  stream->AddTrack(audio_track);
+
+  // Add a local video track.
+  talk_base::scoped_refptr<LocalVideoTrackInterface>
+      video_track(VideoTrack::CreateLocal(kVideoTrackLabel1, NULL));
+  stream->AddTrack(video_track);
+
+  // Peer 1 create an empty collection
+  talk_base::scoped_refptr<StreamCollection> local_collection1(
+      StreamCollection::Create());
+
+  // Peer 2 create an empty collection
+  talk_base::scoped_refptr<StreamCollection> local_collection2(
+      StreamCollection::Create());
+
+  // Connect all messages sent from Peer1 to be received on Peer2
+  observer1_->AnswerPeer(signaling2_.get(), local_collection2);
+  // Connect all messages sent from Peer2 to be received on Peer1
+  observer2_->AnswerPeer(signaling1_.get(), local_collection1);
+
+  // Peer 1 creates an empty offer and send it to Peer2.
+  signaling1_->CreateOffer(local_collection1);
+  // Process posted messages.
+  talk_base::Thread::Current()->ProcessMessages(1);
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  // Verify that both peers have updated the session descriptions.
+  EXPECT_EQ(1u, provider1_->update_session_description_counter_);
+  EXPECT_EQ(1u, provider2_->update_session_description_counter_);
+
+  // Peer2 add a stream.
+  local_collection2->AddStream(stream);
+
+  signaling2_->CreateOffer(local_collection2);
+  talk_base::Thread::Current()->ProcessMessages(1);
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  // Verify that the PeerConnection 2 local stream is now sending.
+  VerifyStreamStates(local_collection2, MediaStreamInterface::kLive ,
+                     MediaStreamTrackInterface::kLive);
+
+  // Verify that PeerConnection1 is aware of the sending stream.
+  EXPECT_TRUE(observer1_->RemoteStream(label) != NULL);
+
+  // Verify that both peers have updated the session descriptions.
+  EXPECT_EQ(2u, provider1_->update_session_description_counter_);
+  EXPECT_EQ(2u, provider2_->update_session_description_counter_);
+
+  // Remove the stream
+  local_collection2->RemoveStream(stream);
+
+  signaling2_->CreateOffer(local_collection2);
+  talk_base::Thread::Current()->ProcessMessages(1);
+  talk_base::Thread::Current()->ProcessMessages(1);
+
+  // Verify that PeerConnection1 is not aware of the sending stream.
+  EXPECT_TRUE(observer1_->RemoteStream(label) == NULL);
+
+  // Verify that the PeerConnection 2 local stream is now ended.
+  VerifyStreamStates(local_collection2, MediaStreamInterface::kEnded ,
+                     MediaStreamTrackInterface::kEnded);
+
+  // Verify that both peers have updated the session descriptions.
+  EXPECT_EQ(3u, provider1_->update_session_description_counter_);
+  EXPECT_EQ(3u, provider2_->update_session_description_counter_);
+}
+
+TEST_F(PeerConnectionSignalingTest, ShutDown) {
+  // Setup a call.
+  SetUpOneWayCall();
+
+  signaling1_->SendShutDown();
+
+  EXPECT_EQ_WAIT(PeerConnectionSignaling::kShutdownComplete,
+                 signaling1_->GetState(), 10);
+  EXPECT_EQ_WAIT(PeerConnectionSignaling::kShutdownComplete,
+                 signaling2_->GetState(), 10);
+
+  EXPECT_EQ(0u, signaling1_->remote_streams()->count());
+  EXPECT_EQ(0u, signaling2_->remote_streams()->count());
+  EXPECT_TRUE(observer2_->RemoteStream(kStreamLabel1) == NULL);
+  EXPECT_EQ(PeerConnectionSignaling::kShutdownComplete, observer1_->state_);
+  EXPECT_EQ(PeerConnectionSignaling::kShutdownComplete, observer2_->state_);
+
+  // Verify that both peers have updated the session descriptions.
+  EXPECT_EQ(2u, provider1_->update_session_description_counter_);
+  EXPECT_EQ(2u, provider2_->update_session_description_counter_);
+}
+
+TEST_F(PeerConnectionSignalingTest, ReceiveError) {
+  // Initialize signaling1_
+  signaling1_->OnCandidatesReady(candidates_);
+
+  talk_base::scoped_refptr<StreamCollection> local_collection1(
+      CreateLocalCollection1());
+
+  signaling1_->CreateOffer(local_collection1);
+  talk_base::Thread::Current()->ProcessMessages(1);
+  EXPECT_EQ(PeerConnectionSignaling::kWaitingForAnswer,
+            signaling1_->GetState());
+
+  RoapSession roap_session;
+  roap_session.Parse(observer1_->last_message_);
+  signaling1_->ProcessSignalingMessage(roap_session.CreateErrorMessage(
+      kNoMatch), local_collection1);
+  EXPECT_EQ(kNoMatch, observer1_->last_error_);
+
+  // Check signaling have cleaned up.
+  EXPECT_EQ(PeerConnectionSignaling::kIdle, signaling1_->GetState());
+
+  signaling1_->CreateOffer(local_collection1);
+  talk_base::Thread::Current()->ProcessMessages(1);
+  EXPECT_EQ(PeerConnectionSignaling::kWaitingForAnswer,
+            signaling1_->GetState());
+}
+
+}  // namespace webrtc
+
diff --git a/talk/app/webrtc/portallocatorfactory.cc b/talk/app/webrtc/portallocatorfactory.cc
new file mode 100644
index 0000000..a77122a
--- /dev/null
+++ b/talk/app/webrtc/portallocatorfactory.cc
@@ -0,0 +1,86 @@
+/*
+ * libjingle
+ * Copyright 2004--2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/portallocatorfactory.h"
+
+#include "talk/base/network.h"
+#include "talk/base/basicpacketsocketfactory.h"
+#include "talk/base/thread.h"
+#include "talk/p2p/client/httpportallocator.h"
+
+static const char kUserAgent[] = "PeerConnection User Agent";
+
+namespace webrtc {
+
+using cricket::HttpPortAllocator;
+using talk_base::scoped_ptr;
+
+talk_base::scoped_refptr<PortAllocatorFactoryInterface>
+PortAllocatorFactory::Create(
+    talk_base::Thread* worker_thread) {
+  talk_base::RefCountedObject<PortAllocatorFactory>* allocator =
+        new talk_base::RefCountedObject<PortAllocatorFactory>(worker_thread);
+  return allocator;
+}
+
+PortAllocatorFactory::PortAllocatorFactory(talk_base::Thread* worker_thread)
+    : network_manager_(new talk_base::BasicNetworkManager()),
+      socket_factory_(new talk_base::BasicPacketSocketFactory(worker_thread)) {
+}
+
+PortAllocatorFactory::~PortAllocatorFactory() {}
+
+cricket::PortAllocator* PortAllocatorFactory::CreatePortAllocator(
+    const std::vector<StunConfiguration>& stun,
+    const std::vector<TurnConfiguration>& turn) {
+
+  scoped_ptr<HttpPortAllocator> allocator(new HttpPortAllocator(
+      network_manager_.get(), socket_factory_.get(), kUserAgent));
+
+  std::vector<talk_base::SocketAddress> stun_hosts;
+  typedef std::vector<StunConfiguration>::const_iterator StunIt;
+  for (StunIt stun_it = stun.begin(); stun_it != stun.end(); ++stun_it) {
+    stun_hosts.push_back(stun_it->server);
+  }
+  allocator->SetStunHosts(stun_hosts);
+
+  std::vector<std::string> relay_hosts;
+  typedef std::vector<TurnConfiguration>::const_iterator TurnIt;
+  for (TurnIt turn_it = turn.begin(); turn_it != turn.end(); ++turn_it) {
+    relay_hosts.push_back(turn_it->server.hostname());
+  }
+  allocator->SetRelayHosts(relay_hosts);
+
+  // Currently we can only set the password of one relay server.
+  // Use the password of the first server. User name can currently not be set.
+  // TODO: See above limitations.
+  if (turn.size() > 0)
+    allocator->SetRelayToken(turn[0].password);
+  return allocator.release();
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/portallocatorfactory.h b/talk/app/webrtc/portallocatorfactory.h
new file mode 100644
index 0000000..1b0d753
--- /dev/null
+++ b/talk/app/webrtc/portallocatorfactory.h
@@ -0,0 +1,70 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file defines the default implementation of
+// PortAllocatorFactoryInterface.
+// This implementation creates instances of cricket::HTTPPortAllocator and uses
+// the BasicNetworkManager and BasicPacketSocketFactory.
+
+#ifndef TALK_APP_WEBRTC_PORTALLOCATORFACTORY_H_
+#define TALK_APP_WEBRTC_PORTALLOCATORFACTORY_H_
+
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/base/scoped_ptr.h"
+
+namespace cricket {
+class PortAllocator;
+}
+
+namespace talk_base {
+class BasicNetworkManager;
+class BasicPacketSocketFactory;
+}
+
+namespace webrtc {
+
+class PortAllocatorFactory : public PortAllocatorFactoryInterface {
+ public:
+  static talk_base::scoped_refptr<PortAllocatorFactoryInterface> Create(
+      talk_base::Thread* worker_thread);
+
+  virtual cricket::PortAllocator* CreatePortAllocator(
+      const std::vector<StunConfiguration>& stun,
+      const std::vector<TurnConfiguration>& turn);
+
+ protected:
+  explicit PortAllocatorFactory(talk_base::Thread* worker_thread);
+  ~PortAllocatorFactory();
+
+ private:
+  talk_base::scoped_ptr<talk_base::BasicNetworkManager> network_manager_;
+  talk_base::scoped_ptr<talk_base::BasicPacketSocketFactory> socket_factory_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_PORTALLOCATORFACTORY_H_
diff --git a/talk/app/webrtc/roaperrorcodes.h b/talk/app/webrtc/roaperrorcodes.h
new file mode 100644
index 0000000..d13230d
--- /dev/null
+++ b/talk/app/webrtc/roaperrorcodes.h
@@ -0,0 +1,48 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contain an enum of possible error codes used in Roap.
+// The meaning of the error codes are defined in
+// http://tools.ietf.org/html/draft-jennings-rtcweb-signaling-01.
+
+#ifndef TALK_APP_WEBRTC_ROAPERRORCODES_H_
+#define TALK_APP_WEBRTC_ROAPERRORCODES_H_
+
+namespace webrtc {
+
+enum RoapErrorCode {
+  kNoMatch,
+  kTimeout,
+  kRefused,
+  kConflict,
+  kDoubleConflict,
+  kFailed
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_ROAPERRORCODES_H_
diff --git a/talk/app/webrtc/roapmessages.cc b/talk/app/webrtc/roapmessages.cc
new file mode 100644
index 0000000..e0528a7
--- /dev/null
+++ b/talk/app/webrtc/roapmessages.cc
@@ -0,0 +1,293 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/roapmessages.h"
+
+#include "talk/app/webrtc/webrtcsdp.h"
+#include "talk/base/json.h"
+
+namespace webrtc {
+
+using cricket::Candidate;
+using cricket::SessionDescription;
+
+// ROAP message types. Must match the enum RoapMessageType.
+static const char* kMessageTypes[] = {
+  "OFFER",
+  "ANSWER",
+  "OK",
+  "SHUTDOWN",
+  "ERROR",
+};
+
+// ROAP error messages. Must match the enum RoapErrorCode.
+static const char* kErrorMessages[] = {
+  "NOMATCH",
+  "TIMEOUT",
+  "REFUSED",
+  "CONFLICT",
+  "DOUBLECONFLICT",
+  "FAILED",
+};
+
+// ROAP json keys.
+static const char kOffererSessionId[] = "offererSessionId";
+static const char kAnswererSessionId[] = "answererSessionId";
+static const char kSetSessionToken[] = "setSessionToken";
+static const char kSetResponseToken[] = "setResponseToken";
+static const char kResponseToken[] = "responseToken";
+static const char kSessionToken[] = "sessionToken";
+static const char kMessageType[] = "messageType";
+static const char kSequenceNumber[] = "seq";
+static const char kSessionDescription[] = "sdp";
+static const char kErrorType[] = "errorType";
+static const char kTieBreaker[] = "tieBreaker";
+static const char kMoreComing[] = "moreComing";
+
+RoapMessageBase::RoapMessageBase() : type_(kInvalid), seq_(0) {
+}
+
+RoapMessageBase::RoapMessageBase(RoapMessageType type,
+                                 const std::string& offer_session_id,
+                                 const std::string& answer_session_id,
+                                 const std::string& session_token,
+                                 const std::string& response_token,
+                                 uint32 seq)
+    : type_(type),
+      offer_session_id_(offer_session_id),
+      answer_session_id_(answer_session_id),
+      session_token_(session_token),
+      response_token_(response_token),
+      seq_(seq) {
+}
+
+bool RoapMessageBase::Parse(const std::string& message) {
+  Json::Reader reader;
+  if (!reader.parse(message, jmessage_))
+    return false;
+
+  std::string message_type;
+  GetStringFromJsonObject(jmessage_, kMessageType, &message_type);
+  if (message_type.empty())
+    return false;
+  bool valid_message_type = false;
+  for (int i = 0; i < kInvalid; i++) {
+    if (message_type == kMessageTypes[i]) {
+      type_ = static_cast<RoapMessageType>(i);
+      valid_message_type = true;
+      break;
+    }
+  }
+  if (!valid_message_type)
+    return false;
+
+  if (!GetStringFromJsonObject(jmessage_, kOffererSessionId,
+                               &offer_session_id_) ||
+                               offer_session_id_.empty()) {
+    // Parse offererSessionId. Allow error messages to not have an
+    // offererSessionId.
+    if (type_ != kError)
+      return false;
+  }
+
+  // answererSessionId does not necessarily need to exist in MessageBase.
+  GetStringFromJsonObject(jmessage_, kAnswererSessionId, &answer_session_id_);
+  // setSessionToken and setResponseToken is not required.
+  GetStringFromJsonObject(jmessage_, kSetSessionToken, &session_token_);
+  GetStringFromJsonObject(jmessage_, kSetResponseToken, &response_token_);
+
+  unsigned int temp_seq;
+  if (!GetUIntFromJsonObject(jmessage_, kSequenceNumber, &temp_seq)) {
+    return false;
+  }
+  if (temp_seq > 0xFFFFFFFF)
+    return false;
+  seq_ = static_cast<uint32>(temp_seq);
+
+  return true;
+}
+
+std::string RoapMessageBase::Serialize() {
+  Json::Value message;
+  SerializeElement(&message);
+  Json::StyledWriter writer;
+  return writer.write(message);
+}
+
+void RoapMessageBase::SerializeElement(Json::Value* message) {
+  ASSERT(message != NULL);
+  (*message)[kMessageType] = kMessageTypes[type_];
+  (*message)[kOffererSessionId] = offer_session_id_;
+  if (!answer_session_id_.empty())
+    (*message)[kAnswererSessionId] = answer_session_id_;
+  if (!session_token_.empty())
+    (*message)[kSessionToken] = session_token_;
+  if (!response_token_.empty())
+    (*message)[kResponseToken] = response_token_;
+  (*message)[kSequenceNumber] = seq_;
+}
+
+RoapOffer::RoapOffer(const std::string& offer_session_id,
+                     const std::string& answer_session_id,
+                     const std::string& session_token,
+                     uint32 seq,
+                     uint32 tie_breaker,
+                     const SessionDescription* desc,
+                     const std::vector<cricket::Candidate>& candidates)
+    : RoapMessageBase(kOffer, offer_session_id, answer_session_id,
+                      session_token, "", seq),
+      tie_breaker_(tie_breaker),
+      desc_(desc),
+      candidates_(candidates) {
+}
+
+RoapOffer::RoapOffer(const RoapMessageBase& base)
+    : RoapMessageBase(base),
+      desc_(NULL) {}
+
+bool RoapOffer::Parse() {
+  if (!GetUIntFromJsonObject(jmessage_, kTieBreaker, &tie_breaker_)) {
+    return false;
+  }
+
+  std::string sdp_message;
+  if (!GetStringFromJsonObject(jmessage_, kSessionDescription, &sdp_message))
+      return false;
+
+  parsed_desc_.reset(new cricket::SessionDescription());
+  return SdpDeserialize(sdp_message, parsed_desc_.get(),
+                        &candidates_);
+}
+
+void RoapOffer::SerializeElement(Json::Value* message) {
+  ASSERT(message != NULL);
+  RoapMessageBase::SerializeElement(message);
+  (*message)[kTieBreaker] = tie_breaker_;
+  (*message)[kSessionDescription] = SdpSerialize(*desc_, candidates_);
+}
+
+RoapAnswer::RoapAnswer(const std::string& offer_session_id,
+                       const std::string& answer_session_id,
+                       const std::string& session_token,
+                       const std::string& response_token,
+                       uint32 seq,
+                       const SessionDescription* desc,
+                       const std::vector<Candidate>& candidates)
+    : RoapMessageBase(kAnswer, offer_session_id, answer_session_id,
+                      session_token, response_token, seq),
+      desc_(desc),
+      candidates_(candidates) {
+}
+
+RoapAnswer::RoapAnswer(const RoapMessageBase& base)
+    : RoapMessageBase(base),
+      more_coming_(false),
+      desc_(NULL) {}
+
+bool RoapAnswer::Parse() {
+  std::string more;
+  if (GetStringFromJsonObject(jmessage_, kMoreComing, &more) && more == "true")
+    more_coming_ = true;
+
+  std::string sdp_message;
+  if (!GetStringFromJsonObject(jmessage_, kSessionDescription, &sdp_message))
+      return false;
+
+  parsed_desc_.reset(new cricket::SessionDescription());
+  return SdpDeserialize(sdp_message, parsed_desc_.get(), &candidates_);
+}
+
+void RoapAnswer::SerializeElement(Json::Value* message) {
+  ASSERT(message != NULL);
+  RoapMessageBase::SerializeElement(message);
+
+  (*message)[kSessionDescription] = SdpSerialize(*desc_, candidates_);
+}
+
+RoapError::RoapError(const RoapMessageBase& base)
+    : RoapMessageBase(base), error_(kFailed) {
+}
+
+RoapError::RoapError(const std::string& offer_session_id,
+                     const std::string& answer_session_id,
+                     const std::string& session_token,
+                     const std::string& response_token,
+                     uint32 seq,
+                     RoapErrorCode error)
+    : RoapMessageBase(kError, offer_session_id, answer_session_id,
+                      session_token, response_token, seq),
+      error_(error) {
+}
+
+bool RoapError::Parse() {
+  std::string error_string;
+  GetStringFromJsonObject(jmessage_, kErrorType, &error_string);
+  if (error_string.empty())
+    return false;
+  for (int i = 0; i < ARRAY_SIZE(kErrorMessages); i++) {
+    if (error_string == kErrorMessages[i]) {
+      error_ = static_cast<RoapErrorCode>(i);
+      return true;
+    }
+  }
+  return false;
+}
+
+void RoapError::SerializeElement(Json::Value* message) {
+  ASSERT(message != NULL);
+  ASSERT(error_< ARRAY_SIZE(kErrorMessages));
+  RoapMessageBase::SerializeElement(message);
+
+  (*message)[kErrorType] = kErrorMessages[error_];
+}
+
+RoapOk::RoapOk(const RoapMessageBase& base)
+    : RoapMessageBase(base) {
+}
+
+RoapOk::RoapOk(const std::string& offer_session_id,
+               const std::string& answer_session_id,
+               const std::string& session_token,
+               const std::string& response_token,
+               uint32 seq)
+    : RoapMessageBase(kOk, offer_session_id, answer_session_id, session_token,
+                      response_token, seq) {
+}
+
+RoapShutdown::RoapShutdown(const RoapMessageBase& base)
+    : RoapMessageBase(base) {
+}
+
+RoapShutdown::RoapShutdown(const std::string& offer_session_id,
+                                 const std::string& answer_session_id,
+                                 const std::string& session_token,
+                                 uint32 seq)
+    : RoapMessageBase(kShutdown, offer_session_id, answer_session_id,
+                      session_token, "", seq) {
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/roapmessages.h b/talk/app/webrtc/roapmessages.h
new file mode 100644
index 0000000..fe99e1f
--- /dev/null
+++ b/talk/app/webrtc/roapmessages.h
@@ -0,0 +1,202 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contain classes for parsing and serializing ROAP messages.
+// The ROAP messages are defined in
+// http://tools.ietf.org/html/draft-jennings-rtcweb-signaling-01.
+
+#ifndef TALK_APP_WEBRTC_ROAPMESSAGES_H_
+#define TALK_APP_WEBRTC_ROAPMESSAGES_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/roaperrorcodes.h"
+#include "talk/base/basictypes.h"
+#include "talk/base/json.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/p2p/base/candidate.h"
+#include "talk/p2p/base/sessiondescription.h"
+
+namespace webrtc {
+
+class RoapMessageBase {
+ public:
+  enum RoapMessageType {
+    kOffer = 0,
+    kAnswer = 1,
+    kOk = 2,
+    kShutdown = 3,
+    kError = 4,
+    kInvalid = 5,
+  };
+  RoapMessageBase();
+  RoapMessageBase(RoapMessageType type,
+                  const std::string& offer_session_id,
+                  const std::string& answer_session_id,
+                  const std::string& session_token,
+                  const std::string& response_token,
+                  uint32 seq);
+
+  bool Parse(const std::string& message);
+  std::string Serialize();
+
+  RoapMessageType type() const { return type_; }
+  const std::string& offer_session_id() const { return offer_session_id_; }
+  const std::string& answer_session_id() const { return answer_session_id_; }
+  const std::string& session_token() const { return session_token_; }
+  const std::string& response_token() const { return response_token_; }
+  uint32 seq() const { return seq_; }
+
+ protected:
+  virtual void SerializeElement(Json::Value* message);
+  Json::Value jmessage_;  // Contains the parsed json message.
+
+ private:
+  RoapMessageType type_;
+  std::string offer_session_id_;
+  std::string answer_session_id_;
+  std::string session_token_;
+  std::string response_token_;
+  uint32 seq_;
+};
+
+class RoapAnswer : public RoapMessageBase {
+ public:
+  explicit RoapAnswer(const RoapMessageBase& base);
+  // Note that the SessionDescription desc is used as a weak reference.
+  // The user of this class must ensure that desc outlives an instance of this
+  // object.
+  RoapAnswer(const std::string& offer_session_id,
+             const std::string& answer_session_id,
+             const std::string& session_token,
+             const std::string& response_token,
+             uint32 seq,
+             const cricket::SessionDescription* desc,
+             const std::vector<cricket::Candidate>& candidates);
+  bool Parse();
+
+  // Get remote SessionDescription if the session description has been parsed
+  // and the ownership is transferred to the caller.
+  // NULL otherwise.
+  const cricket::SessionDescription* ReleaseSessionDescription() {
+    return parsed_desc_.release();
+  }
+  const std::vector<cricket::Candidate>& candidates() const {
+    return candidates_;
+  }
+  bool more_coming() const { return more_coming_ ; }
+
+ protected:
+  virtual void SerializeElement(Json::Value* message);
+
+ private:
+  bool more_coming_;
+  // Session description parsed in an offer.
+  talk_base::scoped_ptr<cricket::SessionDescription> parsed_desc_;
+  // Weak ref to a session description provided in the constructor.
+  const cricket::SessionDescription* desc_;
+  std::vector<cricket::Candidate> candidates_;
+};
+
+class RoapOffer : public RoapMessageBase {
+ public:
+  explicit RoapOffer(const RoapMessageBase& base);
+  // Note that the SessionDescription desc is used as a weak reference.
+  // The user of this class must ensure that desc outlives an instance of this
+  // object.
+  RoapOffer(const std::string& offer_session_id,
+            const std::string& answer_session_id,
+            const std::string& session_token,
+            uint32 seq,
+            uint32 tie_breaker,
+            const cricket::SessionDescription* desc,
+            const std::vector<cricket::Candidate>& candidates);
+  bool Parse();
+
+  uint32 tie_breaker() const { return tie_breaker_; }
+  // Get remote SessionDescription if the session description has been parsed
+  // and the ownership is transferred to the caller.
+  // NULL otherwise.
+  const cricket::SessionDescription* ReleaseSessionDescription() {
+    return parsed_desc_.release();
+  }
+  const std::vector<cricket::Candidate>&  candidates() { return candidates_; }
+
+ protected:
+  virtual void SerializeElement(Json::Value* message);
+
+ private:
+  uint32 tie_breaker_;
+  // Session description parsed in an offer.
+  talk_base::scoped_ptr<cricket::SessionDescription> parsed_desc_;
+  // Weak reference to a session description provided in the constructor.
+  const cricket::SessionDescription* desc_;
+  std::vector<cricket::Candidate> candidates_;
+};
+
+class RoapError : public RoapMessageBase {
+ public:
+  explicit RoapError(const RoapMessageBase& base);
+  RoapError(const std::string& offer_session_id,
+            const std::string& answer_session_id,
+            const std::string& session_token,
+            const std::string& response_token,
+            uint32 seq,
+            RoapErrorCode error);
+  bool Parse();
+  RoapErrorCode error() const { return error_; }
+
+ protected:
+  virtual void SerializeElement(Json::Value* message);
+
+ private:
+  RoapErrorCode error_;
+};
+
+class RoapOk : public RoapMessageBase {
+ public:
+  explicit RoapOk(const RoapMessageBase& base);
+  RoapOk(const std::string& offer_session_id,
+         const std::string& answer_session_id,
+         const std::string& session_token,
+         const std::string& response_token,
+         uint32 seq);
+};
+
+class RoapShutdown : public RoapMessageBase {
+ public:
+  explicit RoapShutdown(const RoapMessageBase& base);
+  RoapShutdown(const std::string& offer_session_id,
+               const std::string& answer_session_id,
+               const std::string& session_token,
+               uint32 seq);
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_ROAPMESSAGES_H_
diff --git a/talk/app/webrtc/roapmessages_unittest.cc b/talk/app/webrtc/roapmessages_unittest.cc
new file mode 100644
index 0000000..b432667
--- /dev/null
+++ b/talk/app/webrtc/roapmessages_unittest.cc
@@ -0,0 +1,227 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/roapmessages.h"
+#include "talk/base/gunit.h"
+#include "talk/base/logging.h"
+#include "talk/p2p/base/transport.h"
+#include "talk/session/phone/mediasession.h"
+
+using cricket::Candidates;
+using cricket::AudioContentDescription;
+using cricket::SessionDescription;
+using cricket::StreamParams;
+using cricket::VideoContentDescription;
+
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kStream1Cname[] = "stream_1_cname";
+static const char kAudioTrackLabel1[] = "local_audio_1";
+static const uint32 kAudioTrack1Ssrc = 1;
+
+static const char kOfferSessionId[] = "offer_1";
+static const char kAnswerSessionId[] = "answer_1";
+static const char kSessionToken[] = "session_1";
+
+static const char kOfferReference[] =
+    "{\n"
+    "   \"answererSessionId\" : \"answer_1\",\n"
+    "   \"messageType\" : \"OFFER\",\n"
+    "   \"offererSessionId\" : \"offer_1\",\n"
+    "   \"sdp\" : \"v=0\\r\\n"
+    "o=- 0 0 IN IP4 127.0.0.1\\r\\n"
+    "s=\\r\\n"
+    "t=0 0\\r\\n"
+    "m=audio 0 RTP/AVPF\\r\\n"
+    "a=mid:audio\\r\\n"
+    "a=rtcp-mux\\r\\n"
+    "a=ssrc:1 cname:stream_1_cname mslabel:local_stream_1 "
+        "label:local_audio_1\\r\\n"
+    "\",\n"  // End of sdp.
+    "   \"seq\" : 1,\n"
+    "   \"tieBreaker\" : 0\n"
+    "}\n";
+
+static const char kAnswerReference[] =
+    "{\n"
+    "   \"answererSessionId\" : \"answer_1\",\n"
+    "   \"messageType\" : \"ANSWER\",\n"
+    "   \"offererSessionId\" : \"offer_1\",\n"
+    "   \"sdp\" : \"v=0\\r\\n"
+    "o=- 0 0 IN IP4 127.0.0.1\\r\\n"
+    "s=\\r\\n"
+    "t=0 0\\r\\n"
+    "m=audio 0 RTP/AVPF\\r\\n"
+    "a=mid:audio\\r\\n"
+    "a=rtcp-mux\\r\\n"
+    "a=ssrc:1 cname:stream_1_cname mslabel:local_stream_1 "
+        "label:local_audio_1\\r\\n"
+    "\",\n"  // End of sdp.
+    "   \"seq\" : 1\n"
+    "}\n";
+
+static const char kOkReference[] =
+    "{\n"
+    "   \"answererSessionId\" : \"answer_1\",\n"
+    "   \"messageType\" : \"OK\",\n"
+    "   \"offererSessionId\" : \"offer_1\",\n"
+    "   \"seq\" : 1\n"
+    "}\n";
+
+static const char kShutdownReference[] =
+    "{\n"
+    "   \"answererSessionId\" : \"answer_1\",\n"
+    "   \"messageType\" : \"SHUTDOWN\",\n"
+    "   \"offererSessionId\" : \"offer_1\",\n"
+    "   \"seq\" : 1\n"
+    "}\n";
+
+static const char kErrorReference[] =
+    "{\n"
+    "   \"answererSessionId\" : \"answer_1\",\n"
+    "   \"errorType\" : \"TIMEOUT\",\n"
+    "   \"messageType\" : \"ERROR\",\n"
+    "   \"offererSessionId\" : \"offer_1\",\n"
+    "   \"seq\" : 1\n"
+    "}\n";
+
+// RoapMessageTest creates a session description that matches the
+// reference messages above.
+class RoapMessageTest: public testing::Test {
+ public:
+  void SetUp() {
+    talk_base::scoped_ptr<AudioContentDescription> audio(
+        new AudioContentDescription());
+    audio->set_rtcp_mux(true);
+    StreamParams audio_stream;
+    audio_stream.name = kAudioTrackLabel1;
+    audio_stream.cname = kStream1Cname;
+    audio_stream.sync_label = kStreamLabel1;
+    audio_stream.ssrcs.push_back(kAudioTrack1Ssrc);
+    audio->AddStream(audio_stream);
+    desc1_.AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
+                      audio.release());
+  }
+   protected:
+    cricket::SessionDescription desc1_;
+    cricket::Candidates empty_candidates_;
+};
+
+static bool CompareRoapBase(const webrtc::RoapMessageBase& base1,
+                            const webrtc::RoapMessageBase& base2) {
+  return base1.type() == base2.type() &&
+      base1.offer_session_id() == base2.offer_session_id() &&
+      base1.answer_session_id() == base2.answer_session_id() &&
+      base1.session_token() == base2.session_token() &&
+      base1.response_token() == base2.response_token() &&
+      base1.seq() == base2.seq();
+}
+
+static bool CompareRoapOffer(const webrtc::RoapOffer& offer1,
+                             const webrtc::RoapOffer& offer2) {
+  return CompareRoapBase(offer1, offer2) &&
+      offer1.tie_breaker() == offer2.tie_breaker();
+}
+
+static bool CompareRoapAnswer(const webrtc::RoapAnswer& answer1,
+                              const webrtc::RoapAnswer& answer2) {
+  return CompareRoapBase(answer1, answer2) &&
+      answer1.more_coming() == answer1.more_coming();
+}
+
+static bool CompareRoapError(const webrtc::RoapError& error1,
+                             const webrtc::RoapError& error2) {
+  return CompareRoapBase(error1, error2) &&
+      error1.error() == error2.error();
+}
+
+TEST_F(RoapMessageTest, RoapOffer) {
+  webrtc::RoapOffer offer(kOfferSessionId, kAnswerSessionId, "", 1, 0, &desc1_,
+                          empty_candidates_);
+  std::string offer_string = offer.Serialize();
+  EXPECT_TRUE(kOfferReference == offer_string);
+
+  webrtc::RoapMessageBase base;
+  EXPECT_TRUE(base.Parse(kOfferReference));
+  EXPECT_EQ(webrtc::RoapMessageBase::kOffer, base.type());
+  webrtc::RoapOffer parsed_offer(base);
+  EXPECT_TRUE(parsed_offer.Parse());
+  EXPECT_TRUE(CompareRoapOffer(offer, parsed_offer));
+}
+
+TEST_F(RoapMessageTest, RoapAnswer) {
+  webrtc::RoapAnswer answer(kOfferSessionId, kAnswerSessionId, "", "", 1,
+                            &desc1_, empty_candidates_);
+  std::string answer_string = answer.Serialize();
+  EXPECT_TRUE(kAnswerReference == answer_string);
+
+  webrtc::RoapMessageBase base;
+  EXPECT_TRUE(base.Parse(kAnswerReference));
+  EXPECT_EQ(webrtc::RoapMessageBase::kAnswer, base.type());
+  webrtc::RoapAnswer parsed_answer(base);
+  EXPECT_TRUE(parsed_answer.Parse());
+  EXPECT_TRUE(CompareRoapAnswer(answer, parsed_answer));
+}
+
+TEST_F(RoapMessageTest, RoapOk) {
+  webrtc::RoapOk ok(kOfferSessionId, kAnswerSessionId, "", "", 1);
+  std::string ok_string = ok.Serialize();
+  EXPECT_TRUE(kOkReference == ok_string);
+
+  webrtc::RoapMessageBase base;
+  EXPECT_TRUE(base.Parse(kOkReference));
+  EXPECT_EQ(webrtc::RoapMessageBase::kOk, base.type());
+  webrtc::RoapOk parsed_ok(base);
+  EXPECT_TRUE(CompareRoapBase(ok, parsed_ok));
+}
+
+TEST_F(RoapMessageTest, RoapShutdown) {
+  webrtc::RoapShutdown shutdown(kOfferSessionId, kAnswerSessionId, "", 1);
+  std::string shutdown_string = shutdown.Serialize();
+  EXPECT_TRUE(kShutdownReference == shutdown_string);
+
+  webrtc::RoapMessageBase base;
+  EXPECT_TRUE(base.Parse(kShutdownReference));
+  EXPECT_EQ(webrtc::RoapMessageBase::kShutdown, base.type());
+  webrtc::RoapShutdown parsed_shutdown(base);
+  EXPECT_TRUE(CompareRoapBase(shutdown, parsed_shutdown));
+}
+
+TEST_F(RoapMessageTest, RoapError) {
+  webrtc::RoapError error(kOfferSessionId, kAnswerSessionId, "", "", 1,
+                          webrtc::kTimeout);
+  std::string error_string = error.Serialize();
+  EXPECT_TRUE(kErrorReference == error_string);
+
+  webrtc::RoapMessageBase base;
+  EXPECT_TRUE(base.Parse(kErrorReference));
+  EXPECT_EQ(webrtc::RoapMessageBase::kError, base.type());
+  webrtc::RoapError parsed_error(base);
+  EXPECT_TRUE(parsed_error.Parse());
+  EXPECT_TRUE(CompareRoapError(error, parsed_error));
+}
diff --git a/talk/app/webrtc/roapsession.cc b/talk/app/webrtc/roapsession.cc
new file mode 100644
index 0000000..6d76b0c
--- /dev/null
+++ b/talk/app/webrtc/roapsession.cc
@@ -0,0 +1,286 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/roapsession.h"
+
+#include "talk/app/webrtc/roapmessages.h"
+#include "talk/base/helpers.h"
+#include "talk/base/logging.h"
+
+namespace webrtc {
+
+static const uint32 kMaxTieBreaker = 0xFFFFFFFE;
+
+static std::string CreateLocalId(const std::string& remote_id) {
+  std::string local_id;
+  do {
+    talk_base::CreateRandomString(32, &local_id);
+    ASSERT(!local_id.empty());
+  } while (local_id == remote_id);
+  return local_id;
+}
+
+RoapSession::RoapSession()
+  : seq_(0),
+    waiting_for_answer_(false),
+    received_seq_(0) {
+}
+
+std::string RoapSession::CreateOffer(
+    const SessionDescription* desc,
+    const std::vector<Candidate>& candidates) {
+  if (local_id_.empty()) {
+    local_id_ = CreateLocalId(remote_id_);
+  }
+
+  do {
+    local_tie_breaker_ = talk_base::CreateRandomNonZeroId();
+  } while (local_tie_breaker_ > kMaxTieBreaker);
+
+  RoapOffer offer(local_id_, remote_id_, session_token_, ++seq_,
+                  local_tie_breaker_, desc, candidates);
+  waiting_for_answer_ = true;
+  return offer.Serialize();
+}
+
+std::string RoapSession::CreateAnswer(
+    const SessionDescription* desc,
+    const std::vector<Candidate>& candidates) {
+  ASSERT(!remote_id_.empty());
+  if (local_id_.empty()) {
+    local_id_ = CreateLocalId(remote_id_);
+  }
+
+  RoapAnswer answer(remote_id_, local_id_, session_token_, response_token_,
+                    seq_, desc, candidates);
+  response_token_.clear();
+  return answer.Serialize();
+}
+
+std::string RoapSession::CreateOk() {
+  ASSERT(!remote_id_.empty());
+
+  if (local_id_.empty()) {
+    local_id_ = CreateLocalId(remote_id_);
+  }
+  RoapOk ok(remote_id_, local_id_, session_token_, response_token_, seq_);
+  response_token_.clear();
+  return ok.Serialize();
+}
+
+std::string RoapSession::CreateShutDown() {
+  if (local_id_.empty()) {
+    local_id_ = CreateLocalId(remote_id_);
+  }
+  RoapShutdown shutdown(local_id_, remote_id_, session_token_, ++seq_);
+  return shutdown.Serialize();
+}
+
+std::string RoapSession::CreateErrorMessage(RoapErrorCode error) {
+  if (local_id_.empty()) {
+    local_id_ = CreateLocalId(remote_id_);
+  }
+
+  RoapError message(received_offer_id_, local_id_, session_token_,
+                    response_token_, received_seq_, error);
+  response_token_.clear();
+  return message.Serialize();
+}
+
+RoapSession::ParseResult RoapSession::Parse(
+    const std::string& msg) {
+  RoapMessageBase message;
+  if (!message.Parse(msg)) {
+    LOG(LS_ERROR) << "Parse failed. Invalid Roap message?";
+    return kInvalidMessage;
+  }
+
+  received_offer_id_ = message.offer_session_id();
+  received_answer_id_ = message.answer_session_id();
+  received_seq_ = message.seq();
+  session_token_ = message.session_token();
+  response_token_ = message.response_token();
+  ParseResult result = kInvalidMessage;
+
+  switch (message.type()) {
+    case RoapMessageBase::kOffer: {
+      RoapOffer offer(message);
+      if (!offer.Parse()) {
+        LOG(LS_ERROR) << "Parse failed. Invalid Offer message?";
+        return kInvalidMessage;
+      }
+      result = ValidateOffer(&offer);
+      break;
+    }
+    case RoapMessageBase::kAnswer: {
+      RoapAnswer answer(message);
+      if (!answer.Parse()) {
+        LOG(LS_ERROR) << "Parse failed. Invalid Answer message?";
+        result = kInvalidMessage;
+      } else {
+        result = ValidateAnswer(&answer);
+      }
+      break;
+    }
+    case RoapMessageBase::kOk: {
+      result =  ValidateOk(message);
+      break;
+    }
+    case RoapMessageBase::kShutdown: {
+      // Always accept shutdown messages.
+      if (remote_id_.empty()) {
+        remote_id_ = message.offer_session_id();
+      }
+      seq_ = message.seq();
+      result = kShutDown;
+      break;
+    }
+    case RoapMessageBase::kError: {
+      RoapError error(message);
+      if (!error.Parse()) {
+        LOG(LS_ERROR) << "Parse failed. Invalid Error message?";
+        result = kInvalidMessage;
+      } else if (ValidateError(error) == kError) {
+        result = kError;
+      }  // else ignore this error message.
+      break;
+    }
+    default: {
+      ASSERT(!"Unknown message type.");
+      LOG(LS_ERROR) << "Received unknown message.";
+      result = kInvalidMessage;
+      break;
+    }
+  }
+  return result;
+}
+
+RoapSession::ParseResult RoapSession::ValidateOffer(
+    RoapOffer* received_offer) {
+
+  /* Check if the incoming OFFER has a answererSessionId, if not it is
+     an initial offer.  If the outstanding OFFER also is an initial
+     OFFER there is an Error. */
+  if (received_offer->answer_session_id().empty() &&
+      remote_id_.empty() && waiting_for_answer_) {
+    return kInvalidMessage;
+  }
+
+  if (remote_id_.empty()) {
+    remote_id_ = received_offer->offer_session_id();
+  }
+
+  // Check the message belong to this session.
+  bool result =
+      received_offer->offer_session_id() == remote_id_ &&
+      received_offer->answer_session_id() == local_id_;
+
+  if (!result || received_offer->seq() < seq_) {
+    return kInvalidMessage;  // Old seq.
+  }
+
+  if (waiting_for_answer_) {
+    if (received_offer->seq() != seq_) {
+      return kInvalidMessage;
+    }
+    // Glare.
+    if (received_offer->tie_breaker() < local_tie_breaker_) {
+      return kConflict;
+    } else if (received_offer->tie_breaker() == local_tie_breaker_) {
+      return kDoubleConflict;
+    }
+  }
+  // seq ok or remote offer won the glare resolution.
+  seq_  = received_offer->seq();
+  remote_desc_.reset(received_offer->ReleaseSessionDescription());
+  remote_candidates_ = received_offer->candidates();
+  return kOffer;
+}
+
+RoapSession::ParseResult RoapSession::ValidateAnswer(
+    RoapAnswer* received_answer) {
+  if (remote_id_.empty()) {
+    remote_id_ = received_answer->answer_session_id();
+  }
+  bool result =
+      received_answer->offer_session_id() == local_id_ &&
+      received_answer->seq() == seq_ &&
+      received_answer->answer_session_id() == remote_id_;
+  if (!result) {
+    return kInvalidMessage;
+  }
+
+  remote_desc_.reset(received_answer->ReleaseSessionDescription());
+  remote_candidates_ = received_answer->candidates();
+  if (received_answer->more_coming()) {
+    return kAnswerMoreComing;
+  }
+  waiting_for_answer_ = false;
+  return kAnswer;
+}
+
+RoapSession::ParseResult RoapSession::ValidateOk(
+    const RoapMessageBase& message) {
+  if (remote_id_.empty()) {
+    remote_id_ = message.answer_session_id();
+  }
+  bool result =
+      message.offer_session_id() == local_id_ &&
+      message.seq() == seq_ &&
+      message.answer_session_id() == remote_id_;
+  if (!result) {
+    return kInvalidMessage;
+  }
+  return kOk;
+}
+
+RoapSession::ParseResult RoapSession::ValidateError(
+    const RoapError& message) {
+  bool result =
+      message.offer_session_id() == local_id_ && message.seq() == seq_;
+
+  if (!result) {
+    return kInvalidMessage;
+  }
+  waiting_for_answer_ = false;
+  remote_error_ = message.error();
+  return kError;
+}
+
+const SessionDescription* RoapSession::ReleaseRemoteDescription() {
+  return remote_desc_.release();
+}
+
+const std::vector<Candidate>& RoapSession::RemoteCandidates() {
+  return remote_candidates_;
+}
+
+RoapErrorCode RoapSession::RemoteError() {
+  return remote_error_;
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/roapsession.h b/talk/app/webrtc/roapsession.h
new file mode 100644
index 0000000..c5712a8
--- /dev/null
+++ b/talk/app/webrtc/roapsession.h
@@ -0,0 +1,121 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contains a class used for creating and parsing ROAP messages
+// as defined in http://tools.ietf.org/html/draft-jennings-rtcweb-signaling-01.
+// The RoapSession is responsible for keeping track of ROAP specific
+// attributes such as offerSessionId etc of a single session but not the logic
+// for when to create a specific message.
+
+#ifndef TALK_APP_WEBRTC_ROAPSESSION_H_
+#define TALK_APP_WEBRTC_ROAPSESSION_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/roaperrorcodes.h"
+#include "talk/base/basictypes.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/p2p/base/candidate.h"
+#include "talk/p2p/base/sessiondescription.h"
+
+namespace webrtc {
+
+using cricket::Candidate;
+using cricket::SessionDescription;
+
+class RoapAnswer;
+class RoapError;
+class RoapMessageBase;
+class RoapOffer;
+
+class RoapSession {
+ public:
+  // ParseResult is the result of parsing a message.
+  // It can be either an identified message type or a detected error.
+  enum ParseResult {
+    kOffer,
+    kAnswerMoreComing,  // More coming flag set. The SDP contains candidates.
+    kAnswer,
+    kOk,
+    kShutDown,
+    kError,
+    // The messages below is errors that can occur during parsing.
+    kConflict,  // Conflict detected during parsing of offer.
+    kDoubleConflict,  // Double conflict detected during parsing of offer.
+    kInvalidMessage  // The parsed message is invalid.
+  };
+
+  RoapSession();
+
+  // Creates a ROAP offer message based on the provided session description and
+  // candidates. This will update states in the ROAP sessions variables such as
+  // sequence number and create a local session id.
+  std::string CreateOffer(const SessionDescription* desc,
+                          const std::vector<Candidate>& candidates);
+
+  // Creates a ROAP answer message based on the provided session description and
+  // candidates. An offer must have been parsed before this function can be
+  // called.
+  std::string CreateAnswer(const SessionDescription* desc,
+                           const std::vector<Candidate>& candidates);
+  std::string CreateOk();
+  std::string CreateShutDown();
+  std::string CreateErrorMessage(RoapErrorCode error);
+  ParseResult Parse(const std::string& msg);
+  RoapErrorCode RemoteError();
+  // Get remote SessionDescription. The ownership is transferred to the caller.
+  const SessionDescription* ReleaseRemoteDescription();
+  const std::vector<Candidate>& RemoteCandidates();
+
+ private:
+  ParseResult ValidateOffer(RoapOffer* received_offer);
+  ParseResult ValidateAnswer(RoapAnswer* received_answer);
+  ParseResult ValidateOk(const RoapMessageBase& message);
+  ParseResult ValidateError(const RoapError& message);
+
+  uint32 seq_;  // Sequence number of current message exchange.
+  std::string local_id_;  // offererSessionId / answerSessionId of local peer.
+  std::string remote_id_;  // offererSessionId / answerSessionId of remote peer.
+  uint32 local_tie_breaker_;  // tieBreaker of last sent offer.
+  bool waiting_for_answer_;
+
+  std::string received_offer_id_;  // offererSessionId in last received message.
+  std::string received_answer_id_;  // answerSessionId in last received message.
+  uint32 received_seq_;  // Sequence number of last received message.
+  std::string session_token_;
+  std::string response_token_;
+
+  talk_base::scoped_ptr<const SessionDescription> remote_desc_;
+  std::vector<Candidate> remote_candidates_;
+
+  RoapErrorCode remote_error_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_ROAPSESSION_H_
diff --git a/talk/app/webrtc/roapsession_unittest.cc b/talk/app/webrtc/roapsession_unittest.cc
new file mode 100644
index 0000000..33cb7ab
--- /dev/null
+++ b/talk/app/webrtc/roapsession_unittest.cc
@@ -0,0 +1,350 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/roapmessages.h"
+#include "talk/app/webrtc/roapsession.h"
+#include "talk/base/gunit.h"
+#include "talk/base/logging.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/p2p/base/transport.h"
+#include "talk/session/phone/mediasession.h"
+
+using cricket::AudioContentDescription;
+using cricket::Candidates;
+using cricket::ContentInfo;
+using cricket::SessionDescription;
+using cricket::VideoContentDescription;
+using webrtc::RoapMessageBase;
+using webrtc::RoapSession;
+using webrtc::RoapOffer;
+
+// MediaStream 1
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kStream1Cname[] = "stream_1_cname";
+static const char kAudioTrackLabel1[] = "local_audio_1";
+static const uint32 kAudioTrack1Ssrc = 1;
+static const char kVideoTrackLabel1[] = "local_video_1";
+static const uint32 kVideoTrack1Ssrc = 2;
+static const char kVideoTrackLabel2[] = "local_video_2";
+static const uint32 kVideoTrack2Ssrc = 3;
+
+// MediaStream 2
+static const char kStreamLabel2[] = "local_stream_2";
+static const char kStream2Cname[] = "stream_2_cname";
+static const char kAudioTrackLabel2[] = "local_audio_2";
+static const uint32 kAudioTrack2Ssrc = 4;
+static const char kVideoTrackLabel3[] = "local_video_3";
+static const uint32 kVideoTrack3Ssrc = 5;
+
+class RoapSessionTest: public testing::Test {
+ public:
+  void SetUp() {
+    talk_base::scoped_ptr<AudioContentDescription> audio(
+        new AudioContentDescription());
+    audio->set_rtcp_mux(true);
+    cricket::StreamParams audio_stream1;
+    audio_stream1.name = kAudioTrackLabel1;
+    audio_stream1.cname = kStream1Cname;
+    audio_stream1.sync_label = kStreamLabel1;
+    audio_stream1.ssrcs.push_back(kAudioTrack1Ssrc);
+    audio->AddStream(audio_stream1);
+    desc1_.AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
+                      audio.release());
+
+    talk_base::scoped_ptr<VideoContentDescription> video(
+        new VideoContentDescription());
+
+    cricket::StreamParams video_stream1;
+    video_stream1.name = kVideoTrackLabel1;
+    video_stream1.cname = kStream1Cname;
+    video_stream1.sync_label = kStreamLabel1;
+    video_stream1.ssrcs.push_back(kVideoTrack1Ssrc);
+    video->AddStream(video_stream1);
+
+    cricket::StreamParams video_stream2;
+    video_stream2.name = kVideoTrackLabel2;
+    video_stream2.cname = kStream1Cname;
+    video_stream2.sync_label = kStreamLabel1;
+    video_stream2.ssrcs.push_back(kVideoTrack2Ssrc);
+    video->AddStream(video_stream2);
+    desc1_.AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
+                      video.release());
+
+    audio.reset(new AudioContentDescription());
+    audio->set_rtcp_mux(true);
+    cricket::StreamParams audio_stream2;
+    audio_stream2.name = kAudioTrackLabel2;
+    audio_stream2.cname = kStream2Cname;
+    audio_stream2.sync_label = kStreamLabel2;
+    audio_stream2.ssrcs.push_back(kAudioTrack2Ssrc);
+    audio->AddStream(audio_stream2);
+    desc2_.AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
+                      audio.release());
+
+    video.reset(new VideoContentDescription());
+    cricket::StreamParams video_stream3;
+    video_stream3.name = kVideoTrackLabel3;
+    video_stream3.cname = kStream2Cname;
+    video_stream3.sync_label = kStreamLabel2;
+    video_stream3.ssrcs.push_back(kVideoTrack3Ssrc);
+    video->AddStream(video_stream3);
+    desc2_.AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
+                      video.release());
+
+    int port = 1234;
+    talk_base::SocketAddress address("127.0.0.1", port++);
+    cricket::Candidate candidate1("video_rtcp", "udp", address, 1,
+        "user_video_rtcp", "password_video_rtcp", "local", "eth0", 0);
+    address.SetPort(port++);
+    cricket::Candidate candidate2("video_rtp", "udp", address, 1,
+        "user_video_rtp", "password_video_rtp", "local", "eth0", 0);
+    address.SetPort(port++);
+    cricket::Candidate candidate3("rtp", "udp", address, 1,
+        "user_rtp", "password_rtp", "local", "eth0", 0);
+    address.SetPort(port++);
+    cricket::Candidate candidate4("rtcp", "udp", address, 1,
+        "user_rtcp", "password_rtcp", "local", "eth0", 0);
+
+    candidates_.push_back(candidate1);
+    candidates_.push_back(candidate2);
+    candidates_.push_back(candidate3);
+    candidates_.push_back(candidate4);
+  }
+
+  bool CompareSessionDescription(const SessionDescription* desc1,
+                                 const SessionDescription* desc2) {
+    const ContentInfo* audio_1 = desc1->GetContentByName("audio");
+    const AudioContentDescription* audio_desc_1 =
+        static_cast<const AudioContentDescription*>(audio_1->description);
+    const ContentInfo* video_1 = desc1->GetContentByName("video");
+    const VideoContentDescription* video_desc_1 =
+        static_cast<const VideoContentDescription*>(video_1->description);
+
+    const ContentInfo* audio_2 = desc2->GetContentByName("audio");
+    const AudioContentDescription* audio_desc_2 =
+        static_cast<const AudioContentDescription*>(audio_2->description);
+    const ContentInfo* video_2 = desc2->GetContentByName("video");
+    const VideoContentDescription* video_desc_2 =
+        static_cast<const VideoContentDescription*>(video_2->description);
+
+    // Check that all streams are equal. We only check that the number of
+    // codecs are the same and leave it for other unit tests to test
+    // parsing / serialization of the session description.
+    return audio_desc_1->codecs().size() == audio_desc_2->codecs().size() &&
+        audio_desc_1->streams() == audio_desc_2->streams() &&
+        video_desc_1->codecs().size() == video_desc_2->codecs().size() &&
+        video_desc_1->streams() == video_desc_2->streams();
+  }
+
+  bool CompareCandidates(const Candidates& c1, const Candidates& c2) {
+    if (c1.size() != c2.size())
+      return false;
+
+    Candidates::const_iterator it1 = c1.begin();
+    for (; it1 != c1.end(); ++it1) {
+      // It is ok if the order in the vector have changed.
+      Candidates::const_iterator it2 = c2.begin();
+      for (; it2 != c2.end(); ++it2) {
+        if (it1->IsEquivalent(*it2)) {
+          break;
+        }
+      }
+      if (it2 == c2.end())
+        return false;
+    }
+    return true;
+  }
+
+ protected:
+  cricket::SessionDescription desc1_;
+  cricket::SessionDescription desc2_;
+  cricket::Candidates candidates_;
+};
+
+TEST_F(RoapSessionTest, OfferAnswer) {
+  RoapSession roap_session1;
+  RoapSession roap_session2;
+
+  std::string offer_message = roap_session1.CreateOffer(&desc1_, candidates_);
+
+  // Check that it is valid to send to another peer.
+  EXPECT_EQ(RoapSession::kOffer, roap_session2.Parse(offer_message));
+  talk_base::scoped_ptr<const cricket::SessionDescription> received_offer(
+      roap_session2.ReleaseRemoteDescription());
+
+  ASSERT_TRUE(received_offer.get() != NULL);
+  EXPECT_TRUE(CompareSessionDescription(&desc1_, received_offer.get()));
+  EXPECT_TRUE(CompareCandidates(candidates_, roap_session2.RemoteCandidates()));
+
+  std::string answer_message = roap_session2.CreateAnswer(&desc2_, candidates_);
+
+  EXPECT_EQ(RoapSession::kAnswer, roap_session1.Parse(answer_message));
+  talk_base::scoped_ptr<const cricket::SessionDescription> received_answer(
+      roap_session1.ReleaseRemoteDescription());
+
+  EXPECT_TRUE(CompareSessionDescription(&desc2_, received_answer.get()));
+  EXPECT_FALSE(CompareSessionDescription(received_offer.get(),
+                                         received_answer.get()));
+  EXPECT_TRUE(CompareCandidates(candidates_, roap_session1.RemoteCandidates()));
+}
+
+TEST_F(RoapSessionTest, InvalidInitialization) {
+  RoapSession roap_session1;
+  RoapSession roap_session2;
+
+  std::string offer_message1 = roap_session1.CreateOffer(&desc1_, candidates_);
+  std::string offer_message2 = roap_session2.CreateOffer(&desc2_, candidates_);
+
+  // It is an error to receive an initial offer if you have sent an
+  // initial offer.
+  EXPECT_EQ(RoapSession::kInvalidMessage,
+            roap_session1.Parse(offer_message2));
+
+  EXPECT_EQ(RoapSession::kInvalidMessage,
+            roap_session2.Parse(offer_message1));
+}
+
+TEST_F(RoapSessionTest, Glare) {
+  RoapSession roap_session1;
+  RoapSession roap_session2;
+
+  // Setup. Need to exchange an offer and an answer in order to test for glare.
+  std::string offer_message1 = roap_session1.CreateOffer(&desc1_, candidates_);
+
+  roap_session2.Parse(offer_message1);
+  talk_base::scoped_ptr<const SessionDescription> received_offer(
+      roap_session2.ReleaseRemoteDescription());
+  std::string answer_message2 = roap_session2.CreateAnswer(&desc2_,
+                                                           candidates_);
+  roap_session1.Parse(answer_message2);
+
+  // Ok- we should now have all we need. Create a glare condition by
+  // updating the offer simultaneously.
+  offer_message1 = roap_session1.CreateOffer(&desc2_, candidates_);
+  std::string offer_message2 = roap_session2.CreateOffer(&desc1_, candidates_);
+
+  EXPECT_TRUE(
+      (RoapSession::kOffer == roap_session1.Parse(offer_message2) &&
+      RoapSession::kConflict == roap_session2.Parse(offer_message1)) ||
+      (RoapSession::kOffer == roap_session2.Parse(offer_message1) &&
+      RoapSession::kConflict == roap_session1.Parse(offer_message2)));
+}
+
+// Test Glare resolution by setting different TieBreakers.
+TEST_F(RoapSessionTest, TieBreaker) {
+  RoapSession roap_session1;
+  RoapSession roap_session2;
+
+  // Offer 1
+  std::string offer_message1 = roap_session1.CreateOffer(&desc1_, candidates_);
+
+  EXPECT_EQ(RoapSession::kOffer, roap_session2.Parse(offer_message1));
+  talk_base::scoped_ptr<const SessionDescription> received_offer(
+      roap_session2.ReleaseRemoteDescription());
+  std::string answer_message2 = roap_session2.CreateAnswer(&desc2_,
+                                                           candidates_);
+
+  EXPECT_EQ(RoapSession::kAnswer, roap_session1.Parse(answer_message2));
+
+  // Ok- we should now have all we need. Create a double conflict condition.
+  offer_message1 = roap_session1.CreateOffer(&desc2_, candidates_);
+  RoapMessageBase message_base;
+  EXPECT_TRUE(message_base.Parse(offer_message1));
+  RoapOffer message_offer(message_base);
+  EXPECT_TRUE(message_offer.Parse());
+  RoapOffer double_conflict_offer(message_offer.answer_session_id(),
+                                  message_offer.offer_session_id(),
+                                  "",
+                                  message_offer.seq(),
+                                  message_offer.tie_breaker(),
+                                  &desc1_,
+                                  candidates_);
+  EXPECT_EQ(RoapSession::kDoubleConflict,
+            roap_session1.Parse(double_conflict_offer.Serialize()));
+
+  RoapOffer losing_offer(message_offer.answer_session_id(),
+                         message_offer.offer_session_id(),
+                         "",
+                         message_offer.seq(),
+                         0,
+                         &desc1_,
+                         candidates_);
+  EXPECT_EQ(RoapSession::kConflict,
+            roap_session1.Parse(losing_offer.Serialize()));
+
+  RoapOffer winning_offer(message_offer.answer_session_id(),
+                          message_offer.offer_session_id(),
+                          "",
+                          message_offer.seq(),
+                          0xFFFFFFFF,
+                          &desc1_,
+                          candidates_);
+  EXPECT_EQ(RoapSession::kOffer,
+            roap_session1.Parse(winning_offer.Serialize()));
+}
+
+TEST_F(RoapSessionTest, ShutDownOk) {
+  RoapSession roap_session1;
+  std::string shutdown = roap_session1.CreateShutDown();
+
+  RoapSession roap_session2;
+  EXPECT_EQ(RoapSession::kShutDown, roap_session2.Parse(shutdown));
+
+  std::string ok_message = roap_session2.CreateOk();
+  EXPECT_EQ(RoapSession::kOk, roap_session1.Parse(ok_message));
+}
+
+TEST_F(RoapSessionTest, ErrorMessageCreation) {
+  RoapSession roap_session1;
+  RoapSession roap_session2;
+
+  std::string message = roap_session1.CreateErrorMessage(webrtc::kNoMatch);
+  EXPECT_EQ(RoapSession::kError, roap_session2.Parse(message));
+  EXPECT_EQ(webrtc::kNoMatch, roap_session2.RemoteError());
+
+  message = roap_session1.CreateErrorMessage(webrtc::kTimeout);
+  EXPECT_EQ(RoapSession::kError, roap_session2.Parse(message));
+  EXPECT_EQ(webrtc::kTimeout, roap_session2.RemoteError());
+
+  message = roap_session1.CreateErrorMessage(webrtc::kRefused);
+  EXPECT_EQ(RoapSession::kError, roap_session2.Parse(message));
+  EXPECT_EQ(webrtc::kRefused, roap_session2.RemoteError());
+
+  message = roap_session1.CreateErrorMessage(webrtc::kConflict);
+  EXPECT_EQ(RoapSession::kError, roap_session2.Parse(message));
+  EXPECT_EQ(webrtc::kConflict, roap_session2.RemoteError());
+
+  message = roap_session1.CreateErrorMessage(webrtc::kDoubleConflict);
+  EXPECT_EQ(RoapSession::kError, roap_session2.Parse(message));
+  EXPECT_EQ(webrtc::kDoubleConflict, roap_session2.RemoteError());
+
+  message = roap_session1.CreateErrorMessage(webrtc::kFailed);
+  EXPECT_EQ(RoapSession::kError, roap_session2.Parse(message));
+  EXPECT_EQ(webrtc::kFailed, roap_session2.RemoteError());
+}
diff --git a/talk/app/webrtc/sessiondescriptionprovider.h b/talk/app/webrtc/sessiondescriptionprovider.h
new file mode 100644
index 0000000..f0a7645
--- /dev/null
+++ b/talk/app/webrtc/sessiondescriptionprovider.h
@@ -0,0 +1,58 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_SESSIONDESCRIPTIONPROVIDER_H_
+#define TALK_APP_WEBRTC_SESSIONDESCRIPTIONPROVIDER_H_
+
+#include "talk/session/phone/mediasession.h"
+#include "talk/p2p/base/candidate.h"
+#include "talk/p2p/base/sessiondescription.h"
+
+namespace webrtc {
+
+class SessionDescriptionProvider {
+ public:
+  virtual const cricket::SessionDescription* ProvideOffer(
+      const cricket::MediaSessionOptions& options) = 0;
+
+  // Transfer ownership of remote_offer.
+  virtual const cricket::SessionDescription* SetRemoteSessionDescription(
+      const cricket::SessionDescription* remote_offer,
+      const std::vector<cricket::Candidate>& remote_candidates) = 0;
+
+  virtual const cricket::SessionDescription* ProvideAnswer(
+      const cricket::MediaSessionOptions& options) = 0;
+
+  virtual void NegotiationDone() = 0;
+
+ protected:
+  virtual ~SessionDescriptionProvider() {}
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_SESSIONDESCRIPTIONPROVIDER_H_
diff --git a/talk/app/webrtc/streamcollectionimpl.h b/talk/app/webrtc/streamcollectionimpl.h
new file mode 100644
index 0000000..76df3fd
--- /dev/null
+++ b/talk/app/webrtc/streamcollectionimpl.h
@@ -0,0 +1,103 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_STREAMCOLLECTIONIMPL_H_
+#define TALK_APP_WEBRTC_STREAMCOLLECTIONIMPL_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/app/webrtc/peerconnection.h"
+
+namespace webrtc {
+
+// Implementation of StreamCollection.
+class StreamCollection : public StreamCollectionInterface {
+ public:
+  static talk_base::scoped_refptr<StreamCollection> Create() {
+    talk_base::RefCountedObject<StreamCollection>* implementation =
+         new talk_base::RefCountedObject<StreamCollection>();
+    return implementation;
+  }
+
+  static talk_base::scoped_refptr<StreamCollection> Create(
+      StreamCollection* streams) {
+    talk_base::RefCountedObject<StreamCollection>* implementation =
+         new talk_base::RefCountedObject<StreamCollection>(streams);
+    return implementation;
+  }
+
+  virtual size_t count() {
+    return media_streams_.size();
+  }
+
+  virtual MediaStreamInterface* at(size_t index) {
+    return media_streams_.at(index);
+  }
+
+  virtual MediaStreamInterface* find(const std::string& label) {
+    for (StreamVector::iterator it = media_streams_.begin();
+         it != media_streams_.end(); ++it) {
+      if ((*it)->label().compare(label) == 0) {
+        return (*it);
+      }
+    }
+    return NULL;
+  }
+
+  void AddStream(MediaStreamInterface* stream) {
+    for (StreamVector::iterator it = media_streams_.begin();
+         it != media_streams_.end(); ++it) {
+      if ((*it)->label().compare(stream->label()) == 0)
+        return;
+    }
+    media_streams_.push_back(stream);
+  }
+
+  void RemoveStream(MediaStreamInterface* remove_stream) {
+    for (StreamVector::iterator it = media_streams_.begin();
+         it != media_streams_.end(); ++it) {
+      if ((*it)->label().compare(remove_stream->label()) == 0) {
+        media_streams_.erase(it);
+        break;
+      }
+    }
+  }
+
+ protected:
+  StreamCollection() {}
+  explicit StreamCollection(StreamCollection* original)
+      : media_streams_(original->media_streams_) {
+  }
+  typedef std::vector<talk_base::scoped_refptr<MediaStreamInterface> >
+      StreamVector;
+  StreamVector media_streams_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_STREAMCOLLECTIONIMPL_H_
diff --git a/talk/app/webrtc/videorendererimpl.cc b/talk/app/webrtc/videorendererimpl.cc
new file mode 100644
index 0000000..6355e4b
--- /dev/null
+++ b/talk/app/webrtc/videorendererimpl.cc
@@ -0,0 +1,58 @@
+/*
+ * libjingle
+ * Copyright 2004--2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/mediastreamimpl.h"
+
+#include "talk/session/phone/videorenderer.h"
+
+namespace webrtc {
+
+// VideoRendererImpl take ownership of cricket::VideoRenderer.
+class VideoRendererImpl : public VideoRendererWrapperInterface {
+ public:
+  explicit VideoRendererImpl(cricket::VideoRenderer* renderer)
+      : renderer_(renderer) {
+  }
+  virtual cricket::VideoRenderer* renderer() {
+    return renderer_;
+  }
+ protected:
+  ~VideoRendererImpl() {
+    delete renderer_;
+  }
+ private:
+  cricket::VideoRenderer* renderer_;
+};
+
+talk_base::scoped_refptr<VideoRendererWrapperInterface> CreateVideoRenderer(
+    cricket::VideoRenderer* renderer) {
+  talk_base::RefCountedObject<VideoRendererImpl>* r =
+      new talk_base::RefCountedObject<VideoRendererImpl>(renderer);
+  return r;
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/videotrackimpl.cc b/talk/app/webrtc/videotrackimpl.cc
new file mode 100644
index 0000000..87f797b
--- /dev/null
+++ b/talk/app/webrtc/videotrackimpl.cc
@@ -0,0 +1,85 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+#include "talk/app/webrtc/videotrackimpl.h"
+
+#include <string>
+
+#include "talk/session/phone/webrtcvideocapturer.h"
+
+namespace webrtc {
+
+static const char kVideoTrackKind[] = "video";
+
+VideoTrack::VideoTrack(const std::string& label)
+    : MediaStreamTrack<LocalVideoTrackInterface>(label),
+      video_device_(NULL) {
+}
+
+VideoTrack::VideoTrack(const std::string& label,
+                       VideoCaptureModule* video_device)
+    : MediaStreamTrack<LocalVideoTrackInterface>(label),
+      video_device_(NULL) {
+  cricket::WebRtcVideoCapturer* video_device_impl =
+      new cricket::WebRtcVideoCapturer;
+  video_device_impl->Init(video_device);
+  video_device_.reset(video_device_impl);
+}
+
+void VideoTrack::SetRenderer(VideoRendererWrapperInterface* renderer) {
+  video_renderer_ = renderer;
+  Notifier<LocalVideoTrackInterface>::FireOnChanged();
+}
+
+VideoRendererWrapperInterface* VideoTrack::GetRenderer() {
+  return video_renderer_.get();
+}
+
+  // Get the VideoCapture device associated with this track.
+cricket::VideoCapturer* VideoTrack::GetVideoCapture() {
+  return video_device_.get();
+}
+
+std::string VideoTrack::kind() const {
+  return kVideoTrackKind;
+}
+
+talk_base::scoped_refptr<VideoTrack> VideoTrack::CreateRemote(
+    const std::string& label) {
+  talk_base::RefCountedObject<VideoTrack>* track =
+      new talk_base::RefCountedObject<VideoTrack>(label);
+  return track;
+}
+
+talk_base::scoped_refptr<VideoTrack> VideoTrack::CreateLocal(
+    const std::string& label,
+    VideoCaptureModule* video_device) {
+  talk_base::RefCountedObject<VideoTrack>* track =
+      new talk_base::RefCountedObject<VideoTrack>(label, video_device);
+  return track;
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/videotrackimpl.h b/talk/app/webrtc/videotrackimpl.h
new file mode 100644
index 0000000..a8de715
--- /dev/null
+++ b/talk/app/webrtc/videotrackimpl.h
@@ -0,0 +1,80 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_VIDEOTRACKIMPL_H_
+#define TALK_APP_WEBRTC_VIDEOTRACKIMPL_H_
+
+#include <string>
+
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/mediatrackimpl.h"
+#include "talk/app/webrtc/notifierimpl.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/base/scoped_ref_ptr.h"
+
+#ifdef WEBRTC_RELATIVE_PATH
+#include "modules/video_capture/main/interface/video_capture.h"
+#else
+#include "third_party/webrtc/files/include/video_capture.h"
+#endif
+
+namespace cricket {
+
+class VideoCapturer;
+
+}  // namespace cricket
+
+namespace webrtc {
+
+class VideoTrack : public MediaStreamTrack<LocalVideoTrackInterface> {
+ public:
+  // Create a video track used for remote video tracks.
+  static talk_base::scoped_refptr<VideoTrack> CreateRemote(
+      const std::string& label);
+  // Create a video track used for local video tracks.
+  static talk_base::scoped_refptr<VideoTrack> CreateLocal(
+      const std::string& label,
+      VideoCaptureModule* video_device);
+
+  virtual cricket::VideoCapturer* GetVideoCapture();
+  virtual void SetRenderer(VideoRendererWrapperInterface* renderer);
+  VideoRendererWrapperInterface* GetRenderer();
+
+  virtual std::string kind() const;
+
+ protected:
+  explicit VideoTrack(const std::string& label);
+  VideoTrack(const std::string& label, VideoCaptureModule* video_device);
+
+ private:
+  talk_base::scoped_ptr<cricket::VideoCapturer> video_device_;
+  talk_base::scoped_refptr<VideoRendererWrapperInterface> video_renderer_;
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_VIDEOTRACKIMPL_H_
diff --git a/talk/app/webrtc/webrtc.scons b/talk/app/webrtc/webrtc.scons
index 271d413..2ba1b1f 100644
--- a/talk/app/webrtc/webrtc.scons
+++ b/talk/app/webrtc/webrtc.scons
@@ -6,22 +6,44 @@
 # local sources
 talk.Library(
   env,
-  name = 'webrtc',
+  name = 'peerconnection',
   srcs = [
+    'audiotrackimpl.cc',
+    'mediastreamhandler.cc',
+    'mediastreamimpl.cc',
+    'mediastreamproxy.cc',
+    'mediastreamtrackproxy.cc',
+    'peerconnectionfactoryimpl.cc',
     'peerconnectionimpl.cc',
-    'peerconnectionproxy.cc',
-    'peerconnectionfactory.cc',
+    'peerconnectionsignaling.cc',
+    'portallocatorfactory.cc',
+    'roapmessages.cc',
+    'roapsession.cc',
+    'videorendererimpl.cc',
+    'videotrackimpl.cc',
     'webrtcjson.cc',
+    'webrtcsdp.cc',
     'webrtcsession.cc',
   ],
 )
 
 talk.Unittest(
   env,
-  name = 'webrtc',
+  name = 'peerconnection',
   srcs = [
+    'test/fakevideocapturemodule.cc',
+    'test/fileframesource.cc',
+    'test/i420framesource.cc',
+    'test/staticframesource.cc',
+    'mediastream_unittest.cc',
+    'mediastreamhandler_unittest.cc',
+    'peerconnectionimpl_unittest.cc',
     'peerconnection_unittest.cc',
-    'unittest_utilities.cc',
+    'peerconnectionfactory_unittest.cc',
+    'peerconnectionsignaling_unittest.cc',
+    'roapmessages_unittest.cc',
+    'roapsession_unittest.cc',
+    'webrtcsdp_unittest.cc',
     'webrtcsession_unittest.cc',
   ],
   libs = [
@@ -29,32 +51,20 @@
     'expat',
     'jpeg',
     'json',
-    'webrtc',
     'p2p',
     'phone',
     'srtp',
-    'xmpp',
     'xmllite',
-    'yuvscaler'
+    'xmpp',
+    'yuvscaler',
+    'peerconnection',
   ],
-  include_talk_media_libs = True,
+  win_link_flags = [('', '/nodefaultlib:libcmt')[env.Bit('debug')]],
+  lin_libs = [
+    'sound',
+  ],
   mac_libs = [
     'crypto',
     'ssl',
   ],
-  mac_FRAMEWORKS = [
-    'Foundation',
-    'IOKit',
-    'QTKit',
-  ],
-  win_link_flags = [('', '/nodefaultlib:libcmt')[env.Bit('debug')]],
-  lin_libs = [
-    'rt',
-    'dl',
-    'sound',
-    'X11',
-    'Xext',
-    'Xfixes',
-    'Xrandr'
-  ],
-)
+ )
diff --git a/talk/app/webrtc/webrtcjson.cc b/talk/app/webrtc/webrtcjson.cc
index b071746..2aaac97 100644
--- a/talk/app/webrtc/webrtcjson.cc
+++ b/talk/app/webrtc/webrtcjson.cc
@@ -1,6 +1,6 @@
 /*
  * libjingle
- * Copyright 2004--2011, Google Inc.
+ * Copyright 2011, Google Inc.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -27,19 +27,16 @@
 
 #include "talk/app/webrtc/webrtcjson.h"
 
-#ifdef WEBRTC_RELATIVE_PATH
-#include "json/json.h"
-#else
-#include "third_party/jsoncpp/json.h"
-#endif
+#include <stdio.h>
+#include <string>
+#include <vector>
 
-// TODO: Remove webrtcsession.h once we can get size from signaling.
-// webrtcsession.h is for kDefaultVideoCodecWidth and kDefaultVideoCodecHeight.
-#include "talk/app/webrtc/webrtcsession.h"
 #include "talk/base/json.h"
 #include "talk/base/logging.h"
 #include "talk/base/stringutils.h"
 #include "talk/session/phone/codec.h"
+#include "talk/session/phone/cryptoparams.h"
+#include "talk/session/phone/mediasession.h"
 #include "talk/session/phone/mediasessionclient.h"
 
 namespace webrtc {
@@ -49,37 +46,49 @@
 static std::vector<Json::Value> ReadValues(const Json::Value& value,
                                            const std::string& key);
 
-static bool BuildMediaMessage(
+static void BuildContent(
+    const cricket::SessionDescription* sdp,
     const cricket::ContentInfo& content_info,
     const std::vector<cricket::Candidate>& candidates,
     bool video,
-    Json::Value* value);
+    Json::Value* content);
 
-static bool BuildRtpMapParams(
-    const cricket::ContentInfo& audio_offer,
-    bool video,
-    std::vector<Json::Value>* rtpmap);
+static void BuildCandidate(const std::vector<cricket::Candidate>& candidates,
+                           bool video,
+                           std::vector<Json::Value>* jcandidates);
 
-static bool BuildAttributes(const std::vector<cricket::Candidate>& candidates,
-                            bool video,
-                            std::vector<Json::Value>* jcandidates);
+static void BuildRtpMapParams(const cricket::ContentInfo& content_info,
+                              bool video,
+                              std::vector<Json::Value>* rtpmap);
 
-static std::string Serialize(const Json::Value& value);
-static bool Deserialize(const std::string& message, Json::Value* value);
+static void BuildCrypto(const cricket::ContentInfo& content_info,
+                        bool video,
+                        std::vector<Json::Value>* cryptos);
 
-static bool ParseRtcpMux(const Json::Value& value);
+static void BuildTrack(const cricket::SessionDescription* sdp,
+                       bool video,
+                       std::vector<Json::Value>* track);
+
+bool ParseContent(const Json::Value& jmessage,
+    cricket::SessionDescription* sdp,
+    std::vector<cricket::Candidate>* candidates);
+
 static bool ParseAudioCodec(const Json::Value& value,
                             cricket::AudioContentDescription* content);
 static bool ParseVideoCodec(const Json::Value& value,
                             cricket::VideoContentDescription* content);
-static bool ParseIceCandidates(const Json::Value& value,
-                               std::vector<cricket::Candidate>* candidates);
+static bool ParseCrypto(const Json::Value& content,
+                        cricket::MediaContentDescription* desc);
 
-static Json::Value ReadValue(const Json::Value& value, const std::string& key);
-static std::string ReadString(const Json::Value& value, const std::string& key);
-static uint32 ReadUInt(const Json::Value& value, const std::string& key);
+static bool ParseCandidates(const Json::Value& content,
+                            std::vector<cricket::Candidate>* candidates);
+
+static bool ParseTrack(const Json::Value& content,
+    cricket::MediaContentDescription* content_desc);
 
 static void Append(Json::Value* object, const std::string& key, bool value);
+static void Append(Json::Value* object, const std::string& key,
+                   const char* value);
 static void Append(Json::Value* object, const std::string& key, int value);
 static void Append(Json::Value* object, const std::string& key,
                    const std::string& value);
@@ -90,83 +99,91 @@
                    const std::string& key,
                    const std::vector<Json::Value>& values);
 
-bool GetJsonSignalingMessage(
+void JsonSerializeSessionDescription(
     const cricket::SessionDescription* sdp,
     const std::vector<cricket::Candidate>& candidates,
-    std::string* signaling_message) {
+    Json::Value* message) {
+
   const cricket::ContentInfo* audio_content = GetFirstAudioContent(sdp);
   const cricket::ContentInfo* video_content = GetFirstVideoContent(sdp);
 
-  std::vector<Json::Value> media;
+  std::vector<Json::Value> together;
+  together.push_back("audio");
+  together.push_back("video");
+
+  std::vector<Json::Value> contents;
+
   if (audio_content) {
-    Json::Value value;
-    BuildMediaMessage(*audio_content, candidates, false, &value);
-    media.push_back(value);
+    Json::Value content;
+    BuildContent(sdp, *audio_content, candidates, false, &content);
+    contents.push_back(content);
   }
 
   if (video_content) {
-    Json::Value value;
-    BuildMediaMessage(*video_content, candidates, true, &value);
-    media.push_back(value);
+    Json::Value content;
+    BuildContent(sdp, *video_content, candidates, true, &content);
+    contents.push_back(content);
   }
 
-  Json::Value signal;
-  Append(&signal, "media", media);
-
-  // Now serialize.
-  *signaling_message = Serialize(signal);
-
-  return true;
+  Append(message, "content", contents);
+  Append(message, "TOGETHER", together);
 }
 
-bool BuildMediaMessage(
+void BuildContent(
+    const cricket::SessionDescription* sdp,
     const cricket::ContentInfo& content_info,
     const std::vector<cricket::Candidate>& candidates,
     bool video,
-    Json::Value* params) {
+    Json::Value* content) {
+  std::string label("media");
+  // TODO: Use enum instead of bool video to prepare for other
+  // media types such as the data media stream.
   if (video) {
-    Append(params, "label", 2);  // always video 2
+    Append(content, label, "video");
   } else {
-    Append(params, "label", 1);  // always audio 1
+    Append(content, label, "audio");
   }
 
   const cricket::MediaContentDescription* media_info =
-  static_cast<const cricket::MediaContentDescription*> (
-      content_info.description);
+      static_cast<const cricket::MediaContentDescription*> (
+          content_info.description);
   if (media_info->rtcp_mux()) {
-    Append(params, "rtcp_mux", true);
+    Append(content, "rtcp_mux", true);
   }
 
+  // rtpmap
   std::vector<Json::Value> rtpmap;
-  if (!BuildRtpMapParams(content_info, video, &rtpmap)) {
-    return false;
-  }
+  BuildRtpMapParams(content_info, video, &rtpmap);
+  Append(content, "rtpmap", rtpmap);
 
-  Append(params, "rtpmap", rtpmap);
+  // crypto
+  std::vector<Json::Value> crypto;
+  BuildCrypto(content_info, video, &crypto);
+  Append(content, "crypto", crypto);
 
-  Json::Value attributes;
+  // candidate
   std::vector<Json::Value> jcandidates;
+  BuildCandidate(candidates, video, &jcandidates);
+  Append(content, "candidate", jcandidates);
 
-  if (!BuildAttributes(candidates, video, &jcandidates)) {
-    return false;
-  }
-  Append(&attributes, "candidate", jcandidates);
-  Append(params, "attributes", attributes);
-  return true;
+  // track
+  std::vector<Json::Value> track;
+  BuildTrack(sdp, video, &track);
+  Append(content, "track", track);
 }
 
-bool BuildRtpMapParams(const cricket::ContentInfo& content_info,
+void BuildRtpMapParams(const cricket::ContentInfo& content_info,
                        bool video,
                        std::vector<Json::Value>* rtpmap) {
   if (!video) {
-    const cricket::AudioContentDescription* audio_offer =
+    const cricket::AudioContentDescription* audio =
         static_cast<const cricket::AudioContentDescription*>(
             content_info.description);
 
     std::vector<cricket::AudioCodec>::const_iterator iter =
-        audio_offer->codecs().begin();
+        audio->codecs().begin();
     std::vector<cricket::AudioCodec>::const_iterator iter_end =
-        audio_offer->codecs().end();
+        audio->codecs().end();
     for (; iter != iter_end; ++iter) {
       Json::Value codec;
       std::string codec_str(std::string("audio/").append(iter->name));
@@ -178,14 +195,14 @@
       rtpmap->push_back(codec_id);
     }
   } else {
-    const cricket::VideoContentDescription* video_offer =
+    const cricket::VideoContentDescription* video =
         static_cast<const cricket::VideoContentDescription*>(
             content_info.description);
 
     std::vector<cricket::VideoCodec>::const_iterator iter =
-        video_offer->codecs().begin();
+        video->codecs().begin();
     std::vector<cricket::VideoCodec>::const_iterator iter_end =
-        video_offer->codecs().end();
+        video->codecs().end();
     for (; iter != iter_end; ++iter) {
       Json::Value codec;
       std::string codec_str(std::string("video/").append(iter->name));
@@ -195,12 +212,29 @@
       rtpmap->push_back(codec_id);
     }
   }
-  return true;
 }
 
-bool BuildAttributes(const std::vector<cricket::Candidate>& candidates,
-                     bool video,
-                     std::vector<Json::Value>* jcandidates) {
+void BuildCrypto(const cricket::ContentInfo& content_info,
+                 bool video,
+                 std::vector<Json::Value>* cryptos) {
+  const cricket::MediaContentDescription* content_desc =
+      static_cast<const cricket::MediaContentDescription*>(
+          content_info.description);
+  std::vector<cricket::CryptoParams>::const_iterator iter =
+      content_desc->cryptos().begin();
+  std::vector<cricket::CryptoParams>::const_iterator iter_end =
+      content_desc->cryptos().end();
+  for (; iter != iter_end; ++iter) {
+    Json::Value crypto;
+    Append(&crypto, "cipher_suite", iter->cipher_suite);
+    Append(&crypto, "key_params", iter->key_params);
+    cryptos->push_back(crypto);
+  }
+}
+
+void BuildCandidate(const std::vector<cricket::Candidate>& candidates,
+                    bool video,
+                    std::vector<Json::Value>* jcandidates) {
   std::vector<cricket::Candidate>::const_iterator iter =
       candidates.begin();
   std::vector<cricket::Candidate>::const_iterator iter_end =
@@ -210,95 +244,117 @@
                   (!iter->name().compare("video_rtp")))) ||
         (!video && (!iter->name().compare("rtp") ||
                    (!iter->name().compare("rtcp"))))) {
-      Json::Value candidate;
-      Append(&candidate, "component", kIceComponent);
-      Append(&candidate, "foundation", kIceFoundation);
-      Append(&candidate, "generation", iter->generation());
-      Append(&candidate, "proto", iter->protocol());
-      Append(&candidate, "priority", iter->preference_str());
-      Append(&candidate, "ip", iter->address().IPAsString());
-      Append(&candidate, "port", iter->address().PortAsString());
-      Append(&candidate, "type", iter->type());
-      Append(&candidate, "name", iter->name());
-      Append(&candidate, "network_name", iter->network_name());
-      Append(&candidate, "username", iter->username());
-      Append(&candidate, "password", iter->password());
-      jcandidates->push_back(candidate);
+      Json::Value jcandidate;
+      Append(&jcandidate, "component", kIceComponent);
+      Append(&jcandidate, "foundation", kIceFoundation);
+      Append(&jcandidate, "generation", iter->generation());
+      Append(&jcandidate, "proto", iter->protocol());
+      Append(&jcandidate, "priority", iter->preference_str());
+      Append(&jcandidate, "ip", iter->address().IPAsString());
+      Append(&jcandidate, "port", iter->address().PortAsString());
+      Append(&jcandidate, "type", iter->type());
+      Append(&jcandidate, "name", iter->name());
+      Append(&jcandidate, "network_name", iter->network_name());
+      Append(&jcandidate, "username", iter->username());
+      Append(&jcandidate, "password", iter->password());
+      jcandidates->push_back(jcandidate);
     }
   }
-  return true;
 }
 
-std::string Serialize(const Json::Value& value) {
-  Json::StyledWriter writer;
-  return writer.write(value);
-}
+void BuildTrack(const cricket::SessionDescription* sdp,
+                bool video,
+                std::vector<Json::Value>* tracks) {
+  const cricket::ContentInfo* content;
+  if (video)
+    content = GetFirstVideoContent(sdp);
+  else
+    content = GetFirstAudioContent(sdp);
 
-bool Deserialize(const std::string& message, Json::Value* value) {
-  Json::Reader reader;
-  return reader.parse(message, *value);
-}
+  if (!content)
+    return;
 
-bool ParseJsonSignalingMessage(const std::string& signaling_message,
-                               cricket::SessionDescription** sdp,
-                               std::vector<cricket::Candidate>* candidates) {
-  ASSERT(!(*sdp));  // expect this to be NULL
-  // first deserialize message
-  Json::Value value;
-  if (!Deserialize(signaling_message, &value)) {
-    return false;
+  const cricket::MediaContentDescription* desc =
+      static_cast<const cricket::MediaContentDescription*>(
+          content->description);
+  for (cricket::StreamParamsVec::const_iterator it = desc->streams().begin();
+       it != desc->streams().end();
+       ++it) {
+    // TODO: Support ssrcsgroups.
+    Json::Value track;
+    Append(&track, "ssrc", it->ssrcs[0]);
+    Append(&track, "cname", it->cname);
+    Append(&track, "stream_label", it->sync_label);
+    Append(&track, "label", it->name);
+    tracks->push_back(track);
   }
+}
 
-  // get media objects
-  std::vector<Json::Value> mlines = ReadValues(value, "media");
-  if (mlines.empty()) {
-    // no m-lines found
+bool JsonDeserializeSessionDescription(
+    const Json::Value& message,
+    cricket::SessionDescription* sdp,
+    std::vector<cricket::Candidate>* candidates) {
+
+  ASSERT(sdp != NULL);
+  ASSERT(candidates != NULL);
+
+  if (sdp == NULL || candidates == NULL)
     return false;
-  }
 
-  *sdp = new cricket::SessionDescription();
-
-  // get codec information
-  for (size_t i = 0; i < mlines.size(); ++i) {
-    if (mlines[i]["label"].asInt() == 1) {
+  // Get content
+  std::vector<Json::Value> contents = ReadValues(message, "content");
+  if (contents.size() == 0)
+    return false;
+  for (size_t i = 0; i < contents.size(); ++i) {
+    Json::Value content = contents[i];
+    // candidates
+    if (!ParseCandidates(content, candidates))
+      return false;
+    // rtcp_mux
+    bool rtcp_mux;
+    if (!GetBoolFromJsonObject(content, "rtcp_mux", &rtcp_mux))
+      rtcp_mux = false;
+    // rtpmap
+    if (content["media"].asString().compare("audio") == 0) {
       cricket::AudioContentDescription* audio_content =
           new cricket::AudioContentDescription();
-      ParseAudioCodec(mlines[i], audio_content);
-      audio_content->set_rtcp_mux(ParseRtcpMux(mlines[i]));
+      if (!ParseAudioCodec(content, audio_content))
+        return false;
+      audio_content->set_rtcp_mux(rtcp_mux);
       audio_content->SortCodecs();
-      (*sdp)->AddContent(cricket::CN_AUDIO,
-                         cricket::NS_JINGLE_RTP, audio_content);
-      ParseIceCandidates(mlines[i], candidates);
-    } else {
+      // crypto
+      if (!ParseCrypto(content, audio_content))
+        return false;
+      // tracks
+      if (!ParseTrack(content, audio_content))
+        return false;
+      (sdp)->AddContent(cricket::CN_AUDIO,
+                        cricket::NS_JINGLE_RTP, audio_content);
+    } else if (content["media"].asString().compare("video") == 0) {
       cricket::VideoContentDescription* video_content =
           new cricket::VideoContentDescription();
-      ParseVideoCodec(mlines[i], video_content);
-
-      video_content->set_rtcp_mux(ParseRtcpMux(mlines[i]));
+      if (!ParseVideoCodec(content, video_content))
+        return false;
+      video_content->set_rtcp_mux(rtcp_mux);
       video_content->SortCodecs();
-      (*sdp)->AddContent(cricket::CN_VIDEO,
-                         cricket::NS_JINGLE_RTP, video_content);
-      ParseIceCandidates(mlines[i], candidates);
+      // crypto
+      if (!ParseCrypto(content, video_content))
+        return false;
+      if (!ParseTrack(content, video_content))
+        return false;
+      (sdp)->AddContent(cricket::CN_VIDEO,
+                        cricket::NS_JINGLE_RTP, video_content);
     }
   }
   return true;
 }
 
-bool ParseRtcpMux(const Json::Value& value) {
-  Json::Value rtcp_mux(ReadValue(value, "rtcp_mux"));
-  if (!rtcp_mux.empty()) {
-    if (rtcp_mux.asBool()) {
-      return true;
-    }
-  }
-  return false;
-}
-
 bool ParseAudioCodec(const Json::Value& value,
                      cricket::AudioContentDescription* content) {
   std::vector<Json::Value> rtpmap(ReadValues(value, "rtpmap"));
+  // When there's no codecs in common, rtpmap can be empty.
   if (rtpmap.empty())
-    return false;
+    return true;
 
   std::vector<Json::Value>::const_iterator iter =
       rtpmap.begin();
@@ -309,11 +365,13 @@
     std::string pltype(iter->begin().memberName());
     talk_base::FromString(pltype, &codec.id);
     Json::Value codec_info((*iter)[pltype]);
-    std::string codec_name(ReadString(codec_info, "codec"));
+    std::string codec_name;
+    if (!GetStringFromJsonObject(codec_info, "codec", &codec_name))
+      continue;
     std::vector<std::string> tokens;
     talk_base::split(codec_name, '/', &tokens);
     codec.name = tokens[1];
-    codec.clockrate = ReadUInt(codec_info, "clockrate");
+    GetIntFromJsonObject(codec_info, "clockrate", &codec.clockrate);
     content->AddCodec(codec);
   }
 
@@ -323,8 +381,9 @@
 bool ParseVideoCodec(const Json::Value& value,
                      cricket::VideoContentDescription* content) {
   std::vector<Json::Value> rtpmap(ReadValues(value, "rtpmap"));
+  // When there's no codecs in common, rtpmap can be empty.
   if (rtpmap.empty())
-    return false;
+    return true;
 
   std::vector<Json::Value>::const_iterator iter =
       rtpmap.begin();
@@ -338,22 +397,14 @@
     std::vector<std::string> tokens;
     talk_base::split(codec_info["codec"].asString(), '/', &tokens);
     codec.name = tokens[1];
-    // TODO: Remove once we can get size from signaling message.
-    codec.width = WebRtcSession::kDefaultVideoCodecWidth;
-    codec.height = WebRtcSession::kDefaultVideoCodecHeight;
     content->AddCodec(codec);
   }
   return true;
 }
 
-bool ParseIceCandidates(const Json::Value& value,
-                        std::vector<cricket::Candidate>* candidates) {
-  Json::Value attributes(ReadValue(value, "attributes"));
-  std::string ice_pwd(ReadString(attributes, "ice-pwd"));
-  std::string ice_ufrag(ReadString(attributes, "ice-ufrag"));
-
-  std::vector<Json::Value> jcandidates(ReadValues(attributes, "candidate"));
-
+bool ParseCandidates(const Json::Value& content,
+                     std::vector<cricket::Candidate>* candidates) {
+  std::vector<Json::Value> jcandidates(ReadValues(content, "candidate"));
   std::vector<Json::Value>::const_iterator iter =
       jcandidates.begin();
   std::vector<Json::Value>::const_iterator iter_end =
@@ -414,6 +465,68 @@
   return true;
 }
 
+bool ParseCrypto(const Json::Value& content,
+                 cricket::MediaContentDescription* desc) {
+  std::vector<Json::Value> jcryptos(ReadValues(content, "crypto"));
+  std::vector<Json::Value>::const_iterator iter =
+      jcryptos.begin();
+  std::vector<Json::Value>::const_iterator iter_end =
+      jcryptos.end();
+  for (; iter != iter_end; ++iter) {
+    cricket::CryptoParams crypto;
+
+    std::string cipher_suite;
+    if (!GetStringFromJsonObject(*iter, "cipher_suite", &cipher_suite))
+      return false;
+    crypto.cipher_suite = cipher_suite;
+
+    std::string key_params;
+    if (!GetStringFromJsonObject(*iter, "key_params", &key_params))
+      return false;
+    crypto.key_params= key_params;
+
+    desc->AddCrypto(crypto);
+  }
+  return true;
+}
+
+bool ParseTrack(const Json::Value& content,
+    cricket::MediaContentDescription* content_desc) {
+  ASSERT(content_desc != NULL);
+  if (!content_desc)
+    return false;
+
+  std::vector<Json::Value> tracks(ReadValues(content, "track"));
+  std::vector<Json::Value>::const_iterator iter =
+      tracks.begin();
+  std::vector<Json::Value>::const_iterator iter_end =
+      tracks.end();
+  for (; iter != iter_end; ++iter) {
+    uint32 ssrc;
+    std::string label;
+    std::string cname;
+    std::string stream_label;
+    if (!GetUIntFromJsonObject(*iter, "ssrc", &ssrc))
+        return false;
+    // label is optional, it will be empty string if doesn't exist
+    GetStringFromJsonObject(*iter, "label", &label);
+    if (!GetStringFromJsonObject(*iter, "cname", &cname))
+        return false;
+    // stream_label is optional, it will be the same as cname if it
+    // doesn't exist.
+    GetStringFromJsonObject(*iter, "stream_label", &stream_label);
+    if (stream_label.empty())
+      stream_label = cname;
+    cricket::StreamParams stream;
+    stream.name = label;
+    stream.cname = cname;
+    stream.sync_label = stream_label;
+    stream.ssrcs.push_back(ssrc);
+    content_desc->AddStream(stream);
+  }
+  return true;
+}
+
 std::vector<Json::Value> ReadValues(
     const Json::Value& value, const std::string& key) {
   std::vector<Json::Value> objects;
@@ -423,22 +536,14 @@
   return objects;
 }
 
-Json::Value ReadValue(const Json::Value& value, const std::string& key) {
-  return value[key];
-}
-
-std::string ReadString(const Json::Value& value, const std::string& key) {
-  return value[key].asString();
-}
-
-uint32 ReadUInt(const Json::Value& value, const std::string& key) {
-  return value[key].asUInt();
-}
-
 void Append(Json::Value* object, const std::string& key, bool value) {
   (*object)[key] = Json::Value(value);
 }
 
+void Append(Json::Value* object, const std::string& key, const char* value) {
+  (*object)[key] = Json::Value(value);
+}
+
 void Append(Json::Value* object, const std::string& key, int value) {
   (*object)[key] = Json::Value(value);
 }
diff --git a/talk/app/webrtc/webrtcjson.h b/talk/app/webrtc/webrtcjson.h
index d923f83..069f4b6 100644
--- a/talk/app/webrtc/webrtcjson.h
+++ b/talk/app/webrtc/webrtcjson.h
@@ -1,6 +1,6 @@
 /*
  * libjingle
- * Copyright 2004--2011, Google Inc.
+ * Copyright 2011, Google Inc.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -29,31 +29,33 @@
 #define TALK_APP_WEBRTC_WEBRTCJSON_H_
 
 #include <string>
+#include <vector>
+
+#ifdef WEBRTC_RELATIVE_PATH
+#include "json/json.h"
+#else
+#include "third_party/jsoncpp/json.h"
+#endif
 
 #include "talk/p2p/base/candidate.h"
-#include "talk/session/phone/codec.h"
-
-namespace Json {
-class Value;
-}
+#include "talk/p2p/base/sessiondescription.h"
 
 namespace cricket {
-class AudioContentDescription;
-class VideoContentDescription;
-struct ContentInfo;
 class SessionDescription;
 }
 
 namespace webrtc {
 
-bool GetJsonSignalingMessage(
+void JsonSerializeSessionDescription(
     const cricket::SessionDescription* sdp,
     const std::vector<cricket::Candidate>& candidates,
-    std::string* signaling_message);
+    Json::Value* media);
 
-bool ParseJsonSignalingMessage(const std::string& signaling_message,
-                               cricket::SessionDescription** sdp,
-                               std::vector<cricket::Candidate>* candidates);
-}
+bool JsonDeserializeSessionDescription(
+    const Json::Value& message,
+    cricket::SessionDescription* sdp,
+    std::vector<cricket::Candidate>* candidates);
+
+}  // namespace webrtc
 
 #endif  // TALK_APP_WEBRTC_WEBRTCJSON_H_
diff --git a/talk/app/webrtc/webrtcsdp.cc b/talk/app/webrtc/webrtcsdp.cc
new file mode 100644
index 0000000..2fcb959
--- /dev/null
+++ b/talk/app/webrtc/webrtcsdp.cc
@@ -0,0 +1,707 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "talk/app/webrtc/webrtcsdp.h"
+
+#include <stdio.h>
+#include <string>
+#include <vector>
+
+#include "talk/base/logging.h"
+#include "talk/base/stringutils.h"
+#include "talk/p2p/base/relayport.h"
+#include "talk/p2p/base/stunport.h"
+#include "talk/p2p/base/udpport.h"
+#include "talk/session/phone/codec.h"
+#include "talk/session/phone/cryptoparams.h"
+#include "talk/session/phone/mediasession.h"
+#include "talk/session/phone/mediasessionclient.h"
+
+using cricket::AudioContentDescription;
+using cricket::Candidate;
+using cricket::ContentDescription;
+using cricket::CryptoParams;
+using cricket::MediaContentDescription;
+using cricket::MediaType;
+using cricket::StreamParams;
+using cricket::VideoContentDescription;
+using talk_base::SocketAddress;
+
+namespace webrtc {
+
+// Line prefix
+static const int kLinePrefixLength = 2;
+static const char kLinePrefixVersion[] = "v=";
+static const char kLinePrefixOrigin[] = "o=";
+static const char kLinePrefixSessionName[] = "s=";
+static const char kLinePrefixSessionInfo[] = "i=";
+static const char kLinePrefixSessionUri[] = "u=";
+static const char kLinePrefixSessionEmail[] = "e=";
+static const char kLinePrefixSessionPhone[] = "p=";
+static const char kLinePrefixSessionConnection[] = "c=";
+static const char kLinePrefixSessionBandwidth[] = "b=";
+static const char kLinePrefixTiming[] = "t=";
+static const char kLinePrefixRepeatTimes[] = "r=";
+static const char kLinePrefixTimeZone[] = "z=";
+static const char kLinePrefixEncryptionKey[] = "k=";
+static const char kLinePrefixMedia[] = "m=";
+static const char kLinePrefixAttributes[] = "a=";
+
+// Attributes
+static const char kAttributeMid[] = "mid:";
+static const char kAttributeRtcpMux[] = "rtcp-mux";
+static const char kAttributeSsrc[] = "ssrc:";
+static const char kAttributeCname[] = "cname:";
+static const char kAttributeMslabel[] = "mslabel:";
+static const char kAttributeLabel[] = "label:";
+static const char kAttributeCrypto[] = "crypto:";
+static const char kAttributeCandidate[] = "candidate:";
+static const char kAttributeCandidateTyp[] = "typ";
+static const char kAttributeCandidateName[] = "name";
+static const char kAttributeCandidateNetworkName[] = "network_name";
+static const char kAttributeCandidateUsername[] = "username";
+static const char kAttributeCandidatePassword[] = "password";
+static const char kAttributeCandidateGeneration[] = "generation";
+static const char kAttributeRtpmap[] = "rtpmap:";
+
+// Candidate
+static const char kCandidateHost[] = "host";
+static const char kCandidateSrflx[] = "srflx";
+// TODO: How to map the prflx with circket candidate type
+// static const char kCandidatePrflx[] = "prflx";
+static const char kCandidateRelay[] = "relay";
+
+static const char kSdpDelimiter = ' ';
+static const char kLineBreak[] = "\r\n";
+
+// TODO: Generate the Session and Time description
+// instead of hardcoding.
+static const char kSessionVersion[] = "v=0";
+static const char kSessionOrigin[] = "o=- 0 0 IN IP4 127.0.0.1";
+static const char kSessionName[] = "s=";
+static const char kTimeDescription[] = "t=0 0";
+static const char kAttrGroup[] = "a=group:BUNDLE audio video";
+static const int kIceComponent = 1;
+static const int kIceFoundation = 1;
+static const char kMediaTypeVideo[] = "video";
+static const char kMediaTypeAudio[] = "audio";
+
+static void BuildMediaDescription(const cricket::ContentInfo& content_info,
+                                  const std::vector<Candidate>& candidates,
+                                  const MediaType media_type,
+                                  std::string* message);
+static void BuildRtpMap(const MediaContentDescription* media_desc,
+                        const MediaType media_type,
+                        std::string* message);
+static void BuildCandidate(const std::vector<Candidate>& candidates,
+                           const MediaType media_type,
+                           std::string* message);
+
+static bool ParseSessionDescription(const std::string& message, size_t* pos);
+static bool ParseTimeDescription(const std::string& message, size_t* pos);
+static bool ParseMediaDescription(const std::string& message, size_t* pos,
+                                  cricket::SessionDescription* desc,
+                                  std::vector<Candidate>* candidates);
+static bool ParseContent(const std::string& message,
+                         const MediaType media_type,
+                         size_t* pos,
+                         ContentDescription* content,
+                         std::vector<Candidate>* candidates);
+
+// Helper functions
+#define LOG_PREFIX_PARSING_ERROR(line_prefix) LOG(LS_ERROR) \
+    << "Failed to parse the \"" << line_prefix << "\" line";
+
+#define LOG_LINE_PARSING_ERROR(line) LOG(LS_ERROR) \
+    << "Failed to parse line:" << line;
+
+static bool AddLine(const std::string& line, std::string* message) {
+  if (!message)
+    return false;
+
+  message->append(line);
+  message->append(kLineBreak);
+  return true;
+}
+
+static bool GetLine(const std::string& message,
+                    size_t* pos,
+                    std::string* line) {
+  size_t line_begin = *pos;
+  size_t line_end = message.find('\n', line_begin);
+  if (line_end == std::string::npos) {
+    return false;
+  }
+  // Update the new start position
+  *pos = line_end + 1;
+  if (line_end > 0 && (message.at(line_end - 1) == '\r')) {
+    --line_end;
+  }
+  *line = message.substr(line_begin, (line_end - line_begin));
+  return true;
+}
+
+static bool GetLineWithPrefix(const std::string& message, size_t* pos,
+                              std::string* line, const char* type) {
+  if (message.compare(*pos, kLinePrefixLength, type) != 0) {
+    return false;
+  }
+
+  if (!GetLine(message, pos, line))
+    return false;
+
+  return true;
+}
+
+static bool HasPrefix(const std::string& line,
+                      const std::string& prefix,
+                      size_t pos) {
+  return (line.compare(pos, prefix.size(), prefix) == 0);
+}
+
+static bool HasPrefix(const std::string& line,
+                      const std::string& prefix) {
+  return HasPrefix(line, prefix, 0);
+}
+
+static bool HasAttribute(const std::string& line,
+                         const std::string& attribute) {
+  return (line.compare(kLinePrefixLength, attribute.size(), attribute) == 0);
+}
+
+std::string SdpSerialize(const cricket::SessionDescription& desc,
+                         const std::vector<Candidate>& candidates) {
+  std::string message;
+
+  // Session Description.
+  AddLine(kSessionVersion, &message);
+  AddLine(kSessionOrigin, &message);
+  AddLine(kSessionName, &message);
+
+  // Time Description.
+  AddLine(kTimeDescription, &message);
+
+  const cricket::ContentInfo* audio_content = GetFirstAudioContent(&desc);
+  const cricket::ContentInfo* video_content = GetFirstVideoContent(&desc);
+
+  // Group
+  if (audio_content && video_content)
+    AddLine(kAttrGroup, &message);
+
+  // Media Description
+  if (audio_content) {
+    BuildMediaDescription(*audio_content, candidates,
+        cricket::MEDIA_TYPE_AUDIO, &message);
+  }
+
+  if (video_content) {
+    BuildMediaDescription(*video_content, candidates,
+        cricket::MEDIA_TYPE_VIDEO, &message);
+  }
+
+  return message;
+}
+
+bool SdpDeserialize(const std::string& message,
+                    cricket::SessionDescription* desc,
+                    std::vector<Candidate>* candidates) {
+  size_t current_pos = 0;
+
+  // Session Description
+  if (!ParseSessionDescription(message, &current_pos)) {
+    return false;
+  }
+
+  // Time Description
+  if (!ParseTimeDescription(message, &current_pos)) {
+    return false;
+  }
+
+  // Media Description
+  if (!ParseMediaDescription(message, &current_pos, desc, candidates)) {
+    return false;
+  }
+
+  return true;
+}
+
+void BuildMediaDescription(const cricket::ContentInfo& content_info,
+                           const std::vector<Candidate>& candidates,
+                           const MediaType media_type,
+                           std::string* message) {
+  ASSERT(message != NULL);
+  // TODO: Rethink if we should use sprintfn instead of stringstream.
+  // According to the style guide, streams should only be used for logging.
+  // http://google-styleguide.googlecode.com/svn/
+  // trunk/cppguide.xml?showone=Streams#Streams
+  std::ostringstream os;
+  const MediaContentDescription* media_desc =
+      static_cast<const MediaContentDescription*> (
+          content_info.description);
+  ASSERT(media_desc != NULL);
+
+  // m=<media> <port> <proto> <fmt>
+  // fmt is a list of payload type numbers that MAY be used in the session.
+  const char* type = NULL;
+  if (media_type == cricket::MEDIA_TYPE_AUDIO)
+    type = kMediaTypeAudio;
+  else if (media_type == cricket::MEDIA_TYPE_VIDEO)
+    type = kMediaTypeVideo;
+  else
+    ASSERT(false);
+
+  std::string fmt;
+  if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    const VideoContentDescription* video_desc =
+        static_cast<const VideoContentDescription*>(media_desc);
+    for (std::vector<cricket::VideoCodec>::const_iterator it =
+             video_desc->codecs().begin();
+         it != video_desc->codecs().end(); ++it) {
+      fmt.append(" ");
+      fmt.append(talk_base::ToString<int>(it->id));
+    }
+  } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    const AudioContentDescription* audio_desc =
+        static_cast<const AudioContentDescription*>(media_desc);
+    for (std::vector<cricket::AudioCodec>::const_iterator it =
+             audio_desc->codecs().begin();
+         it != audio_desc->codecs().end(); ++it) {
+      fmt.append(" ");
+      fmt.append(talk_base::ToString<int>(it->id));
+    }
+  }
+  const int port = 0;
+  const char* proto = "RTP/AVPF";
+  os.str("");
+  os << kLinePrefixMedia << type << " " << port << " " << proto << fmt;
+  AddLine(os.str(), message);
+
+  // a=mid:<media>
+  os.str("");
+  os << kLinePrefixAttributes << kAttributeMid << type;
+  AddLine(os.str(), message);
+
+  // a=rtcp-mux
+  if (media_desc->rtcp_mux()) {
+    os.str("");
+    os << kLinePrefixAttributes << kAttributeRtcpMux;
+    AddLine(os.str(), message);
+  }
+
+  // a=crypto:<tag> <crypto-suite> <key-params> [<session-params>]
+  for (std::vector<CryptoParams>::const_iterator it =
+           media_desc->cryptos().begin();
+       it != media_desc->cryptos().end(); ++it) {
+    os.str("");
+    os << kLinePrefixAttributes << kAttributeCrypto << it->tag << " "
+       << it->cipher_suite << " "
+       << it->key_params << " "
+       << it->session_params;
+    AddLine(os.str(), message);
+  }
+
+  // a=rtpmap:<payload type> <encoding name>/<clock rate>
+  // [/<encodingparameters>]
+  BuildRtpMap(media_desc, media_type, message);
+
+  // rfc5245
+  // a=candidate:<foundation> <component-id> <transport> <priority>
+  // <connection-address> <port> typ <candidate-types>
+  // [raddr <connection-address>] [rport <port>]
+  BuildCandidate(candidates, media_type, message);
+
+  // draft - Mechanisms for Media Source Selection in SDP
+  // a=ssrc:<ssrc-id> <attribute>:<value>
+  // a=ssrc:<ssrc-id> cname:<value> mslabel:<value> label:<value>
+  for (cricket::StreamParamsVec::const_iterator it =
+           media_desc->streams().begin();
+       it != media_desc->streams().end(); ++it) {
+    // Require that the track belongs to a media stream,
+    // ie the sync_label is set. This extra check is necessary since the
+    // MediaContentDescription always contains a streamparam with an ssrc even
+    // if no track or media stream have been created.
+    if (it->sync_label.empty()) continue;
+
+    os.str("");
+    os << kLinePrefixAttributes << kAttributeSsrc << it->ssrcs[0] << " "
+       << kAttributeCname << it->cname << " "
+       << kAttributeMslabel << it->sync_label << " "
+       << kAttributeLabel << it->name;
+    AddLine(os.str(), message);
+  }
+}
+
+void BuildRtpMap(const MediaContentDescription* media_desc,
+                 const MediaType media_type,
+                 std::string* message) {
+  ASSERT(message != NULL);
+  ASSERT(media_desc != NULL);
+  std::ostringstream os;
+  if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+    const VideoContentDescription* video_desc =
+        static_cast<const VideoContentDescription*>(media_desc);
+    for (std::vector<cricket::VideoCodec>::const_iterator it =
+             video_desc->codecs().begin();
+         it != video_desc->codecs().end(); ++it) {
+      // a=rtpmap:<payload type> <encoding name>/<clock rate>
+      // [/<encodingparameters>]
+      os.str("");
+      os << kLinePrefixAttributes << kAttributeRtpmap << it->id << " "
+         << it->name << "/" << 0;
+      AddLine(os.str(), message);
+    }
+  } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+    const AudioContentDescription* audio_desc =
+        static_cast<const AudioContentDescription*>(media_desc);
+    for (std::vector<cricket::AudioCodec>::const_iterator it =
+             audio_desc->codecs().begin();
+         it != audio_desc->codecs().end(); ++it) {
+      // a=rtpmap:<payload type> <encoding name>/<clock rate>
+      // [/<encodingparameters>]
+      os.str("");
+      os << kLinePrefixAttributes << kAttributeRtpmap << it->id << " "
+         << it->name << "/" << it->clockrate;
+      AddLine(os.str(), message);
+    }
+  }
+}
+
+void BuildCandidate(const std::vector<Candidate>& candidates,
+                    const MediaType media_type,
+                    std::string* message) {
+  std::ostringstream os;
+  for (std::vector<Candidate>::const_iterator it = candidates.begin();
+       it != candidates.end(); ++it) {
+    // a=candidate:<foundation> <component-id> <transport> <priority>
+    // <connection-address> <port> typ <candidate-types>
+    // [raddr <connection-address>] [rport <port>]
+    // *(SP extension-att-name SP extension-att-value)
+    if (((media_type == cricket::MEDIA_TYPE_VIDEO) &&
+         (it->name() == "video_rtcp" || it->name() == "video_rtp")) ||
+        ((media_type == cricket::MEDIA_TYPE_AUDIO) &&
+         (it->name() == "rtp" || it->name() == "rtcp"))) {
+      std::string type;
+      // Map the cricket candidate type to "host" / "srflx" / "prflx" / "relay"
+      if (it->type() == cricket::LOCAL_PORT_TYPE) {
+        type = kCandidateHost;
+      } else if (it->type() == cricket::STUN_PORT_TYPE) {
+        type = kCandidateSrflx;
+      } else if (it->type() == cricket::RELAY_PORT_TYPE) {
+        type = kCandidateRelay;
+      } else {
+        ASSERT(false);
+      }
+      os.str("");
+      os << kLinePrefixAttributes << kAttributeCandidate
+         << kIceFoundation << " " << kIceComponent << " "
+         << it->protocol() << " " << it->preference_str() << " "
+         << it->address().IPAsString() << " "
+         << it->address().PortAsString() << " "
+         << kAttributeCandidateTyp << " " << type << " "
+         << kAttributeCandidateName << " " << it->name() << " "
+         << kAttributeCandidateNetworkName << " " << it->network_name() << " "
+         << kAttributeCandidateUsername << " " << it->username() << " "
+         << kAttributeCandidatePassword << " " << it->password() << " "
+         << kAttributeCandidateGeneration << " " << it->generation();
+      AddLine(os.str(), message);
+    }
+  }
+}
+
+bool ParseSessionDescription(const std::string& message, size_t* pos) {
+  std::string line;
+
+  // v=  (protocol version)
+  if (!GetLineWithPrefix(message, pos, &line, kLinePrefixVersion)) {
+    LOG_PREFIX_PARSING_ERROR(kLinePrefixVersion);
+    return false;
+  }
+  // o=  (originator and session identifier)
+  if (!GetLineWithPrefix(message, pos, &line, kLinePrefixOrigin)) {
+    LOG_PREFIX_PARSING_ERROR(kLinePrefixOrigin);
+    return false;
+  }
+  // s=  (session name)
+  if (!GetLineWithPrefix(message, pos, &line, kLinePrefixSessionName)) {
+    LOG_PREFIX_PARSING_ERROR(kLinePrefixSessionName);
+    return false;
+  }
+
+  // Optional lines
+  // Those are the optional lines, so shouldn't return false if not present.
+  // i=* (session information)
+  GetLineWithPrefix(message, pos, &line, kLinePrefixSessionInfo);
+
+  // u=* (URI of description)
+  GetLineWithPrefix(message, pos, &line, kLinePrefixSessionUri);
+
+  // e=* (email address)
+  GetLineWithPrefix(message, pos, &line, kLinePrefixSessionEmail);
+
+  // p=* (phone number)
+  GetLineWithPrefix(message, pos, &line, kLinePrefixSessionPhone);
+
+  // c=* (connection information -- not required if included in
+  //      all media)
+  GetLineWithPrefix(message, pos, &line, kLinePrefixSessionConnection);
+
+  // b=* (zero or more bandwidth information lines)
+  while (GetLineWithPrefix(message, pos, &line, kLinePrefixSessionBandwidth)) {
+    // By pass zero or more b lines.
+  }
+
+  return true;
+}
+
+bool ParseTimeDescription(const std::string& message, size_t* pos) {
+  std::string line;
+  // One or more time descriptions ("t=" and "r=" lines; see below)
+  // t=  (time the session is active)
+  // r=* (zero or more repeat times)
+  // Ensure there's at least one time description
+  if (!GetLineWithPrefix(message, pos, &line, kLinePrefixTiming)) {
+    LOG_PREFIX_PARSING_ERROR(kLinePrefixTiming);
+    return false;
+  }
+
+  while (GetLineWithPrefix(message, pos, &line, kLinePrefixRepeatTimes)) {
+    // By pass zero or more r lines.
+  }
+
+  // Go through the rest of the time descriptions
+  while (GetLineWithPrefix(message, pos, &line, kLinePrefixTiming)) {
+    while (GetLineWithPrefix(message, pos, &line, kLinePrefixRepeatTimes)) {
+      // By pass zero or more r lines.
+    }
+  }
+
+  // z=* (time zone adjustments)
+  GetLineWithPrefix(message, pos, &line, kLinePrefixTimeZone);
+
+  // k=* (encryption key)
+  GetLineWithPrefix(message, pos, &line, kLinePrefixEncryptionKey);
+
+  // a=* (zero or more session attribute lines)
+  while (GetLineWithPrefix(message, pos, &line, kLinePrefixAttributes)) {
+    // TODO: parse the a=group:BUNDLE
+  }
+
+  return true;
+}
+
+bool ParseMediaDescription(const std::string& message, size_t* pos,
+                           cricket::SessionDescription* desc,
+                           std::vector<Candidate>* candidates) {
+  ASSERT(desc != NULL);
+  ASSERT(candidates != NULL);
+
+  std::string line;
+
+  // Zero or more media descriptions
+  // m=<media> <port> <proto> <fmt>
+  while (GetLineWithPrefix(message, pos, &line, kLinePrefixMedia)) {
+    MediaType media_type = cricket::MEDIA_TYPE_VIDEO;
+    ContentDescription* content = NULL;
+    if (HasAttribute(line, kMediaTypeVideo)) {
+      media_type = cricket::MEDIA_TYPE_VIDEO;
+      content = new VideoContentDescription();
+      desc->AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP, content);
+    } else if (HasAttribute(line, kMediaTypeAudio)) {
+      media_type = cricket::MEDIA_TYPE_AUDIO;
+      content = new AudioContentDescription();
+      desc->AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP, content);
+    } else {
+      LOG(LS_WARNING) << "Unsupported media type: " << line;
+    }
+
+    if (!ParseContent(message, media_type, pos, content, candidates))
+      return false;
+  }
+  return true;
+}
+
+bool ParseContent(const std::string& message,
+                  const MediaType media_type,
+                  size_t* pos,
+                  ContentDescription* content,
+                  std::vector<Candidate>* candidates) {
+  ASSERT(candidates != NULL);
+  std::string line;
+  // Loop until the next m line
+  while (!HasPrefix(message, kLinePrefixMedia, *pos)) {
+    if (!GetLine(message, pos, &line)) {
+      if (*pos >= message.size())
+        return true;  // Done parsing
+      else
+        return false;
+    }
+
+    if (!content) {
+      // Unsupported media type, just skip it.
+      continue;
+    }
+
+    if (!HasPrefix(line, kLinePrefixAttributes)) {
+      // TODO: Handle other lines if needed.
+      continue;
+    }
+
+    MediaContentDescription* media_desc =
+        static_cast<MediaContentDescription*> (content);
+
+    std::vector<std::string> fields;
+    talk_base::split(line.substr(kLinePrefixLength), kSdpDelimiter, &fields);
+
+    if (HasAttribute(line, kAttributeMid)) {
+      continue;
+    } else if (HasAttribute(line, kAttributeRtcpMux)) {
+      media_desc->set_rtcp_mux(true);
+    } else if (HasAttribute(line, kAttributeSsrc)) {
+      // a=ssrc:<ssrc-id> cname:<value> mslabel:<value> label:<value>
+      uint32 ssrc = 0;
+      std::string cname;
+      std::string mslabel;
+      std::string label;
+      for (std::vector<std::string>::const_iterator it = fields.begin();
+           it != fields.end(); ++it) {
+        if (HasPrefix(*it, kAttributeSsrc)) {
+          ASSERT(it == fields.begin());
+          ssrc = talk_base::FromString<uint32>(
+                     it->substr(strlen(kAttributeSsrc)));
+        } else if (HasPrefix(*it, kAttributeCname)) {
+          cname = it->substr(strlen(kAttributeCname));
+        } else if (HasPrefix(*it, kAttributeMslabel)) {
+          mslabel = it->substr(strlen(kAttributeMslabel));
+        } else if (HasPrefix(*it, kAttributeLabel)) {
+          label = it->substr(strlen(kAttributeLabel));
+        }
+      }
+      StreamParams stream;
+      stream.name = label;
+      stream.cname = cname;
+      stream.sync_label = mslabel;
+      stream.ssrcs.push_back(ssrc);
+      media_desc->AddStream(stream);
+    } else if (HasAttribute(line, kAttributeCrypto)) {
+      // a=crypto:<tag> <crypto-suite> <key-params> [<session-params>]
+      if (fields.size() < 3) {  // 3 mandatory fields
+        LOG_LINE_PARSING_ERROR(line);
+        return false;
+      }
+      int tag = talk_base::FromString<int>(
+          fields[0].substr(strlen(kAttributeCrypto)));
+      const std::string crypto_suite = fields[1];
+      const std::string key_params = fields[2];
+      media_desc->AddCrypto(CryptoParams(tag, crypto_suite, key_params, ""));
+    } else if (HasAttribute(line, kAttributeCandidate)) {
+      // a=candidate:<foundation> <component-id> <transport> <priority>
+      // <connection-address> <port> typ <candidate-types>
+      // [raddr <connection-address>] [rport <port>]
+      // *(SP extension-att-name SP extension-att-value)
+      // 8 mandatory fields
+      if (fields.size() < 8 || (fields[6] != kAttributeCandidateTyp)) {
+        LOG_LINE_PARSING_ERROR(line);
+        return false;
+      }
+      const std::string transport = fields[2];
+      const float priority = talk_base::FromString<float>(fields[3]);
+      const std::string connection_address = fields[4];
+      const int port = talk_base::FromString<int>(fields[5]);
+      std::string candidate_type;
+      const std::string type = fields[7];
+      if (type == kCandidateHost) {
+        candidate_type = cricket::LOCAL_PORT_TYPE;
+      } else if (type == kCandidateSrflx) {
+        candidate_type = cricket::STUN_PORT_TYPE;
+      } else if (type == kCandidateRelay) {
+        candidate_type = cricket::RELAY_PORT_TYPE;
+      } else {
+        LOG(LS_ERROR) << "Unsupported candidate type from line: " << line;
+        return false;
+      }
+
+      // extension
+      std::string name;
+      std::string network_name;
+      std::string username;
+      std::string password;
+      uint32 generation = 0;
+      for (size_t i = 8; i < (fields.size() - 1); ++i) {
+        const std::string field = fields.at(i);
+        if (field == kAttributeCandidateName) {
+          name = fields.at(++i);
+        } else if (field == kAttributeCandidateNetworkName) {
+          network_name = fields.at(++i);
+        } else if (field == kAttributeCandidateUsername) {
+          username = fields.at(++i);
+        } else if (field == kAttributeCandidatePassword) {
+          password = fields.at(++i);
+        } else if (field == kAttributeCandidateGeneration) {
+          generation = talk_base::FromString<uint32>(fields.at(++i));
+        }
+      }
+
+      SocketAddress address(connection_address, port);
+      Candidate candidate(name, transport, address, priority, username,
+          password, candidate_type, network_name, generation);
+      candidates->push_back(candidate);
+    } else if (HasAttribute(line, kAttributeRtpmap)) {
+      // a=rtpmap:<payload type> <encoding name>/<clock rate>
+      // [/<encodingparameters>]
+      // 2 mandatory fields
+      if (fields.size() < 2) {
+        LOG_LINE_PARSING_ERROR(line);
+        return false;
+      }
+      const int payload_type = talk_base::FromString<int>(
+          fields[0].substr(strlen(kAttributeRtpmap)));
+      const std::string encoder = fields[1];
+      const size_t pos = encoder.find("/");
+      if (pos == std::string::npos)
+        return false;
+      const std::string encoding_name = encoder.substr(0, pos);
+      const int clock_rate =
+          talk_base::FromString<int>(encoder.substr(pos + 1));
+      if (media_type == cricket::MEDIA_TYPE_VIDEO) {
+        VideoContentDescription* video_desc =
+            static_cast<VideoContentDescription*>(media_desc);
+        video_desc->AddCodec(cricket::VideoCodec(payload_type, encoding_name,
+                                                 0, 0, 0, 0));
+      } else if (media_type == cricket::MEDIA_TYPE_AUDIO) {
+        AudioContentDescription* audio_desc =
+            static_cast<AudioContentDescription*>(media_desc);
+        audio_desc->AddCodec(cricket::AudioCodec(payload_type, encoding_name,
+                                                 clock_rate, 0, 0, 0));
+      }
+    } else {
+      LOG(LS_WARNING) << "Unsupported line: " << line;
+    }
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/webrtcsdp.h b/talk/app/webrtc/webrtcsdp.h
new file mode 100644
index 0000000..32131b1
--- /dev/null
+++ b/talk/app/webrtc/webrtcsdp.h
@@ -0,0 +1,69 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file contain functions for parsing and serializing SDP messages.
+// Related RFC/draft including:
+// * RFC 4566 - SDP
+// * RFC 5245 - ICE
+// * RFC 3388 - Grouping of Media Lines in SDP
+// * RFC 4568 - SDP Security Descriptions for Media Streams
+// * draft-lennox-mmusic-sdp-source-selection-02 -
+//   Mechanisms for Media Source Selection in SDP
+
+#ifndef TALK_APP_WEBRTC_WEBRTCSDP_H_
+#define TALK_APP_WEBRTC_WEBRTCSDP_H_
+
+#include <string>
+#include <vector>
+
+#include "talk/p2p/base/candidate.h"
+
+namespace cricket {
+class SessionDescription;
+}
+
+namespace webrtc {
+
+// Serializes the passed in SessionDescription and Candidates to an SDP string.
+// desc - The SessionDescription object to be serialized.
+// candidates - The Set of Candidate objects to be serialized.
+// return - SDP string serialized from the arguments.
+std::string SdpSerialize(const cricket::SessionDescription& desc,
+                         const std::vector<cricket::Candidate>& candidates);
+
+// Deserializes the passed in SDP string to a SessionDescription and Candidates.
+// message - SDP string to be Deserialized.
+// desc - The SessionDescription object deserialized from the SDP string.
+// candidates - The set of Candidate deserialized from the SDP string.
+// return - true on success, false on failure.
+bool SdpDeserialize(const std::string& message,
+                    cricket::SessionDescription* desc,
+                    std::vector<cricket::Candidate>* candidates);
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_WEBRTCSDP_H_
diff --git a/talk/app/webrtc/webrtcsdp_unittest.cc b/talk/app/webrtc/webrtcsdp_unittest.cc
new file mode 100644
index 0000000..2fd1e6b
--- /dev/null
+++ b/talk/app/webrtc/webrtcsdp_unittest.cc
@@ -0,0 +1,293 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <string>
+
+#include "talk/app/webrtc/webrtcsdp.h"
+#include "talk/base/gunit.h"
+#include "talk/base/logging.h"
+#include "talk/base/scoped_ptr.h"
+#include "talk/base/stringutils.h"
+#include "talk/p2p/base/constants.h"
+#include "talk/session/phone/mediasession.h"
+
+typedef std::vector<cricket::Candidate> Candidates;
+using cricket::AudioCodec;
+using cricket::AudioContentDescription;
+using cricket::ContentInfo;
+using cricket::CryptoParams;
+using cricket::SessionDescription;
+using cricket::StreamParams;
+using cricket::VideoCodec;
+using cricket::VideoContentDescription;
+
+// Reference sdp string
+static const char kSdpString[] =
+    "v=0\r\n"
+    "o=- 0 0 IN IP4 127.0.0.1\r\n"
+    "s=\r\n"
+    "t=0 0\r\n"
+    "a=group:BUNDLE audio video\r\n"
+    "m=audio 0 RTP/AVPF 103 104\r\n"
+    "a=mid:audio\r\n"
+    "a=rtcp-mux\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_32 "
+    "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32 \r\n"
+    "a=rtpmap:103 ISAC/16000\r\n"
+    "a=rtpmap:104 ISAC/32000\r\n"
+    "a=candidate:1 1 udp 1 127.0.0.1 1234 typ host name rtp network_name "
+    "eth0 username user_rtp password password_rtp generation 0\r\n"
+    "a=candidate:1 1 udp 1 127.0.0.1 1235 typ host name rtcp network_name "
+    "eth0 username user_rtcp password password_rtcp generation 0\r\n"
+    "a=ssrc:1 cname:stream_1_cname mslabel:local_stream_1 "
+    "label:local_audio_1\r\n"
+    "a=ssrc:4 cname:stream_2_cname mslabel:local_stream_2 "
+    "label:local_audio_2\r\n"
+    "m=video 0 RTP/AVPF 120\r\n"
+    "a=mid:video\r\n"
+    "a=crypto:1 AES_CM_128_HMAC_SHA1_80 "
+    "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32 \r\n"
+    "a=rtpmap:120 VP8/0\r\n"
+    "a=candidate:1 1 udp 1 127.0.0.1 1236 typ host name video_rtcp "
+    "network_name eth0 username user_video_rtcp password password_video_rtcp "
+    "generation 0\r\n"
+    "a=candidate:1 1 udp 1 127.0.0.1 1237 typ host name video_rtp "
+    "network_name eth0 username user_video_rtp password password_video_rtp "
+    "generation 0\r\n"
+    "a=ssrc:2 cname:stream_1_cname mslabel:local_stream_1 "
+    "label:local_video_1\r\n"
+    "a=ssrc:3 cname:stream_1_cname mslabel:local_stream_1 "
+    "label:local_video_2\r\n"
+    "a=ssrc:5 cname:stream_2_cname mslabel:local_stream_2 "
+    "label:local_video_3\r\n";
+
+static const char kSdpDestroyer[] = "!@#$%^&";
+
+// MediaStream 1
+static const char kStreamLabel1[] = "local_stream_1";
+static const char kStream1Cname[] = "stream_1_cname";
+static const char kAudioTrackLabel1[] = "local_audio_1";
+static const uint32 kAudioTrack1Ssrc = 1;
+static const char kVideoTrackLabel1[] = "local_video_1";
+static const uint32 kVideoTrack1Ssrc = 2;
+static const char kVideoTrackLabel2[] = "local_video_2";
+static const uint32 kVideoTrack2Ssrc = 3;
+
+// MediaStream 2
+static const char kStreamLabel2[] = "local_stream_2";
+static const char kStream2Cname[] = "stream_2_cname";
+static const char kAudioTrackLabel2[] = "local_audio_2";
+static const uint32 kAudioTrack2Ssrc = 4;
+static const char kVideoTrackLabel3[] = "local_video_3";
+static const uint32 kVideoTrack3Ssrc = 5;
+
+class WebRtcSdpTest : public testing::Test {
+ public:
+  WebRtcSdpTest() {
+    // AudioContentDescription
+    talk_base::scoped_ptr<AudioContentDescription> audio(
+        new AudioContentDescription());
+    audio->set_rtcp_mux(true);
+    StreamParams audio_stream1;
+    audio_stream1.name = kAudioTrackLabel1;
+    audio_stream1.cname = kStream1Cname;
+    audio_stream1.sync_label = kStreamLabel1;
+    audio_stream1.ssrcs.push_back(kAudioTrack1Ssrc);
+    audio->AddStream(audio_stream1);
+    StreamParams audio_stream2;
+    audio_stream2.name = kAudioTrackLabel2;
+    audio_stream2.cname = kStream2Cname;
+    audio_stream2.sync_label = kStreamLabel2;
+    audio_stream2.ssrcs.push_back(kAudioTrack2Ssrc);
+    audio->AddStream(audio_stream2);
+    audio->AddCrypto(CryptoParams(1, "AES_CM_128_HMAC_SHA1_32",
+        "inline:NzB4d1BINUAvLEw6UzF3WSJ+PSdFcGdUJShpX1Zj|2^20|1:32", ""));
+    audio->AddCodec(AudioCodec(103, "ISAC", 16000, 0, 0, 0));
+    audio->AddCodec(AudioCodec(104, "ISAC", 32000, 0, 0, 0));
+    desc_.AddContent(cricket::CN_AUDIO, cricket::NS_JINGLE_RTP,
+                     audio.release());
+
+    // VideoContentDescription
+    talk_base::scoped_ptr<VideoContentDescription> video(
+        new VideoContentDescription());
+    StreamParams video_stream1;
+    video_stream1.name = kVideoTrackLabel1;
+    video_stream1.cname = kStream1Cname;
+    video_stream1.sync_label = kStreamLabel1;
+    video_stream1.ssrcs.push_back(kVideoTrack1Ssrc);
+    video->AddStream(video_stream1);
+    StreamParams video_stream2;
+    video_stream2.name = kVideoTrackLabel2;
+    video_stream2.cname = kStream1Cname;
+    video_stream2.sync_label = kStreamLabel1;
+    video_stream2.ssrcs.push_back(kVideoTrack2Ssrc);
+    video->AddStream(video_stream2);
+    StreamParams video_stream3;
+    video_stream3.name = kVideoTrackLabel3;
+    video_stream3.cname = kStream2Cname;
+    video_stream3.sync_label = kStreamLabel2;
+    video_stream3.ssrcs.push_back(kVideoTrack3Ssrc);
+    video->AddStream(video_stream3);
+    video->AddCrypto(CryptoParams(1, "AES_CM_128_HMAC_SHA1_80",
+        "inline:d0RmdmcmVCspeEc3QGZiNWpVLFJhQX1cfHAwJSoj|2^20|1:32", ""));
+    video->AddCodec(VideoCodec(120, "VP8", 352, 288, 30, 0));
+    desc_.AddContent(cricket::CN_VIDEO, cricket::NS_JINGLE_RTP,
+                     video.release());
+
+    int port = 1234;
+    talk_base::SocketAddress address("127.0.0.1", port++);
+    cricket::Candidate candidate1("rtp", "udp", address, 1,
+        "user_rtp", "password_rtp", "local", "eth0", 0);
+    address.SetPort(port++);
+    cricket::Candidate candidate2("rtcp", "udp", address, 1,
+        "user_rtcp", "password_rtcp", "local", "eth0", 0);
+    address.SetPort(port++);
+    cricket::Candidate candidate3("video_rtcp", "udp", address, 1,
+        "user_video_rtcp", "password_video_rtcp", "local", "eth0", 0);
+    address.SetPort(port++);
+    cricket::Candidate candidate4("video_rtp", "udp", address, 1,
+        "user_video_rtp", "password_video_rtp", "local", "eth0", 0);
+
+    candidates_.push_back(candidate1);
+    candidates_.push_back(candidate2);
+    candidates_.push_back(candidate3);
+    candidates_.push_back(candidate4);
+  }
+
+  bool CompareSessionDescription(const SessionDescription& desc1,
+                                 const SessionDescription& desc2) {
+    const ContentInfo* ac1 = desc1.GetContentByName("audio");
+    const AudioContentDescription* acd1 =
+        static_cast<const AudioContentDescription*>(ac1->description);
+    const ContentInfo* vc1 = desc1.GetContentByName("video");
+    const VideoContentDescription* vcd1 =
+        static_cast<const VideoContentDescription*>(vc1->description);
+
+    const ContentInfo* ac2 = desc2.GetContentByName("audio");
+    const AudioContentDescription* acd2 =
+        static_cast<const AudioContentDescription*>(ac2->description);
+    const ContentInfo* vc2 = desc2.GetContentByName("video");
+    const VideoContentDescription* vcd2 =
+        static_cast<const VideoContentDescription*>(vc2->description);
+
+    // rtcp_mux
+    EXPECT_EQ(acd1->rtcp_mux(), acd2->rtcp_mux());
+    EXPECT_EQ(vcd1->rtcp_mux(), vcd2->rtcp_mux());
+
+    // cryptos
+    EXPECT_EQ(acd1->cryptos().size(), acd2->cryptos().size());
+    EXPECT_EQ(vcd1->cryptos().size(), vcd2->cryptos().size());
+    for (size_t i = 0; i< acd1->cryptos().size(); ++i) {
+      const CryptoParams c1 = acd1->cryptos().at(i);
+      const CryptoParams c2 = acd2->cryptos().at(i);
+      EXPECT_TRUE(c1.Matches(c2));
+    }
+    for (size_t i = 0; i< vcd1->cryptos().size(); ++i) {
+      const CryptoParams c1 = vcd1->cryptos().at(i);
+      const CryptoParams c2 = vcd2->cryptos().at(i);
+      EXPECT_TRUE(c1.Matches(c2));
+    }
+
+    // codecs
+    EXPECT_EQ(acd1->codecs().size(), acd2->codecs().size());
+    EXPECT_EQ(vcd1->codecs().size(), vcd2->codecs().size());
+    for (size_t i = 0; i< acd1->codecs().size(); ++i) {
+      const AudioCodec c1 = acd1->codecs().at(i);
+      const AudioCodec c2 = acd2->codecs().at(i);
+      EXPECT_TRUE(c1.Matches(c2));
+    }
+    for (size_t i = 0; i< vcd1->codecs().size(); ++i) {
+      const VideoCodec c1 = vcd1->codecs().at(i);
+      const VideoCodec c2 = vcd2->codecs().at(i);
+      EXPECT_TRUE(c1.Matches(c2));
+    }
+
+    // streams
+    EXPECT_EQ(acd1->streams(), acd2->streams());
+    EXPECT_EQ(vcd1->streams(), vcd2->streams());
+
+    return true;
+  }
+
+  bool CompareCandidates(const Candidates& cs1, const Candidates& cs2) {
+    EXPECT_EQ(cs1.size(), cs2.size());
+
+    for (size_t i = 0; i< cs1.size(); ++i) {
+      const cricket::Candidate c1 = cs1.at(i);
+      const cricket::Candidate c2 = cs2.at(i);
+      EXPECT_TRUE(c1.IsEquivalent(c2));
+    }
+    return true;
+  }
+
+  bool ReplaceAndTryToParse(const char* search, const char* replace) {
+    SessionDescription desc;
+    std::vector<cricket::Candidate> candidates;
+    std::string sdp = kSdpString;
+    talk_base::replace_substrs(search, strlen(search), replace,
+        strlen(replace), &sdp);
+    return webrtc::SdpDeserialize(sdp, &desc, &candidates);
+  }
+
+ protected:
+  SessionDescription desc_;
+  Candidates candidates_;
+};
+
+TEST_F(WebRtcSdpTest, Serialize) {
+  std::string message = webrtc::SdpSerialize(desc_, candidates_);
+  LOG(LS_INFO) << "SDP: " << message;
+  EXPECT_EQ(std::string(kSdpString), message);
+}
+
+TEST_F(WebRtcSdpTest, Deserialize) {
+  SessionDescription desc;
+  std::vector<cricket::Candidate> candidates;
+  // Deserialize
+  EXPECT_TRUE(webrtc::SdpDeserialize(kSdpString, &desc, &candidates));
+  // Verify
+  LOG(LS_INFO) << "SDP: " << webrtc::SdpSerialize(desc, candidates);
+  EXPECT_TRUE(CompareSessionDescription(desc_, desc));
+  EXPECT_TRUE(CompareCandidates(candidates_, candidates));
+}
+
+TEST_F(WebRtcSdpTest, DeserializeBrokenSdp) {
+  // Broken session description
+  EXPECT_EQ(false, ReplaceAndTryToParse("v=", kSdpDestroyer));
+  EXPECT_EQ(false, ReplaceAndTryToParse("o=", kSdpDestroyer));
+  EXPECT_EQ(false, ReplaceAndTryToParse("s=", kSdpDestroyer));
+  // Broken time description
+  EXPECT_EQ(false, ReplaceAndTryToParse("t=", kSdpDestroyer));
+
+  // No group line
+  EXPECT_EQ(true, ReplaceAndTryToParse("a=group:BUNDLE audio video\r\n", ""));
+  EXPECT_EQ(true, ReplaceAndTryToParse("a=mid:audio\r\n", ""));
+  EXPECT_EQ(true, ReplaceAndTryToParse("a=mid:video\r\n", ""));
+
+  // Broken media description
+  EXPECT_EQ(true, ReplaceAndTryToParse("video 0 RTP/AVPF", kSdpDestroyer));
+}
diff --git a/talk/app/webrtc/webrtcsession.cc b/talk/app/webrtc/webrtcsession.cc
index 634da77..120a28e 100644
--- a/talk/app/webrtc/webrtcsession.cc
+++ b/talk/app/webrtc/webrtcsession.cc
@@ -1,6 +1,6 @@
 /*
  * libjingle
- * Copyright 2004--2011, Google Inc.
+ * Copyright 2011, Google Inc.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -27,18 +27,17 @@
 
 #include "talk/app/webrtc/webrtcsession.h"
 
-#include <string>
-#include <vector>
-
-#include "talk/base/common.h"
-#include "talk/base/scoped_ptr.h"
-#include "talk/p2p/base/constants.h"
-#include "talk/p2p/base/sessiondescription.h"
-#include "talk/p2p/base/p2ptransport.h"
+#include "talk/app/webrtc/mediastream.h"
+#include "talk/app/webrtc/peerconnection.h"
+#include "talk/app/webrtc/peerconnectionsignaling.h"
+#include "talk/base/helpers.h"
+#include "talk/base/logging.h"
 #include "talk/session/phone/channel.h"
 #include "talk/session/phone/channelmanager.h"
-#include "talk/session/phone/mediasessionclient.h"
-#include "talk/session/phone/voicechannel.h"
+#include "talk/session/phone/mediasession.h"
+#include "talk/session/phone/videocapturer.h"
+
+using cricket::MediaContentDescription;
 
 namespace webrtc {
 
@@ -46,492 +45,294 @@
   MSG_CANDIDATE_TIMEOUT = 101,
 };
 
-static const int kAudioMonitorPollFrequency = 100;
-static const int kMonitorPollFrequency = 1000;
-
-// We allow 30 seconds to establish a connection; beyond that we consider
-// it an error
+// We allow 30 seconds to establish a connection, otherwise it's an error.
 static const int kCallSetupTimeout = 30 * 1000;
-// A loss of connectivity is probably due to the Internet connection going
-// down, and it might take a while to come back on wireless networks, so we
-// use a longer timeout for that.
-static const int kCallLostTimeout = 60 * 1000;
+// Session will accept one candidate per transport channel and dropping other
+// candidates generated for that channel. During the session initialization
+// one cricket::VoiceChannel and one cricket::VideoChannel will be created with
+// rtcp enabled.
+static const size_t kAllowedCandidates = 4;
+// TODO - These are magic string used by cricket::VideoChannel.
+// These should be moved to a common place.
+static const std::string kRtpVideoChannelStr = "video_rtp";
+static const std::string kRtcpVideoChannelStr = "video_rtcp";
 
-static const char kVideoStream[] = "video_rtp";
-static const char kAudioStream[] = "rtp";
-
-static const int kDefaultVideoCodecId = 100;
-static const int kDefaultVideoCodecFramerate = 30;
-static const char kDefaultVideoCodecName[] = "VP8";
-
-WebRtcSession::WebRtcSession(const std::string& id,
-                             bool incoming,
-                             cricket::PortAllocator* allocator,
-                             cricket::ChannelManager* channelmgr,
-                             talk_base::Thread* signaling_thread)
-    : BaseSession(signaling_thread, channelmgr->worker_thread(),
-                  allocator, id, "", !incoming),
-      transport_(NULL),
-      channel_manager_(channelmgr),
-      transports_writable_(false),
-      muted_(false),
-      camera_muted_(false),
-      setup_timeout_(kCallSetupTimeout),
-      signaling_thread_(signaling_thread),
-      incoming_(incoming),
-      port_allocator_(allocator),
-      desc_factory_(channel_manager_) {
+WebRtcSession::WebRtcSession(cricket::ChannelManager* channel_manager,
+                             talk_base::Thread* signaling_thread,
+                             talk_base::Thread* worker_thread,
+                             cricket::PortAllocator* port_allocator)
+    : cricket::BaseSession(signaling_thread, worker_thread, port_allocator,
+          talk_base::ToString(talk_base::CreateRandomId()),
+          cricket::NS_JINGLE_RTP, true),
+      channel_manager_(channel_manager),
+      observer_(NULL),
+      session_desc_factory_(channel_manager) {
 }
 
 WebRtcSession::~WebRtcSession() {
-  RemoveAllStreams();
-  // TODO: Do we still need Terminate?
-  // if (state_ != STATE_RECEIVEDTERMINATE) {
-  //   Terminate();
-  // }
-  if (transport_) {
-    delete transport_;
-    transport_ = NULL;
+  Terminate();
+}
+
+bool WebRtcSession::Initialize() {
+  return CreateChannels();
+}
+
+void WebRtcSession::Terminate() {
+  if (voice_channel_.get()) {
+    channel_manager_->DestroyVoiceChannel(voice_channel_.release());
+  }
+  if (video_channel_.get()) {
+    channel_manager_->DestroyVideoChannel(video_channel_.release());
   }
 }
 
-bool WebRtcSession::Initiate() {
-  const cricket::VideoCodec default_codec(kDefaultVideoCodecId,
-      kDefaultVideoCodecName, kDefaultVideoCodecWidth, kDefaultVideoCodecHeight,
-      kDefaultVideoCodecFramerate, 0);
-  channel_manager_->SetDefaultVideoEncoderConfig(
-      cricket::VideoEncoderConfig(default_codec));
-
-  if (signaling_thread_ == NULL)
+bool WebRtcSession::CreateChannels() {
+  voice_channel_.reset(channel_manager_->CreateVoiceChannel(
+      this, cricket::CN_AUDIO, true));
+  if (!voice_channel_.get()) {
+    LOG(LS_ERROR) << "Failed to create voice channel";
     return false;
+  }
 
-  transport_ = CreateTransport();
-
-  if (transport_ == NULL)
+  video_channel_.reset(channel_manager_->CreateVideoChannel(
+      this, cricket::CN_VIDEO, true, voice_channel_.get()));
+  if (!video_channel_.get()) {
+    LOG(LS_ERROR) << "Failed to create video channel";
     return false;
+  }
 
-  transport_->set_allow_local_ips(true);
-
-  // start transports
-  transport_->SignalRequestSignaling.connect(
-      this, &WebRtcSession::OnRequestSignaling);
-  transport_->SignalCandidatesReady.connect(
-      this, &WebRtcSession::OnCandidatesReady);
-  transport_->SignalWritableState.connect(
-      this, &WebRtcSession::OnWritableState);
-  // Limit the amount of time that setting up a call may take.
-  StartTransportTimeout(kCallSetupTimeout);
+  // TransportProxies and TransportChannels will be created when
+  // CreateVoiceChannel and CreateVideoChannel are called.
+  // Try connecting all transport channels. This is necessary to generate
+  // ICE candidates.
+  SpeculativelyConnectAllTransportChannels();
   return true;
 }
 
-cricket::Transport* WebRtcSession::CreateTransport() {
-  ASSERT(signaling_thread()->IsCurrent());
-  return new cricket::P2PTransport(
-      talk_base::Thread::Current(),
-      channel_manager_->worker_thread(), port_allocator());
-}
-
-bool WebRtcSession::CreateVoiceChannel(const std::string& stream_id) {
-  // RTCP disabled
-  cricket::VoiceChannel* voice_channel =
-      channel_manager_->CreateVoiceChannel(this, stream_id, true);
-  if (voice_channel == NULL) {
-    LOG(LERROR) << "Unable to create voice channel.";
-    return false;
-  }
-  StreamInfo* stream_info = new StreamInfo(stream_id);
-  stream_info->channel = voice_channel;
-  stream_info->video = false;
-  streams_.push_back(stream_info);
-  return true;
-}
-
-bool WebRtcSession::CreateVideoChannel(const std::string& stream_id) {
-  // RTCP disabled
-  cricket::VideoChannel* video_channel =
-      channel_manager_->CreateVideoChannel(this, stream_id, true, NULL);
-  if (video_channel == NULL) {
-    LOG(LERROR) << "Unable to create video channel.";
-    return false;
-  }
-  StreamInfo* stream_info = new StreamInfo(stream_id);
-  stream_info->channel = video_channel;
-  stream_info->video = true;
-  streams_.push_back(stream_info);
-  return true;
-}
-
-cricket::TransportChannel* WebRtcSession::CreateChannel(
-    const std::string& content_name,
-    const std::string& name) {
-  if (!transport_) {
-    return NULL;
-  }
-  std::string type;
-  if (content_name.compare(kVideoStream) == 0) {
-    type = cricket::NS_GINGLE_VIDEO;
-  } else {
-    type = cricket::NS_GINGLE_AUDIO;
-  }
-  cricket::TransportChannel* transport_channel =
-      transport_->CreateChannel(name, type);
-  ASSERT(transport_channel != NULL);
-  return transport_channel;
-}
-
-cricket::TransportChannel* WebRtcSession::GetChannel(
-    const std::string& content_name, const std::string& name) {
-  if (!transport_)
-    return NULL;
-
-  return transport_->GetChannel(name);
-}
-
-void WebRtcSession::DestroyChannel(
-    const std::string& content_name, const std::string& name) {
-  if (!transport_)
-    return;
-
-  transport_->DestroyChannel(name);
-}
-
-void WebRtcSession::OnMessage(talk_base::Message* message) {
-  switch (message->message_id) {
-    case MSG_CANDIDATE_TIMEOUT:
-      if (transport_->writable()) {
-        // This should never happen: The timout triggered even
-        // though a call was successfully set up.
-        ASSERT(false);
-      }
-      SignalFailedCall();
-      break;
-    default:
-      cricket::BaseSession::OnMessage(message);
-      break;
-  }
-}
-
-bool WebRtcSession::Connect() {
-  if (streams_.empty()) {
-    // nothing to initiate
-    return false;
-  }
-  // lets connect all the transport channels created before for this session
-  transport_->ConnectChannels();
-
-  // create an offer now. This is to call SetState
-  // Actual offer will be send when OnCandidatesReady callback received
-  cricket::SessionDescription* offer = CreateOffer();
-  set_local_description(offer);
-  SetState((incoming()) ? STATE_SENTACCEPT : STATE_SENTINITIATE);
-
-  // Enable all the channels
-  EnableAllStreams();
-  SetVideoCapture(true);
-  return true;
-}
-
-bool WebRtcSession::SetVideoRenderer(const std::string& stream_id,
-                                     cricket::VideoRenderer* renderer) {
-  bool ret = false;
-  StreamMap::iterator iter;
-  for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
-    StreamInfo* stream_info = (*iter);
-    if (stream_info->stream_id.compare(stream_id) == 0) {
-      ASSERT(stream_info->channel != NULL);
-      ASSERT(stream_info->video);
-      cricket::VideoChannel* channel = static_cast<cricket::VideoChannel*>(
-          stream_info->channel);
-      ret = channel->SetRenderer(0, renderer);
-      break;
+void WebRtcSession::SetRemoteCandidates(
+    const cricket::Candidates& candidates) {
+  // First partition the candidates for the proxies. During creation of channels
+  // we created CN_AUDIO (audio) and CN_VIDEO (video) proxies.
+  cricket::Candidates audio_candidates;
+  cricket::Candidates video_candidates;
+  for (cricket::Candidates::const_iterator citer = candidates.begin();
+       citer != candidates.end(); ++citer) {
+    if (((*citer).name().compare(kRtpVideoChannelStr) == 0) ||
+        ((*citer).name().compare(kRtcpVideoChannelStr)) == 0) {
+      // Candidate names for video rtp and rtcp channel
+      video_candidates.push_back(*citer);
+    } else {
+      // Candidates for audio rtp and rtcp channel
+      // Channel name will be "rtp" and "rtcp"
+      audio_candidates.push_back(*citer);
     }
   }
-  return ret;
+
+  if (!audio_candidates.empty()) {
+    cricket::TransportProxy* audio_proxy = GetTransportProxy(cricket::CN_AUDIO);
+    if (audio_proxy) {
+      // CompleteNegotiation will set actual impl's in Proxy.
+      if (!audio_proxy->negotiated())
+        audio_proxy->CompleteNegotiation();
+      // TODO - Add a interface to TransportProxy to accept
+      // remote candidate list.
+      audio_proxy->impl()->OnRemoteCandidates(audio_candidates);
+    } else {
+      LOG(LS_INFO) << "No audio TransportProxy exists";
+    }
+  }
+
+  if (!video_candidates.empty()) {
+    cricket::TransportProxy* video_proxy = GetTransportProxy(cricket::CN_VIDEO);
+    if (video_proxy) {
+      // CompleteNegotiation will set actual impl's in Proxy.
+      if (!video_proxy->negotiated())
+        video_proxy->CompleteNegotiation();
+      // TODO - Add a interface to TransportProxy to accept
+      // remote candidate list.
+      video_proxy->impl()->OnRemoteCandidates(video_candidates);
+    } else {
+      LOG(LS_INFO) << "No video TransportProxy exists";
+    }
+  }
 }
 
-bool WebRtcSession::SetVideoCapture(bool capture) {
-  channel_manager_->SetVideoCapture(capture);
-  return true;
+void WebRtcSession::OnTransportRequestSignaling(
+    cricket::Transport* transport) {
+  ASSERT(signaling_thread()->IsCurrent());
+  transport->OnSignalingReady();
 }
 
-bool WebRtcSession::RemoveStream(const std::string& stream_id) {
+void WebRtcSession::OnTransportConnecting(cricket::Transport* transport) {
+  ASSERT(signaling_thread()->IsCurrent());
+  // start monitoring for the write state of the transport.
+  OnTransportWritable(transport);
+}
+
+void WebRtcSession::OnTransportWritable(cricket::Transport* transport) {
+  ASSERT(signaling_thread()->IsCurrent());
+  // If the transport is not in writable state, start a timer to monitor
+  // the state. If the transport doesn't become writable state in 30 seconds
+  // then we are assuming call can't be continued.
+  signaling_thread()->Clear(this, MSG_CANDIDATE_TIMEOUT);
+  if (transport->HasChannels() && !transport->writable()) {
+    signaling_thread()->PostDelayed(
+        kCallSetupTimeout, this, MSG_CANDIDATE_TIMEOUT);
+  }
+}
+
+void WebRtcSession::OnTransportCandidatesReady(
+    cricket::Transport* transport, const cricket::Candidates& candidates) {
+  ASSERT(signaling_thread()->IsCurrent());
+  // Drop additional candidates for the same channel;
+  // local_candidates_ will have one candidate per channel.
+  if (local_candidates_.size() == kAllowedCandidates)
+    return;
+  InsertTransportCandidates(candidates);
+  if (local_candidates_.size() == kAllowedCandidates && observer_) {
+    observer_->OnCandidatesReady(local_candidates_);
+  }
+}
+
+void WebRtcSession::OnTransportChannelGone(cricket::Transport* transport,
+                                           const std::string& name) {
+  ASSERT(signaling_thread()->IsCurrent());
+}
+
+void WebRtcSession::OnMessage(talk_base::Message* msg) {
+  switch (msg->message_id) {
+    case MSG_CANDIDATE_TIMEOUT:
+      LOG(LS_ERROR) << "Transport is not in writable state.";
+      SignalError();
+      break;
+    default:
+      break;
+  }
+}
+
+void WebRtcSession::InsertTransportCandidates(
+    const cricket::Candidates& candidates) {
+  for (cricket::Candidates::const_iterator citer = candidates.begin();
+       citer != candidates.end(); ++citer) {
+    // Find candidates by name, if this channel name not exists in local
+    // candidate list, store it.
+    if (!CheckCandidate((*citer).name())) {
+      local_candidates_.push_back(*citer);
+    }
+  }
+}
+
+// Check transport candidate already available for transport channel as only
+// one cricket::Candidate allower per channel.
+bool WebRtcSession::CheckCandidate(const std::string& name) {
   bool ret = false;
-  StreamMap::iterator iter;
-  for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
-    StreamInfo* sinfo = (*iter);
-    if (sinfo->stream_id.compare(stream_id) == 0) {
-      if (!sinfo->video) {
-        cricket::VoiceChannel* channel = static_cast<cricket::VoiceChannel*> (
-            sinfo->channel);
-        channel->Enable(false);
-        // Note: If later the channel is used by multiple streams, then we
-        // should not destroy the channel until all the streams are removed.
-        channel_manager_->DestroyVoiceChannel(channel);
-      } else {
-        cricket::VideoChannel* channel = static_cast<cricket::VideoChannel*> (
-            sinfo->channel);
-        channel->Enable(false);
-        // Note: If later the channel is used by multiple streams, then we
-        // should not destroy the channel until all the streams are removed.
-        channel_manager_->DestroyVideoChannel(channel);
-      }
-      // channel and transport will be deleted in
-      // DestroyVoiceChannel/DestroyVideoChannel
-      streams_.erase(iter);
+  for (cricket::Candidates::iterator iter = local_candidates_.begin();
+       iter != local_candidates_.end(); ++iter) {
+    if ((*iter).name().compare(name) == 0) {
       ret = true;
       break;
     }
   }
-  if (!ret) {
-    LOG(LERROR) << "No streams found for stream id " << stream_id;
-    // TODO: trigger onError callback
-  }
   return ret;
 }
 
-void WebRtcSession::EnableAllStreams() {
-  StreamMap::const_iterator i;
-  for (i = streams_.begin(); i != streams_.end(); ++i) {
-    cricket::BaseChannel* channel = (*i)->channel;
-    if (channel)
-      channel->Enable(true);
+void WebRtcSession::SetCaptureDevice(const std::string& name,
+                                     cricket::VideoCapturer* camera) {
+  // should be called from a signaling thread
+  ASSERT(signaling_thread()->IsCurrent());
+
+  // TODO: Refactor this when there is support for multiple cameras.
+  const uint32 dummy_ssrc = 0;
+  if (!channel_manager_->SetVideoCapturer(camera, dummy_ssrc)) {
+    LOG(LS_ERROR) << "Failed to set capture device.";
+    return;
   }
+
+  // Actually associate the video capture module with the ViE channel.
+  channel_manager_->SetVideoOptions("");
 }
 
-void WebRtcSession::RemoveAllStreams() {
-  SetState(STATE_RECEIVEDTERMINATE);
-
-  // signaling_thread_->Post(this, MSG_RTC_REMOVEALLSTREAMS);
-  // First build a list of streams to remove and then remove them.
-  // The reason we do this is that if we remove the streams inside the
-  // loop, a stream might get removed while we're enumerating and the iterator
-  // will become invalid (and we crash).
-  // streams_ entry will be removed from ChannelManager callback method
-  // DestroyChannel
-  std::vector<std::string> streams_to_remove;
-  StreamMap::iterator iter;
-  for (iter = streams_.begin(); iter != streams_.end(); ++iter)
-    streams_to_remove.push_back((*iter)->stream_id);
-
-  for (std::vector<std::string>::iterator i = streams_to_remove.begin();
-       i != streams_to_remove.end(); ++i) {
-    RemoveStream(*i);
-  }
+void WebRtcSession::SetLocalRenderer(const std::string& name,
+                                     cricket::VideoRenderer* renderer) {
+  ASSERT(signaling_thread()->IsCurrent());
+  // TODO: Fix SetLocalRenderer.
+  //video_channel_->SetLocalRenderer(0, renderer);
 }
 
-bool WebRtcSession::HasStream(const std::string& stream_id) const {
-  StreamMap::const_iterator iter;
-  for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
-    StreamInfo* sinfo = (*iter);
-    if (stream_id.compare(sinfo->stream_id) == 0) {
-      return true;
-    }
-  }
-  return false;
+void WebRtcSession::SetRemoteRenderer(const std::string& name,
+                                      cricket::VideoRenderer* renderer) {
+  ASSERT(signaling_thread()->IsCurrent());
+
+  // TODO: Only the ssrc = 0 is supported at the moment.
+  // Only one channel.
+  video_channel_->SetRenderer(0, renderer);
 }
 
-bool WebRtcSession::HasChannel(bool video) const {
-  StreamMap::const_iterator iter;
-  for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
-    StreamInfo* sinfo = (*iter);
-    if (sinfo->video == video) {
-      return true;
-    }
-  }
-  return false;
-}
-
-bool WebRtcSession::HasAudioChannel() const {
-  return HasChannel(false);
-}
-
-bool WebRtcSession::HasVideoChannel() const {
-  return HasChannel(true);
-}
-
-void WebRtcSession::OnRequestSignaling(cricket::Transport* transport) {
-  transport->OnSignalingReady();
-}
-
-void WebRtcSession::OnWritableState(cricket::Transport* transport) {
-  ASSERT(transport == transport_);
-  const bool transports_writable = transport_->writable();
-  if (transports_writable) {
-    if (transports_writable != transports_writable_) {
-      signaling_thread_->Clear(this, MSG_CANDIDATE_TIMEOUT);
-    } else {
-      // At one point all channels were writable and we had full connectivity,
-      // but then we lost it. Start the timeout again to kill the call if it
-      // doesn't come back.
-      StartTransportTimeout(kCallLostTimeout);
-    }
-    transports_writable_ = transports_writable;
-  }
-  NotifyTransportState();
-  return;
-}
-
-void WebRtcSession::StartTransportTimeout(int timeout) {
-  talk_base::Thread::Current()->PostDelayed(timeout, this,
-                                            MSG_CANDIDATE_TIMEOUT,
-                                            NULL);
-}
-
-void WebRtcSession::NotifyTransportState() {
-}
-
-bool WebRtcSession::OnInitiateMessage(
-    cricket::SessionDescription* offer,
-    const std::vector<cricket::Candidate>& candidates) {
-  if (!offer) {
-    LOG(LERROR) << "No SessionDescription from peer";
-    return false;
-  }
-
-  // Get capabilities from offer before generating an answer to it.
-  cricket::MediaSessionOptions options;
-  if (GetFirstAudioContent(offer))
-    options.has_audio = true;
-  if (GetFirstVideoContent(offer))
-    options.has_video = true;
-
-  talk_base::scoped_ptr<cricket::SessionDescription> answer;
-  answer.reset(CreateAnswer(offer, options));
-
-  if (!answer.get()) {
-    return false;
-  }
-
-  const cricket::ContentInfo* audio_content = GetFirstAudioContent(
-      answer.get());
-  const cricket::ContentInfo* video_content = GetFirstVideoContent(
-      answer.get());
-
-  if (!audio_content && !video_content) {
-    return false;
-  }
-
-  bool ret = true;
-  if (audio_content) {
-    ret = !HasAudioChannel() &&
-          CreateVoiceChannel(audio_content->name);
-    if (!ret) {
-      LOG(LERROR) << "Failed to create voice channel for "
-                  << audio_content->name;
-      return false;
-    }
-  }
-
-  if (video_content) {
-    ret = !HasVideoChannel() &&
-          CreateVideoChannel(video_content->name);
-    if (!ret) {
-      LOG(LERROR) << "Failed to create video channel for "
-                  << video_content->name;
-      return false;
-    }
-  }
-  // Provide remote candidates to the transport
-  transport_->OnRemoteCandidates(candidates);
-
-  set_remote_description(offer);
-  SetState(STATE_RECEIVEDINITIATE);
-
-  transport_->ConnectChannels();
-  EnableAllStreams();
-
-  set_local_description(answer.release());
-
-  // AddStream called only once with Video label
-  if (video_content) {
-    SignalAddStream(video_content->name, true);
-  } else {
-    SignalAddStream(audio_content->name, false);
-  }
-  SetState(STATE_SENTACCEPT);
-  return true;
-}
-
-bool WebRtcSession::OnRemoteDescription(
-    cricket::SessionDescription* desc,
-    const std::vector<cricket::Candidate>& candidates) {
-  if (state() == STATE_SENTACCEPT ||
-      state() == STATE_RECEIVEDACCEPT ||
-      state() == STATE_INPROGRESS) {
-    transport_->OnRemoteCandidates(candidates);
-    return true;
-  }
-  // Session description is always accepted.
-  set_remote_description(desc);
-  SetState(STATE_RECEIVEDACCEPT);
-  // Will trigger OnWritableState() if successful.
-  transport_->OnRemoteCandidates(candidates);
-
-  if (!incoming()) {
-    // Trigger OnAddStream callback at the initiator
-    const cricket::ContentInfo* video_content = GetFirstVideoContent(desc);
-    if (video_content && !SendSignalAddStream(true)) {
-      LOG(LERROR) << "Video stream unexpected in answer.";
-      return false;
-    } else {
-      const cricket::ContentInfo* audio_content = GetFirstAudioContent(desc);
-      if (audio_content && !SendSignalAddStream(false)) {
-        LOG(LERROR) << "Audio stream unexpected in answer.";
-        return false;
-      }
-    }
-  }
-  return true;
-}
-
-// Send the SignalAddStream with the stream_id based on the content type.
-bool WebRtcSession::SendSignalAddStream(bool video) {
-  StreamMap::const_iterator iter;
-  for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
-    StreamInfo* sinfo = (*iter);
-    if (sinfo->video == video) {
-      SignalAddStream(sinfo->stream_id, video);
-      return true;
-    }
-  }
-  return false;
-}
-
-cricket::SessionDescription* WebRtcSession::CreateOffer() {
-  cricket::MediaSessionOptions options;
-  options.has_audio = false;  // disable default option
-  StreamMap::const_iterator iter;
-  for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
-    if ((*iter)->video) {
-      options.has_video = true;
-    } else {
-      options.has_audio = true;
-    }
-  }
-  // We didn't save the previous offer.
-  const cricket::SessionDescription* previous_offer = NULL;
-  return desc_factory_.CreateOffer(options, previous_offer);
-}
-
-cricket::SessionDescription* WebRtcSession::CreateAnswer(
-    const cricket::SessionDescription* offer,
+const cricket::SessionDescription* WebRtcSession::ProvideOffer(
     const cricket::MediaSessionOptions& options) {
-  // We didn't save the previous answer.
-  const cricket::SessionDescription* previous_answer = NULL;
-  return desc_factory_.CreateAnswer(offer, options, previous_answer);
+  // TODO - Sanity check for options.
+  cricket::SessionDescription* offer(
+      session_desc_factory_.CreateOffer(options, local_description()));
+  set_local_description(offer);
+  return offer;
 }
 
-void WebRtcSession::SetError(Error error) {
-  BaseSession::SetError(error);
+const cricket::SessionDescription* WebRtcSession::SetRemoteSessionDescription(
+    const cricket::SessionDescription* remote_offer,
+    const std::vector<cricket::Candidate>& remote_candidates) {
+  set_remote_description(
+      const_cast<cricket::SessionDescription*>(remote_offer));
+  SetRemoteCandidates(remote_candidates);
+  return remote_offer;
 }
 
-void WebRtcSession::OnCandidatesReady(
-    cricket::Transport* transport,
-    const std::vector<cricket::Candidate>& candidates) {
-  std::vector<cricket::Candidate>::const_iterator iter;
-  for (iter = candidates.begin(); iter != candidates.end(); ++iter) {
-    local_candidates_.push_back(*iter);
+const cricket::SessionDescription* WebRtcSession::ProvideAnswer(
+    const cricket::MediaSessionOptions& options) {
+  cricket::SessionDescription* answer(
+      session_desc_factory_.CreateAnswer(remote_description(), options,
+                                         local_description()));
+  set_local_description(answer);
+  return answer;
+}
+
+void WebRtcSession::NegotiationDone() {
+  // SetState of session is called after session receives both local and
+  // remote descriptions. State transition will happen only when session
+  // is in INIT state.
+  if (state() == STATE_INIT) {
+    SetState(STATE_SENTINITIATE);
+    SetState(STATE_RECEIVEDACCEPT);
+
+    // Enabling voice and video channel.
+    voice_channel_->Enable(true);
+    video_channel_->Enable(true);
   }
-  SignalLocalDescription(local_description(), candidates);
+
+  const cricket::ContentInfo* audio_info =
+      cricket::GetFirstAudioContent(local_description());
+  if (audio_info) {
+    const cricket::MediaContentDescription* audio_content =
+        static_cast<const cricket::MediaContentDescription*>(
+            audio_info->description);
+    // Since channels are currently not supporting multiple send streams,
+    // we can remove stream from a session by muting it.
+    // TODO - Change needed when multiple send streams support
+    // is available.
+    voice_channel_->Mute(audio_content->streams().size() == 0);
+  }
+
+  const cricket::ContentInfo* video_info =
+      cricket::GetFirstVideoContent(local_description());
+  if (video_info) {
+    const cricket::MediaContentDescription* video_content =
+        static_cast<const cricket::MediaContentDescription*>(
+            video_info->description);
+    // Since channels are currently not supporting multiple send streams,
+    // we can remove stream from a session by muting it.
+    // TODO - Change needed when multiple send streams support
+    // is available.
+    video_channel_->Mute(video_content->streams().size() == 0);
+  }
 }
-} /* namespace webrtc */
+
+}  // namespace webrtc
diff --git a/talk/app/webrtc/webrtcsession.h b/talk/app/webrtc/webrtcsession.h
index 8f6c6ad..07bfe25 100644
--- a/talk/app/webrtc/webrtcsession.h
+++ b/talk/app/webrtc/webrtcsession.h
@@ -1,6 +1,6 @@
 /*
  * libjingle
- * Copyright 2004--2011, Google Inc.
+ * Copyright 2011, Google Inc.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -28,181 +28,101 @@
 #ifndef TALK_APP_WEBRTC_WEBRTCSESSION_H_
 #define TALK_APP_WEBRTC_WEBRTCSESSION_H_
 
-#include <map>
 #include <string>
 #include <vector>
 
-#include "talk/base/logging.h"
-#include "talk/base/messagehandler.h"
-#include "talk/p2p/base/candidate.h"
+#include "talk/app/webrtc/mediastreamprovider.h"
+#include "talk/app/webrtc/sessiondescriptionprovider.h"
+#include "talk/app/webrtc/webrtcsessionobserver.h"
+#include "talk/base/sigslot.h"
+#include "talk/base/thread.h"
 #include "talk/p2p/base/session.h"
-#include "talk/session/phone/channel.h"
-#include "talk/session/phone/mediachannel.h"
 #include "talk/session/phone/mediasession.h"
 
 namespace cricket {
+
 class ChannelManager;
 class Transport;
-class TransportChannel;
-class VoiceChannel;
+class VideoCapturer;
 class VideoChannel;
-struct ConnectionInfo;
-}
+class VoiceChannel;
 
-namespace Json {
-class Value;
-}
+}  // namespace cricket
 
 namespace webrtc {
 
-typedef std::vector<cricket::AudioCodec> AudioCodecs;
-typedef std::vector<cricket::VideoCodec> VideoCodecs;
-
-class WebRtcSession : public cricket::BaseSession {
+class WebRtcSession : public cricket::BaseSession,
+                      public MediaProviderInterface,
+                      public SessionDescriptionProvider {
  public:
-  WebRtcSession(const std::string& id,
-                    bool incoming,
-                    cricket::PortAllocator* allocator,
-                    cricket::ChannelManager* channelmgr,
-                    talk_base::Thread* signaling_thread);
-
+  WebRtcSession(cricket::ChannelManager* channel_manager,
+                talk_base::Thread* signaling_thread,
+                talk_base::Thread* worker_thread,
+                cricket::PortAllocator* port_allocator);
   ~WebRtcSession();
 
-  bool Initiate();
-  bool Connect();
-  bool OnRemoteDescription(cricket::SessionDescription* sdp,
-      const std::vector<cricket::Candidate>& candidates);
-  bool OnInitiateMessage(cricket::SessionDescription* sdp,
-      const std::vector<cricket::Candidate>& candidates);
-  bool CreateVoiceChannel(const std::string& stream_id);
-  bool CreateVideoChannel(const std::string& stream_id);
-  bool RemoveStream(const std::string& stream_id);
-  void RemoveAllStreams();
+  bool Initialize();
 
-  // Returns true if we have either a voice or video stream matching this label.
-  bool HasStream(const std::string& label) const;
-  bool HasChannel(bool video) const;
-
-  // Returns true if there's one or more audio channels in the session.
-  bool HasAudioChannel() const;
-
-  // Returns true if there's one or more video channels in the session.
-  bool HasVideoChannel() const;
-
-  bool SetVideoRenderer(const std::string& stream_id,
-                        cricket::VideoRenderer* renderer);
-
-  // This signal occurs when all the streams have been removed.
-  // It is triggered by a successful call to the RemoveAllStream or
-  // the OnRemoteDescription with stream deleted signaling message with the
-  // candidates port equal to 0.
-  sigslot::signal1<WebRtcSession*> SignalRemoveStreamMessage;
-
-  // This signal indicates a stream has been added properly.
-  // It is triggered by a successful call to the OnInitiateMessage or
-  // the OnRemoteDescription and if it's going to the STATE_RECEIVEDACCEPT.
-  sigslot::signal2<const std::string&, bool> SignalAddStream;
-
-  // This signal occurs when one stream is removed with the signaling
-  // message from the remote peer with the candidates port equal to 0.
-  sigslot::signal2<const std::string&, bool> SignalRemoveStream;
-
-  // This signal occurs when the local candidate is ready
-  sigslot::signal2<const cricket::SessionDescription*,
-      const std::vector<cricket::Candidate>&> SignalLocalDescription;
-
-  // This signal triggers when setting up or resuming a call has not been
-  // successful before a certain time out.
-  sigslot::signal0<> SignalFailedCall;
-
-  bool muted() const { return muted_; }
-  bool camera_muted() const { return camera_muted_; }
-  const std::vector<cricket::Candidate>& local_candidates() {
-    return local_candidates_;
+  void RegisterObserver(WebRtcSessionObserver* observer) {
+    observer_ = observer;
   }
-  void set_incoming(bool incoming) { incoming_ = incoming; }
-  bool incoming() const { return incoming_; }
-  cricket::PortAllocator* port_allocator() const { return port_allocator_; }
-  talk_base::Thread* signaling_thread() const { return signaling_thread_; }
 
-  static const int kDefaultVideoCodecWidth = 640;
-  static const int kDefaultVideoCodecHeight = 480;
+  const cricket::VoiceChannel* voice_channel() const {
+    return voice_channel_.get();
+  }
+  const cricket::VideoChannel* video_channel() const {
+    return video_channel_.get();
+  }
 
- protected:
-  // methods from cricket::BaseSession
-  virtual void SetError(cricket::BaseSession::Error error);
-  virtual cricket::TransportChannel* CreateChannel(
-      const std::string& content_name, const std::string& name);
-  virtual cricket::TransportChannel* GetChannel(
-      const std::string& content_name, const std::string& name);
-  virtual void DestroyChannel(
-      const std::string& content_name, const std::string& name);
+  // Generic error message callback from WebRtcSession.
+  // TODO - It may be necessary to supply error code as well.
+  sigslot::signal0<> SignalError;
 
  private:
-  struct StreamInfo {
-    explicit StreamInfo(const std::string stream_id)
-        : channel(NULL),
-          video(false),
-          stream_id(stream_id) {}
-
-    StreamInfo()
-        : channel(NULL),
-          video(false) {}
-    cricket::BaseChannel* channel;
-    bool video;
-    std::string stream_id;
-  };
-  // Not really a map (vector).
-  typedef std::vector<StreamInfo*> StreamMap;
-
-  // methods signaled by the transport
-  void OnRequestSignaling(cricket::Transport* transport);
-  void OnCandidatesReady(cricket::Transport* transport,
-                         const std::vector<cricket::Candidate>& candidates);
-  void OnWritableState(cricket::Transport* transport);
-  void OnTransportError(cricket::Transport* transport);
-  void OnChannelGone(cricket::Transport* transport);
-
-  bool CheckForStreamDeleteMessage(
-      const std::vector<cricket::Candidate>& candidates);
-  void ProcessTerminateAccept(cricket::SessionDescription* desc);
-
-  void UpdateTransportWritableState();
-  bool CheckAllTransportsWritable();
-  void StartTransportTimeout(int timeout);
-  void NotifyTransportState();
-
-  cricket::SessionDescription* CreateOffer();
-  cricket::SessionDescription* CreateAnswer(
-      const cricket::SessionDescription* answer,
+  // Implements SessionDescriptionProvider
+  virtual const cricket::SessionDescription* ProvideOffer(
       const cricket::MediaSessionOptions& options);
+  virtual const cricket::SessionDescription* SetRemoteSessionDescription(
+      const cricket::SessionDescription* remote_offer,
+      const std::vector<cricket::Candidate>& remote_candidates);
+  virtual const cricket::SessionDescription* ProvideAnswer(
+      const cricket::MediaSessionOptions& options);
+  virtual void NegotiationDone();
 
-  // from MessageHandler
-  virtual void OnMessage(talk_base::Message* message);
+  // Implements MediaProviderInterface.
+  virtual void SetCaptureDevice(const std::string& name,
+                                cricket::VideoCapturer* camera);
+  virtual void SetLocalRenderer(const std::string& name,
+                                cricket::VideoRenderer* renderer);
+  virtual void SetRemoteRenderer(const std::string& name,
+                                 cricket::VideoRenderer* renderer);
 
-  virtual cricket::Transport* CreateTransport();
-  cricket::Transport* GetTransport();
+  // Transport related callbacks, override from cricket::BaseSession.
+  virtual void OnTransportRequestSignaling(cricket::Transport* transport);
+  virtual void OnTransportConnecting(cricket::Transport* transport);
+  virtual void OnTransportWritable(cricket::Transport* transport);
+  virtual void OnTransportCandidatesReady(
+      cricket::Transport* transport,
+      const cricket::Candidates& candidates);
+  virtual void OnTransportChannelGone(cricket::Transport* transport,
+                                      const std::string& name);
 
-  typedef std::map<std::string, cricket::TransportChannel*> TransportChannelMap;
+  // Creates channels for voice and video.
+  bool CreateChannels();
+  virtual void OnMessage(talk_base::Message* msg);
+  void InsertTransportCandidates(const cricket::Candidates& candidates);
+  void Terminate();
+  // Get candidate from the local candidates list by the name.
+  bool CheckCandidate(const std::string& name);
+  void SetRemoteCandidates(const cricket::Candidates& candidates);
 
-  bool SetVideoCapture(bool capture);
-  void EnableAllStreams();
-  bool SendSignalAddStream(bool video);
-
-  cricket::Transport* transport_;
+ private:
+  talk_base::scoped_ptr<cricket::VoiceChannel> voice_channel_;
+  talk_base::scoped_ptr<cricket::VideoChannel> video_channel_;
   cricket::ChannelManager* channel_manager_;
-  std::vector<StreamInfo*> streams_;
-  TransportChannelMap transport_channels_;
-  bool transports_writable_;
-  bool muted_;
-  bool camera_muted_;
-  int setup_timeout_;
-  std::vector<cricket::Candidate> local_candidates_;
-
-  talk_base::Thread* signaling_thread_;
-  bool incoming_;
-  cricket::PortAllocator* port_allocator_;
-  cricket::MediaSessionDescriptionFactory desc_factory_;
+  cricket::Candidates local_candidates_;
+  WebRtcSessionObserver* observer_;
+  cricket::MediaSessionDescriptionFactory session_desc_factory_;
 };
 
 }  // namespace webrtc
diff --git a/talk/app/webrtc/webrtcsession_unittest.cc b/talk/app/webrtc/webrtcsession_unittest.cc
index 85339c0..5e22044 100644
--- a/talk/app/webrtc/webrtcsession_unittest.cc
+++ b/talk/app/webrtc/webrtcsession_unittest.cc
@@ -1,6 +1,6 @@
 /*
  * libjingle
- * Copyright 2004--2011, Google Inc.
+ * Copyright 2011, Google Inc.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -25,383 +25,111 @@
  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
-#include <stdio.h>
-
-#include <list>
-
-#include "base/gunit.h"
-#include "base/helpers.h"
-#include "talk/app/webrtc/unittest_utilities.h"
 #include "talk/app/webrtc/webrtcsession.h"
-#include "talk/base/fakenetwork.h"
-#include "talk/base/scoped_ptr.h"
+#include "talk/app/webrtc/peerconnectionsignaling.h"
 #include "talk/base/thread.h"
-#include "talk/p2p/base/fakesession.h"
-#include "talk/p2p/base/portallocator.h"
-#include "talk/p2p/base/sessiondescription.h"
+#include "talk/base/gunit.h"
+#include "talk/session/phone/channelmanager.h"
 #include "talk/p2p/client/fakeportallocator.h"
-#include "talk/session/phone/mediasessionclient.h"
 
-class WebRtcSessionTest
-    : public sigslot::has_slots<>,
-      public testing::Test {
+class MockWebRtcSessionObserver : public webrtc::WebRtcSessionObserver {
  public:
-  enum CallbackId {
-    kNone,
-    kOnAddStream,
-    kOnRemoveStream,
-    kOnLocalDescription,
-    kOnFailedCall,
-  };
-
-  WebRtcSessionTest()
-      : callback_ids_(),
-        last_stream_id_(""),
-        last_was_video_(false),
-        last_description_ptr_(NULL),
-        last_candidates_(),
-        session_(NULL),
-        id_(),
-        receiving_(false),
-        allocator_(NULL),
-        channel_manager_(NULL),
-        worker_thread_(NULL),
-        signaling_thread_(NULL) {
-  }
-
-  ~WebRtcSessionTest() {
-    session_.reset();
-  }
-
-  void OnAddStream(const std::string& stream_id, bool video) {
-    callback_ids_.push_back(kOnAddStream);
-    last_stream_id_ = stream_id;
-    last_was_video_ = video;
-  }
-  void OnRemoveStream(const std::string& stream_id, bool video) {
-    callback_ids_.push_back(kOnRemoveStream);
-    last_stream_id_ = stream_id;
-    last_was_video_ = video;
-  }
-  void OnLocalDescription(
-      const cricket::SessionDescription* desc,
+  virtual void OnCandidatesReady(
       const std::vector<cricket::Candidate>& candidates) {
-    callback_ids_.push_back(kOnLocalDescription);
-    last_description_ptr_.reset(CopySessionDescription(desc));
-    CopyCandidates(candidates, &last_candidates_);
-  }
-  cricket::SessionDescription* GetLocalDescription(
-      std::vector<cricket::Candidate>* candidates) {
-    if (last_candidates_.empty()) {
-      return NULL;
+    for (cricket::Candidates::const_iterator iter = candidates.begin();
+         iter != candidates.end(); ++iter) {
+      candidates_.push_back(*iter);
     }
-    if (!last_description_ptr_.get()) {
-      return NULL;
-    }
-    CopyCandidates(last_candidates_, candidates);
-    return CopySessionDescription(last_description_ptr_.get());
   }
-
-  void OnFailedCall() {
-    callback_ids_.push_back(kOnFailedCall);
-  }
-
-  CallbackId PopOldestCallback() {
-    if (callback_ids_.empty()) {
-      return kNone;
-    }
-    const CallbackId return_value = callback_ids_.front();
-    callback_ids_.pop_front();
-    return return_value;
-  }
-
-  CallbackId PeekOldestCallback() {
-    if (callback_ids_.empty()) {
-      return kNone;
-    }
-    const CallbackId return_value = callback_ids_.front();
-    return return_value;
-  }
-
-  void Reset() {
-    callback_ids_.clear();
-    last_stream_id_ = "";
-    last_was_video_ = false;
-    last_description_ptr_.reset();
-    last_candidates_.clear();
-  }
-
-  bool WaitForCallback(CallbackId id, int timeout_ms) {
-    bool success = false;
-    for (int ms = 0; ms < timeout_ms; ms++) {
-      const CallbackId peek_id = PeekOldestCallback();
-      if (peek_id == id) {
-        PopOldestCallback();
-        success = true;
-        break;
-      } else if (peek_id != kNone) {
-        success = false;
-        break;
-      }
-      talk_base::Thread::Current()->ProcessMessages(1);
-    }
-    return success;
-  }
-
-  bool Init(bool receiving) {
-    if (signaling_thread_ != NULL)
-        return false;
-    signaling_thread_ = talk_base::Thread::Current();
-    receiving_ = receiving;
-
-    if (worker_thread_!= NULL)
-        return false;
-    worker_thread_ = talk_base::Thread::Current();
-
-    cricket::FakePortAllocator* fake_port_allocator =
-        new cricket::FakePortAllocator(worker_thread_, NULL);
-
-    allocator_.reset(static_cast<cricket::PortAllocator*>(fake_port_allocator));
-
-    channel_manager_.reset(new cricket::ChannelManager(worker_thread_));
-    if (!channel_manager_->Init())
-      return false;
-
-    talk_base::CreateRandomString(8, &id_);
-
-    session_.reset(new webrtc::WebRtcSession(
-        id_, receiving_ , allocator_.get(),
-        channel_manager_.get(),
-        signaling_thread_));
-    session_->SignalAddStream.connect(this, &WebRtcSessionTest::OnAddStream);
-    session_->SignalRemoveStream.connect(this,
-        &WebRtcSessionTest::OnRemoveStream);
-    session_->SignalLocalDescription.connect(this,
-        &WebRtcSessionTest::OnLocalDescription);
-    session_->SignalFailedCall.connect(this, &WebRtcSessionTest::OnFailedCall);
-    return true;
-  }
-
-  // All session APIs must be called from the signaling thread.
-  bool CallInitiate() {
-    return session_->Initiate();
-  }
-
-  bool CallConnect() {
-    if (!session_->Connect())
-      return false;
-    // This callback does not happen with FakeTransport!
-    if (!WaitForCallback(kOnLocalDescription, 1000)) {
-      return false;
-    }
-    return true;
-  }
-
-  bool CallOnRemoteDescription(
-      cricket::SessionDescription* description,
-      std::vector<cricket::Candidate> candidates) {
-    if (!session_->OnRemoteDescription(description, candidates)) {
-      return false;
-    }
-    if (!WaitForCallback(kOnAddStream, 1000)) {
-      return false;
-    }
-    return true;
-  }
-
-  bool CallOnInitiateMessage(
-      cricket::SessionDescription* description,
-      const std::vector<cricket::Candidate>& candidates) {
-    if (!session_->OnInitiateMessage(description, candidates)) {
-      return false;
-    }
-    if (!WaitForCallback(kOnAddStream, 1000)) {
-      return false;
-    }
-    return true;
-  }
-
-  bool CallCreateVoiceChannel(const std::string& stream_id) {
-    if (!session_->CreateVoiceChannel(stream_id)) {
-      return false;
-    }
-    return true;
-  }
-
-  bool CallCreateVideoChannel(const std::string& stream_id) {
-    if (!session_->CreateVideoChannel(stream_id)) {
-      return false;
-    }
-    return true;
-  }
-
-  bool CallRemoveStream(const std::string& stream_id) {
-    return session_->RemoveStream(stream_id);
-  }
-
-  void CallRemoveAllStreams() {
-    session_->RemoveAllStreams();
-  }
-
-  bool CallHasChannel(const std::string& label) {
-    return session_->HasStream(label);
-  }
-
-  bool CallHasChannel(bool video) {
-    return session_->HasChannel(video);
-  }
-
-  bool CallHasAudioChannel() {
-    return session_->HasAudioChannel();
-  }
-
-  bool CallHasVideoChannel() {
-    return session_->HasVideoChannel();
-  }
-
-  bool CallSetVideoRenderer(const std::string& stream_id,
-                            cricket::VideoRenderer* renderer) {
-    return session_->SetVideoRenderer(stream_id, renderer);
-  }
-
-  const std::vector<cricket::Candidate>& CallLocalCandidates() {
-    return session_->local_candidates();
-  }
-
- private:
-  std::list<CallbackId> callback_ids_;
-
-  std::string last_stream_id_;
-  bool last_was_video_;
-  talk_base::scoped_ptr<cricket::SessionDescription> last_description_ptr_;
-  std::vector<cricket::Candidate> last_candidates_;
-
-  talk_base::scoped_ptr<webrtc::WebRtcSession> session_;
-  std::string id_;
-  bool receiving_;
-
-  talk_base::scoped_ptr<cricket::PortAllocator> allocator_;
-
-  talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
-
-  talk_base::Thread* worker_thread_;
-  talk_base::Thread* signaling_thread_;
+  std::vector<cricket::Candidate> candidates_;
 };
 
-bool CallbackReceived(WebRtcSessionTest* session, int timeout) {
-  EXPECT_EQ_WAIT(WebRtcSessionTest::kNone, session->PeekOldestCallback(),
-                 timeout);
-  const WebRtcSessionTest::CallbackId peek_id =
-      session->PeekOldestCallback();
-  return peek_id != WebRtcSessionTest::kNone;
-}
-
-TEST_F(WebRtcSessionTest, InitializationReceiveSanity) {
-  const bool kReceiving = true;
-  ASSERT_TRUE(Init(kReceiving));
-  ASSERT_TRUE(CallInitiate());
-
-  // Should return false because no stream has been set up yet.
-  EXPECT_FALSE(CallConnect());
-  const bool kVideo = true;
-  EXPECT_FALSE(CallHasChannel(kVideo));
-  EXPECT_FALSE(CallHasChannel(!kVideo));
-
-  EXPECT_EQ(kNone, PopOldestCallback());
-}
-
-TEST_F(WebRtcSessionTest, AudioSendCallSetUp) {
-  const bool kReceiving = false;
-  ASSERT_TRUE(Init(kReceiving));
-
-  ASSERT_TRUE(CallInitiate());
-
-  ASSERT_TRUE(CallCreateVoiceChannel("Audio"));
-  ASSERT_TRUE(CallConnect());
-
-  std::vector<cricket::Candidate> candidates;
-  cricket::SessionDescription* local_session = GetLocalDescription(
-      &candidates);
-  ASSERT_FALSE(candidates.empty());
-  ASSERT_FALSE(local_session == NULL);
-  if (!CallOnRemoteDescription(local_session, candidates)) {
-      delete local_session;
-      FAIL();
+class WebRtcSessionTest : public testing::Test {
+ protected:
+  virtual void SetUp() {
+    signaling_thread_ = talk_base::Thread::Current();
+    worker_thread_ = talk_base::Thread::Current();
+    channel_manager_.reset(new cricket::ChannelManager(worker_thread_));
+    port_allocator_.reset(
+        new cricket::FakePortAllocator(worker_thread_, NULL));
+    desc_factory_.reset(
+        new cricket::MediaSessionDescriptionFactory(channel_manager_.get()));
   }
 
-  // All callbacks should be caught. Assert it.
-  ASSERT_FALSE(CallbackReceived(this, 1000));
-  ASSERT_TRUE(CallHasAudioChannel() &&
-              !CallHasVideoChannel());
-}
-
-TEST_F(WebRtcSessionTest, VideoSendCallSetUp) {
-  const bool kReceiving = false;
-  ASSERT_TRUE(Init(kReceiving));
-
-  ASSERT_TRUE(CallInitiate());
-
-  ASSERT_TRUE(CallCreateVideoChannel("Video"));
-  ASSERT_TRUE(CallConnect());
-
-  std::vector<cricket::Candidate> candidates;
-  cricket::SessionDescription* local_session = GetLocalDescription(
-      &candidates);
-  ASSERT_FALSE(candidates.empty());
-  ASSERT_FALSE(local_session == NULL);
-
-  if (!CallOnRemoteDescription(local_session, candidates)) {
-      delete local_session;
-      FAIL();
+  bool InitializeSession() {
+    return session_.get()->Initialize();
   }
 
-  // All callbacks should be caught. Assert it.
-  ASSERT_FALSE(CallbackReceived(this, 1000));
-  ASSERT_TRUE(!CallHasAudioChannel() &&
-              CallHasVideoChannel());
-}
-
-TEST_F(WebRtcSessionTest, AudioReceiveCallSetUp) {
-  const bool kReceiving = true;
-  const bool video = false;
-
-  ASSERT_TRUE(Init(kReceiving));
-
-  std::vector<cricket::Candidate> candidates;
-  cricket::SessionDescription* local_session =
-      GenerateFakeSession(video, &candidates);
-  ASSERT_FALSE(candidates.empty());
-  ASSERT_FALSE(local_session == NULL);
-  ASSERT_TRUE(CallInitiate());
-  if (!CallOnInitiateMessage(local_session, candidates)) {
-    delete local_session;
-    FAIL();
+  bool CheckChannels() {
+    return (session_->voice_channel() != NULL &&
+            session_->video_channel() != NULL);
   }
-  ASSERT_TRUE(CallConnect());
-  ASSERT_FALSE(CallbackReceived(this, 1000));
 
-  ASSERT_TRUE(CallHasAudioChannel() &&
-              !CallHasVideoChannel());
-}
-
-TEST_F(WebRtcSessionTest, VideoReceiveCallSetUp) {
-  const bool kReceiving = true;
-  const bool video = true;
-
-  ASSERT_TRUE(Init(kReceiving));
-
-  std::vector<cricket::Candidate> candidates;
-  cricket::SessionDescription* local_session =
-      GenerateFakeSession(video, &candidates);
-  ASSERT_FALSE(candidates.empty());
-  ASSERT_FALSE(local_session == NULL);
-  ASSERT_TRUE(CallInitiate());
-  if (!CallOnInitiateMessage(local_session, candidates)) {
-    delete local_session;
-    FAIL();
+  void CheckTransportChannels() {
+    EXPECT_TRUE(session_->GetChannel(cricket::CN_AUDIO, "rtp") != NULL);
+    EXPECT_TRUE(session_->GetChannel(cricket::CN_AUDIO, "rtcp") != NULL);
+    EXPECT_TRUE(session_->GetChannel(cricket::CN_VIDEO, "video_rtp") != NULL);
+    EXPECT_TRUE(session_->GetChannel(cricket::CN_VIDEO, "video_rtcp") != NULL);
   }
-  ASSERT_TRUE(CallConnect());
-  ASSERT_FALSE(CallbackReceived(this, 1000));
-  ASSERT_TRUE(!CallHasAudioChannel() &&
-              CallHasVideoChannel());
+
+  void Init() {
+    ASSERT_TRUE(channel_manager_.get() != NULL);
+    ASSERT_TRUE(session_.get() == NULL);
+    EXPECT_TRUE(channel_manager_.get()->Init());
+    session_.reset(new webrtc::WebRtcSession(
+        channel_manager_.get(), worker_thread_, signaling_thread_,
+        port_allocator_.get()));
+    session_->RegisterObserver(&observer_);
+    desc_provider_ = session_.get();
+    EXPECT_TRUE(InitializeSession());
+  }
+
+
+  void CreateOffer(uint32 ssrc) {
+    cricket::MediaSessionOptions options;
+    // TODO - Adding test cases for session.
+    local_desc_ = desc_provider_->ProvideOffer(options);
+    ASSERT_TRUE(local_desc_ != NULL);
+  }
+  void CreateAnswer(uint32 ssrc) {
+    cricket::MediaSessionOptions options;
+    // TODO - Adding test cases for session.
+    remote_desc_ = desc_factory_->CreateAnswer(local_desc_, options, NULL);
+    ASSERT_TRUE(remote_desc_ != NULL);
+  }
+  void SetRemoteContents() {
+    desc_provider_->SetRemoteSessionDescription(
+        remote_desc_, observer_.candidates_);
+  }
+  void NegotiationDone() {
+    desc_provider_->NegotiationDone();
+  }
+
+  const cricket::SessionDescription* local_desc_;
+  const cricket::SessionDescription* remote_desc_;
+  talk_base::Thread* signaling_thread_;
+  talk_base::Thread* worker_thread_;
+  talk_base::scoped_ptr<cricket::PortAllocator> port_allocator_;
+  webrtc::SessionDescriptionProvider* desc_provider_;
+  talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
+  talk_base::scoped_ptr<cricket::MediaSessionDescriptionFactory> desc_factory_;
+  talk_base::scoped_ptr<webrtc::WebRtcSession> session_;
+  MockWebRtcSessionObserver observer_;
+};
+
+TEST_F(WebRtcSessionTest, TestInitialize) {
+  WebRtcSessionTest::Init();
+  EXPECT_TRUE(CheckChannels());
+  CheckTransportChannels();
+  talk_base::Thread::Current()->ProcessMessages(1000);
+  EXPECT_EQ(4u, observer_.candidates_.size());
 }
+
+// TODO - Adding test cases for session.
+TEST_F(WebRtcSessionTest, DISABLE_TestOfferAnswer) {
+  WebRtcSessionTest::Init();
+  EXPECT_TRUE(CheckChannels());
+  CheckTransportChannels();
+  talk_base::Thread::Current()->ProcessMessages(1);
+}
+
diff --git a/talk/app/webrtc/webrtcsessionobserver.h b/talk/app/webrtc/webrtcsessionobserver.h
new file mode 100644
index 0000000..c1653c9
--- /dev/null
+++ b/talk/app/webrtc/webrtcsessionobserver.h
@@ -0,0 +1,47 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_WEBRTC_WEBRTCSESSIONOBSERVER_H_
+#define TALK_APP_WEBRTC_WEBRTCSESSIONOBSERVER_H_
+
+#include <vector>
+
+#include "talk/p2p/base/candidate.h"
+
+namespace webrtc {
+
+class WebRtcSessionObserver {
+ public:
+  virtual void OnCandidatesReady(
+      const std::vector<cricket::Candidate>& candiddates) = 0;
+ protected:
+  virtual ~WebRtcSessionObserver() {}
+};
+
+}  // namespace webrtc
+
+#endif  // TALK_APP_WEBRTC_WEBRTCSESSIONOBSERVER_H_
diff --git a/talk/base/refcount.h b/talk/base/refcount.h
new file mode 100644
index 0000000..ce49d9e
--- /dev/null
+++ b/talk/base/refcount.h
@@ -0,0 +1,90 @@
+/*
+ * libjingle
+ * Copyright 2011, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef TALK_APP_BASE_REFCOUNT_H_
+#define TALK_APP_BASE_REFCOUNT_H_
+
+#include <cstring>
+
+#include "talk/base/criticalsection.h"
+
+namespace talk_base {
+
+// Reference count interface.
+class RefCountInterface {
+ public:
+  virtual int AddRef() = 0;
+  virtual int Release() = 0;
+};
+
+template <class T>
+class RefCountedObject : public T {
+ public:
+  RefCountedObject() : ref_count_(0) {
+  }
+
+  template<typename P>
+  explicit RefCountedObject(P p) : T(p), ref_count_(0) {
+  }
+
+  template<typename P1, typename P2>
+  RefCountedObject(P1 p1, P2 p2) : T(p1, p2), ref_count_(0) {
+  }
+
+  template<typename P1, typename P2, typename P3>
+  RefCountedObject(P1 p1, P2 p2, P3 p3) : T(p1, p2, p3), ref_count_(0) {
+  }
+
+  template<typename P1, typename P2, typename P3, typename P4>
+  RefCountedObject(P1 p1, P2 p2, P3 p3, P4 p4) 
+      : T(p1, p2, p3, p4), ref_count_(0) {
+  }
+
+  template<typename P1, typename P2, typename P3, typename P4, typename P5>
+  RefCountedObject(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5)
+      : T(p1, p2, p3, p4, p5), ref_count_(0) {
+  }
+
+  virtual int AddRef() {
+    return talk_base::AtomicOps::Increment(&ref_count_);
+  }
+
+  virtual int Release() {
+    int count = talk_base::AtomicOps::Decrement(&ref_count_);
+    if (!count) {
+      delete this;
+    }
+    return count;
+  }
+
+ protected:
+  int ref_count_;
+};
+
+}  // namespace talk_base
+
+#endif  // TALK_APP_BASE_REFCOUNT_H_
diff --git a/talk/base/stream.cc b/talk/base/stream.cc
index d283e9d..dbe13bd 100644
--- a/talk/base/stream.cc
+++ b/talk/base/stream.cc
@@ -169,9 +169,8 @@
 ///////////////////////////////////////////////////////////////////////////////
 
 StreamTap::StreamTap(StreamInterface* stream, StreamInterface* tap)
-: StreamAdapterInterface(stream), tap_(NULL), tap_result_(SR_SUCCESS),
-  tap_error_(0)
-{
+    : StreamAdapterInterface(stream), tap_(NULL), tap_result_(SR_SUCCESS),
+        tap_error_(0) {
   AttachTap(tap);
 }
 
@@ -223,24 +222,21 @@
 ///////////////////////////////////////////////////////////////////////////////
 
 StreamSegment::StreamSegment(StreamInterface* stream)
-: StreamAdapterInterface(stream), start_(SIZE_UNKNOWN), pos_(0),
-  length_(SIZE_UNKNOWN)
-{
+    : StreamAdapterInterface(stream), start_(SIZE_UNKNOWN), pos_(0),
+    length_(SIZE_UNKNOWN) {
   // It's ok for this to fail, in which case start_ is left as SIZE_UNKNOWN.
   stream->GetPosition(&start_);
 }
 
 StreamSegment::StreamSegment(StreamInterface* stream, size_t length)
-: StreamAdapterInterface(stream), start_(SIZE_UNKNOWN), pos_(0),
-  length_(length)
-{
+    : StreamAdapterInterface(stream), start_(SIZE_UNKNOWN), pos_(0),
+    length_(length) {
   // It's ok for this to fail, in which case start_ is left as SIZE_UNKNOWN.
   stream->GetPosition(&start_);
 }
 
 StreamResult StreamSegment::Read(void* buffer, size_t buffer_len,
-                                 size_t* read, int* error)
-{
+                                 size_t* read, int* error) {
   if (SIZE_UNKNOWN != length_) {
     if (pos_ >= length_)
       return SR_EOS;
@@ -867,7 +863,7 @@
 
   const size_t write_position = (read_position_ + data_length_)
       % buffer_length_;
-  *size = (write_position >= read_position_) ?
+  *size = (write_position > read_position_ || data_length_ == 0) ?
       buffer_length_ - write_position : read_position_ - write_position;
   return &buffer_[write_position];
 }
@@ -945,8 +941,7 @@
 
 LoggingAdapter::LoggingAdapter(StreamInterface* stream, LoggingSeverity level,
                                const std::string& label, bool hex_mode)
-: StreamAdapterInterface(stream), level_(level), hex_mode_(hex_mode)
-{
+    : StreamAdapterInterface(stream), level_(level), hex_mode_(hex_mode) {
   set_label(label);
 }
 
@@ -969,7 +964,8 @@
 
 StreamResult LoggingAdapter::Write(const void* data, size_t data_len,
                                    size_t* written, int* error) {
-  size_t local_written; if (!written) written = &local_written;
+  size_t local_written;
+  if (!written) written = &local_written;
   StreamResult result = StreamAdapterInterface::Write(data, data_len, written,
                                                       error);
   if (result == SR_SUCCESS) {
@@ -1002,13 +998,11 @@
 ///////////////////////////////////////////////////////////////////////////////
 
 StringStream::StringStream(std::string& str)
-: str_(str), read_pos_(0), read_only_(false)
-{
+    : str_(str), read_pos_(0), read_only_(false) {
 }
 
 StringStream::StringStream(const std::string& str)
-: str_(const_cast<std::string&>(str)), read_pos_(0), read_only_(true)
-{
+    : str_(const_cast<std::string&>(str)), read_pos_(0), read_only_(true) {
 }
 
 StreamState StringStream::GetState() const {
@@ -1164,4 +1158,4 @@
 
 ///////////////////////////////////////////////////////////////////////////////
 
-} // namespace talk_base
+}  // namespace talk_base
diff --git a/talk/base/stream_unittest.cc b/talk/base/stream_unittest.cc
index d0c5718..b96279b 100644
--- a/talk/base/stream_unittest.cc
+++ b/talk/base/stream_unittest.cc
@@ -35,14 +35,14 @@
 ///////////////////////////////////////////////////////////////////////////////
 
 class TestStream : public StreamInterface {
-public:
+ public:
   TestStream() : pos_(0) { }
 
   virtual StreamState GetState() const { return SS_OPEN; }
   virtual StreamResult Read(void* buffer, size_t buffer_len,
                             size_t* read, int* error) {
     unsigned char* uc_buffer = static_cast<unsigned char*>(buffer);
-    for (size_t i=0; i<buffer_len; ++i) {
+    for (size_t i = 0; i < buffer_len; ++i) {
       uc_buffer[i] = pos_++;
     }
     if (read)
@@ -71,14 +71,14 @@
     return false;
   }
 
-private:
+ private:
   unsigned char pos_;
 };
 
 bool VerifyTestBuffer(unsigned char* buffer, size_t len,
                       unsigned char value) {
   bool passed = true;
-  for (size_t i=0; i<len; ++i) {
+  for (size_t i = 0; i < len; ++i) {
     if (buffer[i] != value++) {
       passed = false;
       break;
@@ -393,6 +393,15 @@
   EXPECT_EQ(SR_EOS, stream->Read(out, kSize / 2, &bytes, NULL));
 }
 
+TEST(FifoBufferTest, FullBufferCheck) {
+  FifoBuffer buff(10);
+  buff.ConsumeWriteBuffer(10);
+
+  size_t free;
+  EXPECT_TRUE(buff.GetWriteBuffer(&free) != NULL);
+  EXPECT_EQ(0U, free);
+}
+
 TEST(FifoBufferTest, WriteOffsetAndReadOffset) {
   const size_t kSize = 16;
   const char in[kSize * 2 + 1] = "0123456789ABCDEFGHIJKLMNOPQRSTUV";
@@ -442,4 +451,4 @@
   EXPECT_EQ(SR_BLOCK, buf.ReadOffset(out, 10, 16, NULL));
 }
 
-} // namespace talk_base
+}  // namespace talk_base
diff --git a/talk/base/win32socketserver.cc b/talk/base/win32socketserver.cc
index 0915aaf..90bb010 100644
--- a/talk/base/win32socketserver.cc
+++ b/talk/base/win32socketserver.cc
@@ -168,7 +168,7 @@
 
   virtual bool OnMessage(UINT uMsg, WPARAM wParam, LPARAM lParam,
                          LRESULT& result);
-  virtual void OnFinalMessage(HWND hWnd);
+  virtual void OnNcDestroy();
 
  private:
   bool OnSocketNotify(UINT uMsg, WPARAM wParam, LPARAM lParam, LRESULT& result);
@@ -226,8 +226,13 @@
   return true;
 }
 
-void Win32Socket::EventSink::OnFinalMessage(HWND hWnd) {
-  delete this;
+void Win32Socket::EventSink::OnNcDestroy() {
+  if (parent_) {
+    LOG(LS_ERROR) << "EventSink hwnd is being destroyed, but the event sink"
+                     " hasn't yet been disposed.";
+  } else {
+    delete this;
+  }
 }
 
 /////////////////////////////////////////////////////////////////////////////
@@ -741,7 +746,10 @@
       // was a message for the dialog that it handled internally.
       // Otherwise, dispatch as usual via Translate/DispatchMessage.
       b = GetMessage(&msg, NULL, 0, 0);
-      if (b) {
+      if (b == -1) {
+        LOG_GLE(LS_ERROR) << "GetMessage failed.";
+        return false;
+      } else if(b) {
         if (!hdlg_ || !IsDialogMessage(hdlg_, &msg)) {
           TranslateMessage(&msg);
           DispatchMessage(&msg);
diff --git a/talk/base/win32window.cc b/talk/base/win32window.cc
index 0e7761f..5b6275e 100644
--- a/talk/base/win32window.cc
+++ b/talk/base/win32window.cc
@@ -103,12 +103,12 @@
 LRESULT Win32Window::WndProc(HWND hwnd, UINT uMsg,
                              WPARAM wParam, LPARAM lParam) {
   Win32Window* that = reinterpret_cast<Win32Window*>(
-      ::GetWindowLongPtr(hwnd, GWL_USERDATA));
+      ::GetWindowLongPtr(hwnd, GWLP_USERDATA));
   if (!that && (WM_CREATE == uMsg)) {
     CREATESTRUCT* cs = reinterpret_cast<CREATESTRUCT*>(lParam);
     that = static_cast<Win32Window*>(cs->lpCreateParams);
     that->wnd_ = hwnd;
-    ::SetWindowLongPtr(hwnd, GWL_USERDATA, reinterpret_cast<LONG_PTR>(that));
+    ::SetWindowLongPtr(hwnd, GWLP_USERDATA, reinterpret_cast<LONG_PTR>(that));
   }
   if (that) {
     LRESULT result;
@@ -120,9 +120,9 @@
       }
     }
     if (WM_NCDESTROY == uMsg) {
-      ::SetWindowLongPtr(hwnd, GWL_USERDATA, NULL);
+      ::SetWindowLongPtr(hwnd, GWLP_USERDATA, NULL);
       that->wnd_ = NULL;
-      that->OnDestroyed();
+      that->OnNcDestroy();
     }
     if (handled) {
       return result;
diff --git a/talk/base/win32window.h b/talk/base/win32window.h
index 66a56ce..992c8e7 100644
--- a/talk/base/win32window.h
+++ b/talk/base/win32window.h
@@ -2,26 +2,26 @@
  * libjingle
  * Copyright 2004--2005, Google Inc.
  *
- * Redistribution and use in source and binary forms, with or without 
+ * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
  *
- *  1. Redistributions of source code must retain the above copyright notice, 
+ *  1. Redistributions of source code must retain the above copyright notice,
  *     this list of conditions and the following disclaimer.
  *  2. Redistributions in binary form must reproduce the above copyright notice,
  *     this list of conditions and the following disclaimer in the documentation
  *     and/or other materials provided with the distribution.
- *  3. The name of the author may not be used to endorse or promote products 
+ *  3. The name of the author may not be used to endorse or promote products
  *     derived from this software without specific prior written permission.
  *
  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
- * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
- * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
- * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
- * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
@@ -59,7 +59,7 @@
                          LRESULT& result);
 
   virtual bool OnClose() { return true; }
-  virtual void OnDestroyed() { }
+  virtual void OnNcDestroy() { }
 
  private:
   static LRESULT CALLBACK WndProc(HWND hwnd, UINT uMsg, WPARAM wParam,
diff --git a/talk/libjingle.scons b/talk/libjingle.scons
index 0ec1f59..8e40bbd 100644
--- a/talk/libjingle.scons
+++ b/talk/libjingle.scons
@@ -266,7 +266,6 @@
                "xmpp/mucroomlookuptask.cc",
                "xmpp/pubsubclient.cc",
                "xmpp/pubsubtasks.cc",
-               "xmpp/ratelimitmanager.cc",
                "xmpp/receivetask.cc",
                "xmpp/saslmechanism.cc",
                "xmpp/xmppclient.cc",
diff --git a/talk/session/phone/mediasession.h b/talk/session/phone/mediasession.h
index 49df26a..fa27e16 100644
--- a/talk/session/phone/mediasession.h
+++ b/talk/session/phone/mediasession.h
@@ -120,7 +120,8 @@
         bandwidth_(kAutoBandwidth),
         crypto_required_(false),
         rtp_header_extensions_set_(false),
-        multistream_(false) {
+        multistream_(false),
+        partial_(false) {
   }
 
   virtual MediaType type() const = 0;
@@ -191,6 +192,9 @@
     return streams_[0].has_ssrcs();
   }
 
+  void set_partial(bool partial) { partial_ = partial; }
+  bool partial() const { return partial_;  }
+
  protected:
   bool rtcp_mux_;
   int bandwidth_;
@@ -200,6 +204,7 @@
   bool rtp_header_extensions_set_;
   bool multistream_;
   StreamParamsVec streams_;
+  bool partial_;
 };
 
 template <class C>
diff --git a/talk/session/phone/mediasession_unittest.cc b/talk/session/phone/mediasession_unittest.cc
index d2c1695..4dd88c9 100644
--- a/talk/session/phone/mediasession_unittest.cc
+++ b/talk/session/phone/mediasession_unittest.cc
@@ -300,6 +300,32 @@
   ASSERT_CRYPTO(vcd, false, 1U, CS_AES_CM_128_HMAC_SHA1_80);
 }
 
+TEST_F(MediaSessionDescriptionFactoryTest, TestPartial) {
+  MediaSessionOptions opts;
+  opts.has_video = true;
+  f1_.set_secure(SEC_ENABLED);
+  talk_base::scoped_ptr<SessionDescription>
+      offer(f1_.CreateOffer(opts, NULL));
+  ASSERT_TRUE(offer.get() != NULL);
+  const ContentInfo* ac = offer->GetContentByName("audio");
+  const ContentInfo* vc = offer->GetContentByName("video");
+  AudioContentDescription* acd = const_cast<AudioContentDescription*>(
+      static_cast<const AudioContentDescription*>(ac->description));
+  VideoContentDescription* vcd = const_cast<VideoContentDescription*>(
+      static_cast<const VideoContentDescription*>(vc->description));
+
+  EXPECT_FALSE(acd->partial());  // default is false.
+  acd->set_partial(true);
+  EXPECT_TRUE(acd->partial());
+  acd->set_partial(false);
+  EXPECT_FALSE(acd->partial());
+
+  EXPECT_FALSE(vcd->partial());  // default is false.
+  vcd->set_partial(true);
+  EXPECT_TRUE(vcd->partial());
+  vcd->set_partial(false);
+  EXPECT_FALSE(vcd->partial());
+}
 
 // Create a typical video answer, and ensure it matches what we expect.
 TEST_F(MediaSessionDescriptionFactoryTest, TestCreateVideoAnswerRtcpMux) {
diff --git a/talk/session/phone/webrtcvideoframe_unittest.cc b/talk/session/phone/webrtcvideoframe_unittest.cc
index c4526c0..5793ab8 100644
--- a/talk/session/phone/webrtcvideoframe_unittest.cc
+++ b/talk/session/phone/webrtcvideoframe_unittest.cc
@@ -53,6 +53,11 @@
 // TEST_WEBRTCVIDEOFRAME(ConstructCopy)
 // TEST_WEBRTCVIDEOFRAME(ConstructCopyIsRef)
 TEST_WEBRTCVIDEOFRAME(ConstructBlack)
+// TODO: Implement Jpeg
+// TEST_LMIVIDEOFRAME(ConstructMjpgI420)
+// TEST_LMIVIDEOFRAME(ConstructMjpgI422)
+// TEST_LMIVIDEOFRAME(ConstructMjpgI444)
+// TEST_LMIVIDEOFRAME(ConstructMjpgI400)
 // TODO: WebRtcVideoFrame does not support odd sizes.
 // Re-evaluate once WebRTC switches to libyuv
 // TEST_LMIVIDEOFRAME(ConstructYuy2AllSizes)
diff --git a/talk/xmpp/jid.cc b/talk/xmpp/jid.cc
index 67bcc0a..ae01e5d 100644
--- a/talk/xmpp/jid.cc
+++ b/talk/xmpp/jid.cc
@@ -38,106 +38,59 @@
 
 namespace buzz {
 
-Jid::Jid() : data_(NULL) {
+Jid::Jid() {
 }
 
-Jid::Jid(bool is_special, const std::string & special) {
-  data_ = is_special ? new Data(special, STR_EMPTY, STR_EMPTY) : NULL;
-}
-
-Jid::Jid(const std::string & jid_string) {
-  if (jid_string.empty()) {
-    data_ = NULL;
+Jid::Jid(const std::string& jid_string) {
+  if (jid_string.empty())
     return;
-  }
 
   // First find the slash and slice off that part
   size_t slash = jid_string.find('/');
-  std::string resource_name = (slash == std::string::npos ? STR_EMPTY :
+  resource_name_ = (slash == std::string::npos ? STR_EMPTY :
                     jid_string.substr(slash + 1));
 
   // Now look for the node
-  std::string node_name;
   size_t at = jid_string.find('@');
   size_t domain_begin;
   if (at < slash && at != std::string::npos) {
-    node_name = jid_string.substr(0, at);
+    node_name_ = jid_string.substr(0, at);
     domain_begin = at + 1;
   } else {
     domain_begin = 0;
   }
 
   // Now take what is left as the domain
-  size_t domain_length =
-    (  slash == std::string::npos
-     ? jid_string.length() - domain_begin
-     : slash - domain_begin);
+  size_t domain_length = (slash == std::string::npos) ?
+      (jid_string.length() - domain_begin) : (slash - domain_begin);
+  domain_name_ = jid_string.substr(domain_begin, domain_length);
 
-  // avoid allocating these constants repeatedly
-  std::string domain_name;
-
-  if (domain_length == 9  && jid_string.find("gmail.com", domain_begin) == domain_begin) {
-    domain_name = STR_GMAIL_COM;
-  }
-  else if (domain_length == 14 && jid_string.find("googlemail.com", domain_begin) == domain_begin) {
-    domain_name = STR_GOOGLEMAIL_COM;
-  }
-  else if (domain_length == 10 && jid_string.find("google.com", domain_begin) == domain_begin) {
-    domain_name = STR_GOOGLE_COM;
-  }
-  else {
-    domain_name = jid_string.substr(domain_begin, domain_length);
-  }
-
-  // If the domain is empty we have a non-valid jid and we should empty
-  // everything else out
-  if (domain_name.empty()) {
-    data_ = NULL;
-    return;
-  }
-
-  bool valid_node;
-  std::string validated_node = prepNode(node_name,
-      node_name.begin(), node_name.end(), &valid_node);
-  bool valid_domain;
-  std::string validated_domain = prepDomain(domain_name,
-      domain_name.begin(), domain_name.end(), &valid_domain);
-  bool valid_resource;
-  std::string validated_resource = prepResource(resource_name,
-      resource_name.begin(), resource_name.end(), &valid_resource);
-
-  if (!valid_node || !valid_domain || !valid_resource) {
-    data_ = NULL;
-    return;
-  }
-
-  data_ = new Data(validated_node, validated_domain, validated_resource);
+  ValidateOrReset();
 }
 
-Jid::Jid(const std::string & node_name,
-         const std::string & domain_name,
-         const std::string & resource_name) {
-  if (domain_name.empty()) {
-    data_ = NULL;
-    return;
-  }
+Jid::Jid(const std::string& node_name,
+         const std::string& domain_name,
+         const std::string& resource_name)
+    :  node_name_(node_name),
+       domain_name_(domain_name),
+       resource_name_(resource_name) {
+  ValidateOrReset();
+}
 
+void Jid::ValidateOrReset() {
   bool valid_node;
-  std::string validated_node = prepNode(node_name,
-      node_name.begin(), node_name.end(), &valid_node);
   bool valid_domain;
-  std::string validated_domain = prepDomain(domain_name,
-      domain_name.begin(), domain_name.end(), &valid_domain);
   bool valid_resource;
-  std::string validated_resource = prepResource(resource_name,
-      resource_name.begin(), resource_name.end(), &valid_resource);
+
+  node_name_ = PrepNode(node_name_, &valid_node);
+  domain_name_ = PrepDomain(domain_name_, &valid_domain);
+  resource_name_ = PrepResource(resource_name_, &valid_resource);
 
   if (!valid_node || !valid_domain || !valid_resource) {
-    data_ = NULL;
-    return;
+    node_name_.clear();
+    domain_name_.clear();
+    resource_name_.clear();
   }
-
-  data_ = new Data(validated_node, validated_domain, validated_resource);
 }
 
 std::string Jid::Str() const {
@@ -146,150 +99,85 @@
 
   std::string ret;
 
-  if (!data_->node_name_.empty())
-    ret = data_->node_name_ + "@";
+  if (!node_name_.empty())
+    ret = node_name_ + "@";
 
-  ASSERT(data_->domain_name_ != STR_EMPTY);
-  ret += data_->domain_name_;
+  ASSERT(domain_name_ != STR_EMPTY);
+  ret += domain_name_;
 
-  if (!data_->resource_name_.empty())
-    ret += "/" + data_->resource_name_;
+  if (!resource_name_.empty())
+    ret += "/" + resource_name_;
 
   return ret;
 }
 
-bool
-Jid::IsEmpty() const {
-  return data_ == NULL ||
-      (data_->node_name_.empty() && data_->domain_name_.empty() &&
-       data_->resource_name_.empty());
+Jid::~Jid() {
 }
 
-bool
-Jid::IsValid() const {
-  return data_ != NULL && !data_->domain_name_.empty();
+bool Jid::IsEmpty() const {
+  return (node_name_.empty() && domain_name_.empty() &&
+          resource_name_.empty());
 }
 
-bool
-Jid::IsBare() const {
+bool Jid::IsValid() const {
+  return !domain_name_.empty();
+}
+
+bool Jid::IsBare() const {
   if (IsEmpty()) {
-    LOG(LS_VERBOSE) << "Warning: Calling IsBare() on the empty jid";
+    LOG(LS_VERBOSE) << "Warning: Calling IsBare() on the empty jid.";
     return true;
   }
-  return IsValid() &&
-         data_->resource_name_.empty();
+  return IsValid() && resource_name_.empty();
 }
 
-bool
-Jid::IsFull() const {
-  return IsValid() &&
-         !data_->resource_name_.empty();
+bool Jid::IsFull() const {
+  return IsValid() && !resource_name_.empty();
 }
 
-Jid
-Jid::BareJid() const {
+Jid Jid::BareJid() const {
   if (!IsValid())
     return Jid();
   if (!IsFull())
     return *this;
-  return Jid(data_->node_name_, data_->domain_name_, STR_EMPTY);
+  return Jid(node_name_, domain_name_, STR_EMPTY);
 }
 
-#if 0
-void
-Jid::set_node(const std::string & node_name) {
-    data_->node_name_ = node_name;
-}
-void
-Jid::set_domain(const std::string & domain_name) {
-    data_->domain_name_ = domain_name;
-}
-void
-Jid::set_resource(const std::string & res_name) {
-    data_->resource_name_ = res_name;
-}
-#endif
-
-bool
-Jid::BareEquals(const Jid & other) const {
-  return (other.data_ == data_ ||
-          (data_ != NULL &&
-          other.data_ != NULL &&
-          other.data_->node_name_ == data_->node_name_ &&
-          other.data_->domain_name_ == data_->domain_name_));
+bool Jid::BareEquals(const Jid& other) const {
+  return other.node_name_ == node_name_ &&
+      other.domain_name_ == domain_name_;
 }
 
-bool
-Jid::operator==(const Jid & other) const {
-  return (other.data_ == data_ ||
-          (data_ != NULL &&
-          other.data_ != NULL &&
-          other.data_->node_name_ == data_->node_name_ &&
-          other.data_->domain_name_ == data_->domain_name_ &&
-          other.data_->resource_name_ == data_->resource_name_));
+bool Jid::operator==(const Jid& other) const {
+  return other.node_name_ == node_name_ &&
+      other.domain_name_ == domain_name_ &&
+      other.resource_name_ == resource_name_;
 }
 
-int
-Jid::Compare(const Jid & other) const {
-  if (other.data_ == data_)
-    return 0;
-  if (data_ == NULL)
-    return -1;
-  if (other.data_ == NULL)
-    return 1;
-
+int Jid::Compare(const Jid& other) const {
   int compare_result;
-  compare_result = data_->node_name_.compare(other.data_->node_name_);
+  compare_result = node_name_.compare(other.node_name_);
   if (0 != compare_result)
     return compare_result;
-  compare_result = data_->domain_name_.compare(other.data_->domain_name_);
+  compare_result = domain_name_.compare(other.domain_name_);
   if (0 != compare_result)
     return compare_result;
-  compare_result = data_->resource_name_.compare(other.data_->resource_name_);
+  compare_result = resource_name_.compare(other.resource_name_);
   return compare_result;
 }
 
-uint32 Jid::ComputeLameHash() const {
-  uint32 hash = 0;
-  // Hash the node portion
-  {
-    const std::string &str = node();
-    for (int i = 0; i < static_cast<int>(str.size()); ++i) {
-      hash = ((hash << 2) + hash) + str[i];
-    }
-  }
-
-  // Hash the domain portion
-  {
-    const std::string &str = domain();
-    for (int i = 0; i < static_cast<int>(str.size()); ++i)
-      hash = ((hash << 2) + hash) + str[i];
-  }
-
-  // Hash the resource portion
-  {
-    const std::string &str = resource();
-    for (int i = 0; i < static_cast<int>(str.size()); ++i)
-      hash = ((hash << 2) + hash) + str[i];
-  }
-
-  return hash;
-}
-
 // --- JID parsing code: ---
 
 // Checks and normalizes the node part of a JID.
-std::string
-Jid::prepNode(const std::string str, std::string::const_iterator start,
-    std::string::const_iterator end, bool *valid) {
+std::string Jid::PrepNode(const std::string& node, bool* valid) {
   *valid = false;
   std::string result;
 
-  for (std::string::const_iterator i = start; i < end; i++) {
+  for (std::string::const_iterator i = node.begin(); i < node.end(); ++i) {
     bool char_valid = true;
     unsigned char ch = *i;
     if (ch <= 0x7F) {
-      result += prepNodeAscii(ch, &char_valid);
+      result += PrepNodeAscii(ch, &char_valid);
     }
     else {
       // TODO: implement the correct stringprep protocol for these
@@ -309,8 +197,7 @@
 
 
 // Returns the appropriate mapping for an ASCII character in a node.
-char
-Jid::prepNodeAscii(char ch, bool *valid) {
+char Jid::PrepNodeAscii(char ch, bool* valid) {
   *valid = true;
   switch (ch) {
     case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G':
@@ -336,17 +223,16 @@
 
 
 // Checks and normalizes the resource part of a JID.
-std::string
-Jid::prepResource(const std::string str, std::string::const_iterator start,
-    std::string::const_iterator end, bool *valid) {
+std::string Jid::PrepResource(const std::string& resource, bool* valid) {
   *valid = false;
   std::string result;
 
-  for (std::string::const_iterator i = start; i < end; i++) {
+  for (std::string::const_iterator i = resource.begin();
+       i < resource.end(); ++i) {
     bool char_valid = true;
     unsigned char ch = *i;
     if (ch <= 0x7F) {
-      result += prepResourceAscii(ch, &char_valid);
+      result += PrepResourceAscii(ch, &char_valid);
     }
     else {
       // TODO: implement the correct stringprep protocol for these
@@ -362,8 +248,7 @@
 }
 
 // Returns the appropriate mapping for an ASCII character in a resource.
-char
-Jid::prepResourceAscii(char ch, bool *valid) {
+char Jid::PrepResourceAscii(char ch, bool* valid) {
   *valid = true;
   switch (ch) {
     case 0x00: case 0x01: case 0x02: case 0x03: case 0x04: case 0x05:
@@ -380,15 +265,13 @@
 }
 
 // Checks and normalizes the domain part of a JID.
-std::string
-Jid::prepDomain(const std::string str, std::string::const_iterator start,
-    std::string::const_iterator end, bool *valid) {
+std::string Jid::PrepDomain(const std::string& domain, bool* valid) {
   *valid = false;
   std::string result;
 
   // TODO: if the domain contains a ':', then we should parse it
   // as an IPv6 address rather than giving an error about illegal domain.
-  prepDomain(str, start, end, &result, valid);
+  PrepDomain(domain, &result, valid);
   if (!*valid) {
     return STR_EMPTY;
   }
@@ -402,12 +285,10 @@
 
 
 // Checks and normalizes an IDNA domain.
-void
-Jid::prepDomain(const std::string str, std::string::const_iterator start,
-    std::string::const_iterator end, std::string *buf, bool *valid) {
+void Jid::PrepDomain(const std::string& domain, std::string* buf, bool* valid) {
   *valid = false;
-  std::string::const_iterator last = start;
-  for (std::string::const_iterator i = start; i < end; i++) {
+  std::string::const_iterator last = domain.begin();
+  for (std::string::const_iterator i = domain.begin(); i < domain.end(); ++i) {
     bool label_valid = true;
     char ch = *i;
     switch (ch) {
@@ -417,7 +298,7 @@
       case 0xFF0E:
       case 0xFF61:
 #endif
-        prepDomainLabel(str, last, i, buf, &label_valid);
+        PrepDomainLabel(last, i, buf, &label_valid);
         *buf += '.';
         last = i + 1;
         break;
@@ -426,21 +307,21 @@
       return;
     }
   }
-  prepDomainLabel(str, last, end, buf, valid);
+  PrepDomainLabel(last, domain.end(), buf, valid);
 }
 
 // Checks and normalizes a domain label.
-void
-Jid::prepDomainLabel(const std::string str, std::string::const_iterator start,
-    std::string::const_iterator end, std::string *buf, bool *valid) {
+void Jid::PrepDomainLabel(
+    std::string::const_iterator start, std::string::const_iterator end,
+    std::string* buf, bool* valid) {
   *valid = false;
 
-  int startLen = buf->length();
-  for (std::string::const_iterator i = start; i < end; i++) {
+  int start_len = buf->length();
+  for (std::string::const_iterator i = start; i < end; ++i) {
     bool char_valid = true;
     unsigned char ch = *i;
     if (ch <= 0x7F) {
-      *buf += prepDomainLabelAscii(ch, &char_valid);
+      *buf += PrepDomainLabelAscii(ch, &char_valid);
     }
     else {
       // TODO: implement ToASCII for these
@@ -451,7 +332,7 @@
     }
   }
 
-  int count = buf->length() - startLen;
+  int count = buf->length() - start_len;
   if (count == 0) {
     return;
   }
@@ -459,8 +340,8 @@
     return;
   }
 
-  // Is this check needed? See comment in prepDomainLabelAscii.
-  if ((*buf)[startLen] == '-') {
+  // Is this check needed? See comment in PrepDomainLabelAscii.
+  if ((*buf)[start_len] == '-') {
     return;
   }
   if ((*buf)[buf->length() - 1] == '-') {
@@ -471,8 +352,7 @@
 
 
 // Returns the appropriate mapping for an ASCII character in a domain label.
-char
-Jid::prepDomainLabelAscii(char ch, bool *valid) {
+char Jid::PrepDomainLabelAscii(char ch, bool* valid) {
   *valid = true;
   // TODO: A literal reading of the spec seems to say that we do
   // not need to check for these illegal characters (an "internationalized
@@ -507,4 +387,4 @@
   }
 }
 
-}
+}  // namespace buzz
diff --git a/talk/xmpp/jid.h b/talk/xmpp/jid.h
index dd10f97..9daadb0 100644
--- a/talk/xmpp/jid.h
+++ b/talk/xmpp/jid.h
@@ -34,52 +34,26 @@
 
 namespace buzz {
 
-//! The Jid class encapsulates and provides parsing help for Jids
-//! A Jid consists of three parts. The node, the domain and the resource.
-//!
-//! node@domain/resource
-//!
-//! The node and resource are both optional.  A valid jid is defined to have
-//! a domain.  A bare jid is defined to not have a resource and a full jid
-//! *does* have a resource.
+// The Jid class encapsulates and provides parsing help for Jids. A Jid
+// consists of three parts: the node, the domain and the resource, e.g.:
+//
+// node@domain/resource
+//
+// The node and resource are both optional. A valid jid is defined to have
+// a domain. A bare jid is defined to not have a resource and a full jid
+// *does* have a resource.
 class Jid {
 public:
   explicit Jid();
-  explicit Jid(const std::string & jid_string);
-  explicit Jid(const std::string & node_name,
-               const std::string & domain_name,
-               const std::string & resource_name);
-  explicit Jid(bool special, const std::string & special_string);
-  Jid(const Jid & jid) : data_(jid.data_) {
-    if (data_ != NULL) {
-      data_->AddRef();
-    }
-  }
-  Jid & operator=(const Jid & jid) {
-    if (jid.data_ != NULL) {
-      jid.data_->AddRef();
-    }
-    if (data_ != NULL) {
-      data_->Release();
-    }
-    data_ = jid.data_;
-    return *this;
-  }
-  ~Jid() {
-    if (data_ != NULL) {
-      data_->Release();
-    }
-  }
+  explicit Jid(const std::string& jid_string);
+  explicit Jid(const std::string& node_name,
+               const std::string& domain_name,
+               const std::string& resource_name);
+  ~Jid();
 
-  const std::string & node() const {
-    return !data_ ? EmptyStringRef() : data_->node_name_;
-  }
-  const std::string & domain() const {
-    return !data_ ? EmptyStringRef() : data_->domain_name_;
-  }
-  const std::string & resource() const {
-    return !data_ ? EmptyStringRef() : data_->resource_name_;
-  }
+  const std::string & node() const { return node_name_; }
+  const std::string & domain() const { return domain_name_;  }
+  const std::string & resource() const { return resource_name_; }
 
   std::string Str() const;
   Jid BareJid() const;
@@ -89,64 +63,34 @@
   bool IsBare() const;
   bool IsFull() const;
 
-  bool BareEquals(const Jid & other) const;
+  bool BareEquals(const Jid& other) const;
 
-  bool operator==(const Jid & other) const;
-  bool operator!=(const Jid & other) const { return !operator==(other); }
+  bool operator==(const Jid& other) const;
+  bool operator!=(const Jid& other) const { return !operator==(other); }
 
-  bool operator<(const Jid & other) const { return Compare(other) < 0; };
-  bool operator>(const Jid & other) const { return Compare(other) > 0; };
+  bool operator<(const Jid& other) const { return Compare(other) < 0; };
+  bool operator>(const Jid& other) const { return Compare(other) > 0; };
 
   int Compare(const Jid & other) const;
 
-  // A quick and dirty hash.  Don't count on this producing a great
-  // distribution.
-  uint32 ComputeLameHash() const;
-
 private:
+  void ValidateOrReset();
 
-  static std::string prepNode(const std::string str,
+  static std::string PrepNode(const std::string& node, bool* valid);
+  static char PrepNodeAscii(char ch, bool* valid);
+  static std::string PrepResource(const std::string& start, bool* valid);
+  static char PrepResourceAscii(char ch, bool* valid);
+  static std::string PrepDomain(const std::string& domain, bool* valid);
+  static void PrepDomain(const std::string& domain,
+                         std::string* buf, bool* valid);
+  static void PrepDomainLabel(
       std::string::const_iterator start, std::string::const_iterator end,
-      bool *valid);
-  static char prepNodeAscii(char ch, bool *valid);
-  static std::string prepResource(const std::string str,
-      std::string::const_iterator start, std::string::const_iterator end,
-      bool *valid);
-  static char prepResourceAscii(char ch, bool *valid);
-  static std::string prepDomain(const std::string str,
-      std::string::const_iterator start,  std::string::const_iterator end,
-      bool *valid);
-  static void prepDomain(const std::string str,
-      std::string::const_iterator start, std::string::const_iterator end,
-      std::string *buf, bool *valid);
-  static void prepDomainLabel(const std::string str,
-      std::string::const_iterator start, std::string::const_iterator end,
-      std::string *buf, bool *valid);
-  static char prepDomainLabelAscii(char ch, bool *valid);
+      std::string* buf, bool* valid);
+  static char PrepDomainLabelAscii(char ch, bool *valid);
 
-  class Data {
-  public:
-    Data() : refcount_(1) {}
-    Data(const std::string & node, const std::string &domain,
-         const std::string & resource)
-        : node_name_(node),
-          domain_name_(domain),
-          resource_name_(resource),
-          refcount_(1) {
-    }
-    const std::string node_name_;
-    const std::string domain_name_;
-    const std::string resource_name_;
-
-    // TODO: ref-counter is not thread-safe here. Make it
-    // thread-safe or remove this optimization.
-    void AddRef() { refcount_++; }
-    void Release() { if (!--refcount_) delete this; }
-  private:
-    int refcount_;
-  };
-
-  Data * data_;
+  std::string node_name_;
+  std::string domain_name_;
+  std::string resource_name_;
 };
 
 }
diff --git a/talk/xmpp/xmppengineimpl.cc b/talk/xmpp/xmppengineimpl.cc
index eb539d1..9e21249 100644
--- a/talk/xmpp/xmppengineimpl.cc
+++ b/talk/xmpp/xmppengineimpl.cc
@@ -69,6 +69,7 @@
     stanza_handlers_[i].reset(new StanzaHandlerVector());
   }
 
+  // Add XMPP namespaces to XML namespaces stack.
   xmlns_stack_.AddXmlns("stream", "http://etherx.jabber.org/streams");
   xmlns_stack_.AddXmlns("", "jabber:client");
 }
@@ -124,10 +125,10 @@
   return XMPP_RETURN_OK;
 }
 
-XmppReturnStatus XmppEngineImpl::SetTls(TlsOptions useTls) {
+XmppReturnStatus XmppEngineImpl::SetTls(TlsOptions use_tls) {
   if (state_ != STATE_START)
     return XMPP_RETURN_BADSTATE;
-  tls_option_ = useTls;
+  tls_option_ = use_tls;
   return XMPP_RETURN_OK;
 }
 
diff --git a/talk/xmpp/xmppengineimpl.h b/talk/xmpp/xmppengineimpl.h
index 1fdb2a0..e292e75 100644
--- a/talk/xmpp/xmppengineimpl.h
+++ b/talk/xmpp/xmppengineimpl.h
@@ -77,7 +77,7 @@
   virtual XmppReturnStatus SetSaslHandler(SaslHandler* sasl_handler);
 
   //! Sets whether TLS will be used within the connection (default true).
-  virtual XmppReturnStatus SetTls(TlsOptions useTls);
+  virtual XmppReturnStatus SetTls(TlsOptions use_tls);
 
   //! Sets an alternate domain from which we allows TLS certificates.
   //! This is for use in the case where a we want to allow a proxy to
diff --git a/talk/xmpp/xmpptask.cc b/talk/xmpp/xmpptask.cc
index be32e55..046f7a1 100644
--- a/talk/xmpp/xmpptask.cc
+++ b/talk/xmpp/xmpptask.cc
@@ -29,12 +29,9 @@
 #include "talk/xmpp/xmppclient.h"
 #include "talk/xmpp/xmppengine.h"
 #include "talk/xmpp/constants.h"
-#include "talk/xmpp/ratelimitmanager.h"
 
 namespace buzz {
 
-RateLimitManager task_rate_manager;
-
 XmppClientInterface::XmppClientInterface() {
 }
 
@@ -175,10 +172,4 @@
   return true;
 }
 
-bool XmppTask::VerifyTaskRateLimit(const std::string task_name, int max_count, 
-                                   int per_x_seconds) {
-  return task_rate_manager.VerifyRateLimit(task_name, max_count, 
-                                           per_x_seconds);
-}
-
 }