Bug 1393119 - Add webrtc.org trunk/build/ files to support gn build; r?jesup draft
authorDan Minor <dminor@mozilla.com>
Thu, 27 Jul 2017 12:42:30 -0400
changeset 712784 a4120c6fc5c3a186ec32bae7080cf7b22a208b70
parent 712783 7960104f37668e3448c89cbafaaa700182f6e4b4
child 712785 1d6f9aa8c7ac646ea3332b0604e70d382392b8d9
child 712797 7c35e39d278949c8047c43cfbaf5ae3f6634cc94
child 713525 85a8ba74099bb1cb962ce15d007ef03c7da92754
child 713535 3727e8201580818d368335c017649720411997d2
push id93435
push userbmo:dminor@mozilla.com
push dateMon, 18 Dec 2017 19:30:06 +0000
reviewersjesup
bugs1393119
milestone59.0a1
Bug 1393119 - Add webrtc.org trunk/build/ files to support gn build; r?jesup MozReview-Commit-ID: KaupPBeJucK
media/webrtc/trunk/.gn
media/webrtc/trunk/BUILD.gn
media/webrtc/trunk/build/OWNERS
media/webrtc/trunk/build/OWNERS.status
media/webrtc/trunk/build/args/OWNERS
media/webrtc/trunk/build/args/README.txt
media/webrtc/trunk/build/args/headless.gn
media/webrtc/trunk/build/build-ctags.sh
media/webrtc/trunk/build/buildflag.h
media/webrtc/trunk/build/buildflag_header.gni
media/webrtc/trunk/build/check_gn_headers.py
media/webrtc/trunk/build/check_gn_headers_unittest.py
media/webrtc/trunk/build/check_return_value.py
media/webrtc/trunk/build/clobber.py
media/webrtc/trunk/build/common.croc
media/webrtc/trunk/build/compiled_action.gni
media/webrtc/trunk/build/compiler_version.py
media/webrtc/trunk/build/config/BUILD.gn
media/webrtc/trunk/build/config/BUILDCONFIG.gn
media/webrtc/trunk/build/config/OWNERS
media/webrtc/trunk/build/config/aix/BUILD.gn
media/webrtc/trunk/build/config/allocator.gni
media/webrtc/trunk/build/config/android/BUILD.gn
media/webrtc/trunk/build/config/android/OWNERS
media/webrtc/trunk/build/config/android/config.gni
media/webrtc/trunk/build/config/android/internal_rules.gni
media/webrtc/trunk/build/config/android/rules.gni
media/webrtc/trunk/build/config/arm.gni
media/webrtc/trunk/build/config/chrome_build.gni
media/webrtc/trunk/build/config/chromecast/BUILD.gn
media/webrtc/trunk/build/config/chromecast_build.gni
media/webrtc/trunk/build/config/clang/BUILD.gn
media/webrtc/trunk/build/config/clang/clang.gni
media/webrtc/trunk/build/config/compiler/BUILD.gn
media/webrtc/trunk/build/config/compiler/compiler.gni
media/webrtc/trunk/build/config/compiler/pgo/BUILD.gn
media/webrtc/trunk/build/config/compiler/pgo/pgo.gni
media/webrtc/trunk/build/config/crypto.gni
media/webrtc/trunk/build/config/dcheck_always_on.gni
media/webrtc/trunk/build/config/features.gni
media/webrtc/trunk/build/config/freetype/BUILD.gn
media/webrtc/trunk/build/config/freetype/OWNERS
media/webrtc/trunk/build/config/freetype/freetype.gni
media/webrtc/trunk/build/config/fuchsia/BUILD.gn
media/webrtc/trunk/build/config/fuchsia/OWNERS
media/webrtc/trunk/build/config/fuchsia/config.gni
media/webrtc/trunk/build/config/gcc/BUILD.gn
media/webrtc/trunk/build/config/get_host_byteorder.py
media/webrtc/trunk/build/config/host_byteorder.gni
media/webrtc/trunk/build/config/ios/BUILD.gn
media/webrtc/trunk/build/config/ios/BuildInfo.plist
media/webrtc/trunk/build/config/ios/Host-Info.plist
media/webrtc/trunk/build/config/ios/Module-Info.plist
media/webrtc/trunk/build/config/ios/OWNERS
media/webrtc/trunk/build/config/ios/codesign.py
media/webrtc/trunk/build/config/ios/entitlements.plist
media/webrtc/trunk/build/config/ios/find_signing_identity.py
media/webrtc/trunk/build/config/ios/ios_sdk.gni
media/webrtc/trunk/build/config/ios/rules.gni
media/webrtc/trunk/build/config/ios/write_framework_hmap.py
media/webrtc/trunk/build/config/ios/write_framework_modulemap.py
media/webrtc/trunk/build/config/ios/xctest_shell.mm
media/webrtc/trunk/build/config/linux/BUILD.gn
media/webrtc/trunk/build/config/linux/atk/BUILD.gn
media/webrtc/trunk/build/config/linux/dbus/BUILD.gn
media/webrtc/trunk/build/config/linux/gconf/BUILD.gn
media/webrtc/trunk/build/config/linux/gtk/BUILD.gn
media/webrtc/trunk/build/config/linux/gtk/gtk.gni
media/webrtc/trunk/build/config/linux/gtk2/BUILD.gn
media/webrtc/trunk/build/config/linux/gtk3/BUILD.gn
media/webrtc/trunk/build/config/linux/libffi/BUILD.gn
media/webrtc/trunk/build/config/linux/pangocairo/BUILD.gn
media/webrtc/trunk/build/config/linux/pkg-config.py
media/webrtc/trunk/build/config/linux/pkg_config.gni
media/webrtc/trunk/build/config/locales.gni
media/webrtc/trunk/build/config/mac/BUILD.gn
media/webrtc/trunk/build/config/mac/BuildInfo.plist
media/webrtc/trunk/build/config/mac/OWNERS
media/webrtc/trunk/build/config/mac/base_rules.gni
media/webrtc/trunk/build/config/mac/compile_ib_files.py
media/webrtc/trunk/build/config/mac/mac_sdk.gni
media/webrtc/trunk/build/config/mac/package_framework.py
media/webrtc/trunk/build/config/mac/plist_util.py
media/webrtc/trunk/build/config/mac/prepare_framework_version.py
media/webrtc/trunk/build/config/mac/rules.gni
media/webrtc/trunk/build/config/mac/sdk_info.py
media/webrtc/trunk/build/config/mac/symbols.gni
media/webrtc/trunk/build/config/mac/write_pkg_info.py
media/webrtc/trunk/build/config/mac/xcrun.py
media/webrtc/trunk/build/config/mips.gni
media/webrtc/trunk/build/config/nacl/BUILD.gn
media/webrtc/trunk/build/config/nacl/config.gni
media/webrtc/trunk/build/config/nacl/rules.gni
media/webrtc/trunk/build/config/pch.gni
media/webrtc/trunk/build/config/posix/BUILD.gn
media/webrtc/trunk/build/config/posix/sysroot_ld_path.py
media/webrtc/trunk/build/config/sanitizers/BUILD.gn
media/webrtc/trunk/build/config/sanitizers/OWNERS
media/webrtc/trunk/build/config/sanitizers/sanitizers.gni
media/webrtc/trunk/build/config/sysroot.gni
media/webrtc/trunk/build/config/ui.gni
media/webrtc/trunk/build/config/v8_target_cpu.gni
media/webrtc/trunk/build/config/win/BUILD.gn
media/webrtc/trunk/build/config/win/console_app.gni
media/webrtc/trunk/build/config/win/manifest.gni
media/webrtc/trunk/build/config/win/visual_studio_version.gni
media/webrtc/trunk/build/config/zip.gni
media/webrtc/trunk/build/copy_test_data_ios.py
media/webrtc/trunk/build/cp.py
media/webrtc/trunk/build/detect_host_arch.py
media/webrtc/trunk/build/dir_exists.py
media/webrtc/trunk/build/docs/mac_hermetic_toolchain.md
media/webrtc/trunk/build/dotfile_settings.gni
media/webrtc/trunk/build/download_gold_plugin.py
media/webrtc/trunk/build/download_nacl_toolchains.py
media/webrtc/trunk/build/download_translation_unit_tool.py
media/webrtc/trunk/build/env_dump.py
media/webrtc/trunk/build/experimental/install-build-deps.py
media/webrtc/trunk/build/extract_from_cab.py
media/webrtc/trunk/build/find_depot_tools.py
media/webrtc/trunk/build/find_isolated_tests.py
media/webrtc/trunk/build/fix_gn_headers.py
media/webrtc/trunk/build/fuchsia/update_sdk.py
media/webrtc/trunk/build/gdb-add-index
media/webrtc/trunk/build/get_landmines.py
media/webrtc/trunk/build/get_syzygy_binaries.py
media/webrtc/trunk/build/git-hooks/OWNERS
media/webrtc/trunk/build/git-hooks/pre-commit
media/webrtc/trunk/build/gn_helpers.py
media/webrtc/trunk/build/gn_helpers_unittest.py
media/webrtc/trunk/build/gn_run_binary.py
media/webrtc/trunk/build/gyp_chromium
media/webrtc/trunk/build/gyp_chromium.py
media/webrtc/trunk/build/gyp_environment.py
media/webrtc/trunk/build/gyp_helper.py
media/webrtc/trunk/build/gypi_to_gn.py
media/webrtc/trunk/build/install-build-deps-android.sh
media/webrtc/trunk/build/install-build-deps.sh
media/webrtc/trunk/build/install-chroot.sh
media/webrtc/trunk/build/inverse_depth.py
media/webrtc/trunk/build/ios/OWNERS
media/webrtc/trunk/build/ios/chrome_ios.croc
media/webrtc/trunk/build/landmine_utils.py
media/webrtc/trunk/build/landmines.py
media/webrtc/trunk/build/linux/chrome_linux.croc
media/webrtc/trunk/build/linux/dump_app_syms
media/webrtc/trunk/build/linux/pkg-config-wrapper
media/webrtc/trunk/build/linux/python_arch.sh
media/webrtc/trunk/build/linux/rewrite_dirs.py
media/webrtc/trunk/build/linux/system.gyp
media/webrtc/trunk/build/mac/OWNERS
media/webrtc/trunk/build/mac/edit_xibs.sh
media/webrtc/trunk/build/mac/find_sdk.py
media/webrtc/trunk/build/mac/should_use_hermetic_xcode.py
media/webrtc/trunk/build/mac/tweak_info_plist.gni
media/webrtc/trunk/build/mac/tweak_info_plist.py
media/webrtc/trunk/build/mac_toolchain.py
media/webrtc/trunk/build/nocompile.gni
media/webrtc/trunk/build/package_mac_toolchain.py
media/webrtc/trunk/build/precompile.h
media/webrtc/trunk/build/print_python_deps.py
media/webrtc/trunk/build/protoc_java.py
media/webrtc/trunk/build/redirect_stdout.py
media/webrtc/trunk/build/rm.py
media/webrtc/trunk/build/sample_arg_file.gn
media/webrtc/trunk/build/sanitize-mac-build-log.sed
media/webrtc/trunk/build/sanitize-mac-build-log.sh
media/webrtc/trunk/build/sanitize-win-build-log.sed
media/webrtc/trunk/build/sanitize-win-build-log.sh
media/webrtc/trunk/build/sanitizers/OWNERS
media/webrtc/trunk/build/sanitizers/asan_suppressions.cc
media/webrtc/trunk/build/sanitizers/lsan_suppressions.cc
media/webrtc/trunk/build/sanitizers/sanitizer_options.cc
media/webrtc/trunk/build/sanitizers/tsan_suppressions.cc
media/webrtc/trunk/build/secondary/testing/gmock/BUILD.gn
media/webrtc/trunk/build/secondary/testing/gtest/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/android_platform/development/scripts/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/android_platform/development/scripts/stack.pydeps
media/webrtc/trunk/build/secondary/third_party/android_tools/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/android_tools/apk_proguard.flags
media/webrtc/trunk/build/secondary/third_party/crashpad/OWNERS
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/client/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/compat/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/snapshot/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/third_party/apple_cctools/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/third_party/getopt/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/third_party/zlib/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/tools/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/libjpeg_turbo/BUILD.gn
media/webrtc/trunk/build/secondary/third_party/nss/BUILD.gn
media/webrtc/trunk/build/shim_headers.gni
media/webrtc/trunk/build/slave/OWNERS
media/webrtc/trunk/build/slave/README
media/webrtc/trunk/build/split_static_library.gni
media/webrtc/trunk/build/symlink.gni
media/webrtc/trunk/build/symlink.py
media/webrtc/trunk/build/toolchain/BUILD.gn
media/webrtc/trunk/build/toolchain/OWNERS
media/webrtc/trunk/build/toolchain/aix/BUILD.gn
media/webrtc/trunk/build/toolchain/android/BUILD.gn
media/webrtc/trunk/build/toolchain/cc_wrapper.gni
media/webrtc/trunk/build/toolchain/clang_static_analyzer.gni
media/webrtc/trunk/build/toolchain/clang_static_analyzer_wrapper.py
media/webrtc/trunk/build/toolchain/concurrent_links.gni
media/webrtc/trunk/build/toolchain/cros/BUILD.gn
media/webrtc/trunk/build/toolchain/cros_toolchain.gni
media/webrtc/trunk/build/toolchain/fuchsia/BUILD.gn
media/webrtc/trunk/build/toolchain/fuchsia/OWNERS
media/webrtc/trunk/build/toolchain/gcc_ar_wrapper.py
media/webrtc/trunk/build/toolchain/gcc_compile_wrapper.py
media/webrtc/trunk/build/toolchain/gcc_link_wrapper.py
media/webrtc/trunk/build/toolchain/gcc_solink_wrapper.py
media/webrtc/trunk/build/toolchain/gcc_toolchain.gni
media/webrtc/trunk/build/toolchain/get_concurrent_links.py
media/webrtc/trunk/build/toolchain/goma.gni
media/webrtc/trunk/build/toolchain/linux/BUILD.gn
media/webrtc/trunk/build/toolchain/mac/BUILD.gn
media/webrtc/trunk/build/toolchain/mac/compile_xcassets.py
media/webrtc/trunk/build/toolchain/mac/filter_libtool.py
media/webrtc/trunk/build/toolchain/mac/get_tool_mtime.py
media/webrtc/trunk/build/toolchain/mac/linker_driver.py
media/webrtc/trunk/build/toolchain/nacl/BUILD.gn
media/webrtc/trunk/build/toolchain/nacl_toolchain.gni
media/webrtc/trunk/build/toolchain/toolchain.gni
media/webrtc/trunk/build/toolchain/win/BUILD.gn
media/webrtc/trunk/build/toolchain/win/midl.gni
media/webrtc/trunk/build/toolchain/win/setup_toolchain.py
media/webrtc/trunk/build/toolchain/win/tool_wrapper.py
media/webrtc/trunk/build/toolchain/wrapper_utils.py
media/webrtc/trunk/build/tree_truth.sh
media/webrtc/trunk/build/update-linux-sandbox.sh
media/webrtc/trunk/build/util/BUILD.gn
media/webrtc/trunk/build/util/LASTCHANGE
media/webrtc/trunk/build/util/branding.gni
media/webrtc/trunk/build/util/java_action.gni
media/webrtc/trunk/build/util/java_action.py
media/webrtc/trunk/build/util/lastchange.py
media/webrtc/trunk/build/util/lib/common/PRESUBMIT.py
media/webrtc/trunk/build/util/lib/common/__init__.py
media/webrtc/trunk/build/util/lib/common/perf_result_data_type.py
media/webrtc/trunk/build/util/lib/common/perf_tests_results_helper.py
media/webrtc/trunk/build/util/lib/common/unittest_util.py
media/webrtc/trunk/build/util/lib/common/unittest_util_test.py
media/webrtc/trunk/build/util/lib/common/util.py
media/webrtc/trunk/build/util/process_version.gni
media/webrtc/trunk/build/util/version.gni
media/webrtc/trunk/build/util/version.py
media/webrtc/trunk/build/util/webkit_version.h.in
media/webrtc/trunk/build/vs_toolchain.py
media/webrtc/trunk/build/whitespace_file.txt
media/webrtc/trunk/build/win/BUILD.gn
media/webrtc/trunk/build/win/as_invoker.manifest
media/webrtc/trunk/build/win/common_controls.manifest
media/webrtc/trunk/build/win/compatibility.manifest
media/webrtc/trunk/build/win/copy_cdb_to_output.py
media/webrtc/trunk/build/win/merge_pgc_files.py
media/webrtc/trunk/build/win/message_compiler.gni
media/webrtc/trunk/build/win/message_compiler.py
media/webrtc/trunk/build/win/reorder-imports.py
media/webrtc/trunk/build/win/require_administrator.manifest
media/webrtc/trunk/build/win/run_pgo_profiling_benchmarks.py
media/webrtc/trunk/build/win/syzygy/BUILD.gn
media/webrtc/trunk/build/win/syzygy/OWNERS
media/webrtc/trunk/build/win/syzygy/instrument.py
media/webrtc/trunk/build/win/syzygy/reorder.py
media/webrtc/trunk/build/win/syzygy/syzyasan-allocation-filter.txt
media/webrtc/trunk/build/win/syzygy/syzyasan-instrumentation-filter.txt
media/webrtc/trunk/build/win/syzygy/syzygy.gni
media/webrtc/trunk/build/win/use_ansi_codes.py
media/webrtc/trunk/build/win_is_xtree_patched.py
media/webrtc/trunk/build/write_build_date_header.py
media/webrtc/trunk/build/write_buildflag_header.py
media/webrtc/trunk/build_overrides/OWNERS
media/webrtc/trunk/build_overrides/README.md
media/webrtc/trunk/build_overrides/build.gni
media/webrtc/trunk/build_overrides/gtest.gni
media/webrtc/trunk/webrtc/api/BUILD.gn
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/.gn
@@ -0,0 +1,54 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("//build/dotfile_settings.gni")
+
+# The location of the build configuration file.
+buildconfig = "//build/config/BUILDCONFIG.gn"
+
+# The secondary source root is a parallel directory tree where
+# GN build files are placed when they can not be placed directly
+# in the source tree, e.g. for third party source trees.
+secondary_source = "//build/secondary/"
+
+# These are the targets to check headers for by default. The files in targets
+# matching these patterns (see "gn help label_pattern" for format) will have
+# their includes checked for proper dependencies when you run either
+# "gn check" or "gn gen --check".
+# TODO(kjellander): Keep adding paths to this list as work in webrtc:5589 is done.
+check_targets = [
+  "//webrtc/api/*",
+  "//webrtc/audio/*",
+  "//webrtc/modules/audio_coding/*",
+  "//webrtc/modules/audio_conference_mixer/*",
+  "//webrtc/modules/audio_device/*",
+  "//webrtc/modules/audio_mixer/*",
+  "//webrtc/modules/audio_processing/*",
+  "//webrtc/modules/bitrate_controller/*",
+  "//webrtc/modules/congestion_controller/*",
+  "//webrtc/modules/desktop_capture/*",
+  "//webrtc/modules/media_file/*",
+  "//webrtc/modules/pacing/*",
+  "//webrtc/modules/rtp_rtcp/*",
+  "//webrtc/modules/utility/*",
+  "//webrtc/modules/video_capture/*",
+  "//webrtc/modules/video_coding/*",
+  "//webrtc/modules/video_processing/*",
+  "//webrtc/modules/remote_bitrate_estimator/*",
+  "//webrtc/stats:rtc_stats",
+  "//webrtc/voice_engine",
+  "//webrtc/voice_engine:audio_coder",
+  "//webrtc/voice_engine:file_player",
+  "//webrtc/voice_engine:file_recorder",
+  "//webrtc/voice_engine:level_indicator",
+]
+
+# These are the list of GN files that run exec_script. This whitelist exists
+# to force additional review for new uses of exec_script, which is strongly
+# discouraged except for gypi_to_gn calls.
+exec_script_whitelist = build_dotfile_settings.exec_script_whitelist
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/BUILD.gn
@@ -0,0 +1,21 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("webrtc/build/webrtc.gni")
+
+group("default") {
+  testonly = true
+  deps = [
+    "//webrtc",
+    "//webrtc/examples",
+    "//webrtc/tools",
+  ]
+  if (rtc_include_tests) {
+    deps += [ "//webrtc:webrtc_tests" ]
+  }
+}
--- a/media/webrtc/trunk/build/OWNERS
+++ b/media/webrtc/trunk/build/OWNERS
@@ -1,1 +1,22 @@
-*
+agrieve@chromium.org
+dpranke@chromium.org
+jbudorick@chromium.org
+jochen@chromium.org
+scottmg@chromium.org
+thakis@chromium.org
+brucedawson@chromium.org
+
+per-file .gitignore=*
+per-file mac_toolchain.py=erikchen@chromium.org
+per-file mac_toolchain.py=justincohen@chromium.org
+per-file package_mac_toolchain.py=erikchen@chromium.org
+per-file package_mac_toolchain.py=justincohen@chromium.org
+per-file whitespace_file.txt=*
+per-file OWNERS.status=*
+
+# gn-dev is probably a better team here, but the tooling won't let us
+# have more than one team per component, and infra-dev is a catch-all
+# for other build-related lists.
+#
+# TEAM: infra-dev@chromium.org
+# COMPONENT: Build
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/OWNERS.status
@@ -0,0 +1,12 @@
+# Use this file to set a global status message that should be shown whenever
+# git cl owners proposes to add you as a reviewer.
+#
+# The status messages should be somewhat stable, so please don't use this for
+# short term, or frequently changing updates.
+#
+# The format of the file is
+#
+#  you@chromium.org: Single line status message.
+#
+
+jochen@chromium.org: EMEA based reviewer.
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/args/OWNERS
@@ -0,0 +1,1 @@
+per-file headless.gn=file://headless/OWNERS
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/args/README.txt
@@ -0,0 +1,31 @@
+This directory is here to hold .gni files that contain sets of GN build
+arguments for given configurations.
+
+(Currently this directory is empty because we removed the only thing here, but
+this has come up several times so I'm confident we'll need this again. If this
+directory is still empty by 2017, feel free to delete it. --Brett)
+
+Some projects or bots may have build configurations with specific combinations
+of flags. Rather than making a new global flag for your specific project and
+adding it all over the build to each arg it should affect, you can add a .gni
+file here with the variables.
+
+For example, for project foo you may put in build/args/foo.gni:
+
+  target_os = "android"
+  use_pulseaudio = false
+  use_ozone = true
+  system_libdir = "foo"
+
+Users wanting to build this configuration would run:
+
+  $ gn args out/mybuild
+
+And add the following line to their args for that build directory:
+
+  import("//build/args/foo.gni")
+  # You can set any other args here like normal.
+  is_component_build = false
+
+This way everybody can agree on a set of flags for a project, and their builds
+stay in sync as the flags in foo.gni are modified.
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/args/headless.gn
@@ -0,0 +1,44 @@
+# GN args template for the Headless Chrome library
+#
+# Add import to arg.gn in out directory and run gn gen on the directory to use.
+# E.g. for out directory out/foo:
+# echo 'import("//build/args/headless.gn")' > out/foo/args.gn
+# gn gen out/foo
+#
+# Use gn args to add your own build preference args.
+
+use_ozone = true
+ozone_auto_platforms = false
+ozone_platform = "headless"
+ozone_platform_headless = true
+
+# Embed resource.pak into binary to simplify deployment.
+headless_use_embedded_resources = true
+
+# Expose headless bindings for freetype library bundled with Chromium.
+headless_fontconfig_utils = true
+
+# Remove a dependency on a system fontconfig library.
+use_bundled_fontconfig = true
+
+# In order to simplify deployment we build ICU data file
+# into binary.
+icu_use_data_file = false
+
+# Use embedded data instead external files for headless in order
+# to simplify deployment.
+v8_use_external_startup_data = false
+
+enable_nacl = false
+enable_print_preview = false
+enable_remoting = false
+use_alsa = false
+use_ash = false
+use_cups = false
+use_dbus = false
+use_gconf = false
+use_gio = false
+use_kerberos = false
+use_libpci = false
+use_pulseaudio = false
+use_udev = false
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/build-ctags.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then
+  cat <<EOF
+  You must be using Exuberant Ctags, not just standard GNU ctags. If you are on
+  Debian or a related flavor of Linux, you may want to try running
+  apt-get install exuberant-ctags.
+EOF
+  exit
+fi
+
+CHROME_SRC_DIR="$PWD"
+
+fail() {
+  echo "Failed to create ctags for $1"
+  exit 1
+}
+
+ctags_cmd() {
+  echo "ctags --languages=C++ $1 --exclude=.git -R -f .tmp_tags"
+}
+
+build_dir() {
+  local extraexcludes=""
+  if [[ a"$1" == "a--extra-excludes" ]]; then
+    extraexcludes="--exclude=third_party --exclude=build --exclude=out"
+    shift
+  fi
+
+  cd "$CHROME_SRC_DIR/$1" || fail $1
+  # Redirect error messages so they aren't seen because they are almost always
+  # errors about components that you just happen to have not built (NaCl, for
+  # example).
+  $(ctags_cmd "$extraexcludes") 2> /dev/null || fail $1
+  mv -f .tmp_tags tags
+}
+
+# We always build the top level but leave all submodules as optional.
+build_dir --extra-excludes "" "top level"
+
+# Build any other directies that are listed on the command line.
+for dir in $@; do
+  build_dir "$1"
+  shift
+done
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/buildflag.h
@@ -0,0 +1,47 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef BUILD_BUILDFLAG_H_
+#define BUILD_BUILDFLAG_H_
+
+// These macros un-mangle the names of the build flags in a way that looks
+// natural, and gives errors if the flag is not defined. Normally in the
+// preprocessor it's easy to make mistakes that interpret "you haven't done
+// the setup to know what the flag is" as "flag is off". Normally you would
+// include the generated header rather than include this file directly.
+//
+// This is for use with generated headers. See build/buildflag_header.gni.
+
+// This dance of two macros does a concatenation of two preprocessor args using
+// ## doubly indirectly because using ## directly prevents macros in that
+// parameter from being expanded.
+#define BUILDFLAG_CAT_INDIRECT(a, b) a ## b
+#define BUILDFLAG_CAT(a, b) BUILDFLAG_CAT_INDIRECT(a, b)
+
+// Accessor for build flags.
+//
+// To test for a value, if the build file specifies:
+//
+//   ENABLE_FOO=true
+//
+// Then you would check at build-time in source code with:
+//
+//   #include "foo_flags.h"  // The header the build file specified.
+//
+//   #if BUILDFLAG(ENABLE_FOO)
+//     ...
+//   #endif
+//
+// There will no #define called ENABLE_FOO so if you accidentally test for
+// whether that is defined, it will always be negative. You can also use
+// the value in expressions:
+//
+//   const char kSpamServerName[] = BUILDFLAG(SPAM_SERVER_NAME);
+//
+// Because the flag is accessed as a preprocessor macro with (), an error
+// will be thrown if the proper header defining the internal flag value has
+// not been included.
+#define BUILDFLAG(flag) (BUILDFLAG_CAT(BUILDFLAG_INTERNAL_, flag)())
+
+#endif  // BUILD_BUILDFLAG_H_
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/buildflag_header.gni
@@ -0,0 +1,137 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a header with preprocessor defines specified by the build file.
+#
+# The flags are converted to function-style defines with mangled names and
+# code uses an accessor macro to access the values. This is to try to
+# minimize bugs where code checks whether something is defined or not, and
+# the proper header isn't included, meaning the answer will always be silently
+# false or might vary across the code base.
+#
+# In the GN template, specify build flags in the template as a list
+# of strings that encode key/value pairs like this:
+#
+#   flags = [ "ENABLE_FOO=1", "ENABLE_BAR=$enable_bar" ]
+#
+# The GN values "true" and "false" will be mapped to 0 and 1 for boolean
+# #if flags to be expressed naturally. This means you can't directly make a
+# define that generates C++ value of true or false for use in code. If you
+# REALLY need this, you can also use the string "(true)" and "(false)" to
+# prevent the rewriting.
+
+# To check the value of the flag in C code:
+#
+#   #include "path/to/here/header_file.h"
+#
+#   #if BUILDFLAG(ENABLE_FOO)
+#   ...
+#   #endif
+#
+#   const char kSpamServerUrl[] = BUILDFLAG(SPAM_SERVER_URL);
+#
+# There will no #define called ENABLE_FOO so if you accidentally test for that
+# in an ifdef it will always be negative.
+#
+#
+# Template parameters
+#
+#   flags [required, list of strings]
+#       Flag values as described above.
+#
+#   header [required, string]
+#       File name for generated header. By default, this will go in the
+#       generated file directory for this target, and you would include it
+#       with:
+#         #include "<path_to_this_BUILD_file>/<header>"
+#
+#   header_dir [optional, string]
+#       Override the default location of the generated header. The string will
+#       be treated as a subdirectory of the root_gen_dir. For example:
+#         header_dir = "foo/bar"
+#       Then you can include the header as:
+#         #include "foo/bar/baz.h"
+#
+#   deps, public_deps, testonly, visibility
+#       Normal meaning.
+#
+#
+# Grit defines
+#
+# If one .grd file uses a flag, just add to the grit target:
+#
+#   defines = [
+#     "enable_doom_melon=$enable_doom_melon",
+#   ]
+#
+# If multiple .grd files use it, you'll want to put the defines in a .gni file
+# so it can be shared. Generally this .gni file should include all grit defines
+# for a given module (for some definition of "module"). Then do:
+#
+#   defines = ui_grit_defines
+#
+# If you forget to do this, the flag will be implicitly false in the .grd file
+# and those resources won't be compiled. You'll know because the resource
+# #define won't be generated and any code that uses it won't compile. If you
+# see a missing IDS_* string, this is probably the reason.
+#
+#
+# Example
+#
+#   buildflag_header("foo_features") {
+#     header = "foo_features.h"
+#
+#     flags = [
+#       # This uses the GN build flag enable_doom_melon as the definition.
+#       "ENABLE_DOOM_MELON=$enable_doom_melon",
+#
+#       # This force-enables the flag.
+#       "ENABLE_SPACE_LASER=true",
+#
+#       # This will expand to the quoted C string when used in source code.
+#       "SPAM_SERVER_URL=\"http://www.example.com/\"",
+#     ]
+#   }
+template("buildflag_header") {
+  action(target_name) {
+    script = "//build/write_buildflag_header.py"
+
+    if (defined(invoker.header_dir)) {
+      header_file = "${invoker.header_dir}/${invoker.header}"
+    } else {
+      # Compute the path from the root to this file.
+      header_file = rebase_path(".", "//") + "/${invoker.header}"
+    }
+
+    outputs = [
+      "$root_gen_dir/$header_file",
+    ]
+
+    # Always write --flags to the file so it's not empty. Empty will confuse GN
+    # into thinking the response file isn't used.
+    response_file_contents = [ "--flags" ]
+    if (defined(invoker.flags)) {
+      response_file_contents += invoker.flags
+    }
+
+    args = [
+      "--output",
+      header_file,  # Not rebased, Python script puts it inside gen-dir.
+      "--rulename",
+      get_label_info(":$target_name", "label_no_toolchain"),
+      "--gen-dir",
+      rebase_path(root_gen_dir, root_build_dir),
+      "--definitions",
+      "{{response_file_name}}",
+    ]
+
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                             "visibility",
+                           ])
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/check_gn_headers.py
@@ -0,0 +1,234 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Find header files missing in GN.
+
+This script gets all the header files from ninja_deps, which is from the true
+dependency generated by the compiler, and report if they don't exist in GN.
+"""
+
+import argparse
+import json
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+from multiprocessing import Process, Queue
+
+
+def GetHeadersFromNinja(out_dir, q):
+  """Return all the header files from ninja_deps"""
+
+  def NinjaSource():
+    cmd = ['ninja', '-C', out_dir, '-t', 'deps']
+    # A negative bufsize means to use the system default, which usually
+    # means fully buffered.
+    popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=-1)
+    for line in iter(popen.stdout.readline, ''):
+      yield line.rstrip()
+
+    popen.stdout.close()
+    return_code = popen.wait()
+    if return_code:
+      raise subprocess.CalledProcessError(return_code, cmd)
+
+  ans, err = set(), None
+  try:
+    ans = ParseNinjaDepsOutput(NinjaSource())
+  except Exception as e:
+    err = str(e)
+  q.put((ans, err))
+
+
+def ParseNinjaDepsOutput(ninja_out):
+  """Parse ninja output and get the header files"""
+  all_headers = set()
+
+  prefix = '..' + os.sep + '..' + os.sep
+
+  is_valid = False
+  for line in ninja_out:
+    if line.startswith('    '):
+      if not is_valid:
+        continue
+      if line.endswith('.h') or line.endswith('.hh'):
+        f = line.strip()
+        if f.startswith(prefix):
+          f = f[6:]  # Remove the '../../' prefix
+          # build/ only contains build-specific files like build_config.h
+          # and buildflag.h, and system header files, so they should be
+          # skipped.
+          if not f.startswith('build'):
+            all_headers.add(f)
+    else:
+      is_valid = line.endswith('(VALID)')
+
+  return all_headers
+
+
+def GetHeadersFromGN(out_dir, q):
+  """Return all the header files from GN"""
+
+  tmp = None
+  ans, err = set(), None
+  try:
+    tmp = tempfile.mkdtemp()
+    shutil.copy2(os.path.join(out_dir, 'args.gn'),
+                 os.path.join(tmp, 'args.gn'))
+    # Do "gn gen" in a temp dir to prevent dirtying |out_dir|.
+    subprocess.check_call(['gn', 'gen', tmp, '--ide=json', '-q'])
+    gn_json = json.load(open(os.path.join(tmp, 'project.json')))
+    ans = ParseGNProjectJSON(gn_json, out_dir, tmp)
+  except Exception as e:
+    err = str(e)
+  finally:
+    if tmp:
+      shutil.rmtree(tmp)
+  q.put((ans, err))
+
+
+def ParseGNProjectJSON(gn, out_dir, tmp_out):
+  """Parse GN output and get the header files"""
+  all_headers = set()
+
+  for _target, properties in gn['targets'].iteritems():
+    sources = properties.get('sources', [])
+    public = properties.get('public', [])
+    # Exclude '"public": "*"'.
+    if type(public) is list:
+      sources += public
+    for f in sources:
+      if f.endswith('.h') or f.endswith('.hh'):
+        if f.startswith('//'):
+          f = f[2:]  # Strip the '//' prefix.
+          if f.startswith(tmp_out):
+            f = out_dir + f[len(tmp_out):]
+          all_headers.add(f)
+
+  return all_headers
+
+
+def GetDepsPrefixes(q):
+  """Return all the folders controlled by DEPS file"""
+  prefixes, err = set(), None
+  try:
+    gclient_out = subprocess.check_output(
+        ['gclient', 'recurse', '--no-progress', '-j1',
+         'python', '-c', 'import os;print os.environ["GCLIENT_DEP_PATH"]'])
+    for i in gclient_out.split('\n'):
+      if i.startswith('src/'):
+        i = i[4:]
+        prefixes.add(i)
+  except Exception as e:
+    err = str(e)
+  q.put((prefixes, err))
+
+
+def ParseWhiteList(whitelist):
+  out = set()
+  for line in whitelist.split('\n'):
+    line = re.sub(r'#.*', '', line).strip()
+    if line:
+      out.add(line)
+  return out
+
+
+def FilterOutDepsedRepo(files, deps):
+  return {f for f in files if not any(f.startswith(d) for d in deps)}
+
+
+def GetNonExistingFiles(lst):
+  out = set()
+  for f in lst:
+    if not os.path.isfile(f):
+      out.add(f)
+  return out
+
+
+def main():
+  parser = argparse.ArgumentParser(description='''
+      NOTE: Use ninja to build all targets in OUT_DIR before running
+      this script.''')
+  parser.add_argument('--out-dir', metavar='OUT_DIR', default='out/Release',
+                      help='output directory of the build')
+  parser.add_argument('--json',
+                      help='JSON output filename for missing headers')
+  parser.add_argument('--whitelist', help='file containing whitelist')
+
+  args, _extras = parser.parse_known_args()
+
+  if not os.path.isdir(args.out_dir):
+    parser.error('OUT_DIR "%s" does not exist.' % args.out_dir)
+
+  d_q = Queue()
+  d_p = Process(target=GetHeadersFromNinja, args=(args.out_dir, d_q,))
+  d_p.start()
+
+  gn_q = Queue()
+  gn_p = Process(target=GetHeadersFromGN, args=(args.out_dir, gn_q,))
+  gn_p.start()
+
+  deps_q = Queue()
+  deps_p = Process(target=GetDepsPrefixes, args=(deps_q,))
+  deps_p.start()
+
+  d, d_err = d_q.get()
+  gn, gn_err = gn_q.get()
+  missing = d - gn
+  nonexisting = GetNonExistingFiles(gn)
+
+  deps, deps_err = deps_q.get()
+  missing = FilterOutDepsedRepo(missing, deps)
+  nonexisting = FilterOutDepsedRepo(nonexisting, deps)
+
+  d_p.join()
+  gn_p.join()
+  deps_p.join()
+
+  if d_err:
+    parser.error(d_err)
+  if gn_err:
+    parser.error(gn_err)
+  if deps_err:
+    parser.error(deps_err)
+  if len(GetNonExistingFiles(d)) > 0:
+    parser.error('''Found non-existing files in ninja deps. You should
+        build all in OUT_DIR.''')
+  if len(d) == 0:
+    parser.error('OUT_DIR looks empty. You should build all there.')
+  if any((('/gen/' in i) for i in nonexisting)):
+    parser.error('OUT_DIR looks wrong. You should build all there.')
+
+  if args.whitelist:
+    whitelist = ParseWhiteList(open(args.whitelist).read())
+    missing -= whitelist
+
+  missing = sorted(missing)
+  nonexisting = sorted(nonexisting)
+
+  if args.json:
+    with open(args.json, 'w') as f:
+      json.dump(missing, f)
+
+  if len(missing) == 0 and len(nonexisting) == 0:
+    return 0
+
+  if len(missing) > 0:
+    print '\nThe following files should be included in gn files:'
+    for i in missing:
+      print i
+
+  if len(nonexisting) > 0:
+    print '\nThe following non-existing files should be removed from gn files:'
+    for i in nonexisting:
+      print i
+
+  return 1
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/check_gn_headers_unittest.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import json
+import os
+import unittest
+import check_gn_headers
+
+
+ninja_input = r'''
+obj/a.o: #deps 1, deps mtime 123 (VALID)
+    ../../a.cc
+    ../../dir/path/b.h
+    ../../c.hh
+
+obj/b.o: #deps 1, deps mtime 123 (STALE)
+    ../../b.cc
+    ../../dir2/path/b.h
+    ../../c2.hh
+
+obj/c.o: #deps 1, deps mtime 123 (VALID)
+    ../../c.cc
+    ../../build/a.h
+    gen/b.h
+    ../../dir3/path/b.h
+    ../../c3.hh
+'''
+ninja_input_win = ninja_input.replace('/', '\\')
+
+
+gn_input = json.loads(r'''
+{
+   "others": [],
+   "targets": {
+      "//:All": {
+      },
+      "//:base": {
+         "public": [ "//base/p.h" ],
+         "sources": [ "//base/a.cc", "//base/a.h", "//base/b.hh" ],
+         "visibility": [ "*" ]
+      },
+      "//:star_public": {
+         "public": "*",
+         "sources": [ "//base/c.h", "//tmp/gen/a.h" ],
+         "visibility": [ "*" ]
+      }
+    }
+}
+''')
+
+
+whitelist = r'''
+   white-front.c
+a/b/c/white-end.c # comment
+ dir/white-both.c  #more comment
+
+# empty line above
+a/b/c
+'''
+
+
+class CheckGnHeadersTest(unittest.TestCase):
+  def testNinja(self):
+    headers = check_gn_headers.ParseNinjaDepsOutput(ninja_input.split('\n'))
+    expected = set([
+        'dir/path/b.h',
+        'c.hh',
+        'dir3/path/b.h',
+        'c3.hh',
+    ])
+    self.assertEquals(headers, expected)
+
+  def testNinjaWin(self):
+    old_sep = os.sep
+    os.sep = '\\'
+
+    headers = check_gn_headers.ParseNinjaDepsOutput(
+        ninja_input_win.split('\n'))
+    expected = set([
+        'dir\\path\\b.h',
+        'c.hh',
+        'dir3\\path\\b.h',
+        'c3.hh',
+    ])
+    self.assertEquals(headers, expected)
+
+    os.sep = old_sep
+
+  def testGn(self):
+    headers = check_gn_headers.ParseGNProjectJSON(gn_input,
+                                                  'out/Release', 'tmp')
+    expected = set([
+        'base/a.h',
+        'base/b.hh',
+        'base/c.h',
+        'base/p.h',
+        'out/Release/gen/a.h',
+    ])
+    self.assertEquals(headers, expected)
+
+  def testWhitelist(self):
+    output = check_gn_headers.ParseWhiteList(whitelist)
+    expected = set([
+        'white-front.c',
+        'a/b/c/white-end.c',
+        'dir/white-both.c',
+        'a/b/c',
+    ])
+    self.assertEquals(output, expected)
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/check_return_value.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This program wraps an arbitrary command and prints "1" if the command ran
+successfully."""
+
+import os
+import subprocess
+import sys
+
+devnull = open(os.devnull, 'wb')
+if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull):
+  print 1
+else:
+  print 0
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/clobber.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script provides methods for clobbering build directories."""
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+
+
+def extract_gn_build_commands(build_ninja_file):
+  """Extracts from a build.ninja the commands to run GN.
+
+  The commands to run GN are the gn rule and build.ninja build step at the
+  top of the build.ninja file. We want to keep these when deleting GN builds
+  since we want to preserve the command-line flags to GN.
+
+  On error, returns the empty string."""
+  result = ""
+  with open(build_ninja_file, 'r') as f:
+    # Read until the second blank line. The first thing GN writes to the file
+    # is the "rule gn" and the second is the section for "build build.ninja",
+    # separated by blank lines.
+    num_blank_lines = 0
+    while num_blank_lines < 2:
+      line = f.readline()
+      if len(line) == 0:
+        return ''  # Unexpected EOF.
+      result += line
+      if line[0] == '\n':
+        num_blank_lines = num_blank_lines + 1
+  return result
+
+
+def delete_dir(build_dir):
+  if os.path.islink(build_dir):
+    return
+  # For unknown reasons (anti-virus?) rmtree of Chromium build directories
+  # often fails on Windows.
+  if sys.platform.startswith('win'):
+    subprocess.check_call(['rmdir', '/s', '/q', build_dir], shell=True)
+  else:
+    shutil.rmtree(build_dir)
+
+
+def delete_build_dir(build_dir):
+  # GN writes a build.ninja.d file. Note that not all GN builds have args.gn.
+  build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d')
+  if not os.path.exists(build_ninja_d_file):
+    delete_dir(build_dir)
+    return
+
+  # GN builds aren't automatically regenerated when you sync. To avoid
+  # messing with the GN workflow, erase everything but the args file, and
+  # write a dummy build.ninja file that will automatically rerun GN the next
+  # time Ninja is run.
+  build_ninja_file = os.path.join(build_dir, 'build.ninja')
+  build_commands = extract_gn_build_commands(build_ninja_file)
+
+  try:
+    gn_args_file = os.path.join(build_dir, 'args.gn')
+    with open(gn_args_file, 'r') as f:
+      args_contents = f.read()
+  except IOError:
+    args_contents = ''
+
+  e = None
+  try:
+    # delete_dir and os.mkdir() may fail, such as when chrome.exe is running,
+    # and we still want to restore args.gn/build.ninja/build.ninja.d, so catch
+    # the exception and rethrow it later.
+    delete_dir(build_dir)
+    os.mkdir(build_dir)
+  except Exception as e:
+    pass
+
+  # Put back the args file (if any).
+  if args_contents != '':
+    with open(gn_args_file, 'w') as f:
+      f.write(args_contents)
+
+  # Write the build.ninja file sufficiently to regenerate itself.
+  with open(os.path.join(build_dir, 'build.ninja'), 'w') as f:
+    if build_commands != '':
+      f.write(build_commands)
+    else:
+      # Couldn't parse the build.ninja file, write a default thing.
+      f.write('''rule gn
+command = gn -q gen //out/%s/
+description = Regenerating ninja files
+
+build build.ninja: gn
+generator = 1
+depfile = build.ninja.d
+''' % (os.path.split(build_dir)[1]))
+
+  # Write a .d file for the build which references a nonexistant file. This
+  # will make Ninja always mark the build as dirty.
+  with open(build_ninja_d_file, 'w') as f:
+    f.write('build.ninja: nonexistant_file.gn\n')
+
+  if e:
+    # Rethrow the exception we caught earlier.
+    raise e
+
+def clobber(out_dir):
+  """Clobber contents of build directory.
+
+  Don't delete the directory itself: some checkouts have the build directory
+  mounted."""
+  for f in os.listdir(out_dir):
+    path = os.path.join(out_dir, f)
+    if os.path.isfile(path):
+      os.unlink(path)
+    elif os.path.isdir(path):
+      delete_build_dir(path)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('out_dir', help='The output directory to clobber')
+  args = parser.parse_args()
+  clobber(args.out_dir)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
--- a/media/webrtc/trunk/build/common.croc
+++ b/media/webrtc/trunk/build/common.croc
@@ -52,17 +52,17 @@
     },
     # Don't include subversion or mercurial SCM dirs
     {
       'regexp' : '.*/(\\.svn|\\.hg)/',
       'include' : 0,
     },
     # Don't include output dirs
     {
-      'regexp' : '.*/(Debug|Release|sconsbuild|out|xcodebuild)/',
+      'regexp' : '.*/(Debug|Release|out|xcodebuild)/',
       'include' : 0,
     },
     # Don't include third-party source
     {
       'regexp' : '.*/third_party/',
       'include' : 0,
     },
     # We don't run the V8 test suite, so we don't care about V8 coverage.
@@ -84,17 +84,17 @@
       'format' : '*RESULT FilesKnown: files_executable= %d files',
     },
     {
       'stat' : 'files_instrumented',
       'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
     },
     {
       'stat' : '100.0 * files_instrumented / files_executable',
-      'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g',
+      'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent',
     },
     {
       'stat' : 'lines_executable',
       'format' : '*RESULT LinesKnown: lines_known= %d lines',
     },
     {
       'stat' : 'lines_instrumented',
       'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
@@ -106,22 +106,22 @@
     },
     {
       'stat' : 'lines_covered',
       'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
       'group' : 'test',
     },
     {
       'stat' : '100.0 * lines_covered / lines_executable',
-      'format' : '*RESULT PercentCovered: percent_covered= %g',
+      'format' : '*RESULT PercentCovered: percent_covered= %g percent',
     },
     {
       'stat' : '100.0 * lines_covered / lines_executable',
-      'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g',
+      'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent',
       'group' : 'source',
     },
     {
       'stat' : '100.0 * lines_covered / lines_executable',
-      'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g',
+      'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent',
       'group' : 'test',
     },
   ],
 }
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/compiled_action.gni
@@ -0,0 +1,170 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file introduces two related templates that act like action and
+# action_foreach but instead of running a Python script, it will compile a
+# given tool in the host toolchain and run that (either once or over the list
+# of inputs, depending on the variant).
+#
+# Parameters
+#
+#   tool (required)
+#       [label] Label of the tool to run. This should be an executable, and
+#       this label should not include a toolchain (anything in parens). The
+#       host compile of this tool will be used.
+#
+#   outputs (required)
+#       [list of files] Like the outputs of action (if using "compiled_action",
+#       this would be just the list of outputs), or action_foreach (if using
+#       "compiled_action_foreach", this would contain source expansions mapping
+#       input to output files).
+#
+#   args (required)
+#       [list of strings] Same meaning as action/action_foreach.
+#
+#   inputs (optional)
+#       Files the binary takes as input. The step will be re-run whenever any
+#       of these change. If inputs is empty, the step will run only when the
+#       binary itself changes.
+#
+#   visibility
+#   deps
+#   args   (all optional)
+#       Same meaning as action/action_foreach.
+#
+#
+# Example of usage:
+#
+#   compiled_action("run_my_tool") {
+#     tool = "//tools/something:mytool"
+#     outputs = [
+#       "$target_gen_dir/mysource.cc",
+#       "$target_gen_dir/mysource.h",
+#     ]
+#
+#     # The tool takes this input.
+#     inputs = [ "my_input_file.idl" ]
+#
+#     # In this case, the tool takes as arguments the input file and the output
+#     # build dir (both relative to the "cd" that the script will be run in)
+#     # and will produce the output files listed above.
+#     args = [
+#       rebase_path("my_input_file.idl", root_build_dir),
+#       "--output-dir", rebase_path(target_gen_dir, root_build_dir),
+#     ]
+#   }
+#
+# You would typically declare your tool like this:
+#   if (host_toolchain == current_toolchain) {
+#     executable("mytool") {
+#       ...
+#     }
+#   }
+# The if statement around the executable is optional. That says "I only care
+# about this target in the host toolchain". Usually this is what you want, and
+# saves unnecessarily compiling your tool for the target platform. But if you
+# need a target build of your tool as well, just leave off the if statement.
+
+if (host_os == "win") {
+  _host_executable_suffix = ".exe"
+} else {
+  _host_executable_suffix = ""
+}
+
+template("compiled_action") {
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  assert(!defined(invoker.sources),
+         "compiled_action doesn't take a sources arg. Use inputs instead.")
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "inputs",
+                             "outputs",
+                             "testonly",
+                             "visibility",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    if (!defined(inputs)) {
+      inputs = []
+    }
+
+    script = "//build/gn_run_binary.py"
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable =
+        get_label_info(host_tool, "root_out_dir") + "/" +
+        get_label_info(host_tool, "name") + _host_executable_suffix
+
+    # Add the executable itself as an input.
+    inputs += [ host_executable ]
+
+    deps += [ host_tool ]
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+  }
+}
+
+template("compiled_action_foreach") {
+  assert(defined(invoker.sources), "sources must be defined for $target_name")
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  action_foreach(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "inputs",
+                             "outputs",
+                             "sources",
+                             "testonly",
+                             "visibility",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    if (!defined(inputs)) {
+      inputs = []
+    }
+
+    script = "//build/gn_run_binary.py"
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable =
+        get_label_info(host_tool, "root_out_dir") + "/" +
+        get_label_info(host_tool, "name") + _host_executable_suffix
+
+    # Add the executable itself as an input.
+    inputs += [ host_executable ]
+
+    deps += [ host_tool ]
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+  }
+}
--- a/media/webrtc/trunk/build/compiler_version.py
+++ b/media/webrtc/trunk/build/compiler_version.py
@@ -9,68 +9,120 @@ Print gcc version as XY if you are runni
 This is used to tweak build flags for gcc 4.4.
 """
 
 import os
 import re
 import subprocess
 import sys
 
-def GetVersion(compiler):
+
+compiler_version_cache = {}  # Map from (compiler, tool) -> version.
+
+
+def Usage(program_name):
+  print '%s MODE TOOL' % os.path.basename(program_name)
+  print 'MODE: host or target.'
+  print 'TOOL: assembler or compiler or linker.'
+  return 1
+
+
+def ParseArgs(args):
+  if len(args) != 2:
+    raise Exception('Invalid number of arguments')
+  mode = args[0]
+  tool = args[1]
+  if mode not in ('host', 'target'):
+    raise Exception('Invalid mode: %s' % mode)
+  if tool not in ('assembler',):
+    raise Exception('Invalid tool: %s' % tool)
+  return mode, tool
+
+
+def GetEnvironFallback(var_list, default):
+  """Look up an environment variable from a possible list of variable names."""
+  for var in var_list:
+    if var in os.environ:
+      return os.environ[var]
+  return default
+
+
+def GetVersion(compiler, tool):
+  tool_output = tool_error = None
+  cache_key = (compiler, tool)
+  cached_version = compiler_version_cache.get(cache_key)
+  if cached_version:
+    return cached_version
   try:
     # Note that compiler could be something tricky like "distcc g++".
-    compiler = compiler + " -dumpversion"
-    pipe = subprocess.Popen(compiler, shell=True,
+    if tool == "assembler":
+      compiler = compiler + " -Xassembler --version -x assembler -c /dev/null"
+      # Unmodified: GNU assembler (GNU Binutils) 2.24
+      # Ubuntu: GNU assembler (GNU Binutils for Ubuntu) 2.22
+      # Fedora: GNU assembler version 2.23.2
+      version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
+    else:
+      raise Exception("Unknown tool %s" % tool)
+
+    # Force the locale to C otherwise the version string could be localized
+    # making regex matching fail.
+    env = os.environ.copy()
+    env["LC_ALL"] = "C"
+    pipe = subprocess.Popen(compiler, shell=True, env=env,
                             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    gcc_output, gcc_error = pipe.communicate()
+    tool_output, tool_error = pipe.communicate()
     if pipe.returncode:
       raise subprocess.CalledProcessError(pipe.returncode, compiler)
 
-    result = re.match(r"(\d+)\.(\d+)", gcc_output)
-    return result.group(1) + result.group(2)
+    parsed_output = version_re.match(tool_output)
+    result = parsed_output.group(1) + parsed_output.group(2)
+    compiler_version_cache[cache_key] = result
+    return result
   except Exception, e:
-    if gcc_error:
-      sys.stderr.write(gcc_error)
+    if tool_error:
+      sys.stderr.write(tool_error)
     print >> sys.stderr, "compiler_version.py failed to execute:", compiler
     print >> sys.stderr, e
     return ""
 
-def GetVersionFromEnvironment(compiler_env):
-  """ Returns the version of compiler
 
-  If the compiler was set by the given environment variable and exists,
-  return its version, otherwise None is returned.
-  """
-  cxx = os.getenv(compiler_env, None)
-  if cxx:
-    cxx_version = GetVersion(cxx)
-    if cxx_version != "":
-      return cxx_version
-  return None
+def main(args):
+  try:
+    (mode, tool) = ParseArgs(args[1:])
+  except Exception, e:
+    sys.stderr.write(e.message + '\n\n')
+    return Usage(args[0])
+
+  ret_code, result = ExtractVersion(mode, tool)
+  if ret_code == 0:
+    print result
+  return ret_code
+
 
-def main():
-  # Check if CXX_target or CXX environment variable exists an if it does use
-  # that compiler.
-  # TODO: Fix ninja (see http://crbug.com/140900) instead and remove this code
-  # In ninja's cross compile mode, the CXX_target is target compiler, while
-  # the CXX is host. The CXX_target needs be checked first, though the target
-  # and host compiler have different version, there seems no issue to use the
-  # target compiler's version number as gcc_version in Android.
-  cxx_version = GetVersionFromEnvironment("CXX_target")
-  if cxx_version:
-    print cxx_version
-    return 0
+def DoMain(args):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  (mode, tool) = ParseArgs(args)
+  ret_code, result = ExtractVersion(mode, tool)
+  if ret_code == 0:
+    return result
+  raise Exception("Failed to extract compiler version for args: %s" % args)
+
 
-  cxx_version = GetVersionFromEnvironment("CXX")
-  if cxx_version:
-    print cxx_version
-    return 0
+def ExtractVersion(mode, tool):
+  # Check if various CXX environment variables exist and use them if they
+  # exist. The preferences and fallback order is a close approximation of
+  # GenerateOutputForConfig() in GYP's ninja generator.
+  # The main difference being not supporting GYP's make_global_settings.
+  environments = ['CXX_target', 'CXX']
+  if mode == 'host':
+    environments = ['CXX_host'] + environments;
+  compiler = GetEnvironFallback(environments, 'c++')
 
-  # Otherwise we check the g++ version.
-  gccversion = GetVersion("g++")
-  if gccversion != "":
-    print gccversion
-    return 0
+  if compiler:
+    compiler_version = GetVersion(compiler, tool)
+    if compiler_version != "":
+      return (0, compiler_version)
+  return (1, None)
 
-  return 1
 
 if __name__ == "__main__":
-  sys.exit(main())
+  sys.exit(main(sys.argv))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/BUILD.gn
@@ -0,0 +1,383 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/allocator.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/crypto.gni")
+import("//build/config/dcheck_always_on.gni")
+import("//build/config/features.gni")
+import("//build/config/pch.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/ui.gni")
+import("//build/toolchain/goma.gni")
+
+declare_args() {
+  # When set (the default) enables C++ iterator debugging in debug builds.
+  # Iterator debugging is always off in release builds (technically, this flag
+  # affects the "debug" config, which is always available but applied by
+  # default only in debug builds).
+  #
+  # Iterator debugging is generally useful for catching bugs. But it can
+  # introduce extra locking to check the state of an iterator against the state
+  # of the current object. For iterator- and thread-heavy code, this can
+  # significantly slow execution.
+  enable_iterator_debugging = true
+}
+
+# ==============================================
+#   PLEASE DO NOT ADD MORE THINGS TO THIS LIST
+# ==============================================
+#
+# Legacy feature defines applied to all targets.
+#
+# These are applied to every single compile in the build and most of them are
+# only relevant to a few files. This bloats command lines and causes
+# unnecessary recompiles when flags are flipped.
+#
+# To pass defines to source code from the build, use the buildflag system which
+# will write headers containing the defines you need. This isolates the define
+# and means its definition can participate in the build graph, only recompiling
+# things when it actually changes.
+#
+# See //build/buildflag_header.gni for inntructions on generating headers.
+#
+# This will also allow you to scope your build flag to a BUILD.gn file (or a
+# .gni file if you need it from more than one place) rather than making global
+# flags. See //build/config/BUILDCONFIG.gn for advice on where to define
+# build flags.
+config("feature_flags") {
+  # Don't use deprecated V8 APIs anywhere.
+  defines = [ "V8_DEPRECATION_WARNINGS" ]
+  if (dcheck_always_on) {
+    defines += [ "DCHECK_ALWAYS_ON=1" ]
+  }
+  if (use_udev) {
+    # TODO(brettw) should probably be "=1".
+    defines += [ "USE_UDEV" ]
+  }
+  if (use_ash) {
+    defines += [ "USE_ASH=1" ]
+  }
+  if (use_aura) {
+    defines += [ "USE_AURA=1" ]
+  }
+  if (use_pango) {
+    defines += [ "USE_PANGO=1" ]
+  }
+  if (use_cairo) {
+    defines += [ "USE_CAIRO=1" ]
+  }
+  if (use_glib) {
+    defines += [ "USE_GLIB=1" ]
+  }
+  if (use_openssl_certs) {
+    defines += [ "USE_OPENSSL_CERTS=1" ]
+  }
+  if (use_nss_certs) {
+    defines += [ "USE_NSS_CERTS=1" ]
+  }
+  if (use_ozone) {
+    defines += [ "USE_OZONE=1" ]
+  }
+  if (use_x11) {
+    defines += [ "USE_X11=1" ]
+  }
+  if (use_allocator != "tcmalloc") {
+    defines += [ "NO_TCMALLOC" ]
+  }
+  if (is_asan || is_lsan || is_tsan || is_msan) {
+    defines += [
+      "MEMORY_TOOL_REPLACES_ALLOCATOR",
+      "MEMORY_SANITIZER_INITIAL_SIZE",
+    ]
+  }
+  if (is_asan) {
+    defines += [ "ADDRESS_SANITIZER" ]
+  }
+  if (is_lsan) {
+    defines += [ "LEAK_SANITIZER" ]
+  }
+  if (is_tsan) {
+    defines += [
+      "THREAD_SANITIZER",
+      "DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1",
+      "WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1",
+    ]
+  }
+  if (is_msan) {
+    defines += [ "MEMORY_SANITIZER" ]
+  }
+  if (is_ubsan || is_ubsan_null || is_ubsan_vptr || is_ubsan_security) {
+    defines += [ "UNDEFINED_SANITIZER" ]
+  }
+  if (!enable_nacl) {
+    defines += [ "DISABLE_NACL" ]
+  }
+  if (safe_browsing_mode == 1) {
+    defines += [ "FULL_SAFE_BROWSING" ]
+    defines += [ "SAFE_BROWSING_CSD" ]
+    defines += [ "SAFE_BROWSING_DB_LOCAL" ]
+  } else if (safe_browsing_mode == 2) {
+    defines += [ "SAFE_BROWSING_DB_REMOTE" ]
+  }
+  if (is_official_build) {
+    defines += [ "OFFICIAL_BUILD" ]
+  }
+  if (is_chrome_branded) {
+    defines += [ "GOOGLE_CHROME_BUILD" ]
+  } else {
+    defines += [ "CHROMIUM_BUILD" ]
+  }
+  if (is_syzyasan) {
+    defines += [
+      "SYZYASAN",
+      "MEMORY_SANITIZER_INITIAL_SIZE",
+    ]
+  }
+  if (!fieldtrial_testing_like_official_build && !is_chrome_branded) {
+    defines += [ "FIELDTRIAL_TESTING_ENABLED" ]
+  }
+
+  # ==============================================
+  #   PLEASE DO NOT ADD MORE THINGS TO THIS LIST
+  # ==============================================
+  #
+  # See the comment at the top.
+}
+
+# Debug/release ----------------------------------------------------------------
+
+config("debug") {
+  defines = [
+    "_DEBUG",
+    "DYNAMIC_ANNOTATIONS_ENABLED=1",
+    "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+  ]
+
+  if (is_nacl) {
+    defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ]
+  }
+
+  if (is_win) {
+    if (!enable_iterator_debugging) {
+      # Iterator debugging is enabled by default by the compiler on debug
+      # builds, and we have to tell it to turn it off.
+      defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
+    }
+  } else if (is_linux && current_cpu == "x64" && enable_iterator_debugging) {
+    # Enable libstdc++ debugging facilities to help catch problems early, see
+    # http://crbug.com/65151 .
+    # TODO(phajdan.jr): Should we enable this for all of POSIX?
+    defines += [ "_GLIBCXX_DEBUG=1" ]
+  }
+}
+
+config("release") {
+  defines = [ "NDEBUG" ]
+
+  # Sanitizers.
+  if (is_tsan) {
+    defines += [
+      "DYNAMIC_ANNOTATIONS_ENABLED=1",
+      "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+    ]
+  } else {
+    defines += [ "NVALGRIND" ]
+    if (!is_nacl) {
+      # NaCl always enables dynamic annotations. Currently this value is set to
+      # 1 for all .nexes.
+      defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
+    }
+  }
+
+  if (is_ios) {
+    # Disable NSAssert and GTMDevAssert (from Google Toolbox for Mac). This
+    # follows XCode's default behavior for Release builds.
+    defines += [ "NS_BLOCK_ASSERTIONS=1" ]
+  }
+}
+
+# Default libraries ------------------------------------------------------------
+
+# This config defines the default libraries applied to all targets.
+config("default_libs") {
+  if (is_win) {
+    # TODO(brettw) this list of defaults should probably be smaller, and
+    # instead the targets that use the less common ones (e.g. wininet or
+    # winspool) should include those explicitly.
+    libs = [
+      "advapi32.lib",
+      "comdlg32.lib",
+      "dbghelp.lib",
+      "delayimp.lib",
+      "dnsapi.lib",
+      "gdi32.lib",
+      "kernel32.lib",
+      "msimg32.lib",
+      "odbc32.lib",
+      "odbccp32.lib",
+      "ole32.lib",
+      "oleaut32.lib",
+      "psapi.lib",
+      "shell32.lib",
+      "shlwapi.lib",
+      "user32.lib",
+      "usp10.lib",
+      "uuid.lib",
+      "version.lib",
+      "wininet.lib",
+      "winmm.lib",
+      "winspool.lib",
+      "ws2_32.lib",
+
+      # Please don't add more stuff here. We should actually be making this
+      # list smaller, since all common things should be covered. If you need
+      # some extra libraries, please just add a libs = [ "foo.lib" ] to your
+      # target that needs it.
+    ]
+  } else if (is_android) {
+    libs = [
+      "dl",
+      "m",
+    ]
+  } else if (is_mac) {
+    # Targets should choose to explicitly link frameworks they require. Since
+    # linking can have run-time side effects, nothing should be listed here.
+    libs = []
+  } else if (is_ios) {
+    # The libraries listed here will be specified for both the target and the
+    # host. Only the common ones should be listed here.
+    libs = [
+      "CoreFoundation.framework",
+      "CoreGraphics.framework",
+      "CoreText.framework",
+      "Foundation.framework",
+    ]
+  } else if (is_linux) {
+    libs = [
+      "dl",
+      "rt",
+    ]
+  }
+}
+
+# Dependencies that all executables and shared libraries should have.
+group("exe_and_shlib_deps") {
+  public_deps = []
+  if (using_sanitizer) {
+    public_deps += [ "//build/config/sanitizers:deps" ]
+  }
+  if (use_custom_libcxx) {
+    public_deps += [ "//buildtools/third_party/libc++:libcxx_proxy" ]
+  }
+  if (use_afl) {
+    public_deps += [ "//third_party/afl" ]
+  }
+}
+
+# Executable configs -----------------------------------------------------------
+
+# Windows linker setup for EXEs and DLLs.
+if (is_win) {
+  _windows_linker_configs = [
+    "//build/config/win:sdk_link",
+    "//build/config/win:common_linker_setup",
+  ]
+}
+
+# This config defines the configs applied to all executables.
+config("executable_config") {
+  configs = []
+
+  if (is_win) {
+    configs += _windows_linker_configs
+
+    # Currently only turn on linker CFI for executables.
+    configs += [ "//build/config/win:cfi_linker" ]
+  } else if (is_mac) {
+    configs += [
+      "//build/config/mac:mac_dynamic_flags",
+      "//build/config/mac:mac_executable_flags",
+    ]
+  } else if (is_ios) {
+    configs += [
+      "//build/config/ios:ios_dynamic_flags",
+      "//build/config/ios:ios_executable_flags",
+    ]
+  } else if (is_linux || is_android || current_os == "aix") {
+    configs += [ "//build/config/gcc:executable_ldconfig" ]
+    if (is_android) {
+      configs += [ "//build/config/android:executable_config" ]
+    } else if (is_chromecast) {
+      configs += [ "//build/config/chromecast:executable_config" ]
+    }
+  }
+
+  # If we're using the prebuilt instrumented libraries with the sanitizers, we
+  # need to add ldflags to every binary to make sure they are picked up.
+  if (prebuilt_instrumented_libraries_available) {
+    configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
+  }
+  if (use_locally_built_instrumented_libraries) {
+    configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
+  }
+  configs += [ "//build/config/sanitizers:link_executable" ]
+}
+
+# Shared library configs -------------------------------------------------------
+
+# This config defines the configs applied to all shared libraries.
+config("shared_library_config") {
+  configs = []
+
+  if (is_win) {
+    configs += _windows_linker_configs
+  } else if (is_mac) {
+    configs += [ "//build/config/mac:mac_dynamic_flags" ]
+  } else if (is_ios) {
+    configs += [ "//build/config/ios:ios_dynamic_flags" ]
+  } else if (is_chromecast) {
+    configs += [ "//build/config/chromecast:shared_library_config" ]
+  }
+
+  # If we're using the prebuilt instrumented libraries with the sanitizers, we
+  # need to add ldflags to every binary to make sure they are picked up.
+  if (prebuilt_instrumented_libraries_available) {
+    configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
+  }
+  if (use_locally_built_instrumented_libraries) {
+    configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
+  }
+  configs += [ "//build/config/sanitizers:link_shared_library" ]
+}
+
+# Add this config to your target to enable precompiled headers.
+#
+# Precompiled headers are done on a per-target basis. If you have just a couple
+# of files, the time it takes to precompile (~2 seconds) can actually be longer
+# than the time saved. On a Z620, a 100 file target compiles about 2 seconds
+# faster with precompiled headers, with greater savings for larger targets.
+#
+# Recommend precompiled headers for targets with more than 50 .cc files.
+config("precompiled_headers") {
+  if (enable_precompiled_headers) {
+    if (is_win) {
+      # This is a string rather than a file GN knows about. It has to match
+      # exactly what's in the /FI flag below, and what might appear in the
+      # source code in quotes for an #include directive.
+      precompiled_header = "build/precompile.h"
+
+      # This is a file that GN will compile with the above header. It will be
+      # implicitly added to the sources (potentially multiple times, with one
+      # variant for each language used in the target).
+      precompiled_source = "//build/precompile.cc"
+
+      # Force include the header.
+      cflags = [ "/FI$precompiled_header" ]
+    } else if (is_mac) {
+      precompiled_source = "//build/precompile.h"
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/BUILDCONFIG.gn
@@ -0,0 +1,699 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================================================
+# WHAT IS THIS FILE?
+# =============================================================================
+#
+# This is the master GN build configuration. This file is loaded after the
+# build args (args.gn) for the build directory and after the toplevel ".gn"
+# file (which points to this file as the build configuration).
+#
+# This file will be executed and the resulting context will be used to execute
+# every other file in the build. So variables declared here (that don't start
+# with an underscore) will be implicitly global.
+
+# =============================================================================
+# PLATFORM SELECTION
+# =============================================================================
+#
+# There are two main things to set: "os" and "cpu". The "toolchain" is the name
+# of the GN thing that encodes combinations of these things.
+#
+# Users typically only set the variables "target_os" and "target_cpu" in "gn
+# args", the rest are set up by our build and internal to GN.
+#
+# There are three different types of each of these things: The "host"
+# represents the computer doing the compile and never changes. The "target"
+# represents the main thing we're trying to build. The "current" represents
+# which configuration is currently being defined, which can be either the
+# host, the target, or something completely different (like nacl). GN will
+# run the same build file multiple times for the different required
+# configuration in the same build.
+#
+# This gives the following variables:
+#  - host_os, host_cpu, host_toolchain
+#  - target_os, target_cpu, default_toolchain
+#  - current_os, current_cpu, current_toolchain.
+#
+# Note the default_toolchain isn't symmetrical (you would expect
+# target_toolchain). This is because the "default" toolchain is a GN built-in
+# concept, and "target" is something our build sets up that's symmetrical with
+# its GYP counterpart. Potentially the built-in default_toolchain variable
+# could be renamed in the future.
+#
+# When writing build files, to do something only for the host:
+#   if (current_toolchain == host_toolchain) { ...
+
+if (target_os == "") {
+  target_os = host_os
+}
+
+if (target_cpu == "") {
+  if (target_os == "android") {
+    # If we're building for Android, we should assume that we want to
+    # build for ARM by default, not the host_cpu (which is likely x64).
+    # This allows us to not have to specify both target_os and target_cpu
+    # on the command line.
+    target_cpu = "arm"
+  } else {
+    target_cpu = host_cpu
+  }
+}
+
+if (current_cpu == "") {
+  current_cpu = target_cpu
+}
+if (current_os == "") {
+  current_os = target_os
+}
+
+# =============================================================================
+# BUILD FLAGS
+# =============================================================================
+#
+# This block lists input arguments to the build, along with their default
+# values.
+#
+# If a value is specified on the command line, it will overwrite the defaults
+# given in a declare_args block, otherwise the default will be used.
+#
+# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in
+# the build to declare build flags. If you need a flag for a single component,
+# you can just declare it in the corresponding BUILD.gn file.
+#
+# - If your feature is a single target, say //components/foo, you can put
+#   a declare_args() block in //components/foo/BUILD.gn and use it there.
+#   Nobody else in the build needs to see the flag.
+#
+# - Defines based on build variables should be implemented via the generated
+#   build flag header system. See //build/buildflag_header.gni. You can put
+#   the buildflag_header target in the same file as the build flag itself. You
+#   should almost never set "defines" directly.
+#
+# - If your flag toggles a target on and off or toggles between different
+#   versions of similar things, write a "group" target that forwards to the
+#   right target (or no target) depending on the value of the build flag. This
+#   group can be in the same BUILD.gn file as the build flag, and targets can
+#   depend unconditionally on the group rather than duplicating flag checks
+#   across many targets.
+#
+# - If a semi-random set of build files REALLY needs to know about a define and
+#   the above pattern for isolating the build logic in a forwarding group
+#   doesn't work, you can put the argument in a .gni file. This should be put
+#   in the lowest level of the build that knows about this feature (which should
+#   almost always be outside of the //build directory!).
+#
+# Other flag advice:
+#
+# - Use boolean values when possible. If you need a default value that expands
+#   to some complex thing in the default case (like the location of the
+#   compiler which would be computed by a script), use a default value of -1 or
+#   the empty string. Outside of the declare_args block, conditionally expand
+#   the default value as necessary.
+#
+# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for
+#   your feature) rather than just "foo".
+#
+# - Write good comments directly above the declaration with no blank line.
+#   These comments will appear as documentation in "gn args --list".
+#
+# - Don't call exec_script inside declare_args. This will execute the script
+#   even if the value is overridden, which is wasteful. See first bullet.
+
+declare_args() {
+  # Set to enable the official build level of optimization. This has nothing
+  # to do with branding, but enables an additional level of optimization above
+  # release (!is_debug). This might be better expressed as a tri-state
+  # (debug, release, official) but for historical reasons there are two
+  # separate flags.
+  is_official_build = false
+
+  # Whether we're a traditional desktop unix.
+  is_desktop_linux = current_os == "linux"
+
+  # Set to true when compiling with the Clang compiler. Typically this is used
+  # to configure warnings.
+  is_clang =
+      current_os == "mac" || current_os == "ios" || current_os == "chromeos" ||
+      current_os == "fuchsia" ||
+      (current_os == "linux" && current_cpu != "s390x" &&
+       current_cpu != "s390" && current_cpu != "ppc64" && current_cpu != "ppc")
+
+  # Allows the path to a custom target toolchain to be injected as a single
+  # argument, and set as the default toolchain.
+  custom_toolchain = ""
+
+  # This should not normally be set as a build argument.  It's here so that
+  # every toolchain can pass through the "global" value via toolchain_args().
+  host_toolchain = ""
+
+  # DON'T ADD MORE FLAGS HERE. Read the comment above.
+}
+
+declare_args() {
+  # Debug build. Enabling official builds automatically sets is_debug to false.
+  is_debug = !is_official_build
+}
+
+declare_args() {
+  # Component build. Setting to true compiles targets declared as "components"
+  # as shared libraries loaded dynamically. This speeds up development time.
+  # When false, components will be linked statically.
+  #
+  # For more information see
+  # https://chromium.googlesource.com/chromium/src/+/master/docs/component_build.md
+  is_component_build =
+      is_debug && current_os != "ios" && current_os != "fuchsia"
+}
+
+assert(!(is_debug && is_official_build), "Can't do official debug builds")
+
+# ==============================================================================
+# TOOLCHAIN SETUP
+# ==============================================================================
+#
+# Here we set the default toolchain, as well as the variable host_toolchain
+# which will identify the toolchain corresponding to the local system when
+# doing cross-compiles. When not cross-compiling, this will be the same as the
+# default toolchain.
+#
+# We do this before anything else to make sure we complain about any
+# unsupported os/cpu combinations as early as possible.
+
+if (host_toolchain == "") {
+  # This should only happen in the top-level context.
+  # In a specific toolchain context, the toolchain_args()
+  # block should have propagated a value down.
+  # TODO(dpranke): Add some sort of assert here that verifies that
+  # no toolchain omitted host_toolchain from its toolchain_args().
+
+  if (host_os == "linux") {
+    if (target_os != "linux") {
+      # TODO(dpranke) - is_clang normally applies only to the target
+      # build, and there is no way to indicate that you want to override
+      # it for both the target build *and* the host build. Do we need to
+      # support this?
+      host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+    } else if (is_clang) {
+      host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+    } else {
+      host_toolchain = "//build/toolchain/linux:$host_cpu"
+    }
+  } else if (host_os == "mac") {
+    host_toolchain = "//build/toolchain/mac:clang_$host_cpu"
+  } else if (host_os == "win") {
+    # On Windows always use the target CPU for host builds. On the
+    # configurations we support this will always work and it saves build steps.
+    if (is_clang) {
+      host_toolchain = "//build/toolchain/win:clang_$target_cpu"
+    } else {
+      host_toolchain = "//build/toolchain/win:$target_cpu"
+    }
+  } else if (host_os == "aix") {
+    host_toolchain = "//build/toolchain/aix:$host_cpu"
+  } else {
+    assert(false, "Unsupported host_os: $host_os")
+  }
+}
+
+_default_toolchain = ""
+
+if (target_os == "android") {
+  assert(host_os == "linux" || host_os == "mac",
+         "Android builds are only supported on Linux and Mac hosts.")
+  if (is_clang) {
+    _default_toolchain = "//build/toolchain/android:android_clang_$target_cpu"
+  } else {
+    _default_toolchain = "//build/toolchain/android:android_$target_cpu"
+  }
+} else if (target_os == "chromeos" || target_os == "linux") {
+  # See comments in build/toolchain/cros/BUILD.gn about board compiles.
+  if (is_clang) {
+    _default_toolchain = "//build/toolchain/linux:clang_$target_cpu"
+  } else {
+    _default_toolchain = "//build/toolchain/linux:$target_cpu"
+  }
+} else if (target_os == "fuchsia") {
+  _default_toolchain = "//build/toolchain/fuchsia:$target_cpu"
+} else if (target_os == "ios") {
+  _default_toolchain = "//build/toolchain/mac:ios_clang_$target_cpu"
+} else if (target_os == "mac") {
+  assert(host_os == "mac", "Mac cross-compiles are unsupported.")
+  _default_toolchain = host_toolchain
+} else if (target_os == "win") {
+  # On Windows we use the same toolchain for host and target by default.
+  assert(target_os == host_os, "Win cross-compiles only work on win hosts.")
+  if (is_clang) {
+    _default_toolchain = "//build/toolchain/win:clang_$target_cpu"
+  } else {
+    _default_toolchain = "//build/toolchain/win:$target_cpu"
+  }
+} else if (target_os == "aix") {
+  _default_toolchain = "//build/toolchain/aix:$target_cpu"
+} else if (target_os == "winrt_81" || target_os == "winrt_81_phone" ||
+           target_os == "winrt_10") {
+  _default_toolchain = "//build/toolchain/win:winrt_$target_cpu"
+} else {
+  assert(false, "Unsupported target_os: $target_os")
+}
+
+# If a custom toolchain has been set in the args, set it as default. Otherwise,
+# set the default toolchain for the platform (if any).
+if (custom_toolchain != "") {
+  set_default_toolchain(custom_toolchain)
+} else if (_default_toolchain != "") {
+  set_default_toolchain(_default_toolchain)
+}
+
+# =============================================================================
+# OS DEFINITIONS
+# =============================================================================
+#
+# We set these various is_FOO booleans for convenience in writing OS-based
+# conditions.
+#
+# - is_android, is_chromeos, is_ios, and is_win should be obvious.
+# - is_mac is set only for desktop Mac. It is not set on iOS.
+# - is_posix is true for mac and any Unix-like system (basically everything
+#   except Windows).
+# - is_linux is true for desktop Linux and ChromeOS, but not Android (which is
+#   generally too different despite being based on the Linux kernel).
+#
+# Do not add more is_* variants here for random lesser-used Unix systems like
+# aix or one of the BSDs. If you need to check these, just check the
+# current_os value directly.
+
+if (current_os == "win" || current_os == "winrt_81" ||
+    current_os == "winrt_81_phone" || current_os == "winrt_10") {
+  is_android = false
+  is_chromeos = false
+  is_fuchsia = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = false
+  is_win = true
+} else if (current_os == "mac") {
+  is_android = false
+  is_chromeos = false
+  is_fuchsia = false
+  is_ios = false
+  is_linux = false
+  is_mac = true
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "android") {
+  is_android = true
+  is_chromeos = false
+  is_fuchsia = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "chromeos") {
+  is_android = false
+  is_chromeos = true
+  is_fuchsia = false
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "nacl") {
+  # current_os == "nacl" will be passed by the nacl toolchain definition.
+  # It is not set by default or on the command line. We treat is as a
+  # Posix variant.
+  is_android = false
+  is_chromeos = false
+  is_fuchsia = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = true
+  is_posix = true
+  is_win = false
+} else if (current_os == "fuchsia") {
+  is_android = false
+  is_chromeos = false
+  is_fuchsia = true
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "ios") {
+  is_android = false
+  is_chromeos = false
+  is_fuchsia = false
+  is_ios = true
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "linux") {
+  is_android = false
+  is_chromeos = false
+  is_fuchsia = false
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "aix") {
+  is_android = false
+  is_chromeos = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+}
+
+# =============================================================================
+# SOURCES FILTERS
+# =============================================================================
+#
+# These patterns filter out platform-specific files when assigning to the
+# sources variable. The magic variable |sources_assignment_filter| is applied
+# to each assignment or appending to the sources variable and matches are
+# automatically removed.
+#
+# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path
+# boundary = end of string or slash) are supported, and the entire string
+# must match the pattern (so you need "*.cc" to match all .cc files, for
+# example).
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+sources_assignment_filter = []
+if (!is_posix) {
+  sources_assignment_filter += [
+    "*_posix.h",
+    "*_posix.cc",
+    "*_posix_unittest.h",
+    "*_posix_unittest.cc",
+    "*\bposix/*",
+  ]
+}
+
+if (!is_win) {
+  sources_assignment_filter += [
+    "*_win.cc",
+    "*_win.h",
+    "*_win_unittest.cc",
+    "*\bwin/*",
+    "*.def",
+    "*.rc",
+  ]
+}
+if (!is_mac) {
+  sources_assignment_filter += [
+    "*_mac.h",
+    "*_mac.cc",
+    "*_mac.mm",
+    "*_mac_unittest.h",
+    "*_mac_unittest.cc",
+    "*_mac_unittest.mm",
+    "*\bmac/*",
+    "*_cocoa.h",
+    "*_cocoa.cc",
+    "*_cocoa.mm",
+    "*_cocoa_unittest.h",
+    "*_cocoa_unittest.cc",
+    "*_cocoa_unittest.mm",
+    "*\bcocoa/*",
+  ]
+}
+if (!is_ios) {
+  sources_assignment_filter += [
+    "*_ios.h",
+    "*_ios.cc",
+    "*_ios.mm",
+    "*_ios_unittest.h",
+    "*_ios_unittest.cc",
+    "*_ios_unittest.mm",
+    "*\bios/*",
+  ]
+}
+if (!is_mac && !is_ios) {
+  sources_assignment_filter += [ "*.mm" ]
+}
+if (!is_linux) {
+  sources_assignment_filter += [
+    "*_linux.h",
+    "*_linux.cc",
+    "*_linux_unittest.h",
+    "*_linux_unittest.cc",
+    "*\blinux/*",
+  ]
+}
+if (!is_android) {
+  sources_assignment_filter += [
+    "*_android.h",
+    "*_android.cc",
+    "*_android_unittest.h",
+    "*_android_unittest.cc",
+    "*\bandroid/*",
+  ]
+}
+if (!is_chromeos) {
+  sources_assignment_filter += [
+    "*_chromeos.h",
+    "*_chromeos.cc",
+    "*_chromeos_unittest.h",
+    "*_chromeos_unittest.cc",
+    "*\bchromeos/*",
+  ]
+}
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+
+# Actually save this list.
+#
+# These patterns are executed for every file in the source tree of every run.
+# Therefore, adding more patterns slows down the build for everybody. We should
+# only add automatic patterns for configurations affecting hundreds of files
+# across many projects in the tree.
+#
+# Therefore, we only add rules to this list corresponding to platforms on the
+# Chromium waterfall.  This is not for non-officially-supported platforms
+# (FreeBSD, etc.) toolkits, (X11, GTK, etc.), or features. For these cases,
+# write a conditional in the target to remove the file(s) from the list when
+# your platform/toolkit/feature doesn't apply.
+set_sources_assignment_filter(sources_assignment_filter)
+
+# =============================================================================
+# TARGET DEFAULTS
+# =============================================================================
+#
+# Set up the default configuration for every build target of the given type.
+# The values configured here will be automatically set on the scope of the
+# corresponding target. Target definitions can add or remove to the settings
+# here as needed.
+
+# Holds all configs used for running the compiler.
+default_compiler_configs = [
+  "//build/config:feature_flags",
+  "//build/config/compiler:afdo",
+  "//build/config/compiler:compiler",
+  "//build/config/compiler:pthread",
+  "//build/config/compiler:clang_stackrealign",
+  "//build/config/compiler:compiler_arm_fpu",
+  "//build/config/compiler:compiler_arm_thumb",
+  "//build/config/compiler:chromium_code",
+  "//build/config/compiler:default_include_dirs",
+  "//build/config/compiler:default_optimization",
+  "//build/config/compiler:default_stack_frames",
+  "//build/config/compiler:default_symbols",
+  "//build/config/compiler:no_rtti",
+  "//build/config/compiler:runtime_library",
+  "//build/config/sanitizers:default_sanitizer_flags",
+]
+if (is_win) {
+  default_compiler_configs += [
+    "//build/config/win:default_crt",
+    "//build/config/win:lean_and_mean",
+    "//build/config/win:nominmax",
+    "//build/config/win:unicode",
+    "//build/config/win:winver",
+    "//build/config/win:vs_code_analysis",
+  ]
+}
+if (current_os == "winrt_81" || current_os == "winrt_81_phone" ||
+    current_os == "winrt_10") {
+  default_compiler_configs += [ "//build/config/win:target_winrt" ]
+}
+
+if (is_posix) {
+  default_compiler_configs += [ "//build/config/gcc:no_exceptions" ]
+  if (current_os != "aix") {
+    default_compiler_configs +=
+        [ "//build/config/gcc:symbol_visibility_hidden" ]
+  }
+}
+
+if (is_android) {
+  default_compiler_configs +=
+      [ "//build/config/android:default_cygprofile_instrumentation" ]
+}
+
+if (is_clang && !is_nacl) {
+  default_compiler_configs += [
+    "//build/config/clang:find_bad_constructs",
+    "//build/config/clang:extra_warnings",
+  ]
+}
+
+# Debug/release-related defines.
+if (is_debug) {
+  default_compiler_configs += [ "//build/config:debug" ]
+} else {
+  default_compiler_configs += [ "//build/config:release" ]
+}
+
+# Static libraries and source sets use only the compiler ones.
+set_defaults("static_library") {
+  configs = default_compiler_configs
+}
+set_defaults("source_set") {
+  configs = default_compiler_configs
+}
+
+# Compute the set of configs common to all linked targets (shared libraries,
+# loadable modules, executables) to avoid duplication below.
+if (is_win) {
+  # Many targets remove these configs, so they are not contained within
+  # //build/config:executable_config for easy removal.
+  _linker_configs = [
+    "//build/config/win:default_incremental_linking",
+
+    # Default to console-mode apps. Most of our targets are tests and such
+    # that shouldn't use the windows subsystem.
+    "//build/config/win:console",
+  ]
+} else if (is_mac) {
+  _linker_configs = [ "//build/config/mac:strip_all" ]
+} else {
+  _linker_configs = []
+}
+
+# Executable defaults.
+default_executable_configs = default_compiler_configs + [
+                               "//build/config:default_libs",
+                               "//build/config:executable_config",
+                             ] + _linker_configs
+set_defaults("executable") {
+  configs = default_executable_configs
+}
+
+# Shared library and loadable module defaults (also for components in component
+# mode).
+default_shared_library_configs = default_compiler_configs + [
+                                   "//build/config:default_libs",
+                                   "//build/config:shared_library_config",
+                                 ] + _linker_configs
+if (is_android) {
+  # Strip native JNI exports from shared libraries by default. Binaries that
+  # want this can remove this config.
+  default_shared_library_configs +=
+      [ "//build/config/android:hide_all_but_jni_onload" ]
+}
+set_defaults("shared_library") {
+  configs = default_shared_library_configs
+}
+set_defaults("loadable_module") {
+  configs = default_shared_library_configs
+
+  # loadable_modules are generally used by other libs, not just via JNI.
+  if (is_android) {
+    configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
+  }
+}
+
+# ==============================================================================
+# COMPONENT SETUP
+# ==============================================================================
+
+# Defines a component, which equates to a shared_library when
+# is_component_build == true and a static_library otherwise.
+#
+# Use static libraries for the static build rather than source sets because
+# many of of our test binaries link many large dependencies but often don't
+# use large portions of them. The static libraries are much more efficient to
+# link in this situation since only the necessary object files are linked.
+#
+# The invoker can override the type of the target in the non-component-build
+# case by setting static_component_type to either "source_set" or
+# "static_library". If unset, the default will be used.
+template("component") {
+  if (is_component_build) {
+    _component_mode = "shared_library"
+  } else if (defined(invoker.static_component_type)) {
+    assert(invoker.static_component_type == "static_library" ||
+           invoker.static_component_type == "source_set")
+    _component_mode = invoker.static_component_type
+  } else if (is_android || !defined(invoker.sources)) {
+    # When there are no sources defined, use a source set to avoid creating
+    # an empty static library (which generally don't work).
+    #
+    # When we changed components to default from source sets to static
+    # libraries, an Android benchmark regressed slightly
+    # (https://crbug.com/619593). We don't have a good theory on why this might
+    # be since theoretically it should be the same. It could be something as
+    # silly as random code locality luck.
+    #
+    # There seems to be no build-time performance hit to using source sets on
+    # Android (the normal reason for defaulting to static libraries), so we
+    # make the default on Android to be source set.
+    #
+    # If it's been a long time since this was added and you're skeptical,
+    # please feel free to remove the Android exception and see if any
+    # benchmarks obviously regress. If not, it would be great to standardize
+    # with the rest of the platforms.
+    _component_mode = "source_set"
+  } else {
+    _component_mode = "static_library"
+  }
+  target(_component_mode, target_name) {
+    # Explicitly forward visibility, implicitly forward everything else.
+    # Forwarding "*" doesn't recurse into nested scopes (to avoid copying all
+    # globals into each template invocation), so won't pick up file-scoped
+    # variables. Normally this isn't too bad, but visibility is commonly
+    # defined at the file scope. Explicitly forwarding visibility and then
+    # excluding it from the "*" set works around this problem.
+    # See http://crbug.com/594610
+    forward_variables_from(invoker, [ "visibility" ])
+    forward_variables_from(invoker, "*", [ "visibility" ])
+
+    # All shared libraries must have the sanitizer deps to properly link in
+    # asan mode (this target will be empty in other cases).
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ "//build/config:exe_and_shlib_deps" ]
+  }
+}
+
+# Component defaults
+set_defaults("component") {
+  if (is_component_build) {
+    configs = default_shared_library_configs
+    if (is_android) {
+      configs -= [ "//build/config/android:hide_all_but_jni_onload" ]
+    }
+  } else {
+    configs = default_compiler_configs
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/OWNERS
@@ -0,0 +1,6 @@
+brettw@chromium.org
+dpranke@chromium.org
+scottmg@chromium.org
+
+per-file BUILDCONFIG.gn=brettw@chromium.org
+per-file BUILDCONFIG.gn=set noparent
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/aix/BUILD.gn
@@ -0,0 +1,50 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+
+config("compiler") {
+  # These flags are shared between the C compiler and linker.
+  defines = [
+    "_LINUX_SOURCE_COMPAT=1",
+    "__STDC_FORMAT_MACROS",
+    "_ALL_SOURCE=1",
+  ]
+
+  cflags = [
+    "-Wall",
+    "-Wno-unused-parameter",
+    "-pthread",
+    "-Wmissing-field-initializers",
+    "-Wno-uninitialized",
+    "-mcpu=power5+",
+    "-mfprnd",
+    "-mno-popcntb",
+    "-maix64",
+    "-fdata-sections",
+    "-ffunction-sections",
+    "-O3",
+
+    # "-Werror"
+    # We need to find a way to fix the TOC warnings if we want to enable this.
+  ]
+
+  cflags_cc = [
+    "-std=gnu++11",
+    "-fno-rtti",
+    "-fno-exceptions",
+    "-Wno-narrowing",
+    "-Wnon-virtual-dtor",
+  ]
+
+  ldflags = [
+    "-pthread",
+    "-maix64",
+    "-Wl,-bbigtoc",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/allocator.gni
@@ -0,0 +1,53 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+# Temporarily disable tcmalloc on arm64 linux to get rid of compilation errors.
+if (is_android || current_cpu == "mipsel" || is_mac || is_ios || is_asan ||
+    is_lsan || is_tsan || is_msan || is_win || is_syzyasan || is_fuchsia ||
+    (is_linux && target_cpu == "arm64")) {
+  _default_allocator = "none"
+} else {
+  _default_allocator = "tcmalloc"
+}
+
+# The debug CRT on Windows has some debug features that are incompatible with
+# the shim. NaCl in particular does seem to link some binaries statically
+# against the debug CRT with "is_nacl=false".
+if ((is_linux || is_android || is_mac ||
+     (is_win && !is_component_build && !is_debug)) && !is_asan && !is_lsan &&
+    !is_tsan && !is_msan) {
+  _default_use_allocator_shim = true
+} else {
+  _default_use_allocator_shim = false
+}
+
+declare_args() {
+  # Memory allocator to use. Set to "none" to use default allocator.
+  use_allocator = _default_allocator
+
+  # Causes all the allocations to be routed via allocator_shim.cc.
+  use_allocator_shim = _default_use_allocator_shim
+}
+
+if (is_nacl) {
+  # Turn off the build flag for NaCL builds to minimize confusion, as NaCL
+  # doesn't support the heap shim.
+  use_allocator_shim = false
+}
+
+assert(use_allocator == "none" || use_allocator == "tcmalloc")
+
+assert(!is_win || use_allocator == "none", "Tcmalloc doesn't work on Windows.")
+assert(!is_mac || use_allocator == "none", "Tcmalloc doesn't work on macOS.")
+
+assert(
+    !use_allocator_shim || is_linux || is_android || is_win || is_mac,
+    "use_allocator_shim is supported only on Linux, Android, Windows and macOS targets")
+
+if (is_win && use_allocator_shim) {
+  assert(!is_component_build,
+         "The allocator shim doesn't work for the component build on Windows.")
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/android/BUILD.gn
@@ -0,0 +1,237 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+assert(is_android)
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Android-only.
+config("compiler") {
+  cflags = [
+    "-ffunction-sections",
+    "-fno-short-enums",
+  ]
+  defines = [
+    "ANDROID",
+
+    # The NDK has these things, but doesn't define the constants to say that it
+    # does. Define them here instead.
+    "HAVE_SYS_UIO_H",
+
+    # Forces full rebuilds on NDK rolls.
+    "ANDROID_NDK_VERSION=${android_ndk_version}",
+  ]
+
+  if (is_clang) {
+    if (current_cpu == "mips64el") {
+      cflags += [
+        # Have to force IAS for mips64.
+        "-fintegrated-as",
+      ]
+    }
+  } else {
+    # Clang doesn't support these flags.
+    cflags += [ "-finline-limit=64" ]
+  }
+
+  ldflags = [
+    "-Wl,--no-undefined",
+
+    # Don't allow visible symbols from libgcc or libc++ to be
+    # re-exported.
+    "-Wl,--exclude-libs=libgcc.a",
+    "-Wl,--exclude-libs=libc++_static.a",
+
+    # Don't allow visible symbols from libraries that contain
+    # assembly code with symbols that aren't hidden properly.
+    # http://crbug.com/448386
+    "-Wl,--exclude-libs=libvpx_assembly_arm.a",
+  ]
+
+  if (is_clang) {
+    if (current_cpu == "arm") {
+      abi_target = "arm-linux-androideabi"
+    } else if (current_cpu == "x86") {
+      abi_target = "i686-linux-androideabi"
+    } else if (current_cpu == "arm64") {
+      abi_target = "aarch64-linux-android"
+    } else if (current_cpu == "x64") {
+      # Place holder for x64 support, not tested.
+      # TODO: Enable clang support for Android x64. http://crbug.com/539781
+      abi_target = "x86_64-linux-androideabi"
+    } else if (current_cpu == "mipsel") {
+      abi_target = "mipsel-linux-android"
+    } else if (current_cpu == "mips64el") {
+      # Place holder for mips64 support, not tested.
+      abi_target = "mips64el-linux-androideabi"
+    } else {
+      assert(false, "Architecture not supported")
+    }
+    cflags += [ "--target=$abi_target" ]
+    ldflags += [ "--target=$abi_target" ]
+  }
+
+  # Assign any flags set for the C compiler to asmflags so that they are sent
+  # to the assembler.
+  asmflags = cflags
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Android-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  # NOTE: The libc++ header include paths below are specified in cflags_cc
+  # rather than include_dirs because they need to come after include_dirs.
+  # Think of them like system headers, but don't use '-isystem' because the
+  # arm-linux-androideabi-4.4.3 toolchain (circa Gingerbread) will exhibit
+  # strange errors. The include ordering here is important; change with
+  # caution.
+  cflags_cc = []
+  if (android_ndk_major_version >= 13) {
+    libcxx_include_path =
+        rebase_path("$android_libcpp_root/include", root_build_dir)
+    libcxxabi_include_path =
+        rebase_path("$android_ndk_root/sources/cxx-stl/llvm-libc++abi/include",
+                    root_build_dir)
+
+    if (!is_clang) {
+      # Per the release notes, GCC is not supported in the NDK starting with
+      # r13. It's still present, though, and has conflicting declarations of
+      # float abs(float).
+      cflags_cc += [ "-Wno-attributes" ]
+    }
+  } else {
+    libcxx_include_path =
+        rebase_path("$android_libcpp_root/libcxx/include", root_build_dir)
+    libcxxabi_include_path = rebase_path(
+            "$android_ndk_root/sources/cxx-stl/llvm-libc++abi/libcxxabi/include",
+            root_build_dir)
+  }
+  cflags_cc += [
+    "-isystem" + libcxx_include_path,
+    "-isystem" + libcxxabi_include_path,
+    "-isystem" +
+        rebase_path("$android_ndk_root/sources/android/support/include",
+                    root_build_dir),
+  ]
+
+  defines = [ "__GNU_SOURCE=1" ]  # Necessary for clone().
+  ldflags = [ "-nostdlib" ]
+  lib_dirs = [ android_libcpp_lib_dir ]
+
+  # The libc++ runtime library (must come first).
+  # ASan needs to dynamically link to libc++ even in static builds so
+  # that it can interpose operator new.
+  if (is_component_build || is_asan) {
+    libs = [ "c++_shared" ]
+  } else {
+    libs = [ "c++_static" ]
+  }
+  libs += [
+    "c++abi",
+    "android_support",
+  ]
+
+  # arm builds of libc++ starting in NDK r12 depend on unwind.
+  if (current_cpu == "arm") {
+    libs += [ "unwind" ]
+  }
+
+  # Manually link the libgcc.a that the cross compiler uses. This is
+  # absolute because the linker will look inside the sysroot if it's not.
+  libs += [
+    rebase_path(android_libgcc_file),
+    "c",
+  ]
+
+  # Clang with libc++ does not require an explicit atomic library reference.
+  if (!is_clang) {
+    libs += [ "atomic" ]
+  }
+
+  if (is_clang) {
+    # Work around incompatibilities between bionic and clang headers.
+    defines += [
+      "__compiler_offsetof=__builtin_offsetof",
+      "nan=__builtin_nan",
+    ]
+
+    if (current_cpu == "x64" || current_cpu == "arm64" ||
+        current_cpu == "mips64el") {
+      # 64-bit targets build with NDK 21, 32-bit targets with NDK 16
+      # (see ./config.gni).  When using clang, NDK 21 defines snprintf to
+      # something for a kind of for of _FORTIFY_SOURCE support, see
+      # third_party/android_tools/ndk/platforms/android-21/arch-x86_64/usr/include/stdio.h
+      # Making snprintf a macro breaks base/strings/string_utils.h which
+      # defines base::snprintf().  So define snprintf to itself to force the
+      # NDK to not redefine it.  This disables _chk for snprintf, but since
+      # 32-bit versions use NDK 16 which doesn't have any fortify support, that
+      # seems ok.  b/32067310 tracks better fortify support with clang.
+      # TODO(thakis): Remove this once b/32067310 is fixed.
+      defines += [ "snprintf=snprintf" ]
+    }
+  }
+
+  # TODO(jdduke) Re-enable on mips after resolving linking
+  # issues with libc++ (crbug.com/456380).
+  if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+    ldflags += [ "-Wl,--warn-shared-textrel" ]
+  }
+}
+
+config("executable_config") {
+  cflags = [ "-fPIE" ]
+  asmflags = [ "-fPIE" ]
+  ldflags = [ "-pie" ]
+}
+
+config("hide_all_but_jni_onload") {
+  ldflags = [ "-Wl,--version-script=" + rebase_path(
+                  "//build/android/android_only_explicit_jni_exports.lst") ]
+}
+
+config("hide_all_but_jni") {
+  ldflags = [ "-Wl,--version-script=" +
+              rebase_path("//build/android/android_only_jni_exports.lst") ]
+}
+
+# Instrumentation -------------------------------------------------------------
+#
+# The BUILDCONFIG file sets the "default_cygprofile_instrumentation" config on
+# targets by default. You can override whether the cygprofile instrumentation is
+# used on a per-target basis:
+#
+# configs -= [ "//build/config/android:default_cygprofile_instrumentation" ]
+# configs += [ "//build/config/android:no_cygprofile_instrumentation" ]
+
+config("default_cygprofile_instrumentation") {
+  if (use_order_profiling) {
+    configs = [ ":cygprofile_instrumentation" ]
+  } else {
+    configs = [ ":no_cygprofile_instrumentation" ]
+  }
+}
+
+config("cygprofile_instrumentation") {
+  defines = [ "CYGPROFILE_INSTRUMENTATION=1" ]
+  cflags = [ "-finstrument-functions" ]
+
+  if (!is_clang) {
+    cflags += [
+      # Allow mmx intrinsics to inline, so that the compiler can expand the intrinsics.
+      "-finstrument-functions-exclude-file-list=mmintrin.h",
+
+      # Avoid errors with current NDK:
+      # "third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/include/arm_neon.h:3426:3: error: argument must be a constant"
+      "-finstrument-functions-exclude-file-list=arm_neon.h",
+    ]
+  }
+}
+
+config("no_cygprofile_instrumentation") {
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/android/OWNERS
@@ -0,0 +1,3 @@
+agrieve@chromium.org
+
+# COMPONENT: Build
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/android/config.gni
@@ -0,0 +1,374 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains common system config stuff for the Android build.
+
+if (is_android) {
+  import("//build_overrides/build.gni")
+
+  has_chrome_android_internal =
+      exec_script("//build/dir_exists.py",
+                  [ rebase_path("//clank", root_build_dir) ],
+                  "string") == "True"
+
+  # We are using a separate declare_args block for only this argument so that
+  # we can decide if we have to pull in definitions from the internal config
+  # early.
+  declare_args() {
+    # Enables using the internal Chrome for Android repository. The default
+    # value depends on whether the repository is available, and if it's not but
+    # this argument is manually set to True, the generation will fail.
+    # The main purpose of this argument is to avoid having to maintain 2
+    # repositories to support both public only and internal builds.
+    enable_chrome_android_internal = has_chrome_android_internal
+  }
+
+  if (enable_chrome_android_internal) {
+    import("//clank/config.gni")
+  }
+
+  if (!defined(extra_chrome_shared_library_configs)) {
+    extra_chrome_shared_library_configs = []
+  }
+
+  if (!defined(default_android_ndk_root)) {
+    default_android_ndk_root = "//third_party/android_tools/ndk"
+    default_android_ndk_version = "r12b"
+    default_android_ndk_major_version = 12
+  } else {
+    assert(defined(default_android_ndk_version))
+    assert(defined(default_android_ndk_major_version))
+  }
+
+  if (!defined(default_android_sdk_root)) {
+    default_android_sdk_root = "//third_party/android_tools/sdk"
+    default_android_sdk_version = "25"
+    default_android_sdk_build_tools_version = "25.0.2"
+  }
+
+  if (!defined(default_lint_android_sdk_root)) {
+    # Purposefully repeated so that downstream can change
+    # default_android_sdk_root without changing lint version.
+    default_lint_android_sdk_root = "//third_party/android_tools/sdk"
+    default_lint_android_sdk_version = "25"
+  }
+
+  if (!defined(default_extras_android_sdk_root)) {
+    # Purposefully repeated so that downstream can change
+    # default_android_sdk_root without changing where we load the SDK extras
+    # from. (Google Play services, etc.)
+    default_extras_android_sdk_root = "//third_party/android_tools/sdk"
+  }
+
+  if (!defined(default_android_keystore_path)) {
+    default_android_keystore_path =
+        "//build/android/ant/chromium-debug.keystore"
+    default_android_keystore_name = "chromiumdebugkey"
+    default_android_keystore_password = "chromium"
+  }
+
+  # TODO(paulmiller): Remove; superseded by google_play_services_package.
+  if (!defined(google_play_services_library)) {
+    google_play_services_library =
+        "//third_party/android_tools:google_play_services_default_java"
+  }
+
+  # TODO(paulmiller): Remove; superseded by google_play_services_package.
+  if (!defined(google_play_services_resources)) {
+    google_play_services_resources =
+        "//third_party/android_tools:google_play_services_default_resources"
+  }
+
+  # google_play_services_package contains the path where individual client
+  # targets (e.g. google_play_services_base_java) are located.
+  if (!defined(google_play_services_package)) {
+    google_play_services_package = "//third_party/android_tools"
+  }
+
+  webview_public_framework_jar =
+      "//third_party/android_platform/webview/frameworks_7.1.1_r28.jar"
+  if (!defined(webview_framework_jar)) {
+    webview_framework_jar = webview_public_framework_jar
+  }
+
+  declare_args() {
+    android_ndk_root = default_android_ndk_root
+    android_ndk_version = default_android_ndk_version
+    android_ndk_major_version = default_android_ndk_major_version
+
+    android_sdk_root = default_android_sdk_root
+    android_sdk_version = default_android_sdk_version
+    android_sdk_build_tools_version = default_android_sdk_build_tools_version
+
+    lint_android_sdk_root = default_lint_android_sdk_root
+    lint_android_sdk_version = default_lint_android_sdk_version
+
+    # Libc++ library directory. Override to use a custom libc++ binary.
+    android_libcpp_lib_dir = ""
+
+    # Android versionCode for android_apk()s that don't expclitly set one.
+    android_default_version_code = "1"
+
+    # Android versionName for android_apk()s that don't expclitly set one.
+    android_default_version_name = "Developer Build"
+
+    # The path to the keystore to use for signing builds.
+    android_keystore_path = default_android_keystore_path
+
+    # The name of the keystore to use for signing builds.
+    android_keystore_name = default_android_keystore_name
+
+    # The password for the keystore to use for signing builds.
+    android_keystore_password = default_android_keystore_password
+
+    # Set to true to run findbugs on JAR targets.
+    run_findbugs = false
+
+    # Set to true to enable verbose findbugs logging. This does nothing if
+    # run_findbugs is false.
+    findbugs_verbose = false
+
+    # Enables verbose proguard output (summaries and unfiltered output).
+    proguard_verbose = false
+
+    # Java debug on Android. Having this on enables multidexing, and turning it
+    # off will enable proguard.
+    is_java_debug = is_debug
+
+    # Set to true to enable the Errorprone compiler
+    use_errorprone_java_compiler = false
+
+    # Enables EMMA Java code coverage. Instruments classes during build to
+    # produce .ec files during runtime
+    emma_coverage = false
+
+    # EMMA filter string consisting of a list of inclusion/exclusion patterns
+    # separated with whitespace and/or comma. Only has effect if
+    # emma_coverage==true
+    emma_filter = ""
+
+    # Disables process isolation when building _incremental targets.
+    # Required for Android M+ due to SELinux policies (stronger sandboxing).
+    disable_incremental_isolated_processes = false
+
+    # Speeds up incremental compiles by compiling only changed files.
+    enable_incremental_javac = false
+
+    # Adds intrumentation to each function. Writes a file with the order that
+    # functions are called at startup.
+    use_order_profiling = false
+
+    # Builds secondary abi for APKs, supports build 32-bit arch as secondary
+    # abi in 64-bit Monochrome and WebView.
+    build_apk_secondary_abi = true
+
+    # Enables java8 language features (via retrolambda).
+    # work-in-progress (http://crbug.com/642600)
+    use_java8 = false
+
+    # Build incremental targets whenever possible.
+    # Ex. with this arg set to true, the chrome_public_apk target result in
+    # chrome_public_apk_incremental being built.
+    incremental_apk_by_default = false
+  }
+
+  # We need a second declare_args block to make sure we are using the overridden
+  # value of the arguments set above.
+  declare_args() {
+    # Speed up dexing using dx --incremental.
+    enable_incremental_dx = is_java_debug
+  }
+
+  # Neither of these should ever be used for release builds since they are
+  # somewhat experimental and dx --incremental is known to not produce
+  # byte-for-byte identical output.
+  assert(!(enable_incremental_dx && !is_java_debug))
+  assert(!(enable_incremental_javac && !is_java_debug))
+
+  # Host stuff -----------------------------------------------------------------
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (host_cpu == "x64") {
+    android_host_arch = "x86_64"
+  } else if (host_cpu == "x86") {
+    android_host_arch = "x86"
+  } else {
+    assert(false, "Need Android toolchain support for your build CPU arch.")
+  }
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (host_os == "linux") {
+    android_host_os = "linux"
+  } else if (host_os == "mac") {
+    android_host_os = "darwin"
+  } else {
+    assert(false, "Need Android toolchain support for your build OS.")
+  }
+
+  # Directories and files ------------------------------------------------------
+  #
+  # We define may of the dirs strings here for each output architecture (rather
+  # than just the current one) since these are needed by the Android toolchain
+  # file to define toolchains for all possible targets in one pass.
+
+  android_sdk = "${android_sdk_root}/platforms/android-${android_sdk_version}"
+
+  # Path to the Android NDK and SDK.
+  android_ndk_include_dir = "$android_ndk_root/usr/include"
+
+  android_sdk_tools = "${android_sdk_root}/tools"
+  android_sdk_build_tools =
+      "${android_sdk_root}/build-tools/$android_sdk_build_tools_version"
+
+  # Path to the SDK's android.jar
+  android_sdk_jar = "$android_sdk/android.jar"
+
+  zipalign_path = "$android_sdk_build_tools/zipalign"
+
+  # Subdirectories inside android_ndk_root that contain the sysroot for the
+  # associated platform.
+  # If you raise this, reevaluate the snprintf=snprintf in ./BUILD.gn.
+  _android_api_level = 16
+  x86_android_sysroot_subdir =
+      "platforms/android-${_android_api_level}/arch-x86"
+  arm_android_sysroot_subdir =
+      "platforms/android-${_android_api_level}/arch-arm"
+  mips_android_sysroot_subdir =
+      "platforms/android-${_android_api_level}/arch-mips"
+
+  # If you raise this, reevaluate the snprintf=snprintf in ./BUILD.gn.
+  _android64_api_level = 21
+  x86_64_android_sysroot_subdir =
+      "platforms/android-${_android64_api_level}/arch-x86_64"
+  arm64_android_sysroot_subdir =
+      "platforms/android-${_android64_api_level}/arch-arm64"
+  mips64_android_sysroot_subdir =
+      "platforms/android-${_android64_api_level}/arch-mips64"
+
+  # Toolchain root directory for each build. The actual binaries are inside
+  # a "bin" directory inside of these.
+  _android_toolchain_version = "4.9"
+  _android_toolchain_detailed_version = "4.9.x"
+  x86_android_toolchain_root = "$android_ndk_root/toolchains/x86-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  arm_android_toolchain_root = "$android_ndk_root/toolchains/arm-linux-androideabi-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  mips_android_toolchain_root = "$android_ndk_root/toolchains/mipsel-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  x86_64_android_toolchain_root = "$android_ndk_root/toolchains/x86_64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  arm64_android_toolchain_root = "$android_ndk_root/toolchains/aarch64-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  mips64_android_toolchain_root = "$android_ndk_root/toolchains/mips64el-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+
+  # Location of libgcc. This is only needed for the current GN toolchain, so we
+  # only need to define the current one, rather than one for every platform
+  # like the toolchain roots.
+  if (current_cpu == "x86") {
+    android_prebuilt_arch = "android-x86"
+    _binary_prefix = "i686-linux-android"
+    android_toolchain_root = "$x86_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/i686-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+  } else if (current_cpu == "arm") {
+    android_prebuilt_arch = "android-arm"
+    _binary_prefix = "arm-linux-androideabi"
+    android_toolchain_root = "$arm_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/arm-linux-androideabi/${_android_toolchain_detailed_version}/libgcc.a"
+  } else if (current_cpu == "mipsel") {
+    android_prebuilt_arch = "android-mips"
+    _binary_prefix = "mipsel-linux-android"
+    android_toolchain_root = "$mips_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+  } else if (current_cpu == "x64") {
+    android_prebuilt_arch = "android-x86_64"
+    _binary_prefix = "x86_64-linux-android"
+    android_toolchain_root = "$x86_64_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/x86_64-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+  } else if (current_cpu == "arm64") {
+    android_prebuilt_arch = "android-arm64"
+    _binary_prefix = "aarch64-linux-android"
+    android_toolchain_root = "$arm64_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/aarch64-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+  } else if (current_cpu == "mips64el") {
+    android_prebuilt_arch = "android-mips64"
+    _binary_prefix = "mips64el-linux-android"
+    android_toolchain_root = "$mips64_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/mips64el-linux-android/${_android_toolchain_detailed_version}/libgcc.a"
+  } else {
+    assert(false, "Need android libgcc support for your target arch.")
+  }
+
+  android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-"
+  android_readelf = "${android_tool_prefix}readelf"
+  android_objcopy = "${android_tool_prefix}objcopy"
+  android_gdbserver =
+      "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
+
+  # Toolchain stuff ------------------------------------------------------------
+
+  android_libcpp_root = "$android_ndk_root/sources/cxx-stl/llvm-libc++"
+
+  # ABI ------------------------------------------------------------------------
+
+  if (current_cpu == "x86") {
+    android_app_abi = "x86"
+  } else if (current_cpu == "arm") {
+    import("//build/config/arm.gni")
+    if (arm_version < 7) {
+      android_app_abi = "armeabi"
+    } else {
+      android_app_abi = "armeabi-v7a"
+    }
+  } else if (current_cpu == "mipsel") {
+    android_app_abi = "mips"
+  } else if (current_cpu == "x64") {
+    android_app_abi = "x86_64"
+  } else if (current_cpu == "arm64") {
+    android_app_abi = "arm64-v8a"
+  } else if (current_cpu == "mips64el") {
+    android_app_abi = "mips64"
+  } else {
+    assert(false, "Unknown Android ABI: " + current_cpu)
+  }
+
+  if (android_libcpp_lib_dir == "") {
+    android_libcpp_lib_dir = "${android_libcpp_root}/libs/${android_app_abi}"
+  }
+
+  # Secondary ABI -------------------------------------------------------------
+  if (target_cpu == "arm64" || target_cpu == "x64" || target_cpu == "mips64el") {
+    android_64bit_target_cpu = true
+  } else if (target_cpu == "arm" || target_cpu == "x86" ||
+             target_cpu == "mipsel") {
+    android_64bit_target_cpu = false
+  } else {
+    assert(false, "Unknown target CPU: $target_cpu")
+  }
+
+  # Intentionally do not define android_app_secondary_abi_cpu and
+  # android_app_secondary_abi for 32-bit target_cpu, since they are not used.
+  if (target_cpu == "arm64") {
+    android_secondary_abi_cpu = "arm"
+    android_app_secondary_abi = "armeabi-v7a"
+  } else if (target_cpu == "x64") {
+    android_secondary_abi_cpu = "x86"
+    android_app_secondary_abi = "x86"
+  } else if (target_cpu == "mips64el") {
+    android_secondary_abi_cpu = "mipsel"
+    android_app_secondary_abi = "mips"
+  }
+
+  if (defined(android_secondary_abi_cpu)) {
+    if (is_clang) {
+      android_secondary_abi_toolchain =
+          "//build/toolchain/android:android_clang_${android_secondary_abi_cpu}"
+    } else {
+      android_secondary_abi_toolchain =
+          "//build/toolchain/android:android_${android_secondary_abi_cpu}"
+    }
+  }
+}
+
+declare_args() {
+  # Enables used resource whitelist generation. Set for official builds only
+  # as a large amount of build output is generated.
+  enable_resource_whitelist_generation = is_android && is_official_build
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/android/internal_rules.gni
@@ -0,0 +1,2894 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not add any imports to non-//build directories here.
+# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
+import("//build_overrides/build.gni")
+import("//build/config/android/config.gni")
+import("//build/config/dcheck_always_on.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+assert(is_android)
+
+# These identify targets that have .build_config files (except for android_apk,
+# java_binary, resource_rewriter, since we never need to depend on these).
+_java_target_whitelist = [
+  "*:*_java",
+  "*:*_javalib",
+  "*:*_java_*",  # e.g. java_test_support
+  "*:java",
+  "*:junit",
+  "*:junit_*",
+  "*:*_junit_*",
+  "*:*javatests",
+  "*:*_assets",
+  "*android*:assets",
+  "*:*_apk_*resources",
+  "*android*:resources",
+  "*:*_resources",
+  "*:*_grd",
+  "*:*locale_paks",
+
+  # TODO(agrieve): Rename targets below to match above patterns.
+  "*android_webview/glue:glue",
+  "//chrome/test/android/cast_emulator:cast_emulator",
+]
+
+# Targets that match the whitelist but are not actually java targets.
+_java_target_blacklist = [
+  "//chrome:packed_resources",
+  "*:*_unpack_aar",
+]
+
+_default_proguard_jar_path = "//third_party/proguard/lib/proguard.jar"
+
+# Write the target's .build_config file. This is a json file that contains a
+# dictionary of information about how to build this target (things that
+# require knowledge about this target's dependencies and cannot be calculated
+# at gn-time). There is a special syntax to add a value in that dictionary to
+# an action/action_foreachs args:
+#   --python-arg=@FileArg($rebased_build_config_path:key0:key1)
+# At runtime, such an arg will be replaced by the value in the build_config.
+# See build/android/gyp/write_build_config.py and
+# build/android/gyp/util/build_utils.py:ExpandFileArgs
+template("write_build_config") {
+  type = invoker.type
+  _is_prebuilt_binary =
+      defined(invoker.is_prebuilt_binary) && invoker.is_prebuilt_binary
+
+  # Don't need to enforce naming scheme for these targets since we never
+  # consider them in dependency chains.
+  if (!_is_prebuilt_binary && type != "android_apk" && type != "java_binary" &&
+      type != "resource_rewriter" && type != "dist_jar") {
+    set_sources_assignment_filter(_java_target_whitelist)
+    _parent_invoker = invoker.invoker
+    _target_label =
+        get_label_info(":${_parent_invoker.target_name}", "label_no_toolchain")
+    sources = [
+      _target_label,
+    ]
+    if (sources != []) {
+      set_sources_assignment_filter(_java_target_blacklist)
+      sources = []
+      sources = [
+        _target_label,
+      ]
+      if (sources != []) {
+        assert(false, "Invalid java target name: $_target_label")
+      }
+    }
+    sources = []
+  }
+
+  action(target_name) {
+    set_sources_assignment_filter([])
+    build_config = invoker.build_config
+
+    assert(type == "android_apk" || type == "java_library" ||
+           type == "android_resources" || type == "deps_dex" ||
+           type == "dist_jar" || type == "android_assets" ||
+           type == "resource_rewriter" || type == "java_binary" ||
+           type == "group" || type == "java_prebuilt" || type == "junit_binary")
+
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "testonly",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+
+    script = "//build/android/gyp/write_build_config.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = []
+
+    _deps_configs = []
+    if (defined(invoker.possible_config_deps)) {
+      foreach(_possible_dep, invoker.possible_config_deps) {
+        set_sources_assignment_filter(_java_target_whitelist)
+        _target_label = get_label_info(_possible_dep, "label_no_toolchain")
+        sources = [
+          _target_label,
+        ]
+        if (sources == []) {
+          set_sources_assignment_filter(_java_target_blacklist)
+          sources = []
+          sources = [
+            _target_label,
+          ]
+          if (sources != []) {
+            deps += [ "${_target_label}__build_config" ]
+            _dep_gen_dir = get_label_info(_possible_dep, "target_gen_dir")
+            _dep_name = get_label_info(_possible_dep, "name")
+            _deps_configs += [ "$_dep_gen_dir/$_dep_name.build_config" ]
+          }
+        }
+        sources = []
+      }
+      set_sources_assignment_filter([])
+    }
+    _rebased_deps_configs = rebase_path(_deps_configs, root_build_dir)
+
+    outputs = [
+      build_config,
+    ]
+
+    args = [
+      "--type",
+      type,
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--deps-configs=$_rebased_deps_configs",
+      "--build-config",
+      rebase_path(build_config, root_build_dir),
+    ]
+
+    is_java = type == "java_library" || type == "java_binary" ||
+              type == "java_prebuilt"
+    is_apk = type == "android_apk"
+    is_android_assets = type == "android_assets"
+    is_android_resources = type == "android_resources"
+    is_deps_dex = type == "deps_dex"
+    is_group = type == "group"
+
+    supports_android = is_apk || is_android_assets || is_android_resources ||
+                       is_deps_dex || is_group ||
+                       (is_java && defined(invoker.supports_android) &&
+                        invoker.supports_android)
+    requires_android =
+        is_apk || is_android_assets || is_android_resources || is_deps_dex ||
+        (is_java && defined(invoker.requires_android) &&
+         invoker.requires_android)
+
+    assert(!requires_android || supports_android,
+           "requires_android requires" + " supports_android")
+
+    # Mark these variables as used.
+    assert(is_java || true)
+    assert(is_apk || true)
+    assert(is_android_resources || true)
+    assert(is_deps_dex || true)
+    assert(is_group || true)
+
+    if (is_java || is_apk) {
+      args += [
+        "--jar-path",
+        rebase_path(invoker.jar_path, root_build_dir),
+      ]
+    }
+
+    if (is_java && defined(invoker.java_resources_jar)) {
+      args += [
+        "--java-resources-jar-path",
+        rebase_path(invoker.java_resources_jar, root_build_dir),
+      ]
+    }
+    if (is_apk || is_deps_dex || (is_java && supports_android)) {
+      args += [
+        "--dex-path",
+        rebase_path(invoker.dex_path, root_build_dir),
+      ]
+    }
+    if (supports_android) {
+      args += [ "--supports-android" ]
+    }
+    if (requires_android) {
+      args += [ "--requires-android" ]
+    }
+    if (defined(invoker.bypass_platform_checks) &&
+        invoker.bypass_platform_checks) {
+      args += [ "--bypass-platform-checks" ]
+    }
+
+    if (defined(invoker.apk_under_test)) {
+      deps += [ "${invoker.apk_under_test}__build_config" ]
+      apk_under_test_gen_dir =
+          get_label_info(invoker.apk_under_test, "target_gen_dir")
+      apk_under_test_name = get_label_info(invoker.apk_under_test, "name")
+      apk_under_test_config =
+          "$apk_under_test_gen_dir/$apk_under_test_name.build_config"
+      args += [
+        "--tested-apk-config",
+        rebase_path(apk_under_test_config, root_build_dir),
+      ]
+    }
+
+    if (is_android_assets) {
+      if (defined(invoker.asset_sources)) {
+        _rebased_asset_sources =
+            rebase_path(invoker.asset_sources, root_build_dir)
+        args += [ "--asset-sources=$_rebased_asset_sources" ]
+      }
+      if (defined(invoker.asset_renaming_sources)) {
+        _rebased_asset_renaming_sources =
+            rebase_path(invoker.asset_renaming_sources, root_build_dir)
+        args += [ "--asset-renaming-sources=$_rebased_asset_renaming_sources" ]
+
+        # These are zip paths, so no need to rebase.
+        args += [ "--asset-renaming-destinations=${invoker.asset_renaming_destinations}" ]
+      }
+      if (defined(invoker.disable_compression) && invoker.disable_compression) {
+        args += [ "--disable-asset-compression" ]
+      }
+    }
+
+    if (is_android_resources || is_apk) {
+      assert(defined(invoker.resources_zip))
+      args += [
+        "--resources-zip",
+        rebase_path(invoker.resources_zip, root_build_dir),
+      ]
+      if (defined(invoker.android_manifest)) {
+        inputs += [ invoker.android_manifest ]
+        args += [
+          "--android-manifest",
+          rebase_path(invoker.android_manifest, root_build_dir),
+        ]
+      } else {
+        assert(!is_apk, "apk build configs require an android_manifest")
+      }
+      if (defined(invoker.custom_package)) {
+        args += [
+          "--package-name",
+          invoker.custom_package,
+        ]
+      }
+      if (defined(invoker.r_text)) {
+        args += [
+          "--r-text",
+          rebase_path(invoker.r_text, root_build_dir),
+        ]
+      }
+    }
+
+    if (is_android_resources && defined(invoker.resource_dirs)) {
+      resource_dirs = rebase_path(invoker.resource_dirs, root_build_dir)
+      args += [ "--resource-dirs=$resource_dirs" ]
+    }
+
+    if (is_apk) {
+      if (defined(invoker.shared_libraries_runtime_deps_file)) {
+        # Don't list shared_libraries_runtime_deps_file as an input in order to
+        # avoid having to depend on the runtime_deps target. See comment in
+        # rules.gni for why we do this.
+        args += [
+          "--shared-libraries-runtime-deps",
+          rebase_path(invoker.shared_libraries_runtime_deps_file,
+                      root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.secondary_abi_shared_libraries_runtime_deps_file)) {
+        # Don't list secondary_abi_shared_libraries_runtime_deps_file as an
+        # input in order to avoid having to depend on the runtime_deps target.
+        # See comment in rules.gni for why we do this.
+        args += [
+          "--secondary-abi-shared-libraries-runtime-deps",
+          rebase_path(invoker.secondary_abi_shared_libraries_runtime_deps_file,
+                      root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) {
+        args += [
+          "--proguard-enabled",
+          "--proguard-info",
+          rebase_path(invoker.proguard_info, root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.apk_path)) {
+        _rebased_apk_path = rebase_path(invoker.apk_path, root_build_dir)
+        _rebased_incremental_apk_path =
+            rebase_path(invoker.incremental_apk_path, root_build_dir)
+        _rebased_incremental_install_script_path =
+            rebase_path(invoker.incremental_install_script_path, root_build_dir)
+        _incremental_allowed =
+            defined(invoker.incremental_allowed) && invoker.incremental_allowed
+        args += [ "--apk-path=$_rebased_apk_path" ]
+        args += [ "--incremental-install-script-path=$_rebased_incremental_install_script_path" ]
+
+        assert(_rebased_incremental_apk_path != "")  # Mark as used.
+        if (_incremental_allowed) {
+          args += [ "--incremental-apk-path=$_rebased_incremental_apk_path" ]
+        }
+      }
+    }
+
+    if (defined(invoker.java_sources_file)) {
+      args += [
+        "--java-sources-file",
+        rebase_path(invoker.java_sources_file, root_build_dir),
+      ]
+    }
+    if (defined(invoker.srcjar)) {
+      args += [
+        "--srcjar",
+        rebase_path(invoker.srcjar, root_build_dir),
+      ]
+    }
+    if (defined(invoker.bundled_srcjars)) {
+      _rebased_bundled_srcjars =
+          rebase_path(invoker.bundled_srcjars, root_build_dir)
+      args += [ "--bundled-srcjars=$_rebased_bundled_srcjars" ]
+    }
+    if (defined(invoker.input_jars_paths)) {
+      _rebased_input_jars_paths =
+          rebase_path(invoker.input_jars_paths, root_build_dir)
+      args += [ "--extra-classpath-jars=$_rebased_input_jars_paths" ]
+    }
+    if (defined(invoker.proguard_configs)) {
+      _rebased_proguard_configs =
+          rebase_path(invoker.proguard_configs, root_build_dir)
+      args += [ "--proguard-configs=$_rebased_proguard_configs" ]
+    }
+    if (defined(invoker.gradle_treat_as_prebuilt) &&
+        invoker.gradle_treat_as_prebuilt) {
+      args += [ "--gradle-treat-as-prebuilt" ]
+    }
+    if (defined(invoker.main_class)) {
+      args += [
+        "--main-class",
+        invoker.main_class,
+      ]
+    }
+    if (defined(invoker.alternative_android_sdk_ijar)) {
+      args += [
+        "--bootclasspath",
+        rebase_path(invoker.alternative_android_sdk_ijar, root_build_dir),
+      ]
+    }
+    if (current_toolchain != default_toolchain) {
+      # This has to be a built-time error rather than a GN assert because many
+      # packages have a mix of java and non-java targets. For example, the
+      # following would fail even though nothing depends on :bar(//baz):
+      #
+      # shared_library("foo") {
+      # }
+      #
+      # android_library("bar") {
+      #   deps = [ ":foo(//baz)" ]
+      #   assert(current_toolchain == default_toolchain)
+      # }
+      _msg = [
+        "Tried to build an Android target in a non-default toolchain.",
+        "target: " + get_label_info(":$target_name", "label_with_toolchain"),
+        "default_toolchain: $default_toolchain",
+      ]
+      args += [ "--fail=$_msg" ]
+    }
+  }
+}
+
+template("copy_ex") {
+  set_sources_assignment_filter([])
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data",
+                             "deps",
+                             "inputs",
+                             "sources",
+                             "testonly",
+                             "visibility",
+                           ])
+    if (!defined(sources)) {
+      sources = []
+    }
+    script = "//build/android/gyp/copy_ex.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    _stamp_file = "$target_gen_dir/$target_name.stamp"
+    outputs = [
+      _stamp_file,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--stamp",
+      rebase_path(_stamp_file, root_build_dir),
+      "--dest",
+      rebase_path(invoker.dest, root_build_dir),
+    ]
+    rebased_sources = rebase_path(sources, root_build_dir)
+    args += [ "--files=$rebased_sources" ]
+
+    if (defined(invoker.clear_dir) && invoker.clear_dir) {
+      args += [ "--clear" ]
+    }
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+
+    if (defined(invoker.renaming_sources) &&
+        defined(invoker.renaming_destinations)) {
+      sources += invoker.renaming_sources
+      rebased_renaming_sources =
+          rebase_path(invoker.renaming_sources, root_build_dir)
+      args += [ "--renaming-sources=$rebased_renaming_sources" ]
+
+      renaming_destinations = invoker.renaming_destinations
+      args += [ "--renaming-destinations=$renaming_destinations" ]
+    }
+  }
+}
+
+# Generates a script in the build bin directory which runs the test
+# target using the test runner script in build/android/test_runner.py.
+template("test_runner_script") {
+  testonly = true
+  _test_name = invoker.test_name
+  _test_type = invoker.test_type
+  _incremental_install =
+      defined(invoker.incremental_install) && invoker.incremental_install
+
+  _runtime_deps =
+      !defined(invoker.ignore_all_data_deps) || !invoker.ignore_all_data_deps
+
+  if (_runtime_deps) {
+    # This runtime_deps file is used at runtime and thus cannot go in
+    # target_gen_dir.
+    _target_dir_name = get_label_info(":$target_name", "dir")
+    _runtime_deps_file =
+        "$root_out_dir/gen.runtime/$_target_dir_name/$target_name.runtime_deps"
+    _runtime_deps_target = "${target_name}__write_deps"
+    group(_runtime_deps_target) {
+      forward_variables_from(invoker,
+                             [
+                               "data",
+                               "data_deps",
+                               "deps",
+                               "public_deps",
+                             ])
+      write_runtime_deps = _runtime_deps_file
+    }
+  }
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    if (!defined(data_deps)) {
+      data_deps = []
+    }
+
+    script = "//build/android/gyp/create_test_runner_script.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    data_deps += [
+      "//build/android:test_runner_py",
+      "//build/android:logdog_wrapper_py",
+    ]
+
+    data = []
+
+    test_runner_args = [
+      _test_type,
+      "--output-directory",
+      rebase_path(root_build_dir, root_build_dir),
+    ]
+
+    if (_runtime_deps) {
+      deps += [ ":$_runtime_deps_target" ]
+      data += [ _runtime_deps_file ]
+      test_runner_args += [
+        "--runtime-deps-path",
+        rebase_path(_runtime_deps_file, root_build_dir),
+      ]
+    }
+
+    # apk_target is not used for native executable tests
+    # (e.g. breakpad_unittests).
+    if (defined(invoker.apk_target)) {
+      assert(!defined(invoker.executable_dist_dir))
+      deps += [ "${invoker.apk_target}__build_config" ]
+      _apk_build_config =
+          get_label_info(invoker.apk_target, "target_gen_dir") + "/" +
+          get_label_info(invoker.apk_target, "name") + ".build_config"
+      _rebased_apk_build_config = rebase_path(_apk_build_config, root_build_dir)
+      assert(_rebased_apk_build_config != "")  # Mark as used.
+    } else if (_test_type == "gtest") {
+      assert(
+          defined(invoker.executable_dist_dir),
+          "Must define either apk_target or executable_dist_dir for test_runner_script()")
+      test_runner_args += [
+        "--executable-dist-dir",
+        rebase_path(invoker.executable_dist_dir, root_build_dir),
+      ]
+    }
+
+    _device_test = true
+    if (_test_type == "gtest") {
+      assert(defined(invoker.test_suite))
+      test_runner_args += [
+        "--suite",
+        invoker.test_suite,
+      ]
+    } else if (_test_type == "instrumentation") {
+      _test_apk = "@FileArg($_rebased_apk_build_config:deps_info:apk_path)"
+      if (_incremental_install) {
+        _test_apk = "@FileArg($_rebased_apk_build_config:deps_info:incremental_apk_path)"
+      }
+      test_runner_args += [
+        "--test-apk=$_test_apk",
+        "--test-jar",
+        rebase_path(invoker.test_jar, root_build_dir),
+      ]
+      if (defined(invoker.apk_under_test)) {
+        deps += [ "${invoker.apk_under_test}__build_config" ]
+        _apk_under_test_build_config =
+            get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" +
+            get_label_info(invoker.apk_under_test, "name") + ".build_config"
+        _rebased_apk_under_test_build_config =
+            rebase_path(_apk_under_test_build_config, root_build_dir)
+        _apk_under_test =
+            "@FileArg($_rebased_apk_under_test_build_config:deps_info:apk_path)"
+        if (_incremental_install) {
+          _apk_under_test = "@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_apk_path)"
+        }
+        test_runner_args += [ "--apk-under-test=$_apk_under_test" ]
+      }
+      if (emma_coverage) {
+        # Set a default coverage output directory (can be overridden by user
+        # passing the same flag).
+        test_runner_args += [
+          "--coverage-dir",
+          rebase_path("$root_out_dir/coverage", root_build_dir),
+        ]
+      }
+    } else if (_test_type == "junit") {
+      assert(defined(invoker.test_suite))
+      _device_test = false
+      test_runner_args += [
+        "--test-suite",
+        invoker.test_suite,
+      ]
+      if (defined(invoker.android_manifest_path)) {
+        test_runner_args += [
+          "--android-manifest-path",
+          rebase_path(invoker.android_manifest_path, root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.package_name)) {
+        test_runner_args += [
+          "--package-name",
+          invoker.package_name,
+        ]
+
+        deps += [ ":${invoker.test_suite}__build_config" ]
+        _junit_binary_build_config =
+            "${target_gen_dir}/${invoker.test_suite}.build_config"
+        _rebased_build_config =
+            rebase_path("$_junit_binary_build_config", root_build_dir)
+        test_runner_args += [
+          "--resource-zips",
+          "@FileArg($_rebased_build_config:resources:dependency_zips)",
+        ]
+      }
+
+      test_runner_args += [
+        "--robolectric-runtime-deps-dir",
+        rebase_path("$root_build_dir/lib.java/third_party/robolectric",
+                    root_build_dir),
+      ]
+    } else if (_test_type == "linker") {
+      test_runner_args += [
+        "--test-apk",
+        "@FileArg($_rebased_apk_build_config:deps_info:apk_path)",
+      ]
+    } else {
+      assert(false, "Invalid test type: $_test_type.")
+    }
+
+    if (defined(invoker.additional_apks)) {
+      foreach(additional_apk, invoker.additional_apks) {
+        deps += [ "${additional_apk}__build_config" ]
+        _build_config = get_label_info(additional_apk, "target_gen_dir") + "/" +
+                        get_label_info(additional_apk, "name") + ".build_config"
+        _rebased_build_config = rebase_path(_build_config, root_build_dir)
+        test_runner_args += [
+          "--additional-apk",
+          "@FileArg($_rebased_build_config:deps_info:apk_path)",
+          "--additional-apk-incremental",
+          "@FileArg($_rebased_build_config:deps_info:incremental_apk_path)",
+        ]
+      }
+    }
+    if (defined(invoker.shard_timeout)) {
+      test_runner_args += [ "--shard-timeout=${invoker.shard_timeout}" ]
+    }
+    if (_incremental_install) {
+      test_runner_args += [
+        "--test-apk-incremental-install-script",
+        "@FileArg($_rebased_apk_build_config:deps_info:incremental_install_script_path)",
+      ]
+      if (defined(invoker.apk_under_test)) {
+        test_runner_args += [
+          "--apk-under-test-incremental-install-script",
+          "@FileArg($_rebased_apk_under_test_build_config:deps_info:incremental_install_script_path)",
+        ]
+      }
+      test_runner_args += [ "--fast-local-dev" ]
+    }
+    if (_device_test && is_asan) {
+      test_runner_args += [ "--tool=asan" ]
+    }
+
+    if (defined(invoker.generated_script)) {
+      assert(_test_name != "" || true)  # Mark _test_name as used.
+      generated_script = invoker.generated_script
+    } else {
+      generated_script = "$root_build_dir/bin/run_${_test_name}"
+    }
+    outputs = [
+      generated_script,
+    ]
+    data += [ generated_script ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--script-output-path",
+      rebase_path(generated_script, root_build_dir),
+    ]
+    if (defined(android_test_runner_script)) {
+      args += [
+        "--test-runner-path",
+        android_test_runner_script,
+      ]
+    }
+
+    args += test_runner_args
+  }
+}
+
+template("stack_script") {
+  forward_variables_from(invoker, [ "testonly" ])
+
+  _stack_target_name = invoker.stack_target_name
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    if (!defined(data_deps)) {
+      data_deps = []
+    }
+
+    data_deps +=
+        [ "//third_party/android_platform/development/scripts:stack_py" ]
+
+    script = "//build/android/gyp/create_stack_script.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    _stack_script = "//third_party/android_platform/development/scripts/stack"
+
+    _generated_script = "$root_build_dir/bin/stack_${_stack_target_name}"
+
+    outputs = [
+      _generated_script,
+    ]
+    data = [
+      _generated_script,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--output-directory",
+      rebase_path(root_build_dir, root_build_dir),
+      "--script-path",
+      rebase_path(_stack_script, root_build_dir),
+      "--script-output-path",
+      rebase_path(_generated_script, root_build_dir),
+      "--arch=$target_cpu",
+    ]
+    if (defined(invoker.packed_libraries)) {
+      args += [
+        "--packed-libs",
+        invoker.packed_libraries,
+      ]
+    }
+  }
+}
+
+if (enable_java_templates) {
+  import("//build/config/zip.gni")
+  import("//third_party/ijar/ijar.gni")
+  import("//third_party/android_platform/config.gni")
+
+  rebased_android_sdk = rebase_path(android_sdk, root_build_dir)
+  rebased_android_sdk_build_tools =
+      rebase_path(android_sdk_build_tools, root_build_dir)
+
+  android_sdk_jar = "$android_sdk/android.jar"
+  rebased_android_sdk_jar = rebase_path(android_sdk_jar, root_build_dir)
+  android_default_aapt_path = "$rebased_android_sdk_build_tools/aapt"
+
+  android_configuration_name = "Release"
+  if (is_debug) {
+    android_configuration_name = "Debug"
+  }
+
+  template("android_lint") {
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "data_deps",
+                               "public_deps",
+                               "testonly",
+                             ])
+      if (!defined(deps)) {
+        deps = []
+      }
+
+      if (!defined(lint_suppressions_file)) {
+        lint_suppressions_file = "//build/android/lint/suppressions.xml"
+      }
+
+      _cache_dir = "$root_build_dir/android_lint_cache"
+      _result_path = "$target_gen_dir/$target_name/result.xml"
+      _config_path = "$target_gen_dir/$target_name/config.xml"
+      _suppressions_file = lint_suppressions_file
+      _platform_xml_path =
+          "${android_sdk_root}/platform-tools/api/api-versions.xml"
+      _rebased_lint_android_sdk_root =
+          rebase_path(lint_android_sdk_root, root_build_dir)
+
+      script = "//build/android/gyp/lint.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      inputs = [
+        _platform_xml_path,
+        _suppressions_file,
+        invoker.android_manifest,
+      ]
+
+      outputs = [
+        _result_path,
+        _config_path,
+      ]
+
+      args = [
+        "--lint-path=$_rebased_lint_android_sdk_root/tools-lint/bin/lint",
+        "--cache-dir",
+        rebase_path(_cache_dir, root_build_dir),
+        "--platform-xml-path",
+        rebase_path(_platform_xml_path, root_build_dir),
+        "--android-sdk-version=${lint_android_sdk_version}",
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--config-path",
+        rebase_path(_suppressions_file, root_build_dir),
+        "--manifest-path",
+        rebase_path(invoker.android_manifest, root_build_dir),
+        "--product-dir=.",
+        "--processed-config-path",
+        rebase_path(_config_path, root_build_dir),
+        "--result-path",
+        rebase_path(_result_path, root_build_dir),
+      ]
+
+      if (defined(invoker.disable)) {
+        args += [ "--disable=${invoker.disable}" ]
+      }
+
+      if (defined(invoker.create_cache) && invoker.create_cache) {
+        args += [
+          "--create-cache",
+          "--silent",
+        ]
+      } else {
+        inputs += invoker.java_files
+        inputs += [
+          invoker.jar_path,
+          invoker.build_config,
+        ]
+        if (invoker.java_files != []) {
+          inputs += [ invoker.java_sources_file ]
+          _rebased_java_sources_file =
+              rebase_path(invoker.java_sources_file, root_build_dir)
+          args += [ "--java-sources-file=$_rebased_java_sources_file" ]
+        }
+        deps += [ "//build/android:prepare_android_lint_cache" ]
+
+        _rebased_build_config =
+            rebase_path(invoker.build_config, root_build_dir)
+        args += [
+          "--jar-path",
+          rebase_path(invoker.jar_path, root_build_dir),
+          "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)",
+          "--resource-sources=@FileArg($_rebased_build_config:deps_info:owned_resources_dirs)",
+          "--resource-sources=@FileArg($_rebased_build_config:deps_info:owned_resources_zips)",
+          "--can-fail-build",
+        ]
+      }
+    }
+  }
+
+  template("proguard") {
+    action(target_name) {
+      set_sources_assignment_filter([])
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "data_deps",
+                               "public_deps",
+                               "testonly",
+                             ])
+      script = "//build/android/gyp/proguard.py"
+      if (defined(invoker.proguard_jar_path)) {
+        _proguard_jar_path = invoker.proguard_jar_path
+      } else {
+        _proguard_jar_path = _default_proguard_jar_path
+      }
+      _output_jar_path = invoker.output_jar_path
+      inputs = [
+        _proguard_jar_path,
+      ]
+      if (defined(invoker.alternative_android_sdk_jar)) {
+        inputs += [ invoker.alternative_android_sdk_jar ]
+        _rebased_android_sdk_jar =
+            rebase_path(invoker.alternative_android_sdk_jar)
+      } else {
+        inputs += [ android_sdk_jar ]
+        _rebased_android_sdk_jar = rebased_android_sdk_jar
+      }
+      if (defined(invoker.inputs)) {
+        inputs += invoker.inputs
+      }
+      depfile = "${target_gen_dir}/${target_name}.d"
+      outputs = [
+        _output_jar_path,
+        "$_output_jar_path.flags",
+        "$_output_jar_path.mapping",
+        "$_output_jar_path.seeds",
+        "$_output_jar_path.usage",
+      ]
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--proguard-path",
+        rebase_path(_proguard_jar_path, root_build_dir),
+        "--output-path",
+        rebase_path(_output_jar_path, root_build_dir),
+        "--classpath",
+        _rebased_android_sdk_jar,
+      ]
+      if (proguard_verbose) {
+        args += [ "--verbose" ]
+      }
+      if (defined(invoker.args)) {
+        args += invoker.args
+      }
+      if (defined(invoker.proguard_jar_path)) {
+        # We assume that if we are using a different ProGuard, this new version
+        # can handle the 'dangerous' optimizaions.
+        args += [ "--enable-dangerous-optimizations" ]
+      }
+    }
+  }
+
+  template("findbugs") {
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "testonly",
+                             ])
+      script = "//build/android/findbugs_diff.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      _result_path = "$target_gen_dir/$target_name/result.xml"
+      _exclusions_file = "//build/android/findbugs_filter/findbugs_exclude.xml"
+
+      _rebased_build_config = rebase_path(invoker.build_config, root_build_dir)
+
+      inputs = [
+        "//build/android/pylib/utils/findbugs.py",
+        _exclusions_file,
+        invoker.jar_path,
+        invoker.build_config,
+      ]
+
+      outputs = [
+        _result_path,
+      ]
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--exclude",
+        rebase_path(_exclusions_file, root_build_dir),
+        "--auxclasspath-gyp",
+        "@FileArg($_rebased_build_config:javac:classpath)",
+        "--output-file",
+        rebase_path(_result_path, root_build_dir),
+        rebase_path(invoker.jar_path, root_build_dir),
+      ]
+
+      if (findbugs_verbose) {
+        args += [ "-vv" ]
+      }
+    }
+  }
+
+  # Generates a script in the build bin directory to run a java binary.
+  #
+  # Variables
+  #   main_class: The class containing the program entry point.
+  #   jar_path: The path to the jar to run.
+  #   script_name: Name of the script to generate.
+  #   build_config: Path to .build_config for the jar (contains classpath).
+  #   wrapper_script_args: List of extra arguments to pass to the executable.
+  #
+  template("java_binary_script") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    _main_class = invoker.main_class
+    _build_config = invoker.build_config
+    _jar_path = invoker.jar_path
+    _script_name = invoker.script_name
+
+    action(target_name) {
+      script = "//build/android/gyp/create_java_binary_script.py"
+      depfile = "$target_gen_dir/$_script_name.d"
+      java_script = "$root_build_dir/bin/$_script_name"
+      inputs = [
+        _build_config,
+      ]
+      outputs = [
+        java_script,
+      ]
+      forward_variables_from(invoker, [ "deps" ])
+      _rebased_build_config = rebase_path(_build_config, root_build_dir)
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(java_script, root_build_dir),
+        "--classpath=@FileArg($_rebased_build_config:deps_info:java:full_classpath)",
+        "--jar-path",
+        rebase_path(_jar_path, root_build_dir),
+        "--main-class",
+        _main_class,
+      ]
+      if (emma_coverage) {
+        args += [
+          "--classpath",
+          rebase_path("//third_party/android_tools/sdk/tools/lib/emma.jar",
+                      root_build_dir),
+        ]
+        args += [ "--noverify" ]
+      }
+      if (defined(invoker.wrapper_script_args)) {
+        args += [ "--" ] + invoker.wrapper_script_args
+      }
+      if (defined(invoker.bootclasspath)) {
+        args += [
+          "--bootclasspath",
+          rebase_path(invoker.bootclasspath, root_build_dir),
+        ]
+      }
+    }
+  }
+
+  template("dex") {
+    set_sources_assignment_filter([])
+
+    _enable_multidex =
+        defined(invoker.enable_multidex) && invoker.enable_multidex
+
+    if (_enable_multidex) {
+      _main_dex_list_path = invoker.output + ".main_dex_list"
+      _main_dex_list_target_name = "${target_name}__main_dex_list"
+      action(_main_dex_list_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "deps",
+                                 "inputs",
+                                 "sources",
+                                 "testonly",
+                               ])
+
+        script = "//build/android/gyp/main_dex_list.py"
+        depfile = "$target_gen_dir/$target_name.d"
+
+        main_dex_rules = "//build/android/main_dex_classes.flags"
+
+        if (defined(invoker.proguard_jar_path)) {
+          _proguard_jar_path = invoker.proguard_jar_path
+        } else {
+          _proguard_jar_path = _default_proguard_jar_path
+        }
+
+        if (!defined(inputs)) {
+          inputs = []
+        }
+        inputs += [
+          main_dex_rules,
+          _proguard_jar_path,
+        ]
+
+        outputs = [
+          _main_dex_list_path,
+        ]
+
+        args = [
+          "--depfile",
+          rebase_path(depfile, root_build_dir),
+          "--android-sdk-tools",
+          rebased_android_sdk_build_tools,
+          "--main-dex-list-path",
+          rebase_path(_main_dex_list_path, root_build_dir),
+          "--main-dex-rules-path",
+          rebase_path(main_dex_rules, root_build_dir),
+          "--proguard-path",
+          rebase_path(_proguard_jar_path, root_build_dir),
+        ]
+
+        if (defined(invoker.extra_main_dex_proguard_config)) {
+          inputs += [ invoker.extra_main_dex_proguard_config ]
+          args += [
+            "--main-dex-rules-path",
+            rebase_path(invoker.extra_main_dex_proguard_config, root_build_dir),
+          ]
+        }
+
+        if (defined(invoker.args)) {
+          args += invoker.args
+        }
+
+        if (defined(invoker.sources)) {
+          args += rebase_path(invoker.sources, root_build_dir)
+        }
+      }
+    }
+
+    assert(defined(invoker.output))
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "inputs",
+                               "sources",
+                               "testonly",
+                             ])
+      script = "//build/android/gyp/dex.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      outputs = [
+        invoker.output,
+      ]
+
+      rebased_output = rebase_path(invoker.output, root_build_dir)
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--android-sdk-tools",
+        rebased_android_sdk_build_tools,
+        "--dex-path",
+        rebased_output,
+      ]
+
+      if (enable_incremental_dx) {
+        args += [ "--incremental" ]
+      }
+
+      # EMMA requires --no-locals.
+      if (emma_coverage) {
+        args += [ "--no-locals=1" ]
+      }
+
+      if (_enable_multidex) {
+        args += [
+          "--multi-dex",
+          "--main-dex-list-path",
+          rebase_path(_main_dex_list_path, root_build_dir),
+        ]
+        deps += [ ":${_main_dex_list_target_name}" ]
+        inputs += [ _main_dex_list_path ]
+      }
+
+      if (defined(invoker.args)) {
+        args += invoker.args
+      }
+
+      if (defined(invoker.sources)) {
+        args += rebase_path(invoker.sources, root_build_dir)
+      }
+    }
+  }
+
+  template("process_java_prebuilt") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    assert(invoker.build_config != "")
+    _build_config = invoker.build_config
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    assert(_rebased_build_config != "" || true)  # Mark used.
+
+    _input_jar_path = invoker.input_jar_path
+    _output_jar_path = invoker.output_jar_path
+
+    _jar_excluded_patterns = []
+    if (defined(invoker.jar_excluded_patterns)) {
+      _jar_excluded_patterns = invoker.jar_excluded_patterns
+    }
+    _strip_resource_classes = defined(invoker.strip_resource_classes) &&
+                              invoker.strip_resource_classes
+    _filter_jar = _jar_excluded_patterns != [] || _strip_resource_classes
+
+    _enable_assert =
+        defined(invoker.supports_android) && invoker.supports_android &&
+        (is_java_debug || dcheck_always_on)
+
+    _retrolambda = defined(invoker.supports_android) &&
+                   invoker.supports_android && use_java8
+
+    _deps = []
+    _previous_output_jar = _input_jar_path
+
+    if (_filter_jar) {
+      _filter_target = "${target_name}__filter"
+      _filter_input_jar = _previous_output_jar
+      _filter_output_jar = "$target_out_dir/$target_name-filtered.jar"
+
+      action(_filter_target) {
+        script = "//build/android/gyp/jar.py"
+        deps = _deps
+        if (defined(invoker.deps)) {
+          deps += invoker.deps
+        }
+        if (defined(invoker.public_deps)) {
+          public_deps = invoker.public_deps
+        }
+        inputs = [
+          _build_config,
+          _filter_input_jar,
+        ]
+        outputs = [
+          _filter_output_jar,
+        ]
+        args = [
+          "--input-jar",
+          rebase_path(_filter_input_jar, root_build_dir),
+          "--jar-path",
+          rebase_path(_filter_output_jar, root_build_dir),
+          "--excluded-classes=$_jar_excluded_patterns",
+        ]
+        if (_strip_resource_classes) {
+          args += [ "--strip-resource-classes-for=@FileArg($_rebased_build_config:javac:resource_packages)" ]
+        }
+      }
+
+      _deps = []
+      _deps = [ ":$_filter_target" ]
+      _previous_output_jar = _filter_output_jar
+    }
+
+    if (_enable_assert) {
+      _assert_target = "${target_name}__assert"
+      _assert_input_jar = _previous_output_jar
+      _assert_output_jar = "$target_out_dir/$target_name-asserted.jar"
+
+      action(_assert_target) {
+        script = "$root_build_dir/bin/helper/java_assertion_enabler"
+        deps = [
+          "//build/android/java_assertion_enabler($default_toolchain)",
+        ]
+        deps += _deps
+        if (defined(invoker.deps)) {
+          deps += invoker.deps
+        }
+        if (defined(invoker.public_deps)) {
+          public_deps = invoker.public_deps
+        }
+        inputs = [
+          _assert_input_jar,
+        ]
+        outputs = [
+          _assert_output_jar,
+        ]
+        args = [
+          rebase_path(_assert_input_jar, root_build_dir),
+          rebase_path(_assert_output_jar, root_build_dir),
+        ]
+      }
+
+      _deps = []
+      _deps = [ ":$_assert_target" ]
+      _previous_output_jar = _assert_output_jar
+    }
+
+    if (_retrolambda) {
+      _retrolambda_target = "${target_name}__retrolambda"
+      _retrolambda_input_jar = _previous_output_jar
+      _retrolambda_output_jar = "$target_out_dir/$target_name-retrolambda.jar"
+
+      android_sdk_jar = "$android_sdk/android.jar"
+      action(_retrolambda_target) {
+        script = "//build/android/gyp/retrolambda.py"
+        deps = _deps
+        if (defined(invoker.deps)) {
+          deps += invoker.deps
+        }
+        if (defined(invoker.public_deps)) {
+          public_deps = invoker.public_deps
+        }
+        inputs = [
+          _build_config,
+          _retrolambda_input_jar,
+        ]
+        outputs = [
+          _retrolambda_output_jar,
+        ]
+        args = [
+          "--input-jar",
+          rebase_path(_retrolambda_input_jar, root_build_dir),
+          "--output-jar",
+          rebase_path(_retrolambda_output_jar, root_build_dir),
+          "--classpath=@FileArg($_rebased_build_config:javac:classpath)",
+          "--android-sdk-jar",
+          rebase_path(android_sdk_jar, root_build_dir),
+        ]
+      }
+
+      _deps = []
+      _deps = [ ":$_retrolambda_target" ]
+      _previous_output_jar = _retrolambda_output_jar
+    }
+
+    _output_jar_target = "${target_name}__copy"
+    copy(_output_jar_target) {
+      deps = _deps
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      sources = [
+        _previous_output_jar,
+      ]
+      outputs = [
+        _output_jar_path,
+      ]
+    }
+
+    group(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "data_deps",
+                               "visibility",
+                             ])
+      public_deps = [
+        ":$_output_jar_target",
+      ]
+    }
+  }
+
+  template("emma_instr") {
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "testonly",
+                             ])
+
+      _coverage_file = "$target_out_dir/${target_name}.em"
+      _source_dirs_listing_file = "$target_out_dir/${target_name}_sources.txt"
+      _emma_jar = "${android_sdk_root}/tools/lib/emma.jar"
+
+      script = "//build/android/gyp/emma_instr.py"
+      depfile = "${target_gen_dir}/${target_name}.d"
+      inputs = invoker.java_files + [
+                 _emma_jar,
+                 invoker.input_jar_path,
+               ]
+      outputs = [
+        _coverage_file,
+        _source_dirs_listing_file,
+        invoker.output_jar_path,
+      ]
+      args = [
+        "instrument_jar",
+        "--input-path",
+        rebase_path(invoker.input_jar_path, root_build_dir),
+        "--output-path",
+        rebase_path(invoker.output_jar_path, root_build_dir),
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--coverage-file",
+        rebase_path(_coverage_file, root_build_dir),
+        "--sources-list-file",
+        rebase_path(_source_dirs_listing_file, root_build_dir),
+        "--src-root",
+        rebase_path("//", root_build_dir),
+        "--emma-jar",
+        rebase_path(_emma_jar, root_build_dir),
+      ]
+      _rebased_java_sources_file =
+          rebase_path(invoker.java_sources_file, root_build_dir)
+      args += [ "--java-sources-file=$_rebased_java_sources_file" ]
+
+      if (emma_filter != "") {
+        args += [
+          "--filter-string",
+          emma_filter,
+        ]
+      }
+    }
+  }
+
+  # Creates an unsigned .apk.
+  #
+  # Variables
+  #   assets_build_config: Path to android_apk .build_config containing merged
+  #       asset information.
+  #   deps: Specifies the dependencies of this target.
+  #   dex_path: Path to classes.dex file to include (optional).
+  #   resource_packaged_apk_path: Path to .ap_ to use.
+  #   output_apk_path: Output path for the generated .apk.
+  #   native_lib_placeholders: List of placeholder filenames to add to the apk
+  #     (optional).
+  #   native_libs: List of native libraries.
+  #   native_libs_filearg: @FileArg() of additionaly native libraries.
+  #   write_asset_list: Adds an extra file to the assets, which contains a list of
+  #     all other asset files.
+  template("package_apk") {
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "public_deps",
+                               "testonly",
+                             ])
+      _native_lib_placeholders = []
+      if (defined(invoker.native_lib_placeholders)) {
+        _native_lib_placeholders = invoker.native_lib_placeholders
+      }
+
+      script = "//build/android/gyp/apkbuilder.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      data_deps = [
+        "//tools/android/md5sum",
+      ]  # Used when deploying APKs
+
+      inputs = invoker.native_libs + [ invoker.resource_packaged_apk_path ]
+      if (defined(invoker.dex_path)) {
+        inputs += [ invoker.dex_path ]
+      }
+
+      outputs = [
+        invoker.output_apk_path,
+      ]
+
+      _rebased_resource_packaged_apk_path =
+          rebase_path(invoker.resource_packaged_apk_path, root_build_dir)
+      _rebased_packaged_apk_path =
+          rebase_path(invoker.output_apk_path, root_build_dir)
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--resource-apk=$_rebased_resource_packaged_apk_path",
+        "--output-apk=$_rebased_packaged_apk_path",
+      ]
+      if (defined(invoker.assets_build_config)) {
+        inputs += [ invoker.assets_build_config ]
+        _rebased_build_config =
+            rebase_path(invoker.assets_build_config, root_build_dir)
+        args += [
+          "--assets=@FileArg($_rebased_build_config:assets)",
+          "--uncompressed-assets=@FileArg($_rebased_build_config:uncompressed_assets)",
+        ]
+
+        # TODO(mlopatkin) We are relying on the fact that assets_build_config is
+        # an APK build_config.
+        args += [ "--java-resources=@FileArg($_rebased_build_config:java_resources_jars)" ]
+      }
+      if (defined(invoker.write_asset_list) && invoker.write_asset_list) {
+        args += [ "--write-asset-list" ]
+      }
+      if (defined(invoker.dex_path)) {
+        _rebased_dex_path = rebase_path(invoker.dex_path, root_build_dir)
+        args += [ "--dex-file=$_rebased_dex_path" ]
+      }
+      if (invoker.native_libs != [] || defined(invoker.native_libs_filearg) ||
+          _native_lib_placeholders != []) {
+        args += [ "--android-abi=$android_app_abi" ]
+      }
+      if (invoker.native_libs != []) {
+        _rebased_native_libs = rebase_path(invoker.native_libs, root_build_dir)
+        args += [ "--native-libs=$_rebased_native_libs" ]
+      }
+      if (defined(invoker.native_libs_filearg)) {
+        args += [ "--native-libs=${invoker.native_libs_filearg}" ]
+      }
+      if (_native_lib_placeholders != []) {
+        args += [ "--native-lib-placeholders=$_native_lib_placeholders" ]
+      }
+
+      # TODO (michaelbai): Remove the secondary_native_libs variable.
+      if (defined(invoker.secondary_abi_native_libs_filearg)) {
+        assert(defined(android_app_secondary_abi))
+        args += [
+          "--secondary-native-libs=${invoker.secondary_abi_native_libs_filearg}",
+          "--secondary-android-abi=$android_app_secondary_abi",
+        ]
+      } else if (defined(invoker.secondary_native_libs) &&
+                 invoker.secondary_native_libs != []) {
+        assert(defined(android_app_secondary_abi))
+        inputs += invoker.secondary_native_libs
+        _secondary_native_libs = rebase_path(invoker.secondary_native_libs)
+        args += [
+          "--secondary-native-libs=$_secondary_native_libs",
+          "--secondary-android-abi=$android_app_secondary_abi",
+        ]
+      }
+
+      if (defined(invoker.uncompress_shared_libraries) &&
+          invoker.uncompress_shared_libraries) {
+        args += [ "--uncompress-shared-libraries" ]
+      }
+    }
+  }
+
+  # Signs & zipaligns an apk.
+  #
+  # Variables
+  #   input_apk_path: Path of the .apk to be finalized.
+  #   output_apk_path: Output path for the generated .apk.
+  #   keystore_path: Path to keystore to use for signing.
+  #   keystore_name: Key alias to use.
+  #   keystore_password: Keystore password.
+  template("finalize_apk") {
+    action(target_name) {
+      deps = []
+      script = "//build/android/gyp/finalize_apk.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "data_deps",
+                               "public_deps",
+                               "testonly",
+                             ])
+
+      sources = [
+        invoker.input_apk_path,
+      ]
+      inputs = [
+        invoker.keystore_path,
+      ]
+      outputs = [
+        invoker.output_apk_path,
+      ]
+      data = [
+        invoker.output_apk_path,
+      ]
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--zipalign-path",
+        rebase_path(zipalign_path, root_build_dir),
+        "--unsigned-apk-path",
+        rebase_path(invoker.input_apk_path, root_build_dir),
+        "--final-apk-path",
+        rebase_path(invoker.output_apk_path, root_build_dir),
+        "--key-path",
+        rebase_path(invoker.keystore_path, root_build_dir),
+        "--key-name",
+        invoker.keystore_name,
+        "--key-passwd",
+        invoker.keystore_password,
+      ]
+    }
+  }
+
+  # Packages resources, assets, dex, and native libraries into an apk. Signs and
+  # zipaligns the apk.
+  template("create_apk") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    _android_manifest = invoker.android_manifest
+    _base_path = invoker.base_path
+    _final_apk_path = invoker.apk_path
+    _incremental_final_apk_path_helper =
+        process_file_template(
+            [ _final_apk_path ],
+            "{{source_dir}}/{{source_name_part}}_incremental.apk")
+    _incremental_final_apk_path = _incremental_final_apk_path_helper[0]
+
+    if (defined(invoker.resources_zip)) {
+      _resources_zip = invoker.resources_zip
+      assert(_resources_zip != "")  # Mark as used.
+    }
+    if (defined(invoker.dex_path)) {
+      _dex_path = invoker.dex_path
+    }
+    _load_library_from_apk = invoker.load_library_from_apk
+
+    _deps = []
+    if (defined(invoker.deps)) {
+      _deps = invoker.deps
+    }
+    _incremental_deps = []
+    if (defined(invoker.incremental_deps)) {
+      _incremental_deps = invoker.incremental_deps
+    }
+    _native_libs = []
+    if (defined(invoker.native_libs)) {
+      _native_libs = invoker.native_libs
+    }
+    _native_libs_even_when_incremental = []
+    if (defined(invoker.native_libs_even_when_incremental)) {
+      _native_libs_even_when_incremental =
+          invoker.native_libs_even_when_incremental
+    }
+
+    _version_code = invoker.version_code
+    _version_name = invoker.version_name
+    assert(_version_code != -1)  # Mark as used.
+    assert(_version_name != "")  # Mark as used.
+
+    _base_apk_path = _base_path + ".apk_intermediates"
+
+    _resource_packaged_apk_path = _base_apk_path + ".ap_"
+    _incremental_resource_packaged_apk_path =
+        _base_apk_path + "_incremental.ap_"
+    _packaged_apk_path = _base_apk_path + ".unfinished.apk"
+    _incremental_packaged_apk_path =
+        _base_apk_path + "_incremental.unfinished.apk"
+    _shared_resources =
+        defined(invoker.shared_resources) && invoker.shared_resources
+    assert(_shared_resources || true)  # Mark as used.
+    _app_as_shared_lib =
+        defined(invoker.app_as_shared_lib) && invoker.app_as_shared_lib
+    assert(_app_as_shared_lib || true)  # Mark as used.
+    assert(!(_shared_resources && _app_as_shared_lib))
+
+    _keystore_path = invoker.keystore_path
+    _keystore_name = invoker.keystore_name
+    _keystore_password = invoker.keystore_password
+
+    _split_densities = []
+    if (defined(invoker.create_density_splits) &&
+        invoker.create_density_splits) {
+      _split_densities = [
+        "hdpi",
+        "xhdpi",
+        "xxhdpi",
+        "xxxhdpi",
+        "tvdpi",
+      ]
+    }
+
+    _split_languages = []
+    if (defined(invoker.language_splits)) {
+      _split_languages = invoker.language_splits
+    }
+
+    template("package_resources_helper") {
+      action(target_name) {
+        deps = invoker.deps
+
+        script = "//build/android/gyp/package_resources.py"
+        depfile = "${target_gen_dir}/${target_name}.d"
+        inputs = [
+          invoker.android_manifest,
+        ]
+        if (defined(_resources_zip)) {
+          inputs += [ _resources_zip ]
+        }
+        outputs = [
+          invoker.resource_packaged_apk_path,
+        ]
+
+        if (defined(invoker.android_aapt_path)) {
+          _android_aapt_path = invoker.android_aapt_path
+        } else {
+          _android_aapt_path = android_default_aapt_path
+        }
+
+        if (defined(invoker.alternative_android_sdk_jar)) {
+          _rebased_android_sdk_jar =
+              rebase_path(invoker.alternative_android_sdk_jar)
+        } else {
+          _rebased_android_sdk_jar = rebased_android_sdk_jar
+        }
+
+        args = [
+          "--depfile",
+          rebase_path(depfile, root_build_dir),
+          "--android-sdk-jar",
+          _rebased_android_sdk_jar,
+          "--aapt-path",
+          _android_aapt_path,
+          "--configuration-name=$android_configuration_name",
+          "--android-manifest",
+          rebase_path(invoker.android_manifest, root_build_dir),
+          "--version-code",
+          _version_code,
+          "--version-name",
+          _version_name,
+          "--apk-path",
+          rebase_path(invoker.resource_packaged_apk_path, root_build_dir),
+        ]
+
+        if (defined(_resources_zip)) {
+          args += [
+            "--resource-zips",
+            rebase_path(_resources_zip, root_build_dir),
+          ]
+        }
+        if (_shared_resources) {
+          args += [ "--shared-resources" ]
+        }
+        if (_app_as_shared_lib) {
+          args += [ "--app-as-shared-lib" ]
+        }
+        if (_split_densities != []) {
+          args += [ "--create-density-splits" ]
+          foreach(_density, _split_densities) {
+            outputs += [ "${invoker.resource_packaged_apk_path}_${_density}" ]
+          }
+        }
+        if (_split_languages != []) {
+          args += [ "--language-splits=$_split_languages" ]
+          foreach(_language, _split_languages) {
+            outputs += [ "${invoker.resource_packaged_apk_path}_${_language}" ]
+          }
+        }
+        if (defined(invoker.aapt_locale_whitelist)) {
+          args += [ "--locale-whitelist=${invoker.aapt_locale_whitelist}" ]
+        }
+        if (defined(invoker.extensions_to_not_compress)) {
+          args += [
+            "--no-compress",
+            invoker.extensions_to_not_compress,
+          ]
+        }
+      }
+    }
+
+    _package_resources_target_name = "${target_name}__package_resources"
+    package_resources_helper(_package_resources_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "aapt_locale_whitelist",
+                               "alternative_android_sdk_jar",
+                               "android_aapt_path",
+                               "extensions_to_not_compress",
+                             ])
+      deps = _deps
+      android_manifest = _android_manifest
+      resource_packaged_apk_path = _resource_packaged_apk_path
+    }
+
+    _generate_incremental_manifest_target_name =
+        "${target_name}_incremental_generate_manifest"
+    _incremental_android_manifest =
+        get_label_info(_generate_incremental_manifest_target_name,
+                       "target_gen_dir") + "/AndroidManifest.xml"
+    action(_generate_incremental_manifest_target_name) {
+      deps = _incremental_deps
+      script =
+          "//build/android/incremental_install/generate_android_manifest.py"
+      depfile = "${target_gen_dir}/${target_name}.d"
+      inputs = [
+        _android_manifest,
+      ]
+      outputs = [
+        _incremental_android_manifest,
+      ]
+
+      _rebased_src_manifest = rebase_path(_android_manifest, root_build_dir)
+      _rebased_incremental_manifest =
+          rebase_path(_incremental_android_manifest, root_build_dir)
+      _rebased_depfile = rebase_path(depfile, root_build_dir)
+      args = [
+        "--src-manifest=$_rebased_src_manifest",
+        "--out-manifest=$_rebased_incremental_manifest",
+        "--depfile=$_rebased_depfile",
+      ]
+      if (disable_incremental_isolated_processes) {
+        args += [ "--disable-isolated-processes" ]
+      }
+    }
+
+    _incremental_package_resources_target_name =
+        "${target_name}_incremental__package_resources"
+
+    # TODO(agrieve): See if we can speed up this step by swapping the manifest
+    # from the result of the main package_resources step.
+    package_resources_helper(_incremental_package_resources_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "aapt_locale_whitelist",
+                               "alternative_android_sdk_jar",
+                               "android_aapt_path",
+                               "extensions_to_not_compress",
+                             ])
+      deps =
+          _incremental_deps + [ ":$_generate_incremental_manifest_target_name" ]
+      android_manifest = _incremental_android_manifest
+      resource_packaged_apk_path = _incremental_resource_packaged_apk_path
+    }
+
+    package_target = "${target_name}__package"
+    package_apk(package_target) {
+      forward_variables_from(invoker,
+                             [
+                               "assets_build_config",
+                               "native_lib_placeholders",
+                               "native_libs_filearg",
+                               "secondary_abi_native_libs_filearg",
+                               "secondary_native_libs",
+                               "uncompress_shared_libraries",
+                               "write_asset_list",
+                             ])
+      if (!defined(uncompress_shared_libraries)) {
+        uncompress_shared_libraries = _load_library_from_apk
+      }
+      deps = _deps + [ ":${_package_resources_target_name}" ]
+      native_libs = _native_libs + _native_libs_even_when_incremental
+
+      if (defined(_dex_path)) {
+        dex_path = _dex_path
+      }
+
+      output_apk_path = _packaged_apk_path
+      resource_packaged_apk_path = _resource_packaged_apk_path
+    }
+
+    _incremental_package_target = "${target_name}_incremental__package"
+    package_apk(_incremental_package_target) {
+      forward_variables_from(invoker,
+                             [
+                               "assets_build_config",
+                               "secondary_native_libs",
+                               "uncompress_shared_libraries",
+                             ])
+      if (!defined(uncompress_shared_libraries)) {
+        uncompress_shared_libraries = _load_library_from_apk
+      }
+      _dex_target = "//build/android/incremental_install:bootstrap_java__dex"
+      deps = _incremental_deps + [
+               ":${_incremental_package_resources_target_name}",
+               _dex_target,
+             ]
+
+      if (defined(_dex_path)) {
+        dex_path =
+            get_label_info(_dex_target, "target_gen_dir") + "/bootstrap.dex"
+      }
+
+      native_libs = _native_libs_even_when_incremental
+
+      # http://crbug.com/384638
+      _has_native_libs =
+          defined(invoker.native_libs_filearg) || _native_libs != []
+      if (_has_native_libs && _native_libs_even_when_incremental == []) {
+        native_lib_placeholders = [ "libfix.crbug.384638.so" ]
+      }
+
+      output_apk_path = _incremental_packaged_apk_path
+      resource_packaged_apk_path = _incremental_resource_packaged_apk_path
+    }
+
+    _finalize_apk_rule_name = "${target_name}__finalize"
+    finalize_apk(_finalize_apk_rule_name) {
+      input_apk_path = _packaged_apk_path
+      output_apk_path = _final_apk_path
+      keystore_path = _keystore_path
+      keystore_name = _keystore_name
+      keystore_password = _keystore_password
+
+      public_deps = [
+        # Generator of the _packaged_apk_path this target takes as input.
+        ":$package_target",
+      ]
+    }
+
+    _incremental_finalize_apk_rule_name = "${target_name}_incremental__finalize"
+    finalize_apk(_incremental_finalize_apk_rule_name) {
+      input_apk_path = _incremental_packaged_apk_path
+      output_apk_path = _incremental_final_apk_path
+      keystore_path = _keystore_path
+      keystore_name = _keystore_name
+      keystore_password = _keystore_password
+
+      public_deps = [
+        ":$_incremental_package_target",
+      ]
+    }
+
+    _split_deps = []
+
+    template("finalize_split") {
+      finalize_apk(target_name) {
+        _config = invoker.split_config
+        _type = invoker.split_type
+        input_apk_path = "${_resource_packaged_apk_path}_${_config}"
+        _output_paths = process_file_template(
+                [ _final_apk_path ],
+                "{{source_dir}}/{{source_name_part}}-${_type}-${_config}.apk")
+        output_apk_path = _output_paths[0]
+        keystore_path = _keystore_path
+        keystore_name = _keystore_name
+        keystore_password = _keystore_password
+        deps = [
+          ":${_package_resources_target_name}",
+        ]
+      }
+    }
+
+    foreach(_split, _split_densities) {
+      _split_rule = "${target_name}__finalize_${_split}_split"
+      finalize_split(_split_rule) {
+        split_type = "density"
+        split_config = _split
+      }
+      _split_deps += [ ":$_split_rule" ]
+    }
+    foreach(_split, _split_languages) {
+      _split_rule = "${target_name}__finalize_${_split}_split"
+      finalize_split(_split_rule) {
+        split_type = "lang"
+        split_config = _split
+      }
+      _split_deps += [ ":$_split_rule" ]
+    }
+
+    group(target_name) {
+      public_deps = [ ":${_finalize_apk_rule_name}" ] + _split_deps
+    }
+    group("${target_name}_incremental") {
+      public_deps = [ ":${_incremental_finalize_apk_rule_name}" ] + _split_deps
+    }
+  }
+
+  template("java_prebuilt_impl") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+    _supports_android =
+        defined(invoker.supports_android) && invoker.supports_android
+
+    assert(defined(invoker.jar_path))
+    if (defined(invoker.output_name)) {
+      _output_name = invoker.output_name
+    } else {
+      _output_name = get_path_info(invoker.jar_path, "name")
+    }
+    _base_path = "${target_gen_dir}/$target_name"
+
+    # Jar files can be needed at runtime (by Robolectric tests or java binaries),
+    # so do not put them under gen/.
+    _target_dir_name = get_label_info(":$target_name", "dir")
+    _jar_path = "$root_out_dir/lib.java$_target_dir_name/$_output_name.jar"
+    _ijar_path =
+        "$root_out_dir/lib.java$_target_dir_name/$_output_name.interface.jar"
+    _build_config = _base_path + ".build_config"
+
+    if (_supports_android) {
+      _dex_path = _base_path + ".dex.jar"
+    }
+    _deps = []
+    if (defined(invoker.deps)) {
+      _deps = invoker.deps
+    }
+    _jar_deps = []
+    if (defined(invoker.jar_dep)) {
+      _jar_deps = [ invoker.jar_dep ]
+    }
+
+    _template_name = target_name
+
+    _build_config_target_name = "${_template_name}__build_config"
+    _process_jar_target_name = "${_template_name}__process_jar"
+    _ijar_target_name = "${_template_name}__ijar"
+    if (_supports_android) {
+      _dex_target_name = "${_template_name}__dex"
+    }
+
+    write_build_config(_build_config_target_name) {
+      type = "java_prebuilt"
+      is_prebuilt_binary = defined(invoker.main_class)
+      forward_variables_from(invoker,
+                             [
+                               "input_jars_paths",
+                               "proguard_configs",
+                             ])
+      supports_android = _supports_android
+      requires_android =
+          defined(invoker.requires_android) && invoker.requires_android
+
+      if (defined(invoker.deps)) {
+        possible_config_deps = _deps
+      }
+      build_config = _build_config
+      jar_path = _jar_path
+      if (_supports_android) {
+        dex_path = _dex_path
+      }
+      if (defined(invoker.include_java_resources) &&
+          invoker.include_java_resources) {
+        # Use original jar_path because _jar_path points to a library without
+        # resources.
+        java_resources_jar = invoker.jar_path
+      }
+    }
+
+    process_java_prebuilt(_process_jar_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "jar_excluded_patterns",
+                               "strip_resource_classes",
+                             ])
+
+      visibility = [
+        ":$_ijar_target_name",
+        ":$_template_name",
+      ]
+      if (_supports_android) {
+        visibility += [ ":$_dex_target_name" ]
+      }
+
+      supports_android = _supports_android
+      build_config = _build_config
+      input_jar_path = invoker.jar_path
+      output_jar_path = _jar_path
+
+      deps = [ ":$_build_config_target_name" ] + _deps + _jar_deps
+    }
+
+    generate_interface_jar(_ijar_target_name) {
+      # Always used the unfiltered .jar to create the interface jar so that
+      # other targets will resolve filtered classes when depending on
+      # BuildConfig, NativeLibraries, etc.
+      input_jar = invoker.jar_path
+      deps = _deps + _jar_deps
+      output_jar = _ijar_path
+    }
+
+    if (_supports_android) {
+      dex(_dex_target_name) {
+        sources = [
+          _jar_path,
+        ]
+        output = _dex_path
+        deps = [ ":$_process_jar_target_name" ] + _deps + _jar_deps
+      }
+    }
+
+    if (defined(invoker.main_class)) {
+      _binary_script_target_name = "${_template_name}__java_binary_script"
+      java_binary_script(_binary_script_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "bootclasspath",
+                                 "deps",
+                                 "main_class",
+                                 "wrapper_script_args",
+                               ])
+        if (!defined(deps)) {
+          deps = []
+        }
+        build_config = _build_config
+        jar_path = _jar_path
+        script_name = _template_name
+        if (defined(invoker.wrapper_script_name)) {
+          script_name = invoker.wrapper_script_name
+        }
+        deps += [ ":$_build_config_target_name" ]
+      }
+    }
+
+    group(target_name) {
+      forward_variables_from(invoker, [ "data_deps" ])
+      public_deps = [
+        ":$_ijar_target_name",
+        ":$_process_jar_target_name",
+      ]
+      if (_supports_android) {
+        public_deps += [ ":$_dex_target_name" ]
+      }
+      if (defined(invoker.main_class)) {
+        # Some targets use the generated script while building, so make it a dep
+        # rather than a data_dep.
+        public_deps += [ ":$_binary_script_target_name" ]
+      }
+    }
+  }
+
+  # Compiles and jars a set of java files.
+  #
+  # Outputs:
+  #  $jar_path.jar
+  #  $jar_path.interface.jar
+  #
+  # Variables
+  #   java_files: List of .java files to compile (same as exists in java_sources_file)
+  #   java_sources_file: Path to file containing list of files to compile.
+  #   chromium_code: If true, enable extra warnings.
+  #   srcjar_deps: List of srcjar dependencies. The .java files contained in the
+  #     dependencies srcjar outputs will be compiled and added to the output jar.
+  #   jar_path: Use this to explicitly set the output jar path. Defaults to
+  #     "${target_gen_dir}/${target_name}.jar.
+  template("compile_java") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    assert(defined(invoker.build_config))
+    assert(defined(invoker.jar_path))
+
+    _build_config = invoker.build_config
+
+    _chromium_code = false
+    if (defined(invoker.chromium_code)) {
+      _chromium_code = invoker.chromium_code
+    }
+
+    _supports_android = true
+    if (defined(invoker.supports_android)) {
+      _supports_android = invoker.supports_android
+    }
+
+    _requires_android =
+        defined(invoker.requires_android) && invoker.requires_android
+
+    _enable_errorprone = use_errorprone_java_compiler
+    if (!_chromium_code) {
+      _enable_errorprone = false
+    } else if (defined(invoker.enable_errorprone)) {
+      _enable_errorprone = invoker.enable_errorprone
+    }
+
+    _provider_configurations = []
+    if (defined(invoker.provider_configurations)) {
+      _provider_configurations = invoker.provider_configurations
+    }
+
+    _processors = []
+    _enable_interface_jars_javac = true
+    if (defined(invoker.processors_javac)) {
+      _processors = invoker.processors_javac
+      _enable_interface_jars_javac = _processors == []
+    }
+
+    _processor_args = []
+    if (defined(invoker.processor_args_javac)) {
+      _processor_args = invoker.processor_args_javac
+    }
+
+    _additional_jar_files = []
+    if (defined(invoker.additional_jar_files)) {
+      _additional_jar_files = invoker.additional_jar_files
+    }
+
+    if (defined(invoker.enable_incremental_javac_override)) {
+      # Use invoker-specified override.
+      _enable_incremental_javac = invoker.enable_incremental_javac_override
+    } else {
+      # Default to build arg if not overridden.
+      _enable_incremental_javac = enable_incremental_javac
+    }
+
+    _manifest_entries = []
+    if (defined(invoker.manifest_entries)) {
+      _manifest_entries = invoker.manifest_entries
+    }
+
+    _srcjar_deps = []
+    if (defined(invoker.srcjar_deps)) {
+      _srcjar_deps += invoker.srcjar_deps
+    }
+
+    _java_srcjars = []
+    if (defined(invoker.srcjars)) {
+      _java_srcjars = invoker.srcjars
+    }
+    foreach(dep, _srcjar_deps) {
+      _dep_gen_dir = get_label_info(dep, "target_gen_dir")
+      _dep_name = get_label_info(dep, "name")
+      _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+    }
+
+    # Mark srcjar_deps as used.
+    assert(_srcjar_deps == [] || true)
+
+    _javac_target_name = "${target_name}__javac"
+    _process_prebuilt_target_name = "${target_name}__process_prebuilt"
+    _ijar_target_name = "${target_name}__ijar"
+    _final_target_name = target_name
+
+    _final_jar_path = invoker.jar_path
+    _javac_jar_path = "$target_gen_dir/$target_name.javac.jar"
+    _process_prebuilt_jar_path = _final_jar_path
+    _final_ijar_path = get_path_info(_final_jar_path, "dir") + "/" +
+                       get_path_info(_final_jar_path, "name") + ".interface.jar"
+
+    _emma_instrument = defined(invoker.emma_instrument) &&
+                       invoker.emma_instrument && invoker.java_files != []
+    if (_emma_instrument) {
+      _emma_instr_target_name = "${target_name}__emma_instr"
+      _process_prebuilt_jar_path =
+          "$target_gen_dir/$target_name.process_prebuilt.jar"
+    }
+
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    _rebased_jar_path = rebase_path(_javac_jar_path, root_build_dir)
+
+    action(_javac_target_name) {
+      script = "//build/android/gyp/javac.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      deps = _srcjar_deps
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+
+      outputs = [
+        _javac_jar_path,
+        _javac_jar_path + ".md5.stamp",
+      ]
+      sources = invoker.java_files + _java_srcjars
+      inputs = [
+        _build_config,
+      ]
+      if (invoker.java_files != []) {
+        inputs += [ invoker.java_sources_file ]
+      }
+
+      _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir)
+      _rebased_depfile = rebase_path(depfile, root_build_dir)
+      args = [
+        "--depfile=$_rebased_depfile",
+        "--jar-path=$_rebased_jar_path",
+        "--java-srcjars=$_rebased_java_srcjars",
+        "--java-srcjars=@FileArg($_rebased_build_config:javac:srcjars)",
+      ]
+      if (_enable_interface_jars_javac) {
+        args += [ "--classpath=@FileArg($_rebased_build_config:javac:interface_classpath)" ]
+      } else {
+        args +=
+            [ "--classpath=@FileArg($_rebased_build_config:javac:classpath)" ]
+      }
+      if (_enable_incremental_javac) {
+        args += [ "--incremental" ]
+        deps += [ "//third_party/jmake($default_toolchain)" ]
+        inputs += [ "$root_build_dir/bin/jmake" ]
+        outputs += [ "${_javac_jar_path}.pdb" ]
+      }
+      if (_requires_android) {
+        if (defined(invoker.alternative_android_sdk_ijar)) {
+          deps += [ invoker.alternative_android_sdk_ijar_dep ]
+          _android_sdk_ijar = invoker.alternative_android_sdk_ijar
+        } else {
+          deps += [ "//build/android:android_ijar" ]
+          _android_sdk_ijar = "$root_out_dir/lib.java/android.interface.jar"
+        }
+        inputs += [ _android_sdk_ijar ]
+        _rebased_android_sdk_ijar =
+            rebase_path(_android_sdk_ijar, root_build_dir)
+        args += [ "--bootclasspath=$_rebased_android_sdk_ijar" ]
+      }
+      if (use_java8) {
+        args += [ "--java-version=1.8" ]
+      } else if (_supports_android) {
+        args += [ "--java-version=1.7" ]
+      }
+      foreach(e, _manifest_entries) {
+        args += [ "--manifest-entry=" + e ]
+      }
+      if (_chromium_code) {
+        args += [ "--chromium-code=1" ]
+      }
+      if (_enable_errorprone) {
+        deps += [ "//third_party/errorprone:chromium_errorprone" ]
+        args += [
+          "--use-errorprone-path",
+          "bin/chromium_errorprone",
+        ]
+      }
+      foreach(e, _provider_configurations) {
+        args += [ "--provider-configuration=" + rebase_path(e, root_build_dir) ]
+      }
+      foreach(e, _processors) {
+        args += [ "--processor=" + e ]
+      }
+      foreach(e, _processor_args) {
+        args += [ "--processor-arg=" + e ]
+      }
+      foreach(file_tuple, _additional_jar_files) {
+        # Each element is of length two, [ path_to_file, path_to_put_in_jar ]
+        inputs += [ file_tuple[0] ]
+        args +=
+            [ "--additional-jar-file=" + file_tuple[0] + ":" + file_tuple[1] ]
+      }
+      if (invoker.java_files != []) {
+        args += [ "@" + rebase_path(invoker.java_sources_file, root_build_dir) ]
+      }
+    }
+
+    process_java_prebuilt(_process_prebuilt_target_name) {
+      forward_variables_from(invoker, [ "jar_excluded_patterns" ])
+      supports_android = _supports_android
+      build_config = _build_config
+      input_jar_path = _javac_jar_path
+      output_jar_path = _process_prebuilt_jar_path
+
+      deps = [
+        ":$_javac_target_name",
+      ]
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+    }
+
+    if (_emma_instrument) {
+      emma_instr(_emma_instr_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "deps",
+                                 "java_files",
+                                 "java_sources_file",
+                               ])
+
+        input_jar_path = _process_prebuilt_jar_path
+        output_jar_path = _final_jar_path
+
+        if (!defined(deps)) {
+          deps = []
+        }
+        deps += [ ":$_process_prebuilt_target_name" ]
+      }
+    }
+
+    generate_interface_jar(_ijar_target_name) {
+      # Always used the unfiltered .jar to create the interface jar so that
+      # other targets will resolve filtered classes when depending on
+      # BuildConfig, NativeLibraries, etc.
+      input_jar = _javac_jar_path
+      deps = [
+        ":$_javac_target_name",
+      ]
+      output_jar = _final_ijar_path
+    }
+
+    group(_final_target_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      public_deps = [
+        ":$_ijar_target_name",
+        ":$_javac_target_name",
+      ]
+      if (_emma_instrument) {
+        public_deps += [ ":$_emma_instr_target_name" ]
+      } else {
+        public_deps += [ ":$_process_prebuilt_target_name" ]
+      }
+    }
+  }
+
+  template("java_library_impl") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+    _accumulated_deps = []
+    if (defined(invoker.deps)) {
+      _accumulated_deps = invoker.deps
+    }
+
+    # Caller overriding build config must have valid java sources file if it has
+    # java files.
+    assert(!defined(invoker.override_build_config) ||
+           !defined(invoker.java_files) || defined(invoker.java_sources_file))
+
+    assert(defined(invoker.java_files) || defined(invoker.srcjars) ||
+           defined(invoker.srcjar_deps))
+    _base_path = "$target_gen_dir/$target_name"
+
+    if (defined(invoker.output_name)) {
+      _output_name = invoker.output_name
+    } else {
+      _output_name = target_name
+    }
+
+    # Jar files can be needed at runtime (by Robolectric tests or java binaries),
+    # so do not put them under gen/.
+    target_dir_name = get_label_info(":$target_name", "dir")
+    _jar_path = "$root_out_dir/lib.java$target_dir_name/$_output_name.jar"
+    if (defined(invoker.jar_path)) {
+      _jar_path = invoker.jar_path
+    }
+    _template_name = target_name
+
+    _final_deps = []
+
+    _supports_android =
+        defined(invoker.supports_android) && invoker.supports_android
+    _requires_android =
+        defined(invoker.requires_android) && invoker.requires_android
+    assert(_requires_android || true)  # Mark as used.
+    _android_manifest = "//build/android/AndroidManifest.xml"
+    if (defined(invoker.android_manifest)) {
+      _android_manifest = invoker.android_manifest
+    }
+    assert(_android_manifest != "")  # Mark as used.
+
+    if (defined(invoker.run_findbugs_override)) {
+      _run_findbugs = invoker.run_findbugs_override
+    } else {
+      _run_findbugs = run_findbugs  # Default to build arg if not overridden.
+    }
+    assert(_run_findbugs || true)  # Mark as used.
+
+    # Don't enable coverage, lint, findbugs unless the target has some
+    # non-generated files.
+    if (defined(invoker.chromium_code)) {
+      _chromium_code = invoker.chromium_code
+    } else {
+      _chromium_code = defined(invoker.java_files) && invoker.java_files != []
+      if (_chromium_code) {
+        # Make chromium_code = false be the default for targets within
+        # third_party which contain no chromium-namespaced java files.
+        set_sources_assignment_filter([ "*\bthird_party\b*" ])
+        sources = [
+          get_label_info(":$target_name", "dir"),
+        ]
+        if (sources == []) {
+          set_sources_assignment_filter([ "*\bchromium\b*" ])
+          sources = invoker.java_files
+          _chromium_code = invoker.java_files != sources
+        }
+        set_sources_assignment_filter([])
+        sources = []
+      }
+    }
+
+    _emma_never_instrument = !_chromium_code
+    if (defined(invoker.emma_never_instrument)) {
+      _emma_never_instrument = invoker.emma_never_instrument
+    }
+    assert(_emma_never_instrument || true)  # Mark as used
+    _emma_instrument = emma_coverage && !_emma_never_instrument
+
+    if (_supports_android) {
+      _dex_path = _base_path + ".dex.jar"
+      if (defined(invoker.dex_path)) {
+        _dex_path = invoker.dex_path
+      }
+    }
+
+    _java_files = []
+    if (defined(invoker.java_files)) {
+      _java_files += invoker.java_files
+    }
+    if (_java_files != []) {
+      if (defined(invoker.java_sources_file)) {
+        _java_sources_file = invoker.java_sources_file
+      } else {
+        _java_sources_file = "$_base_path.sources"
+      }
+      write_file(_java_sources_file, rebase_path(_java_files, root_build_dir))
+    }
+
+    # Define build_config_deps which will be a list of targets required to
+    # build the _build_config.
+    if (defined(invoker.override_build_config)) {
+      _build_config = invoker.override_build_config
+    } else {
+      _build_config = _base_path + ".build_config"
+      build_config_target_name = "${_template_name}__build_config"
+
+      write_build_config(build_config_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "alternative_android_sdk_ijar",
+                                 "gradle_treat_as_prebuilt",
+                                 "input_jars_paths",
+                                 "main_class",
+                                 "proguard_configs",
+                               ])
+        if (defined(invoker.is_java_binary) && invoker.is_java_binary) {
+          type = "java_binary"
+        } else {
+          type = "java_library"
+        }
+        if (defined(invoker.deps)) {
+          possible_config_deps = invoker.deps
+        }
+        supports_android = _supports_android
+        requires_android = _requires_android
+        bypass_platform_checks = defined(invoker.bypass_platform_checks) &&
+                                 invoker.bypass_platform_checks
+
+        build_config = _build_config
+        jar_path = _jar_path
+        if (_supports_android) {
+          dex_path = _dex_path
+        }
+        if (_java_files != []) {
+          java_sources_file = _java_sources_file
+        }
+
+        if (defined(invoker.srcjar_deps)) {
+          bundled_srcjars = []
+          foreach(d, invoker.srcjar_deps) {
+            _dep_gen_dir = get_label_info(d, "target_gen_dir")
+            _dep_name = get_label_info(d, "name")
+            bundled_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+          }
+        }
+      }
+      _accumulated_deps += [ ":$build_config_target_name" ]
+    }
+
+    _srcjar_deps = []
+    if (defined(invoker.srcjar_deps)) {
+      _srcjar_deps = invoker.srcjar_deps
+    }
+
+    _srcjars = []
+    if (defined(invoker.srcjars)) {
+      _srcjars = invoker.srcjars
+    }
+
+    assert(_java_files != [] || _srcjar_deps != [] || _srcjars != [])
+
+    _compile_java_target = "${_template_name}__compile_java"
+    _final_deps += [ ":$_compile_java_target" ]
+    compile_java(_compile_java_target) {
+      forward_variables_from(invoker,
+                             [
+                               "additional_jar_files",
+                               "alternative_android_sdk_ijar",
+                               "alternative_android_sdk_ijar_dep",
+                               "dist_jar_path",
+                               "enable_errorprone",
+                               "enable_incremental_javac_override",
+                               "jar_excluded_patterns",
+                               "manifest_entries",
+                               "processors_javac",
+                               "processor_args_javac",
+                               "provider_configurations",
+                             ])
+      jar_path = _jar_path
+      build_config = _build_config
+      java_files = _java_files
+      if (_java_files != []) {
+        java_sources_file = _java_sources_file
+      }
+      srcjar_deps = _srcjar_deps
+      srcjars = _srcjars
+      chromium_code = _chromium_code
+      supports_android = _supports_android
+      requires_android = _requires_android
+      emma_instrument = _emma_instrument
+      deps = _accumulated_deps
+    }
+    _accumulated_deps += [ ":$_compile_java_target" ]
+    assert(_accumulated_deps != [])  # Mark used.
+
+    if (defined(invoker.main_class)) {
+      # Targets might use the generated script while building, so make it a dep
+      # rather than a data_dep.
+      _final_deps += [ ":${_template_name}__java_binary_script" ]
+      java_binary_script("${_template_name}__java_binary_script") {
+        forward_variables_from(invoker,
+                               [
+                                 "bootclasspath",
+                                 "main_class",
+                                 "wrapper_script_args",
+                               ])
+        build_config = _build_config
+        jar_path = _jar_path
+        script_name = _template_name
+        if (defined(invoker.wrapper_script_name)) {
+          script_name = invoker.wrapper_script_name
+        }
+        deps = _accumulated_deps
+      }
+    }
+
+    _has_lint_target = false
+    if (_supports_android) {
+      if (_chromium_code) {
+        _has_lint_target = true
+        android_lint("${_template_name}__lint") {
+          android_manifest = _android_manifest
+          build_config = _build_config
+
+          # Run lint on javac output.
+          jar_path = "$target_gen_dir/$_compile_java_target.javac.jar"
+
+          java_files = _java_files
+          if (_java_files != []) {
+            java_sources_file = _java_sources_file
+          }
+          deps = _accumulated_deps
+          if (_emma_instrument) {
+            # Disable the NewApi lint warning when building with coverage
+            # enabled. Coverage seems to mess with how the linter detects
+            # the usages of a new API within a conditional. See
+            # crbug.com/677320 for more.
+            disable = [ "NewApi" ]
+          }
+        }
+
+        if (_run_findbugs) {
+          findbugs("${_template_name}__findbugs") {
+            build_config = _build_config
+            jar_path = _jar_path
+            deps = _accumulated_deps
+          }
+        }
+
+        # Use an intermediate group() rather as the data_deps target in order to
+        # avoid lint artifacts showing up as runtime_deps (while still having lint
+        # run in parallel to other targets).
+        group("${_template_name}__analysis") {
+          public_deps = [
+            ":${_template_name}__lint",
+          ]
+          if (_run_findbugs) {
+            public_deps += [ ":${_template_name}__findbugs" ]
+          }
+        }
+      }
+
+      _final_deps += [ ":${_template_name}__dex" ]
+      dex("${_template_name}__dex") {
+        sources = [
+          _jar_path,
+        ]
+        output = _dex_path
+        deps = [
+          ":$_compile_java_target",
+        ]
+      }
+    }
+
+    group(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "data",
+                               "data_deps",
+                               "visibility",
+                             ])
+      if (!defined(data_deps)) {
+        data_deps = []
+      }
+      public_deps = _final_deps
+      if (_has_lint_target) {
+        data_deps += [ ":${_template_name}__analysis" ]
+      }
+    }
+  }
+
+  # Runs process_resources.py
+  template("process_resources") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    zip_path = invoker.zip_path
+    srcjar_path = invoker.srcjar_path
+    r_text_out_path = invoker.r_text_out_path
+    build_config = invoker.build_config
+    android_manifest = invoker.android_manifest
+
+    non_constant_id = true
+    if (defined(invoker.generate_constant_ids) &&
+        invoker.generate_constant_ids) {
+      non_constant_id = false
+    }
+
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "visibility",
+                             ])
+      script = "//build/android/gyp/process_resources.py"
+
+      depfile = "$target_gen_dir/$target_name.d"
+      outputs = [
+        zip_path,
+        srcjar_path,
+        r_text_out_path,
+      ]
+
+      _all_resource_dirs = []
+      sources = []
+
+      if (defined(invoker.resource_dirs)) {
+        _all_resource_dirs += invoker.resource_dirs
+
+        # Speed up "gn gen" by short-circuiting the empty directory.
+        if (invoker.resource_dirs != [ "//build/android/ant/empty/res" ] &&
+            invoker.resource_dirs != []) {
+          _sources_build_rel =
+              exec_script("//build/android/gyp/find.py",
+                          rebase_path(invoker.resource_dirs, root_build_dir),
+                          "list lines")
+          sources += rebase_path(_sources_build_rel, ".", root_build_dir)
+        }
+      }
+
+      if (defined(invoker.generated_resource_dirs)) {
+        assert(defined(invoker.generated_resource_files))
+        _all_resource_dirs += invoker.generated_resource_dirs
+        sources += invoker.generated_resource_files
+      }
+
+      inputs = [
+        build_config,
+        android_manifest,
+      ]
+
+      _rebased_all_resource_dirs =
+          rebase_path(_all_resource_dirs, root_build_dir)
+      rebase_build_config = rebase_path(build_config, root_build_dir)
+
+      if (defined(invoker.android_aapt_path)) {
+        _android_aapt_path = invoker.android_aapt_path
+      } else {
+        _android_aapt_path = android_default_aapt_path
+      }
+
+      if (defined(invoker.alternative_android_sdk_jar)) {
+        _rebased_android_sdk_jar =
+            rebase_path(invoker.alternative_android_sdk_jar)
+      } else {
+        _rebased_android_sdk_jar = rebased_android_sdk_jar
+      }
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--android-sdk-jar",
+        _rebased_android_sdk_jar,
+        "--aapt-path",
+        _android_aapt_path,
+        "--android-manifest",
+        rebase_path(android_manifest, root_build_dir),
+        "--resource-dirs=$_rebased_all_resource_dirs",
+        "--srcjar-out",
+        rebase_path(srcjar_path, root_build_dir),
+        "--resource-zip-out",
+        rebase_path(zip_path, root_build_dir),
+        "--r-text-out",
+        rebase_path(r_text_out_path, root_build_dir),
+        "--dependencies-res-zips=@FileArg($rebase_build_config:resources:dependency_zips)",
+        "--extra-res-packages=@FileArg($rebase_build_config:resources:extra_package_names)",
+        "--extra-r-text-files=@FileArg($rebase_build_config:resources:extra_r_text_files)",
+      ]
+
+      if (defined(invoker.r_text_in_path)) {
+        _r_text_in_path = invoker.r_text_in_path
+        inputs += [ _r_text_in_path ]
+        args += [
+          "--r-text-in",
+          rebase_path(_r_text_in_path, root_build_dir),
+        ]
+      }
+
+      if (non_constant_id) {
+        args += [ "--non-constant-id" ]
+      }
+
+      if (defined(invoker.custom_package)) {
+        args += [
+          "--custom-package",
+          invoker.custom_package,
+        ]
+      }
+
+      if (defined(invoker.v14_skip) && invoker.v14_skip) {
+        args += [ "--v14-skip" ]
+      }
+
+      if (defined(invoker.shared_resources) && invoker.shared_resources) {
+        args += [ "--shared-resources" ]
+      }
+
+      if (defined(invoker.app_as_shared_lib) && invoker.app_as_shared_lib) {
+        args += [ "--app-as-shared-lib" ]
+      }
+
+      if (defined(invoker.include_all_resources) &&
+          invoker.include_all_resources) {
+        args += [ "--include-all-resources" ]
+      }
+
+      if (defined(invoker.all_resources_zip_path)) {
+        all_resources_zip = invoker.all_resources_zip_path
+        outputs += [ all_resources_zip ]
+        args += [
+          "--all-resources-zip-out",
+          rebase_path(all_resources_zip, root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.proguard_file)) {
+        outputs += [ invoker.proguard_file ]
+        args += [
+          "--proguard-file",
+          rebase_path(invoker.proguard_file, root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.proguard_file_main_dex)) {
+        outputs += [ invoker.proguard_file_main_dex ]
+        args += [
+          "--proguard-file-main-dex",
+          rebase_path(invoker.proguard_file_main_dex, root_build_dir),
+        ]
+      }
+
+      if (defined(invoker.args)) {
+        args += invoker.args
+      }
+    }
+  }
+
+  # Produces a single .dex.jar out of a set of Java dependencies.
+  template("deps_dex") {
+    set_sources_assignment_filter([])
+    build_config = "$target_gen_dir/${target_name}.build_config"
+    build_config_target_name = "${target_name}__build_config"
+
+    write_build_config(build_config_target_name) {
+      forward_variables_from(invoker, [ "dex_path" ])
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+      type = "deps_dex"
+      build_config = build_config
+    }
+
+    rebased_build_config = rebase_path(build_config, root_build_dir)
+    dex(target_name) {
+      inputs = [
+        build_config,
+      ]
+      output = invoker.dex_path
+      dex_arg_key = "${rebased_build_config}:final_dex:dependency_dex_files"
+      args = [ "--inputs=@FileArg($dex_arg_key)" ]
+      if (defined(invoker.excluded_jars)) {
+        excluded_jars = rebase_path(invoker.excluded_jars, root_build_dir)
+        args += [ "--excluded-paths=${excluded_jars}" ]
+      }
+      deps = [
+        ":$build_config_target_name",
+      ]
+    }
+  }
+
+  # Creates an AndroidManifest.xml for an APK split.
+  template("generate_split_manifest") {
+    assert(defined(invoker.main_manifest))
+    assert(defined(invoker.out_manifest))
+    assert(defined(invoker.split_name))
+
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "testonly",
+                             ])
+      depfile = "$target_gen_dir/$target_name.d"
+      args = [
+        "--main-manifest",
+        rebase_path(invoker.main_manifest, root_build_dir),
+        "--out-manifest",
+        rebase_path(invoker.out_manifest, root_build_dir),
+        "--split",
+        invoker.split_name,
+      ]
+      if (defined(invoker.version_code)) {
+        args += [
+          "--version-code",
+          invoker.version_code,
+        ]
+      }
+      if (defined(invoker.version_name)) {
+        args += [
+          "--version-name",
+          invoker.version_name,
+        ]
+      }
+      if (defined(invoker.has_code)) {
+        args += [
+          "--has-code",
+          invoker.has_code,
+        ]
+      }
+      args += [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+      ]
+
+      script = "//build/android/gyp/generate_split_manifest.py"
+      outputs = [
+        invoker.out_manifest,
+      ]
+      inputs = [
+        invoker.main_manifest,
+      ]
+    }
+  }
+
+  template("pack_relocation_section") {
+    assert(defined(invoker.file_list_json))
+    assert(defined(invoker.libraries_filearg))
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "public_deps",
+                               "inputs",
+                               "testonly",
+                             ])
+      script = "//build/android/gyp/pack_relocations.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      _packed_libraries_dir = "$target_gen_dir/$target_name/packed-libs"
+      outputs = [
+        invoker.file_list_json,
+      ]
+      deps += [ relocation_packer_target ]
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--enable-packing=1",
+        "--android-pack-relocations",
+        rebase_path(relocation_packer_exe, root_build_dir),
+        "--stripped-libraries-dir",
+        rebase_path(root_build_dir, root_build_dir),
+        "--packed-libraries-dir",
+        rebase_path(_packed_libraries_dir, root_build_dir),
+        "--libraries=${invoker.libraries_filearg}",
+        "--filelistjson",
+        rebase_path(invoker.file_list_json, root_build_dir),
+      ]
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/android/rules.gni
@@ -0,0 +1,2965 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not add any imports to non-//build directories here.
+# Some projects (e.g. V8) do not have non-build directories DEPS'ed in.
+import("//build/config/android/config.gni")
+import("//build/config/android/internal_rules.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/dcheck_always_on.gni")
+import("//build/toolchain/toolchain.gni")
+
+assert(is_android)
+
+# Creates a dist directory for a native executable.
+#
+# Running a native executable on a device requires all the shared library
+# dependencies of that executable. To make it easier to install and run such an
+# executable, this will create a directory containing the native exe and all
+# it's library dependencies.
+#
+# Note: It's usually better to package things as an APK than as a native
+# executable.
+#
+# Variables
+#   dist_dir: Directory for the exe and libraries. Everything in this directory
+#     will be deleted before copying in the exe and libraries.
+#   binary: Path to (stripped) executable.
+#   extra_files: List of extra files to copy in (optional).
+#
+# Example
+#   create_native_executable_dist("foo_dist") {
+#     dist_dir = "$root_build_dir/foo_dist"
+#     binary = "$root_build_dir/foo"
+#     deps = [ ":the_thing_that_makes_foo" ]
+#   }
+template("create_native_executable_dist") {
+  forward_variables_from(invoker, [ "testonly" ])
+
+  _libraries_list = "${target_gen_dir}/${target_name}_library_dependencies.list"
+
+  _find_deps_target_name = "${target_name}__find_library_dependencies"
+
+  # TODO(agrieve): Extract dependent libs from GN rather than readelf.
+  action(_find_deps_target_name) {
+    forward_variables_from(invoker, [ "deps" ])
+
+    script = "//build/android/gyp/write_ordered_libraries.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = [
+      invoker.binary,
+      android_readelf,
+    ]
+    outputs = [
+      _libraries_list,
+    ]
+    rebased_binaries = rebase_path([ invoker.binary ], root_build_dir)
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--input-libraries=$rebased_binaries",
+      "--libraries-dir",
+      rebase_path(root_shlib_dir, root_build_dir),
+      "--output",
+      rebase_path(_libraries_list, root_build_dir),
+      "--readelf",
+      rebase_path(android_readelf, root_build_dir),
+    ]
+  }
+
+  copy_ex(target_name) {
+    clear_dir = true
+
+    inputs = [
+      _libraries_list,
+      invoker.binary,
+    ]
+
+    dest = invoker.dist_dir
+    data = [
+      "${invoker.dist_dir}/",
+    ]
+
+    _rebased_libraries_list = rebase_path(_libraries_list, root_build_dir)
+    _rebased_binaries_list = rebase_path([ invoker.binary ], root_build_dir)
+    args = [
+      "--files=@FileArg($_rebased_libraries_list:lib_paths)",
+      "--files=$_rebased_binaries_list",
+    ]
+    if (defined(invoker.extra_files)) {
+      _rebased_extra_files = rebase_path(invoker.extra_files, root_build_dir)
+      args += [ "--files=$_rebased_extra_files" ]
+    }
+
+    deps = [
+      ":$_find_deps_target_name",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+}
+
+# Writes a script to root_out_dir/bin that passes --output-directory to the
+# wrapped script, in addition to forwarding arguments. Most / all of these
+# wrappers should be made deps of //tools/android:android_tools.
+#
+# Variables
+#   target: Script to wrap.
+#   flag_name: Default is "--output-directory"
+#
+# Example
+#   wrapper_script("foo_wrapper") {
+#     target = "//pkg/foo.py"
+#   }
+template("wrapper_script") {
+  action(target_name) {
+    _name = get_path_info(invoker.target, "name")
+    _output = "$root_out_dir/bin/$_name"
+
+    script = "//build/android/gyp/create_tool_wrapper.py"
+    outputs = [
+      _output,
+    ]
+
+    # The target isn't actually used by the script, but it's nice to have GN
+    # check that it exists.
+    inputs = [
+      invoker.target,
+    ]
+    args = [
+      "--output",
+      rebase_path(_output, root_build_dir),
+      "--target",
+      rebase_path(invoker.target, root_build_dir),
+      "--output-directory",
+      rebase_path(root_out_dir, root_build_dir),
+    ]
+    if (defined(invoker.flag_name)) {
+      args += [ "--flag-name=${invoker.flag_name}" ]
+    }
+  }
+}
+
+if (enable_java_templates) {
+  import("//build/config/sanitizers/sanitizers.gni")
+  import("//tools/grit/grit_rule.gni")
+
+  # Declare a jni target
+  #
+  # This target generates the native jni bindings for a set of .java files.
+  #
+  # See base/android/jni_generator/jni_generator.py for more info about the
+  # format of generating JNI bindings.
+  #
+  # Variables
+  #   sources: list of .java files to generate jni for
+  #   jni_package: subdirectory path for generated bindings
+  #
+  # Example
+  #   generate_jni("foo_jni") {
+  #     sources = [
+  #       "android/java/src/org/chromium/foo/Foo.java",
+  #       "android/java/src/org/chromium/foo/FooUtil.java",
+  #     ]
+  #     jni_package = "foo"
+  #   }
+  template("generate_jni") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    assert(defined(invoker.sources))
+    assert(defined(invoker.jni_package))
+    jni_package = invoker.jni_package
+    base_output_dir = "${target_gen_dir}/${target_name}"
+    package_output_dir = "${base_output_dir}/${jni_package}"
+    jni_output_dir = "${package_output_dir}/jni"
+
+    jni_generator_include =
+        "//base/android/jni_generator/jni_generator_helper.h"
+
+    foreach_target_name = "${target_name}__jni_gen"
+    action_foreach(foreach_target_name) {
+      script = "//base/android/jni_generator/jni_generator.py"
+      depfile = "$target_gen_dir/$target_name.{{source_name_part}}.d"
+      sources = invoker.sources
+      outputs = [
+        "${jni_output_dir}/{{source_name_part}}_jni.h",
+      ]
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--input_file={{source}}",
+        "--optimize_generation=1",
+        "--ptr_type=long",
+        "--output_dir",
+        rebase_path(jni_output_dir, root_build_dir),
+        "--includes",
+        rebase_path(jni_generator_include, jni_output_dir),
+        "--native_exports_optional",
+      ]
+
+      if (enable_profiling) {
+        args += [ "--enable_profiling" ]
+      }
+    }
+
+    config("jni_includes_${target_name}") {
+      # TODO(cjhopman): #includes should probably all be relative to
+      # base_output_dir. Remove that from this config once the includes are
+      # updated.
+      include_dirs = [
+        base_output_dir,
+        package_output_dir,
+      ]
+    }
+
+    group(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "public_deps",
+                               "visibility",
+                             ])
+      if (!defined(public_deps)) {
+        public_deps = []
+      }
+      public_deps += [ ":$foreach_target_name" ]
+      public_configs = [ ":jni_includes_${target_name}" ]
+    }
+  }
+
+  # Declare a jni target for a prebuilt jar
+  #
+  # This target generates the native jni bindings for a set of classes in a .jar.
+  #
+  # See base/android/jni_generator/jni_generator.py for more info about the
+  # format of generating JNI bindings.
+  #
+  # Variables
+  #   classes: list of .class files in the jar to generate jni for. These should
+  #     include the full path to the .class file.
+  #   jni_package: subdirectory path for generated bindings
+  #   jar_file: the path to the .jar. If not provided, will default to the sdk's
+  #     android.jar
+  #
+  #   deps, public_deps: As normal
+  #
+  # Example
+  #   generate_jar_jni("foo_jni") {
+  #     classes = [
+  #       "android/view/Foo.class",
+  #     ]
+  #     jni_package = "foo"
+  #   }
+  template("generate_jar_jni") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    assert(defined(invoker.classes))
+    assert(defined(invoker.jni_package))
+
+    if (defined(invoker.jar_file)) {
+      jar_file = invoker.jar_file
+    } else {
+      jar_file = android_sdk_jar
+    }
+
+    jni_package = invoker.jni_package
+    base_output_dir = "${root_gen_dir}/${target_name}/${jni_package}"
+    jni_output_dir = "${base_output_dir}/jni"
+
+    jni_generator_include =
+        "//base/android/jni_generator/jni_generator_helper.h"
+
+    # TODO(cjhopman): make jni_generator.py support generating jni for multiple
+    # .class files from a .jar.
+    jni_actions = []
+    foreach(class, invoker.classes) {
+      _classname_list = []
+      _classname_list = process_file_template([ class ], "{{source_name_part}}")
+      classname = _classname_list[0]
+      jni_target_name = "${target_name}__jni_${classname}"
+      jni_actions += [ ":$jni_target_name" ]
+      action(jni_target_name) {
+        # The sources aren't compiled so don't check their dependencies.
+        check_includes = false
+        depfile = "$target_gen_dir/$target_name.d"
+        script = "//base/android/jni_generator/jni_generator.py"
+        sources = [
+          jar_file,
+        ]
+        outputs = [
+          "${jni_output_dir}/${classname}_jni.h",
+        ]
+
+        args = [
+          "--depfile",
+          rebase_path(depfile, root_build_dir),
+          "--jar_file",
+          rebase_path(jar_file, root_build_dir),
+          "--input_file",
+          class,
+          "--optimize_generation=1",
+          "--ptr_type=long",
+          "--output_dir",
+          rebase_path(jni_output_dir, root_build_dir),
+          "--includes",
+          rebase_path(jni_generator_include, jni_output_dir),
+          "--native_exports_optional",
+        ]
+
+        if (enable_profiling) {
+          args += [ "--enable_profiling" ]
+        }
+      }
+    }
+
+    config("jni_includes_${target_name}") {
+      include_dirs = [ base_output_dir ]
+    }
+
+    group(target_name) {
+      public_deps = []
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "public_deps",
+                               "visibility",
+                             ])
+      public_deps += jni_actions
+      public_configs = [ ":jni_includes_${target_name}" ]
+    }
+  }
+
+  # Declare a target for c-preprocessor-generated java files
+  #
+  # NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum
+  #       rule instead.
+  #
+  # This target generates java files using the host C pre-processor. Each file in
+  # sources will be compiled using the C pre-processor. If include_path is
+  # specified, it will be passed (with --I) to the pre-processor.
+  #
+  # This target will create a single .srcjar. Adding this target to an
+  # android_library target's srcjar_deps will make the generated java files be
+  # included in that library's final outputs.
+  #
+  # Variables
+  #   sources: list of files to be processed by the C pre-processor. For each
+  #     file in sources, there will be one .java file in the final .srcjar. For a
+  #     file named FooBar.template, a java file will be created with name
+  #     FooBar.java.
+  #   inputs: additional compile-time dependencies. Any files
+  #     `#include`-ed in the templates should be listed here.
+  #   package_path: this will be the subdirectory for each .java file in the
+  #     .srcjar.
+  #
+  # Example
+  #   java_cpp_template("foo_generated_enum") {
+  #     sources = [
+  #       "android/java/templates/Foo.template",
+  #     ]
+  #     inputs = [
+  #       "android/java/templates/native_foo_header.h",
+  #     ]
+  #
+  #     package_path = "org/chromium/base/library_loader"
+  #     include_path = "android/java/templates"
+  #   }
+  template("java_cpp_template") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    _include_path = "//"
+    if (defined(invoker.include_path)) {
+      _include_path = invoker.include_path
+    }
+
+    _apply_gcc_target_name = "${target_name}__apply_gcc"
+    _base_gen_dir = "${target_gen_dir}/${target_name}/java_cpp_template"
+
+    if (defined(invoker.package_path)) {
+      package_path = invoker.package_path
+    } else {
+      # TODO(jbudorick): Back this out once all clients have been switched to
+      # package_path.
+      assert(defined(invoker.package_name))
+      package_path = invoker.package_name
+    }
+
+    action_foreach(_apply_gcc_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "public_deps",
+                               "data_deps",
+                             ])
+      script = "//build/android/gyp/gcc_preprocess.py"
+      if (defined(invoker.inputs)) {
+        inputs = invoker.inputs + []
+      }
+      depfile =
+          "${target_gen_dir}/${invoker.target_name}_{{source_name_part}}.d"
+
+      sources = invoker.sources
+
+      outputs = [
+        "$_base_gen_dir/${package_path}/{{source_name_part}}.java",
+      ]
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--include-path",
+        rebase_path(_include_path, root_build_dir),
+        "--output",
+        rebase_path(outputs[0], root_build_dir),
+        "--template={{source}}",
+      ]
+
+      if (defined(invoker.defines)) {
+        foreach(def, invoker.defines) {
+          args += [
+            "--defines",
+            def,
+          ]
+        }
+      }
+    }
+
+    # Filter out .d files.
+    set_sources_assignment_filter([ "*.d" ])
+    sources = get_target_outputs(":$_apply_gcc_target_name")
+
+    zip(target_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      inputs = sources
+      output = "${target_gen_dir}/${target_name}.srcjar"
+      base_dir = _base_gen_dir
+      deps = [
+        ":$_apply_gcc_target_name",
+      ]
+    }
+  }
+
+  # Declare a target for generating Java classes from C++ enums.
+  #
+  # This target generates Java files from C++ enums using a script.
+  #
+  # This target will create a single .srcjar. Adding this target to an
+  # android_library target's srcjar_deps will make the generated java files be
+  # included in that library's final outputs.
+  #
+  # Variables
+  #   sources: list of files to be processed by the script. For each annotated
+  #     enum contained in the sources files the script will generate a .java
+  #     file with the same name as the name of the enum.
+  #
+  # Example
+  #   java_cpp_enum("foo_generated_enum") {
+  #     sources = [
+  #       "src/native_foo_header.h",
+  #     ]
+  #   }
+  template("java_cpp_enum") {
+    action(target_name) {
+      # The sources aren't compiled so don't check their dependencies.
+      check_includes = false
+      set_sources_assignment_filter([])
+
+      assert(defined(invoker.sources))
+      forward_variables_from(invoker,
+                             [
+                               "sources",
+                               "testonly",
+                               "visibility",
+                             ])
+
+      script = "//build/android/gyp/java_cpp_enum.py"
+      depfile = "$target_gen_dir/$target_name.d"
+
+      _srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+      _rebased_srcjar_path = rebase_path(_srcjar_path, root_build_dir)
+      _rebased_sources = rebase_path(invoker.sources, root_build_dir)
+
+      args = [
+               "--depfile",
+               rebase_path(depfile, root_build_dir),
+               "--srcjar=$_rebased_srcjar_path",
+             ] + _rebased_sources
+      outputs = [
+        _srcjar_path,
+      ]
+    }
+  }
+
+  # Declare a target for processing a Jinja template.
+  #
+  # Variables
+  #   input: The template file to be processed.
+  #   output: Where to save the result.
+  #   variables: (Optional) A list of variables to make available to the template
+  #     processing environment, e.g. ["name=foo", "color=red"].
+  #
+  # Example
+  #   jinja_template("chrome_public_manifest") {
+  #     input = "java/AndroidManifest.xml"
+  #     output = "$target_gen_dir/AndroidManifest.xml"
+  #   }
+  template("jinja_template") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    assert(defined(invoker.input))
+    assert(defined(invoker.output))
+
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "visibility",
+                               "deps",
+                             ])
+
+      sources = [
+        invoker.input,
+      ]
+      script = "//build/android/gyp/jinja_template.py"
+      depfile = "$target_gen_dir/$target_name.d"
+
+      outputs = [
+        invoker.output,
+      ]
+
+      args = [
+        "--loader-base-dir",
+        rebase_path("//", root_build_dir),
+        "--inputs",
+        rebase_path(invoker.input, root_build_dir),
+        "--output",
+        rebase_path(invoker.output, root_build_dir),
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+      ]
+      if (defined(invoker.variables)) {
+        variables = invoker.variables
+        args += [ "--variables=${variables}" ]
+      }
+    }
+  }
+
+  # Declare a target for processing Android resources as Jinja templates.
+  #
+  # This takes an Android resource directory where each resource is a Jinja
+  # template, processes each template, then packages the results in a zip file
+  # which can be consumed by an android resources, library, or apk target.
+  #
+  # If this target is included in the deps of an android resources/library/apk,
+  # the resources will be included with that target.
+  #
+  # Variables
+  #   resources: The list of resources files to process.
+  #   res_dir: The resource directory containing the resources.
+  #   variables: (Optional) A list of variables to make available to the template
+  #     processing environment, e.g. ["name=foo", "color=red"].
+  #
+  # Example
+  #   jinja_template_resources("chrome_public_template_resources") {
+  #     res_dir = "res_template"
+  #     resources = ["res_template/xml/syncable.xml"]
+  #     variables = ["color=red"]
+  #   }
+  template("jinja_template_resources") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    assert(defined(invoker.resources))
+    assert(defined(invoker.res_dir))
+
+    _base_path = "$target_gen_dir/$target_name"
+
+    # JUnit tests use resource zip files. These must not be put in gen/
+    # directory or they will not be available to tester bots.
+    _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
+    _resources_zip = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
+    _build_config = _base_path + ".build_config"
+
+    write_build_config("${target_name}__build_config") {
+      build_config = _build_config
+      resources_zip = _resources_zip
+      type = "android_resources"
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+    }
+
+    action("${target_name}__template") {
+      forward_variables_from(invoker, [ "deps" ])
+      sources = invoker.resources
+      script = "//build/android/gyp/jinja_template.py"
+      depfile = "$target_gen_dir/$target_name.d"
+
+      outputs = [
+        _resources_zip,
+      ]
+
+      rebased_resources = rebase_path(invoker.resources, root_build_dir)
+      args = [
+        "--inputs=${rebased_resources}",
+        "--inputs-base-dir",
+        rebase_path(invoker.res_dir, root_build_dir),
+        "--outputs-zip",
+        rebase_path(_resources_zip, root_build_dir),
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+      ]
+      if (defined(invoker.variables)) {
+        variables = invoker.variables
+        args += [ "--variables=${variables}" ]
+      }
+    }
+
+    group(target_name) {
+      public_deps = [
+        ":${target_name}__build_config",
+        ":${target_name}__template",
+      ]
+    }
+  }
+
+  # Declare an Android resources target
+  #
+  # This creates a resources zip file that will be used when building an Android
+  # library or apk and included into a final apk.
+  #
+  # To include these resources in a library/apk, this target should be listed in
+  # the library's deps. A library/apk will also include any resources used by its
+  # own dependencies.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Any Android resources
+  #     listed in deps will be included by libraries/apks that depend on this
+  #     target.
+  #   resource_dirs: List of directories containing resources for this target.
+  #   generated_resource_dirs: List of directories containing resources for this
+  #     target which are *generated* by a dependency. |generated_resource_files|
+  #     must be specified if |generated_resource_dirs| is specified.
+  #   generated_resource_files: List of all files in |generated_resource_dirs|.
+  #     |generated_resource_dirs| must be specified in |generated_resource_files|
+  #     is specified.
+  #   android_manifest: AndroidManifest.xml for this target. Defaults to
+  #     //build/android/AndroidManifest.xml.
+  #   android_manifest_dep: Target that generates AndroidManifest (if applicable)
+  #   custom_package: java package for generated .java files.
+  #   v14_skip: If true, don't run v14 resource generator on this. Defaults to
+  #     false. (see build/android/gyp/generate_v14_compatible_resources.py)
+  #   shared_resources: If true make a resource package that can be loaded by a
+  #     different application at runtime to access the package's resources.
+  #   app_as_shared_lib: If true make a resource package that can be loaded as
+  #     both shared_resources and normal application.
+  #   r_text_file: (optional) path to pre-generated R.txt to be used when
+  #     generating R.java instead of resource-based aapt-generated one.
+
+  # Example:
+  #   android_resources("foo_resources") {
+  #     deps = [":foo_strings_grd"]
+  #     resource_dirs = ["res"]
+  #     custom_package = "org.chromium.foo"
+  #   }
+  #
+  #   android_resources("foo_resources_overrides") {
+  #     deps = [":foo_resources"]
+  #     resource_dirs = ["res_overrides"]
+  #   }
+  template("android_resources") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    assert(defined(invoker.resource_dirs))
+
+    base_path = "$target_gen_dir/$target_name"
+
+    # JUnit tests use resource zip files. These must not be put in gen/
+    # directory or they will not be available to tester bots.
+    _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
+    zip_path = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
+    srcjar_path = base_path + ".srcjar"
+    r_text_out_path = base_path + "_R.txt"
+    build_config = base_path + ".build_config"
+
+    build_config_target_name = "${target_name}__build_config"
+    process_resources_target_name = "${target_name}__process_resources"
+    final_target_name = target_name
+
+    write_build_config(build_config_target_name) {
+      type = "android_resources"
+      forward_variables_from(invoker,
+                             [
+                               "android_manifest",
+                               "custom_package",
+                             ])
+      resource_dirs = []
+      if (defined(invoker.resource_dirs)) {
+        resource_dirs += invoker.resource_dirs
+      }
+      if (defined(invoker.generated_resource_dirs)) {
+        resource_dirs += invoker.generated_resource_dirs
+      }
+
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+      if (defined(invoker.android_manifest_dep)) {
+        deps = [
+          invoker.android_manifest_dep,
+        ]
+      }
+
+      # No package means resources override their deps.
+      if (defined(custom_package) || defined(android_manifest)) {
+        r_text = r_text_out_path
+      } else {
+        assert(defined(invoker.deps),
+               "Must specify deps when custom_package is omitted.")
+      }
+
+      resources_zip = zip_path
+      srcjar = srcjar_path
+    }
+
+    process_resources(process_resources_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "app_as_shared_lib",
+                               "android_manifest",
+                               "custom_package",
+                               "deps",
+                               "generated_resource_dirs",
+                               "generated_resource_files",
+                               "resource_dirs",
+                               "shared_resources",
+                               "v14_skip",
+                             ])
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += [ ":$build_config_target_name" ]
+      if (defined(invoker.android_manifest_dep)) {
+        deps += [ invoker.android_manifest_dep ]
+      }
+
+      if (defined(invoker.r_text_file)) {
+        r_text_in_path = invoker.r_text_file
+      }
+
+      # Always generate R.onResourcesLoaded() method, it is required for
+      # compiling ResourceRewriter, there is no side effect because the
+      # generated R.class isn't used in final apk.
+      shared_resources = true
+      if (!defined(android_manifest)) {
+        android_manifest = "//build/android/AndroidManifest.xml"
+      }
+    }
+
+    group(final_target_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      public_deps = [
+        ":${target_name}__process_resources",
+      ]
+    }
+  }
+
+  # Declare an Android assets target.
+  #
+  # Defines a set of files to include as assets in a dependent apk.
+  #
+  # To include these assets in an apk, this target should be listed in
+  # the apk's deps, or in the deps of a library target used by an apk.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Any Android assets
+  #     listed in deps will be included by libraries/apks that depend on this
+  #     target.
+  #   sources: List of files to include as assets.
+  #   renaming_sources: List of files to include as assets and be renamed.
+  #   renaming_destinations: List of asset paths for files in renaming_sources.
+  #   disable_compression: Whether to disable compression for files that are
+  #     known to be compressable (default: false).
+  #
+  # Example:
+  # android_assets("content_shell_assets") {
+  #   deps = [
+  #     ":generates_foo",
+  #     ":other_assets",
+  #     ]
+  #   sources = [
+  #     "//path/asset1.png",
+  #     "//path/asset2.png",
+  #     "$target_gen_dir/foo.dat",
+  #   ]
+  # }
+  #
+  # android_assets("overriding_content_shell_assets") {
+  #   deps = [ ":content_shell_assets" ]
+  #   # Override foo.dat from content_shell_assets.
+  #   sources = [ "//custom/foo.dat" ]
+  #   renaming_sources = [ "//path/asset2.png" ]
+  #   renaming_destinations = [ "renamed/asset2.png" ]
+  # }
+  template("android_assets") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    _build_config = "$target_gen_dir/$target_name.build_config"
+    _build_config_target_name = "${target_name}__build_config"
+
+    write_build_config(_build_config_target_name) {
+      type = "android_assets"
+      build_config = _build_config
+
+      forward_variables_from(invoker, [ "disable_compression" ])
+
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+
+      if (defined(invoker.sources)) {
+        asset_sources = invoker.sources
+      }
+      if (defined(invoker.renaming_sources)) {
+        assert(defined(invoker.renaming_destinations))
+        _source_count = 0
+        foreach(_, invoker.renaming_sources) {
+          _source_count += 1
+        }
+        _dest_count = 0
+        foreach(_, invoker.renaming_destinations) {
+          _dest_count += 1
+        }
+        assert(
+            _source_count == _dest_count,
+            "android_assets() renaming_sources.length != renaming_destinations.length")
+        asset_renaming_sources = invoker.renaming_sources
+        asset_renaming_destinations = invoker.renaming_destinations
+      }
+    }
+
+    group(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "visibility",
+                             ])
+      public_deps = [
+        ":$_build_config_target_name",
+      ]
+    }
+  }
+
+  # Declare a group() that supports forwarding java dependency information.
+  #
+  # Example
+  #  java_group("conditional_deps") {
+  #    if (enable_foo) {
+  #      deps = [":foo_java"]
+  #    }
+  #  }
+  template("java_group") {
+    forward_variables_from(invoker, [ "testonly" ])
+    write_build_config("${target_name}__build_config") {
+      type = "group"
+      build_config = "$target_gen_dir/${invoker.target_name}.build_config"
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+    }
+    group(target_name) {
+      forward_variables_from(invoker, "*")
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += [ ":${target_name}__build_config" ]
+    }
+  }
+
+  # Declare a target that generates localized strings.xml from a .grd file.
+  #
+  # If this target is included in the deps of an android resources/library/apk,
+  # the strings.xml will be included with that target.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target.
+  #   grd_file: Path to the .grd file to generate strings.xml from.
+  #   outputs: Expected grit outputs (see grit rule).
+  #
+  # Example
+  #  java_strings_grd("foo_strings_grd") {
+  #    grd_file = "foo_strings.grd"
+  #  }
+  template("java_strings_grd") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    base_path = "$target_gen_dir/$target_name"
+
+    # JUnit tests use resource zip files. These must not be put in gen/
+    # directory or they will not be available to tester bots.
+    _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
+    resources_zip = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
+    build_config = base_path + ".build_config"
+
+    write_build_config("${target_name}__build_config") {
+      type = "android_resources"
+    }
+
+    # Put grit files into this subdirectory of target_gen_dir.
+    extra_output_path = target_name + "_grit_output"
+
+    grit_target_name = "${target_name}__grit"
+    grit_output_dir = "$target_gen_dir/$extra_output_path"
+
+    grit(grit_target_name) {
+      forward_variables_from(invoker, [ "deps" ])
+      grit_flags = [
+        "-E",
+        "ANDROID_JAVA_TAGGED_ONLY=false",
+      ]
+      output_dir = grit_output_dir
+      resource_ids = ""
+      source = invoker.grd_file
+      outputs = invoker.outputs
+    }
+
+    # This needs to get outputs from grit's internal target, not the final
+    # source_set.
+    generate_strings_outputs = get_target_outputs(":${grit_target_name}_grit")
+
+    zip("${target_name}__zip") {
+      base_dir = grit_output_dir
+      inputs = generate_strings_outputs
+      output = resources_zip
+      deps = [
+        ":$grit_target_name",
+      ]
+    }
+
+    group(target_name) {
+      public_deps = [
+        ":${target_name}__build_config",
+        ":${target_name}__zip",
+      ]
+    }
+  }
+
+  # Declare a target that packages strings.xml generated from a grd file.
+  #
+  # If this target is included in the deps of an android resources/library/apk,
+  # the strings.xml will be included with that target.
+  #
+  # Variables
+  #  grit_output_dir: directory containing grit-generated files.
+  #  generated_files: list of android resource files to package.
+  #
+  # Example
+  #  java_strings_grd_prebuilt("foo_strings_grd") {
+  #    grit_output_dir = "$root_gen_dir/foo/grit"
+  #    generated_files = [
+  #      "values/strings.xml"
+  #    ]
+  #  }
+  template("java_strings_grd_prebuilt") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    base_path = "$target_gen_dir/$target_name"
+
+    # JUnit tests use resource zip files. These must not be put in gen/
+    # directory or they will not be available to tester bots.
+    _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
+    resources_zip = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
+    build_config = base_path + ".build_config"
+
+    build_config_target_name = "${target_name}__build_config"
+    zip_target_name = "${target_name}__zip"
+    final_target_name = target_name
+
+    write_build_config(build_config_target_name) {
+      type = "android_resources"
+    }
+
+    zip(zip_target_name) {
+      visibility = [ ":$final_target_name" ]
+
+      base_dir = invoker.grit_output_dir
+      inputs = rebase_path(invoker.generated_files, ".", base_dir)
+      output = resources_zip
+      deps = [
+        ":$build_config_target_name",
+      ]
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+    }
+
+    group(final_target_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      public_deps = [
+        ":$zip_target_name",
+      ]
+    }
+  }
+
+  # Declare a Java executable target
+  #
+  # This target creates an executable from java code and libraries. The executable
+  # will be in the output folder's /bin/ directory.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Java targets in this list
+  #     will be included in the executable (and the javac classpath).
+  #   java_files: List of .java files included in this library.
+  #   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+  #     will be added to java_files and be included in this library.
+  #   srcjars: List of srcjars to be included in this library, together with the
+  #     ones obtained from srcjar_deps.
+  #   bypass_platform_checks: Disables checks about cross-platform (Java/Android)
+  #     dependencies for this target. This will allow depending on an
+  #     android_library target, for example.
+  #   chromium_code: If true, extra analysis warning/errors will be enabled.
+  #   enable_errorprone: If true, enables the errorprone compiler.
+  #   enable_incremental_javac_override: Overrides the
+  #     global enable_incremental_javac.
+  #   main_class: When specified, a wrapper script is created within
+  #     $root_build_dir/bin to launch the binary with the given class as the
+  #     entrypoint.
+  #   wrapper_script_args: List of additional arguments for the wrapper script.
+  #
+  #   data_deps, testonly
+  #
+  # Example
+  #   java_binary("foo") {
+  #     java_files = [ "org/chromium/foo/FooMain.java" ]
+  #     deps = [ ":bar_java" ]
+  #     main_class = "org.chromium.foo.FooMain"
+  #   }
+  template("java_binary") {
+    set_sources_assignment_filter([])
+
+    java_library_impl(target_name) {
+      forward_variables_from(invoker, "*")
+      supports_android = false
+      main_class = invoker.main_class
+      is_java_binary = true
+    }
+  }
+
+  # Declare a Junit executable target
+  #
+  # This target creates an executable from java code for running as a junit test
+  # suite. The executable will be in the output folder's /bin/ directory.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Java targets in this list
+  #     will be included in the executable (and the javac classpath).
+  #
+  #   java_files: List of .java files included in this library.
+  #   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+  #     will be added to java_files and be included in this library.
+  #   srcjars: List of srcjars to be included in this library, together with the
+  #     ones obtained from srcjar_deps.
+  #
+  #   chromium_code: If true, extra analysis warning/errors will be enabled.
+  #
+  # Example
+  #   junit_binary("foo") {
+  #     java_files = [ "org/chromium/foo/FooTest.java" ]
+  #     deps = [ ":bar_java" ]
+  #   }
+  template("junit_binary") {
+    set_sources_assignment_filter([])
+    testonly = true
+
+    _java_binary_target_name = "${target_name}__java_binary"
+    _test_runner_target_name = "${target_name}__test_runner_script"
+
+    _build_config = "$target_gen_dir/$target_name.build_config"
+    _build_config_target_name = "${target_name}__build_config"
+    write_build_config(_build_config_target_name) {
+      type = "junit_binary"
+      build_config = _build_config
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+    }
+
+    test_runner_script(_test_runner_target_name) {
+      test_name = invoker.target_name
+      test_suite = invoker.target_name
+      test_type = "junit"
+      ignore_all_data_deps = true
+      forward_variables_from(invoker,
+                             [
+                               "android_manifest_path",
+                               "package_name",
+                             ])
+    }
+
+    java_binary(_java_binary_target_name) {
+      deps = []
+      output_name = invoker.target_name
+      forward_variables_from(invoker, "*")
+      testonly = true
+      bypass_platform_checks = true
+      main_class = "org.chromium.testing.local.JunitTestMain"
+      wrapper_script_name = "helper/$target_name"
+      deps += [
+        "//testing/android/junit:junit_test_support",
+        "//third_party/junit",
+        "//third_party/mockito:mockito_java",
+        "//third_party/robolectric:robolectric_all_java",
+      ]
+    }
+    group(target_name) {
+      public_deps = [
+        ":$_build_config_target_name",
+        ":$_java_binary_target_name",
+        ":$_test_runner_target_name",
+      ]
+    }
+  }
+
+  # Declare a java library target
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Java targets in this list
+  #     will be added to the javac classpath.
+  #
+  #   java_files: List of .java files included in this library.
+  #   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+  #     will be added to java_files and be included in this library.
+  #   srcjars: List of srcjars to be included in this library, together with the
+  #     ones obtained from srcjar_deps.
+  #
+  #   input_jars_paths: A list of paths to the jars that should be included
+  #     in the classpath. These are in addition to library .jars that
+  #     appear in deps.
+  #
+  #   chromium_code: If true, extra analysis warning/errors will be enabled.
+  #   enable_errorprone: If true, enables the errorprone compiler.
+  #   enable_incremental_javac_override: Overrides the global
+  #     enable_incremental_javac.
+  #
+  #   jar_excluded_patterns: List of patterns of .class files to exclude from the
+  #     final jar.
+  #
+  #   output_name: File name for the output .jar (not including extension).
+  #     Defaults to the input .jar file name.
+  #
+  #   proguard_configs: List of proguard configs to use in final apk step for
+  #     any apk that depends on this library.
+  #
+  #   supports_android: If true, Android targets (android_library, android_apk)
+  #     may depend on this target. Note: if true, this target must only use the
+  #     subset of Java available on Android.
+  #   bypass_platform_checks: Disables checks about cross-platform (Java/Android)
+  #     dependencies for this target. This will allow depending on an
+  #     android_library target, for example.
+  #
+  #   additional_jar_files: Use to package additional files into the output jar.
+  #     Pass a list of length-2 lists with format
+  #     [ [ path_to_file, path_to_put_in_jar ] ]
+  #
+  #
+  #   data_deps, testonly
+  #
+  # Example
+  #   java_library("foo_java") {
+  #     java_files = [
+  #       "org/chromium/foo/Foo.java",
+  #       "org/chromium/foo/FooInterface.java",
+  #       "org/chromium/foo/FooService.java",
+  #     ]
+  #     deps = [
+  #       ":bar_java"
+  #     ]
+  #     srcjar_deps = [
+  #       ":foo_generated_enum"
+  #     ]
+  #     jar_excluded_patterns = [
+  #       "*/FooService.class", "*/FooService\$*.class"
+  #     ]
+  #   }
+  template("java_library") {
+    set_sources_assignment_filter([])
+    java_library_impl(target_name) {
+      forward_variables_from(invoker, "*")
+    }
+  }
+
+  # Declare a java library target for a prebuilt jar
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Java targets in this list
+  #     will be added to the javac classpath.
+  #   jar_path: Path to the prebuilt jar.
+  #   jar_dep: Target that builds jar_path (optional).
+  #   main_class: When specified, a wrapper script is created within
+  #     $root_build_dir/bin to launch the binary with the given class as the
+  #     entrypoint.
+  #   output_name: File name for the output .jar (not including extension).
+  #     Defaults to the input .jar file name.
+  #   proguard_configs: List of proguard configs to use in final apk step for
+  #     any apk that depends on this library.
+  #   supports_android: If true, Android targets (android_library, android_apk)
+  #     may depend on this target. Note: if true, this target must only use the
+  #     subset of Java available on Android.
+  #
+  # Example
+  #   java_prebuilt("foo_java") {
+  #     jar_path = "foo.jar"
+  #     deps = [
+  #       ":foo_resources",
+  #       ":bar_java"
+  #     ]
+  #   }
+  template("java_prebuilt") {
+    set_sources_assignment_filter([])
+    java_prebuilt_impl(target_name) {
+      forward_variables_from(invoker, "*")
+    }
+  }
+
+  # Combines all dependent .jar files into a single .jar file.
+  #
+  # Variables:
+  #   output: Path to the output jar.
+  #   override_build_config: Use a pre-existing .build_config. Must be of type
+  #     "apk".
+  #   use_interface_jars: Use all dependent interface .jars rather than
+  #     implementation .jars.
+  #   direct_deps_only: Do not recurse on deps.
+  #   data, deps, testonly, visibility: Usual meaning.
+  #
+  # Example
+  #   dist_jar("lib_fatjar") {
+  #     deps = [ ":my_java_lib" ]
+  #   }
+  template("dist_jar") {
+    if (defined(invoker.override_build_config)) {
+      _build_config = invoker.override_build_config
+    } else {
+      _build_config = "$target_gen_dir/$target_name.build_config"
+      _build_config_target_name = "${target_name}__build_config"
+
+      write_build_config(_build_config_target_name) {
+        forward_variables_from(invoker, [ "testonly" ])
+        type = "dist_jar"
+        if (defined(invoker.deps)) {
+          possible_config_deps = invoker.deps
+        }
+        build_config = _build_config
+      }
+    }
+
+    action(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "data",
+                               "deps",
+                               "testonly",
+                               "visibility",
+                             ])
+      script = "//build/android/gyp/create_dist_jar.py"
+      depfile = "$target_gen_dir/$target_name.d"
+
+      inputs = [
+        _build_config,
+      ]
+
+      outputs = [
+        invoker.output,
+      ]
+
+      if (defined(_build_config_target_name)) {
+        deps += [ ":$_build_config_target_name" ]
+      }
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(invoker.output, root_build_dir),
+      ]
+
+      _rebased_build_config = rebase_path(_build_config, root_build_dir)
+      if (defined(invoker.direct_deps_only) && invoker.direct_deps_only) {
+        if (defined(invoker.use_interface_jars) && invoker.use_interface_jars) {
+          args += [ "--inputs=@FileArg($_rebased_build_config:javac:interface_classpath)" ]
+        } else {
+          args +=
+              [ "--inputs=@FileArg($_rebased_build_config:javac:classpath)" ]
+        }
+      } else {
+        if (defined(invoker.use_interface_jars) && invoker.use_interface_jars) {
+          args += [ "--inputs=@FileArg($_rebased_build_config:dist_jar:all_interface_jars)" ]
+        } else {
+          args += [ "--inputs=@FileArg($_rebased_build_config:dist_jar:dependency_jars)" ]
+        }
+      }
+    }
+  }
+
+  # Declare an Android library target
+  #
+  # This target creates an Android library containing java code and Android
+  # resources.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Java targets in this list
+  #     will be added to the javac classpath. Android resources in dependencies
+  #     will be used when building this library.
+  #
+  #   java_files: List of .java files included in this library.
+  #   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+  #     will be added to java_files and be included in this library.
+  #   srcjars: List of srcjars to be included in this library, together with the
+  #     ones obtained from srcjar_deps.
+  #
+  #   input_jars_paths: A list of paths to the jars that should be included
+  #     in the classpath. These are in addition to library .jars that
+  #     appear in deps.
+  #
+  #   chromium_code: If true, extra analysis warning/errors will be enabled.
+  #   enable_errorprone: If true, enables the errorprone compiler.
+  #   enable_incremental_javac_override: Overrides the global
+  #     enable_incremental_javac.
+  #
+  #   jar_excluded_patterns: List of patterns of .class files to exclude from the
+  #     final jar.
+  #
+  #   proguard_configs: List of proguard configs to use in final apk step for
+  #     any apk that depends on this library.
+  #
+  #   output_name: File name for the output .jar (not including extension).
+  #     Defaults to the input .jar file name.
+  #   dex_path: If set, the resulting .dex.jar file will be placed under this
+  #     path.
+  #
+  #   alternative_android_sdk_ijar: if set, the given android_sdk_ijar file
+  #     replaces the default android_sdk_ijar.
+  #
+  #   alternative_android_sdk_ijar_dep: the target that generates
+  #      alternative_android_sdk_ijar, must be set if alternative_android_sdk_ijar
+  #      is used.
+  #
+  #   emma_never_instrument: Disables EMMA Java code coverage for this target.
+  #
+  # Example
+  #   android_library("foo_java") {
+  #     java_files = [
+  #       "android/org/chromium/foo/Foo.java",
+  #       "android/org/chromium/foo/FooInterface.java",
+  #       "android/org/chromium/foo/FooService.java",
+  #     ]
+  #     deps = [
+  #       ":bar_java"
+  #     ]
+  #     srcjar_deps = [
+  #       ":foo_generated_enum"
+  #     ]
+  #     jar_excluded_patterns = [
+  #       "*/FooService.class", "*/FooService\$*.class"
+  #     ]
+  #   }
+  template("android_library") {
+    set_sources_assignment_filter([])
+    assert(!defined(invoker.jar_path),
+           "android_library does not support a custom jar path")
+
+    if (defined(invoker.alternative_android_sdk_ijar)) {
+      assert(defined(invoker.alternative_android_sdk_ijar_dep))
+    }
+
+    java_library_impl(target_name) {
+      forward_variables_from(invoker, "*")
+
+      supports_android = true
+      requires_android = true
+
+      if (!defined(jar_excluded_patterns)) {
+        jar_excluded_patterns = []
+      }
+      jar_excluded_patterns += [
+        "*/R.class",
+        "*/R\$*.class",
+        "*/Manifest.class",
+        "*/Manifest\$*.class",
+      ]
+    }
+  }
+
+  # Declare a target that packages a set of Java dependencies into a standalone
+  # .dex.jar.
+  #
+  # Variables
+  #   deps: specifies the dependencies of this target. Android libraries in deps
+  #     will be packaged into the resulting .dex.jar file.
+  #   dex_path: location at which the output file will be put
+  template("android_standalone_library") {
+    set_sources_assignment_filter([])
+    deps_dex(target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "deps",
+                               "dex_path",
+                               "excluded_jars",
+                             ])
+    }
+  }
+
+  # Declare an Android library target for a prebuilt jar
+  #
+  # This target creates an Android library containing java code and Android
+  # resources.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. Java targets in this list
+  #     will be added to the javac classpath. Android resources in dependencies
+  #     will be used when building this library.
+  #   jar_path: Path to the prebuilt jar.
+  #   output_name: File name for the output .jar (not including extension).
+  #     Defaults to the input .jar file name.
+  #   proguard_configs: List of proguard configs to use in final apk step for
+  #     any apk that depends on this library.
+  #
+  # Example
+  #   android_java_prebuilt("foo_java") {
+  #     jar_path = "foo.jar"
+  #     deps = [
+  #       ":foo_resources",
+  #       ":bar_java"
+  #     ]
+  #   }
+  template("android_java_prebuilt") {
+    set_sources_assignment_filter([])
+    java_prebuilt_impl(target_name) {
+      forward_variables_from(invoker, "*")
+      supports_android = true
+      requires_android = true
+      strip_resource_classes = true
+    }
+  }
+
+  # Declare an Android apk target
+  #
+  # This target creates an Android APK containing java code, resources, assets,
+  # and (possibly) native libraries.
+  #
+  # Variables
+  #   alternative_android_sdk_jar: The alternative android sdk jar used in
+  #     proguard.
+  #   android_aapt_path: Android aapt tool to replace default one to build
+  #     resource.
+  #   android_manifest: Path to AndroidManifest.xml.
+  #   android_manifest_dep: Target that generates AndroidManifest (if applicable)
+  #   chromium_code: If true, extra analysis warning/errors will be enabled.
+  #   dist_ijar_path: Path to create "${target_name}_dist_ijar" target
+  #     (used by instrumentation_test_apk).
+  #   data_deps: List of dependencies needed at runtime. These will be built but
+  #     won't change the generated .apk in any way (in fact they may be built
+  #     after the .apk is).
+  #   deps: List of dependencies. All Android java resources and libraries in the
+  #     "transitive closure" of these dependencies will be included in the apk.
+  #     Note: this "transitive closure" actually only includes such targets if
+  #     they are depended on through android_library or android_resources targets
+  #     (and so not through builtin targets like 'action', 'group', etc).
+  #   install_script_name: Name of wrapper script (default=target_name).
+  #   java_files: List of .java files to include in the apk.
+  #   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+  #      will be added to java_files and be included in this apk.
+  #   apk_name: Name for final apk.
+  #   final_apk_path: Path to final built apk. Default is
+  #     $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
+  #   loadable_modules: List of paths to native libraries to include. Different
+  #     from |shared_libraries| in that:
+  #       * dependencies of this .so are not automatically included
+  #       * ".cr.so" is never added
+  #       * they are not side-loaded for _incremental targets.
+  #       * load_library_from_apk, use_chromium_linker,
+  #         and enable_relocation_packing do not apply
+  #     Use this instead of shared_libraries when you are going to load the library
+  #     conditionally, and only when shared_libraries doesn't work for you.
+  #   shared_libraries: List shared_library targets to bundle. If these
+  #     libraries depend on other shared_library targets, those dependencies will
+  #     also be included in the apk (e.g. for is_component_build).
+  #   secondary_abi_shared_libraries: secondary abi shared_library targets to
+  #     bundle. If these libraries depend on other shared_library targets, those
+  #     dependencies will also be included in the apk (e.g. for is_component_build).
+  #   native_lib_placeholders: List of placeholder filenames to add to the apk
+  #     (optional).
+  #   apk_under_test: For an instrumentation test apk, this is the target of the
+  #     tested apk.
+  #   include_all_resources - If true include all resource IDs in all generated
+  #     R.java files.
+  #   testonly: Marks this target as "test-only".
+  #   write_asset_list: Adds an extra file to the assets, which contains a list of
+  #     all other asset files.
+  #   generate_buildconfig_java: If defined and false, skip generating the
+  #     BuildConfig java class describing the build configuration. The default
+  #     is true for non-test APKs.
+  #   requires_sdk_api_level_23: If defined and true, the apk is intended for
+  #     installation only on Android M or later. In these releases the system
+  #     linker does relocation unpacking, so we can enable it unconditionally.
+  #   secondary_native_libs (deprecated): The path of native libraries for secondary
+  #     app abi.
+  #   run_findbugs_override: Forces run_findbugs on or off. If undefined, the
+  #     default will use the build arg run_findbugs.
+  #   proguard_jar_path: The path to proguard.jar you wish to use. If undefined,
+  #     the proguard used will be the checked in one in //third_party/proguard.
+  #   never_incremental: If true, |incremental_apk_by_default| will be ignored.
+  #   aapt_locale_whitelist: If set, all locales not in this list will be
+  #     stripped from resources.arsc.
+  #
+  # Example
+  #   android_apk("foo_apk") {
+  #     android_manifest = "AndroidManifest.xml"
+  #     java_files = [
+  #       "android/org/chromium/foo/FooApplication.java",
+  #       "android/org/chromium/foo/FooActivity.java",
+  #     ]
+  #     deps = [
+  #       ":foo_support_java"
+  #       ":foo_resources"
+  #     ]
+  #     srcjar_deps = [
+  #       ":foo_generated_enum"
+  #     ]
+  #     shared_libraries = [
+  #       ":my_shared_lib",
+  #     ]
+  #   }
+  template("android_apk") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    assert(defined(invoker.final_apk_path) || defined(invoker.apk_name))
+    assert(defined(invoker.android_manifest))
+    gen_dir = "$target_gen_dir/$target_name"
+    base_path = "$gen_dir/$target_name"
+    _build_config = "$target_gen_dir/$target_name.build_config"
+
+    # JUnit tests use resource zip files. These must not be put in gen/
+    # directory or they will not be available to tester bots.
+    _resources_zip_rebased_path = rebase_path(target_gen_dir, root_gen_dir)
+    resources_zip_path = "${root_out_dir}/resource_zips/${_resources_zip_rebased_path}/${target_name}.resources.zip"
+    _all_resources_zip_path = "$base_path.resources.all.zip"
+    _jar_path = "$base_path.jar"
+    _lib_dex_path = "$base_path.dex.jar"
+    _rebased_lib_dex_path = rebase_path(_lib_dex_path, root_build_dir)
+    _template_name = target_name
+    if (defined(invoker.java_files)) {
+      _java_sources_file = "$base_path.sources"
+    }
+
+    enable_multidex =
+        defined(invoker.enable_multidex) && invoker.enable_multidex
+    if (enable_multidex) {
+      final_dex_path = "$gen_dir/classes.dex.zip"
+    } else {
+      final_dex_path = "$gen_dir/classes.dex"
+    }
+    final_dex_target_name = "${_template_name}__final_dex"
+
+    _final_apk_path = ""
+    if (defined(invoker.final_apk_path)) {
+      _final_apk_path = invoker.final_apk_path
+    } else if (defined(invoker.apk_name)) {
+      _final_apk_path = "$root_build_dir/apks/" + invoker.apk_name + ".apk"
+    }
+    _final_apk_path_no_ext_list =
+        process_file_template([ _final_apk_path ],
+                              "{{source_dir}}/{{source_name_part}}")
+    _final_apk_path_no_ext = _final_apk_path_no_ext_list[0]
+    assert(_final_apk_path_no_ext != "")  # Mark as used.
+
+    _install_script_name = "install_$_template_name"
+    if (defined(invoker.install_script_name)) {
+      _install_script_name = invoker.install_script_name
+    }
+    _incremental_install_script_path =
+        "${root_out_dir}/bin/${_install_script_name}"
+    if (!incremental_apk_by_default) {
+      _incremental_install_script_path =
+          "${_incremental_install_script_path}_incremental"
+    }
+
+    _version_code = android_default_version_code
+    if (defined(invoker.version_code)) {
+      _version_code = invoker.version_code
+    }
+
+    _version_name = android_default_version_name
+    if (defined(invoker.version_name)) {
+      _version_name = invoker.version_name
+    }
+    _keystore_path = android_keystore_path
+    _keystore_name = android_keystore_name
+    _keystore_password = android_keystore_password
+
+    if (defined(invoker.keystore_path)) {
+      _keystore_path = invoker.keystore_path
+      _keystore_name = invoker.keystore_name
+      _keystore_password = invoker.keystore_password
+    }
+
+    _srcjar_deps = []
+    if (defined(invoker.srcjar_deps)) {
+      _srcjar_deps += invoker.srcjar_deps
+    }
+
+    _use_chromium_linker =
+        defined(invoker.use_chromium_linker) && invoker.use_chromium_linker
+    _enable_relocation_packing = defined(invoker.enable_relocation_packing) &&
+                                 invoker.enable_relocation_packing
+    _load_library_from_apk =
+        defined(invoker.load_library_from_apk) && invoker.load_library_from_apk
+    _requires_sdk_api_level_23 = defined(invoker.requires_sdk_api_level_23) &&
+                                 invoker.requires_sdk_api_level_23
+
+    assert(_use_chromium_linker || true)  # Mark as used.
+    assert(_requires_sdk_api_level_23 || true)
+    if (_enable_relocation_packing) {
+      assert(_use_chromium_linker || _requires_sdk_api_level_23,
+             "enable_relocation_packing requires either use_chromium_linker " +
+                 "or requires_sdk_api_level_23")
+    }
+    if (_load_library_from_apk) {
+      assert(_use_chromium_linker || _requires_sdk_api_level_23,
+             "load_library_from_apk requires use_chromium_linker " +
+                 "or requires_sdk_api_level_23")
+    }
+
+    # The dependency that makes the chromium linker, if any is needed.
+    _native_libs_deps = []
+    _shared_libraries_is_valid =
+        defined(invoker.shared_libraries) && invoker.shared_libraries != []
+    _secondary_abi_native_libs_deps = []
+    assert(_secondary_abi_native_libs_deps == [])  # mark as used.
+    _secondary_abi_shared_libraries_is_valid =
+        defined(invoker.secondary_abi_shared_libraries) &&
+        invoker.secondary_abi_shared_libraries != []
+
+    if (is_component_build || is_asan) {
+      if (_shared_libraries_is_valid) {
+        _native_libs_deps += [ "//build/android:cpplib_stripped" ]
+      }
+      if (_secondary_abi_shared_libraries_is_valid) {
+        _secondary_abi_native_libs_deps += [ "//build/android:cpplib_stripped($android_secondary_abi_toolchain)" ]
+      }
+    }
+
+    if (_shared_libraries_is_valid) {
+      _native_libs_deps += invoker.shared_libraries
+
+      # To determine the filenames of all dependent shared libraries, write the
+      # runtime deps of |shared_libraries| to a file during "gn gen".
+      # write_build_config.py will then grep this file for *.so to obtain the
+      # complete list.
+      _runtime_deps_file =
+          "$target_gen_dir/${_template_name}.native.runtimedeps"
+      group("${_template_name}__runtime_deps") {
+        deps = _native_libs_deps
+        write_runtime_deps = _runtime_deps_file
+      }
+
+      _native_lib_version_rule = ""
+      if (defined(invoker.native_lib_version_rule)) {
+        _native_lib_version_rule = invoker.native_lib_version_rule
+      }
+      _native_lib_version_arg = "\"\""
+      if (defined(invoker.native_lib_version_arg)) {
+        _native_lib_version_arg = invoker.native_lib_version_arg
+      }
+    }
+
+    if (_secondary_abi_shared_libraries_is_valid) {
+      _secondary_abi_native_libs_deps += invoker.secondary_abi_shared_libraries
+
+      # To determine the filenames of all dependent shared libraries, write the
+      # runtime deps of |shared_libraries| to a file during "gn gen".
+      # write_build_config.py will then grep this file for *.so to obtain the
+      # complete list.
+      _secondary_abi_runtime_deps_file =
+          "$target_gen_dir/${_template_name}.secondary.abi.native.runtimedeps"
+      group("${_template_name}_secondary_abi__runtime_deps") {
+        deps = _secondary_abi_native_libs_deps
+        write_runtime_deps = _secondary_abi_runtime_deps_file
+      }
+    }
+
+    if (defined(invoker.deps)) {
+      set_sources_assignment_filter([ "*manifest*" ])
+      sources = invoker.deps
+      set_sources_assignment_filter([])
+      if (sources != invoker.deps) {
+        _bad_deps = invoker.deps - sources
+        assert(
+            false,
+            "Possible manifest-generating dep found in deps. Use android_manifest_dep for this instead. Found: $_bad_deps")
+      }
+      sources = []
+    }
+    _android_manifest_deps = []
+    if (defined(invoker.android_manifest_dep)) {
+      _android_manifest_deps = [ invoker.android_manifest_dep ]
+    }
+    _android_manifest = invoker.android_manifest
+
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    _create_abi_split =
+        defined(invoker.create_abi_split) && invoker.create_abi_split
+    _create_density_splits =
+        defined(invoker.create_density_splits) && invoker.create_density_splits
+    _create_language_splits =
+        defined(invoker.language_splits) && invoker.language_splits != []
+    _generate_buildconfig_java = !defined(invoker.apk_under_test)
+    if (defined(invoker.generate_buildconfig_java)) {
+      _generate_buildconfig_java = invoker.generate_buildconfig_java
+    }
+
+    # Help GN understand that _create_abi_split is not unused (bug in GN).
+    assert(_create_abi_split || true)
+
+    _proguard_enabled =
+        defined(invoker.proguard_enabled) && invoker.proguard_enabled
+    if (_proguard_enabled) {
+      _proguard_output_jar_path = "$base_path.proguard.jar"
+    }
+
+    _emma_never_instrument = defined(invoker.testonly) && invoker.testonly
+    _incremental_allowed =
+        !(defined(invoker.never_incremental) && invoker.never_incremental)
+
+    build_config_target = "${_template_name}__build_config"
+    write_build_config(build_config_target) {
+      forward_variables_from(invoker, [ "apk_under_test" ])
+      type = "android_apk"
+      jar_path = _jar_path
+      dex_path = final_dex_path
+      apk_path = _final_apk_path
+      incremental_allowed = _incremental_allowed
+      incremental_apk_path = "${_final_apk_path_no_ext}_incremental.apk"
+      incremental_install_script_path = _incremental_install_script_path
+      resources_zip = resources_zip_path
+      build_config = _build_config
+      android_manifest = _android_manifest
+
+      if (defined(_java_sources_file)) {
+        java_sources_file = _java_sources_file
+      }
+
+      deps = _android_manifest_deps
+
+      if (defined(invoker.deps)) {
+        possible_config_deps = invoker.deps
+      }
+
+      # Added emma to the target's classpath via its .build_config.
+      if (emma_coverage && !_emma_never_instrument) {
+        possible_config_deps +=
+            [ "//third_party/android_tools:emma_device_java" ]
+      }
+
+      proguard_enabled = _proguard_enabled
+      if (_proguard_enabled) {
+        proguard_info = "$_proguard_output_jar_path.info"
+      }
+
+      # Don't depend on the runtime_deps target in order to avoid having to
+      # build the native libraries just to create the .build_config file.
+      # The dep is unnecessary since the runtime_deps file is created by gn gen
+      # and the runtime_deps file is added to write_build_config.py's depfile.
+      if (_native_libs_deps != []) {
+        shared_libraries_runtime_deps_file = _runtime_deps_file
+      }
+      if (_secondary_abi_native_libs_deps != []) {
+        secondary_abi_shared_libraries_runtime_deps_file =
+            _secondary_abi_runtime_deps_file
+      }
+    }
+
+    _final_deps = []
+
+    if (enable_multidex) {
+      _generated_proguard_main_dex_config =
+          "$base_path.resources.main-dex-proguard.txt"
+    }
+    _generated_proguard_config = "$base_path.resources.proguard.txt"
+    process_resources_target = "${_template_name}__process_resources"
+    process_resources(process_resources_target) {
+      forward_variables_from(invoker,
+                             [
+                               "alternative_android_sdk_jar",
+                               "android_aapt_path",
+                               "app_as_shared_lib",
+                               "include_all_resources",
+                               "shared_resources",
+                             ])
+      srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+      r_text_out_path = "${target_gen_dir}/${target_name}_R.txt"
+      android_manifest = _android_manifest
+      resource_dirs = [ "//build/android/ant/empty/res" ]
+      zip_path = resources_zip_path
+      all_resources_zip_path = _all_resources_zip_path
+      generate_constant_ids = true
+      proguard_file = _generated_proguard_config
+      if (enable_multidex) {
+        proguard_file_main_dex = _generated_proguard_main_dex_config
+      }
+
+      build_config = _build_config
+      deps = _android_manifest_deps + [ ":$build_config_target" ]
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+    }
+    _srcjar_deps += [ ":$process_resources_target" ]
+
+    if (_native_libs_deps != []) {
+      _enable_chromium_linker_tests = false
+      if (defined(invoker.enable_chromium_linker_tests)) {
+        _enable_chromium_linker_tests = invoker.enable_chromium_linker_tests
+      }
+      _ordered_libraries_json =
+          "$target_gen_dir/$target_name.ordered_libararies.json"
+      _rebased_ordered_libraries_json =
+          rebase_path(_ordered_libraries_json, root_build_dir)
+      _ordered_libraries_target = "${_template_name}__write_ordered_libraries"
+
+      # TODO(agrieve): Make GN write runtime deps in dependency order so as to
+      # not need this manual sorting step.
+      action(_ordered_libraries_target) {
+        script = "//build/android/gyp/write_ordered_libraries.py"
+        deps = _native_libs_deps + [ ":$build_config_target" ]
+        outputs = [
+          _ordered_libraries_json,
+        ]
+        _rebased_android_readelf = rebase_path(android_readelf, root_build_dir)
+        args = [
+          "--readelf=$_rebased_android_readelf",
+          "--output=$_rebased_ordered_libraries_json",
+          "--libraries-dir=.",
+          "--input-libraries=@FileArg($_rebased_build_config:native:libraries)",
+        ]
+      }
+
+      java_cpp_template("${_template_name}__native_libraries_java") {
+        package_path = "org/chromium/base/library_loader"
+        sources = [
+          "//base/android/java/templates/NativeLibraries.template",
+        ]
+        inputs = [
+          _ordered_libraries_json,
+        ]
+        deps = [
+          ":${_ordered_libraries_target}",
+        ]
+        if (_native_lib_version_rule != "") {
+          deps += [ _native_lib_version_rule ]
+        }
+
+        defines = [
+          "NATIVE_LIBRARIES_LIST=" +
+              "@FileArg($_rebased_ordered_libraries_json:java_libraries_list)",
+          "NATIVE_LIBRARIES_VERSION_NUMBER=$_native_lib_version_arg",
+        ]
+        if (_use_chromium_linker) {
+          defines += [ "ENABLE_CHROMIUM_LINKER" ]
+        }
+        if (_load_library_from_apk) {
+          defines += [ "ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE" ]
+        }
+        if (_enable_chromium_linker_tests) {
+          defines += [ "ENABLE_CHROMIUM_LINKER_TESTS" ]
+        }
+      }
+      _srcjar_deps += [ ":${_template_name}__native_libraries_java" ]
+    }
+
+    if (_generate_buildconfig_java) {
+      java_cpp_template("${_template_name}__build_config_java") {
+        package_path = "org/chromium/base"
+        sources = [
+          "//base/android/java/templates/BuildConfig.template",
+        ]
+        deps = [
+          ":$build_config_target",
+        ]
+
+        defines = []
+        if (enable_multidex) {
+          defines += [ "ENABLE_MULTIDEX" ]
+        }
+        if (is_java_debug || dcheck_always_on) {
+          defines += [ "_DCHECK_IS_ON" ]
+        }
+        defines += [
+          "COMPRESSED_LOCALE_LIST=" +
+              "@FileArg($_rebased_build_config:compressed_locales_java_list)",
+          "UNCOMPRESSED_LOCALE_LIST=" +
+              "@FileArg($_rebased_build_config:uncompressed_locales_java_list)",
+        ]
+      }
+      _srcjar_deps += [ ":${_template_name}__build_config_java" ]
+    }
+
+    java_target = "${_template_name}__java"
+    java_library_impl(java_target) {
+      forward_variables_from(invoker,
+                             [
+                               "chromium_code",
+                               "java_files",
+                               "run_findbugs_override",
+                             ])
+      supports_android = true
+      requires_android = true
+      override_build_config = _build_config
+      deps = _android_manifest_deps + [ ":$build_config_target" ]
+
+      android_manifest = _android_manifest
+      srcjar_deps = _srcjar_deps
+      jar_path = _jar_path
+      dex_path = _lib_dex_path
+      emma_never_instrument = _emma_never_instrument
+      if (defined(_java_sources_file)) {
+        java_sources_file = _java_sources_file
+      }
+
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+      if (defined(invoker.apk_under_test)) {
+        deps += [ "${invoker.apk_under_test}__java" ]
+      }
+      if (emma_coverage && !_emma_never_instrument) {
+        deps += [ "//third_party/android_tools:emma_device_java" ]
+      }
+    }
+
+    # TODO(cjhopman): This is only ever needed to calculate the list of tests to
+    # run. See build/android/pylib/instrumentation/test_jar.py. We should be
+    # able to just do that calculation at build time instead.
+    if (defined(invoker.dist_ijar_path)) {
+      _dist_ijar_path = invoker.dist_ijar_path
+      dist_jar("${_template_name}_dist_ijar") {
+        override_build_config = _build_config
+        output = _dist_ijar_path
+        data = [
+          _dist_ijar_path,
+        ]
+        use_interface_jars = true
+        deps = [
+          ":$build_config_target",
+          ":$java_target",
+        ]
+      }
+    }
+
+    if (_proguard_enabled) {
+      _proguard_configs = [ _generated_proguard_config ]
+      if (defined(invoker.proguard_configs)) {
+        _proguard_configs += invoker.proguard_configs
+      }
+      if (enable_multidex) {
+        _proguard_configs += [ "//build/android/multidex.flags" ]
+      }
+      assert(_proguard_configs != [])  # Mark as used.
+      _proguard_target = "${_template_name}__proguard"
+      proguard(_proguard_target) {
+        forward_variables_from(invoker,
+                               [
+                                 "alternative_android_sdk_jar",
+                                 "deps",
+                                 "proguard_jar_path",
+                               ])
+        if (!defined(deps)) {
+          deps = []
+        }
+        deps += [
+          ":$build_config_target",
+          ":$java_target",
+          ":$process_resources_target",
+        ]
+        inputs = [
+                   _build_config,
+                   _jar_path,
+                 ] + _proguard_configs
+
+        output_jar_path = _proguard_output_jar_path
+        _rebased_proguard_configs =
+            rebase_path(_proguard_configs, root_build_dir)
+        args = [
+          "--proguard-configs=$_rebased_proguard_configs",
+          "--proguard-configs=@FileArg($_rebased_build_config:proguard:lib_configs)",
+          "--input-paths=@FileArg($_rebased_build_config:proguard:input_paths)",
+          "--classpath=@FileArg($_rebased_build_config:proguard:lib_paths)",
+        ]
+        if (defined(invoker.apk_under_test)) {
+          deps += [
+            "${invoker.apk_under_test}__build_config",
+            "${invoker.apk_under_test}__proguard",
+          ]
+          _apk_under_test_build_config =
+              get_label_info(invoker.apk_under_test, "target_gen_dir") + "/" +
+              get_label_info(invoker.apk_under_test, "name") + ".build_config"
+          _rebased_apk_under_test_build_config =
+              rebase_path(_apk_under_test_build_config, root_build_dir)
+          args += [ "--tested-apk-info=@FileArg($_rebased_apk_under_test_build_config:deps_info:proguard_info)" ]
+        }
+      }
+      _dex_sources = [ _proguard_output_jar_path ]
+      _dex_deps = [ ":$_proguard_target" ]
+
+      _copy_proguard_mapping_target = "${_template_name}__copy_proguard_mapping"
+      copy(_copy_proguard_mapping_target) {
+        sources = [
+          "$_proguard_output_jar_path.mapping",
+        ]
+        outputs = [
+          "$_final_apk_path.mapping",
+        ]
+        deps = [
+          ":$_proguard_target",
+        ]
+      }
+    } else {
+      if (enable_multidex) {
+        _dex_sources = [ _jar_path ]
+      } else {
+        _dex_sources = [ _lib_dex_path ]
+      }
+      _dex_deps = [ ":$java_target" ]
+    }
+
+    dex("$final_dex_target_name") {
+      deps = _dex_deps + [ ":$build_config_target" ]
+      inputs = [
+        _build_config,
+      ]
+      sources = _dex_sources
+      output = final_dex_path
+
+      # All deps are already included in _dex_sources when proguard is used.
+      if (!_proguard_enabled) {
+        if (enable_multidex) {
+          _dex_arg_key = "${_rebased_build_config}:dist_jar:dependency_jars"
+          extra_main_dex_proguard_config = _generated_proguard_main_dex_config
+          deps += [ ":$process_resources_target" ]
+        } else {
+          _dex_arg_key =
+              "${_rebased_build_config}:final_dex:dependency_dex_files"
+        }
+        args = [ "--inputs=@FileArg($_dex_arg_key)" ]
+      }
+    }
+
+    _native_libs_file_arg_dep = ":$build_config_target"
+    _native_libs_file_arg = "@FileArg($_rebased_build_config:native:libraries)"
+    _secondary_abi_native_libs_file_arg_dep = ":$build_config_target"
+    _secondary_abi_native_libs_file_arg =
+        "@FileArg($_rebased_build_config:native:secondary_abi_libraries)"
+    assert(_secondary_abi_native_libs_file_arg != "" &&
+           _secondary_abi_native_libs_file_arg_dep != "")  # Mark as used.
+
+    if (_native_libs_deps != [] && _enable_relocation_packing) {
+      _prepare_native_target_name = "${_template_name}__prepare_native"
+      _native_libs_json = "$gen_dir/packed-libs/filelist.json"
+      _rebased_native_libs_json = rebase_path(_native_libs_json, root_build_dir)
+      _native_libs_file_arg_dep = ":$_prepare_native_target_name"
+      _native_libs_file_arg = "@FileArg($_rebased_native_libs_json:files)"
+
+      pack_relocation_section(_prepare_native_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "deps",
+                                 "public_deps",
+                               ])
+        file_list_json = _native_libs_json
+        libraries_filearg =
+            "@FileArg(${_rebased_build_config}:native:libraries)"
+        inputs = [
+          _build_config,
+        ]
+
+        deps += _native_libs_deps
+        deps += [ ":$build_config_target" ]
+      }
+      if (_secondary_abi_native_libs_deps != []) {
+        _prepare_native_target_name =
+            "${_template_name}_secondary_abi__prepare_native"
+        _native_libs_json =
+            "$gen_dir/packed-libs/$android_secondary_abi_cpu/filelist.json"
+        _rebased_native_libs_json =
+            rebase_path(_native_libs_json, root_build_dir)
+        _secondary_abi_native_libs_file_arg_dep =
+            ":$_prepare_native_target_name"
+        _secondary_abi_native_libs_file_arg =
+            "@FileArg($_rebased_native_libs_json:files)"
+
+        pack_relocation_section(_prepare_native_target_name) {
+          forward_variables_from(invoker,
+                                 [
+                                   "deps",
+                                   "public_deps",
+                                 ])
+          file_list_json = _native_libs_json
+          libraries_filearg = "@FileArg(${_rebased_build_config}:native:secondary_abi_libraries)"
+          inputs = [
+            _build_config,
+          ]
+
+          deps += _secondary_abi_native_libs_deps
+          deps += [ ":$build_config_target" ]
+        }
+      }
+    }
+
+    _extra_native_libs = []
+    _extra_native_libs_deps = []
+    assert(_extra_native_libs_deps == [])  # Mark as used.
+    _extra_native_libs_even_when_incremental = []
+    if (_native_libs_deps != []) {
+      if (_use_chromium_linker) {
+        _extra_native_libs =
+            [ "$root_shlib_dir/libchromium_android_linker$shlib_extension" ]
+        _extra_native_libs_deps +=
+            [ "//base/android/linker:chromium_android_linker" ]
+      }
+
+      _create_stack_script_rule_name = "${_template_name}__stack_script"
+      _final_deps += [ ":${_create_stack_script_rule_name}" ]
+      stack_script(_create_stack_script_rule_name) {
+        stack_target_name = invoker.target_name
+        deps = _native_libs_deps
+        if (_native_libs_deps != [] && _enable_relocation_packing) {
+          packed_libraries = _native_libs_file_arg
+          deps += [ _native_libs_file_arg_dep ]
+        }
+      }
+    }
+    if (defined(invoker.loadable_modules) && invoker.loadable_modules != []) {
+      _extra_native_libs_even_when_incremental += invoker.loadable_modules
+    }
+
+    _final_deps += [ ":${_template_name}__create" ]
+    create_apk("${_template_name}__create") {
+      forward_variables_from(invoker,
+                             [
+                               "aapt_locale_whitelist",
+                               "alternative_android_sdk_jar",
+                               "android_aapt_path",
+                               "app_as_shared_lib",
+                               "deps",
+                               "extensions_to_not_compress",
+                               "language_splits",
+                               "public_deps",
+                               "secondary_native_libs",
+                               "shared_resources",
+                               "uncompress_shared_libraries",
+                               "write_asset_list",
+                             ])
+      if (!defined(deps)) {
+        deps = []
+      }
+      apk_path = _final_apk_path
+      android_manifest = _android_manifest
+      assets_build_config = _build_config
+      resources_zip = _all_resources_zip_path
+      dex_path = final_dex_path
+      load_library_from_apk = _load_library_from_apk
+      create_density_splits = _create_density_splits
+
+      if (!defined(extensions_to_not_compress)) {
+        # Allow icu data, v8 snapshots, and pak files to be loaded directly from
+        # the .apk.
+        # Note: These are actually suffix matches, not necessarily extensions.
+        extensions_to_not_compress = ".dat,.bin,.pak"
+      }
+
+      version_code = _version_code
+      version_name = _version_name
+
+      keystore_name = _keystore_name
+      keystore_path = _keystore_path
+      keystore_password = _keystore_password
+
+      # Incremental apk does not use native libs nor final dex.
+      incremental_deps = deps + _android_manifest_deps + [
+                           ":$build_config_target",
+                           ":$process_resources_target",
+                         ]
+
+      # This target generates the input file _all_resources_zip_path.
+      deps += _android_manifest_deps + [
+                ":$build_config_target",
+                ":$process_resources_target",
+                ":$final_dex_target_name",
+              ]
+
+      if ((_native_libs_deps != [] ||
+           _extra_native_libs_even_when_incremental != []) &&
+          !_create_abi_split) {
+        deps += _native_libs_deps + _extra_native_libs_deps +
+                [ _native_libs_file_arg_dep ]
+        native_libs_filearg = _native_libs_file_arg
+        native_libs = _extra_native_libs
+        native_libs_even_when_incremental =
+            _extra_native_libs_even_when_incremental
+      }
+
+      if (_secondary_abi_native_libs_deps != [] && !_create_abi_split) {
+        deps += _secondary_abi_native_libs_deps +
+                [ _secondary_abi_native_libs_file_arg_dep ]
+        secondary_abi_native_libs_filearg = _secondary_abi_native_libs_file_arg
+      }
+
+      # Placeholders necessary for some older devices.
+      # http://crbug.com/395038
+      forward_variables_from(invoker, [ "native_lib_placeholders" ])
+    }
+
+    if ((_native_libs_deps != [] ||
+         _extra_native_libs_even_when_incremental != []) && _create_abi_split) {
+      _manifest_rule =
+          "${_template_name}__split_manifest_abi_${android_app_abi}"
+      generate_split_manifest(_manifest_rule) {
+        main_manifest = _android_manifest
+        out_manifest =
+            "$gen_dir/split-manifests/${android_app_abi}/AndroidManifest.xml"
+        split_name = "abi_${android_app_abi}"
+        deps = _android_manifest_deps
+      }
+
+      _apk_rule = "${_template_name}__split_apk_abi_${android_app_abi}"
+      _final_deps += [ ":$_apk_rule" ]
+
+      create_apk(_apk_rule) {
+        apk_path = "${_final_apk_path_no_ext}-abi-${android_app_abi}.apk"
+        base_path = "$gen_dir/$_apk_rule"
+
+        manifest_outputs = get_target_outputs(":${_manifest_rule}")
+        android_manifest = manifest_outputs[1]
+        load_library_from_apk = _load_library_from_apk
+
+        version_code = _version_code
+        version_name = _version_name
+
+        keystore_name = _keystore_name
+        keystore_path = _keystore_path
+        keystore_password = _keystore_password
+
+        # Placeholders necessary for some older devices.
+        # http://crbug.com/395038
+        deps = []
+        forward_variables_from(invoker,
+                               [
+                                 "alternative_android_sdk_jar",
+                                 "android_aapt_path",
+                                 "deps",
+                                 "native_lib_placeholders",
+                                 "public_deps",
+                               ])
+
+        incremental_deps = deps + [ ":$_manifest_rule" ]
+        deps = []
+        deps = incremental_deps + _native_libs_deps + _extra_native_libs_deps +
+               [ _native_libs_file_arg_dep ]
+        native_libs_filearg = _native_libs_file_arg
+        native_libs = _extra_native_libs
+        native_libs_even_when_incremental =
+            _extra_native_libs_even_when_incremental
+      }
+    }
+
+    _create_incremental_script_rule_name =
+        "${_template_name}__incremental_script"
+    action(_create_incremental_script_rule_name) {
+      script = "//build/android/incremental_install/create_install_script.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      deps = [
+        _native_libs_file_arg_dep,
+      ]
+
+      outputs = [
+        _incremental_install_script_path,
+      ]
+
+      _rebased_apk_path_no_ext =
+          rebase_path(_final_apk_path_no_ext, root_build_dir)
+      _rebased_incremental_install_script_path =
+          rebase_path(_incremental_install_script_path, root_build_dir)
+      _rebased_depfile = rebase_path(depfile, root_build_dir)
+      _dex_arg_key = "${_rebased_build_config}:final_dex:dependency_dex_files"
+      args = [
+        "--apk-path=${_rebased_apk_path_no_ext}_incremental.apk",
+        "--script-output-path=$_rebased_incremental_install_script_path",
+        "--dex-file=$_rebased_lib_dex_path",
+        "--dex-file-list=@FileArg($_dex_arg_key)",
+        "--depfile=$_rebased_depfile",
+      ]
+      if (_proguard_enabled) {
+        args += [ "--show-proguard-warning" ]
+      }
+      if (defined(_native_libs_file_arg)) {
+        args += [ "--native-libs=$_native_libs_file_arg" ]
+      }
+      if (_extra_native_libs != []) {
+        # Don't pass in _extra_native_libs_even_when_incremental, since these are
+        # end up in the apk and are not side-loaded.
+        _rebased_extra_native_libs =
+            rebase_path(_extra_native_libs, root_build_dir)
+        args += [ "--native-libs=$_rebased_extra_native_libs" ]
+      }
+      if (_create_density_splits) {
+        args += [ "--split=${_rebased_apk_path_no_ext}-density-*.apk" ]
+      }
+      if (_create_language_splits) {
+        args += [ "--split=${_rebased_apk_path_no_ext}-language-*.apk" ]
+      }
+      if (_load_library_from_apk) {
+        args += [ "--dont-even-try=Incremental builds do not work with load_library_from_apk. Try setting is_component_build=true in your GN args." ]
+      }
+    }
+
+    group(target_name) {
+      if (_incremental_allowed && incremental_apk_by_default) {
+        deps = [
+          ":${target_name}_incremental",
+        ]
+      } else {
+        forward_variables_from(invoker,
+                               [
+                                 "data",
+                                 "data_deps",
+                               ])
+        public_deps = _final_deps
+
+        # Make the proguard .mapping file easy to find by putting it beside the .apk.
+        if (_proguard_enabled) {
+          deps = [
+            ":$_copy_proguard_mapping_target",
+          ]
+        }
+      }
+    }
+
+    if (_incremental_allowed) {
+      group("${target_name}_incremental") {
+        forward_variables_from(invoker,
+                               [
+                                 "data",
+                                 "data_deps",
+                               ])
+        if (!defined(data_deps)) {
+          data_deps = []
+        }
+
+        # device/commands is used by the installer script to push files via .zip.
+        data_deps += [ "//build/android/pylib/device/commands" ] +
+                     _native_libs_deps + _extra_native_libs_deps
+
+        # Since the _incremental.apk does not include use .so nor .dex from the
+        # actual target, but instead loads them at runtime, we need to explicitly
+        # depend on them here.
+        public_deps = [
+          ":${_create_incremental_script_rule_name}",
+          ":${_template_name}__create_incremental",
+          ":${java_target}",
+        ]
+      }
+    }
+  }
+
+  # Declare an Android instrumentation test apk
+  #
+  # This target creates an Android instrumentation test apk.
+  #
+  # Variables
+  #   android_manifest: Path to AndroidManifest.xml.
+  #   data_deps: List of dependencies needed at runtime. These will be built but
+  #     won't change the generated .apk in any way (in fact they may be built
+  #     after the .apk is).
+  #   deps: List of dependencies. All Android java resources and libraries in the
+  #     "transitive closure" of these dependencies will be included in the apk.
+  #     Note: this "transitive closure" actually only includes such targets if
+  #     they are depended on through android_library or android_resources targets
+  #     (and so not through builtin targets like 'action', 'group', etc).
+  #   java_files: List of .java files to include in the apk.
+  #   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+  #      will be added to java_files and be included in this apk.
+  #   apk_name: Name for final apk.
+  #   final_apk_path: Path to final built apk. Default is
+  #     $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
+  #   shared_libraries: List shared_library targets to bundle. If these
+  #     libraries depend on other shared_library targets, those dependencies will
+  #     also be included in the apk (e.g. for is_component_build).
+  #   apk_under_test: The apk being tested.
+  #
+  # Example
+  #   instrumentation_test_apk("foo_test_apk") {
+  #     android_manifest = "AndroidManifest.xml"
+  #     apk_name = "FooTest"
+  #     apk_under_test = "Foo"
+  #     java_files = [
+  #       "android/org/chromium/foo/FooTestCase.java",
+  #       "android/org/chromium/foo/FooExampleTest.java",
+  #     ]
+  #     deps = [
+  #       ":foo_test_support_java"
+  #     ]
+  #   }
+  template("instrumentation_test_apk") {
+    assert(defined(invoker.apk_name))
+    testonly = true
+    _apk_target_name = "${target_name}__apk"
+    _test_runner_target_name = "${target_name}__test_runner_script"
+    _install_script_name = "install_$target_name"
+    _dist_ijar_path =
+        "$root_build_dir/test.lib.java/" + invoker.apk_name + ".jar"
+    _incremental_test_runner_target_name =
+        "${_test_runner_target_name}_incremental"
+    _incremental_test_name = "${invoker.target_name}_incremental"
+    if (incremental_apk_by_default) {
+      _incremental_test_runner_target_name = _test_runner_target_name
+      _incremental_test_name = invoker.target_name
+    }
+
+    if (!incremental_apk_by_default) {
+      test_runner_script(_test_runner_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "additional_apks",
+                                 "apk_under_test",
+                                 "data",
+                                 "data_deps",
+                                 "deps",
+                                 "ignore_all_data_deps",
+                                 "public_deps",
+                               ])
+        test_name = invoker.target_name
+        test_type = "instrumentation"
+        apk_target = ":$_apk_target_name"
+        test_jar = _dist_ijar_path
+      }
+    }
+    test_runner_script(_incremental_test_runner_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "additional_apks",
+                               "apk_under_test",
+                               "data",
+                               "data_deps",
+                               "deps",
+                               "ignore_all_data_deps",
+                               "public_deps",
+                             ])
+      test_name = _incremental_test_name
+      test_type = "instrumentation"
+      apk_target = ":$_apk_target_name"
+      test_jar = _dist_ijar_path
+      incremental_install = true
+    }
+
+    android_apk(_apk_target_name) {
+      deps = []
+      data_deps = []
+      forward_variables_from(invoker, "*")
+      install_script_name = _install_script_name
+      deps += [ "//testing/android/broker:broker_java" ]
+      data_deps += [
+        "//build/android/pylib/device/commands",
+        "//tools/android/forwarder2",
+        "//tools/android/md5sum",
+      ]
+      if (defined(invoker.additional_apks)) {
+        data_deps += invoker.additional_apks
+      }
+
+      if (defined(invoker.proguard_enabled) && invoker.proguard_enabled) {
+        # When ProGuard is on, we use ProGuard to combine the under test java
+        # code and the test java code. This is to allow us to apply all ProGuard
+        # optimizations that we ship with, but not have them break tests. The
+        # apk under test will still have the same resources, assets, and
+        # manifest, all of which are the ones used in the tests.
+        if (!defined(invoker.proguard_configs)) {
+          proguard_configs = []
+        }
+        proguard_configs += [ "//testing/android/proguard_for_test.flags" ]
+      }
+
+      dist_ijar_path = _dist_ijar_path
+      if (defined(invoker.run_findbugs_override)) {
+        # Only allow findbugs when there are java files.
+        run_findbugs_override =
+            invoker.run_findbugs_override && defined(invoker.java_files)
+      }
+    }
+
+    group(target_name) {
+      if (incremental_apk_by_default) {
+        deps = [
+          ":${target_name}_incremental",
+        ]
+      } else {
+        public_deps = [
+          ":$_apk_target_name",
+          ":$_test_runner_target_name",
+
+          # Required by test runner to enumerate test list.
+          ":${_apk_target_name}_dist_ijar",
+        ]
+        if (defined(invoker.apk_under_test)) {
+          public_deps += [ invoker.apk_under_test ]
+        }
+      }
+    }
+
+    # TODO: Delete once recipes no longer use this target.
+    group("${target_name}_run") {
+      public_deps = [
+        ":${invoker.target_name}",
+      ]
+    }
+    group("${target_name}_incremental") {
+      public_deps = [
+        ":$_incremental_test_runner_target_name",
+        ":${_apk_target_name}_dist_ijar",
+        ":${_apk_target_name}_incremental",
+      ]
+      if (defined(invoker.apk_under_test)) {
+        public_deps += [ "${invoker.apk_under_test}_incremental" ]
+      }
+    }
+  }
+
+  # Declare an Android gtest apk
+  #
+  # This target creates an Android apk for running gtest-based unittests.
+  #
+  # Variables
+  #   deps: Specifies the dependencies of this target. These will be passed to
+  #     the underlying android_apk invocation and should include the java and
+  #     resource dependencies of the apk.
+  #   shared_library: shared_library target that contains the unit tests.
+  #   apk_name: The name of the produced apk. If unspecified, it uses the name
+  #             of the shared_library target suffixed with "_apk"
+  #   use_default_launcher: Whether the default activity (NativeUnitTestActivity)
+  #     should be used for launching tests.
+  #   use_native_activity: Test implements ANativeActivity_onCreate().
+  #
+  # Example
+  #   unittest_apk("foo_unittests_apk") {
+  #     deps = [ ":foo_java", ":foo_resources" ]
+  #     shared_library = ":foo_unittests"
+  #   }
+  template("unittest_apk") {
+    _use_native_activity =
+        defined(invoker.use_native_activity) && invoker.use_native_activity
+    _android_manifest = "$target_gen_dir/$target_name/AndroidManifest.xml"
+    assert(invoker.shared_library != "")
+
+    # This trivial assert is needed in case android_manifest is defined,
+    # as otherwise _use_native_activity and _android_manifest would not be used.
+    assert(_use_native_activity != "" && _android_manifest != "")
+
+    if (!defined(invoker.android_manifest)) {
+      jinja_template("${target_name}_manifest") {
+        _native_library_name = get_label_info(invoker.shared_library, "name")
+        input = "//testing/android/native_test/java/AndroidManifest.xml.jinja2"
+        output = _android_manifest
+        variables = [
+          "is_component_build=${is_component_build}",
+          "native_library_name=${_native_library_name}",
+          "use_native_activity=${_use_native_activity}",
+        ]
+      }
+    }
+
+    android_apk(target_name) {
+      set_sources_assignment_filter([])
+      data_deps = []
+      deps = []
+      forward_variables_from(invoker, "*")
+      testonly = true
+
+      assert(!defined(invoker.proguard_enabled) || !invoker.proguard_enabled ||
+             invoker.proguard_configs != [])
+
+      if (!defined(apk_name)) {
+        apk_name = get_label_info(invoker.shared_library, "name")
+      }
+
+      if (!defined(android_manifest)) {
+        android_manifest_dep = ":${target_name}_manifest"
+        android_manifest = _android_manifest
+      }
+
+      final_apk_path = "$root_build_dir/${apk_name}_apk/${apk_name}-debug.apk"
+
+      if (!defined(use_default_launcher) || use_default_launcher) {
+        deps += [ "//testing/android/native_test:native_test_java" ]
+      }
+      shared_libraries = [ invoker.shared_library ]
+      deps += [
+        "//base:base_java",
+        "//testing/android/appurify_support:appurify_support_java",
+        "//testing/android/reporter:reporter_java",
+      ]
+      data_deps += [
+        "//build/android/pylib/device/commands",
+        "//tools/android/md5sum",
+      ]
+      if (host_os == "linux") {
+        data_deps += [ "//tools/android/forwarder2" ]
+      }
+    }
+  }
+
+  # Generate .java files from .aidl files.
+  #
+  # This target will store the .java files in a srcjar and should be included in
+  # an android_library or android_apk's srcjar_deps.
+  #
+  # Variables
+  #   sources: Paths to .aidl files to compile.
+  #   import_include: Path to directory containing .java files imported by the
+  #     .aidl files.
+  #   interface_file: Preprocessed aidl file to import.
+  #
+  # Example
+  #   android_aidl("foo_aidl") {
+  #     import_include = "java/src"
+  #     sources = [
+  #       "java/src/com/foo/bar/FooBarService.aidl",
+  #       "java/src/com/foo/bar/FooBarServiceCallback.aidl",
+  #     ]
+  #   }
+  template("android_aidl") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+
+    srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+    aidl_path = "${android_sdk_build_tools}/aidl"
+    framework_aidl = "$android_sdk/framework.aidl"
+
+    action(target_name) {
+      script = "//build/android/gyp/aidl.py"
+      sources = invoker.sources
+
+      imports = [ framework_aidl ]
+      if (defined(invoker.interface_file)) {
+        assert(invoker.interface_file != "")
+        imports += [ invoker.interface_file ]
+      }
+
+      inputs = [ aidl_path ] + imports
+
+      depfile = "${target_gen_dir}/${target_name}.d"
+      outputs = [
+        srcjar_path,
+      ]
+      rebased_imports = rebase_path(imports, root_build_dir)
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--aidl-path",
+        rebase_path(aidl_path, root_build_dir),
+        "--imports=$rebased_imports",
+        "--srcjar",
+        rebase_path(srcjar_path, root_build_dir),
+      ]
+      if (defined(invoker.import_include) && invoker.import_include != []) {
+        # TODO(cjhopman): aidl supports creating a depfile. We should be able to
+        # switch to constructing a depfile for the overall action from that
+        # instead of having all the .java files in the include paths as inputs.
+        rebased_import_paths = []
+        foreach(import_path, invoker.import_include) {
+          _rebased_import_path = []
+          _rebased_import_path += rebase_path([ import_path ], root_build_dir)
+          rebased_import_paths += _rebased_import_path
+          _java_files_build_rel = []
+          _java_files_build_rel = exec_script("//build/android/gyp/find.py",
+                                              _rebased_import_path,
+                                              "list lines")
+          inputs += rebase_path(_java_files_build_rel, ".", root_build_dir)
+        }
+        args += [ "--includes=$rebased_import_paths" ]
+      }
+      args += rebase_path(sources, root_build_dir)
+    }
+  }
+
+  # Compile a protocol buffer to java.
+  #
+  # This generates java files from protocol buffers and creates an Android library
+  # containing the classes.
+  #
+  # Variables
+  #   sources: Paths to .proto files to compile.
+  #   proto_path: Root directory of .proto files.
+  #
+  # Example:
+  #  proto_java_library("foo_proto_java") {
+  #    proto_path = "src/foo"
+  #    sources = [ "$proto_path/foo.proto" ]
+  #  }
+  template("proto_java_library") {
+    set_sources_assignment_filter([])
+    forward_variables_from(invoker, [ "testonly" ])
+    _protoc_dep =
+        "//third_party/android_protobuf:android_protoc($host_toolchain)"
+    _protoc_out_dir = get_label_info(_protoc_dep, "root_out_dir")
+    _protoc_bin = "$_protoc_out_dir/android_protoc"
+    _proto_path = invoker.proto_path
+
+    _template_name = target_name
+
+    action("${_template_name}__protoc_java") {
+      srcjar_path = "$target_gen_dir/$target_name.srcjar"
+      script = "//build/protoc_java.py"
+
+      deps = [
+        _protoc_dep,
+      ]
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+
+      sources = invoker.sources
+      depfile = "$target_gen_dir/$target_name.d"
+      outputs = [
+        srcjar_path,
+      ]
+      args = [
+               "--depfile",
+               rebase_path(depfile, root_build_dir),
+               "--protoc",
+               rebase_path(_protoc_bin, root_build_dir),
+               "--proto-path",
+               rebase_path(_proto_path, root_build_dir),
+               "--srcjar",
+               rebase_path(srcjar_path, root_build_dir),
+             ] + rebase_path(sources, root_build_dir)
+    }
+
+    android_library(target_name) {
+      chromium_code = false
+      java_files = []
+      srcjar_deps = [ ":${_template_name}__protoc_java" ]
+      deps = [
+        "//third_party/android_protobuf:protobuf_nano_javalib",
+      ]
+    }
+  }
+
+  # Declare an Android library target for a prebuilt AAR.
+  #
+  # This target creates an Android library containing java code and Android
+  # resources. For libraries without resources, it will not generate
+  # corresponding android_resources targets.
+  #
+  # Variables
+  #   aar_path: Path to the AAR.
+  #   proguard_configs: List of proguard configs to use in final apk step for
+  #     any apk that depends on this library.
+  #   ignore_aidl: Whether to ignore .aidl files found with the .aar.
+  #   ignore_assets: Whether to ignore assets found in the .aar.
+  #   ignore_manifest: Whether to ignore merging of AndroidManifest.xml.
+  #   ignore_native_libraries: Whether to ignore .so files found in the .aar.
+  #   TODO(jbudorick@): remove this arguments after crbug.com/522043 is fixed.
+  #   requires_android: Whether this target can only be used for compiling Android related targets.
+  #
+  # Example
+  #   android_aar_prebuilt("foo_java") {
+  #     aar_path = "foo.aar"
+  #   }
+  template("android_aar_prebuilt") {
+    _output_path = "${target_gen_dir}/${target_name}"
+    _unpack_target_name = "${target_name}__unpack_aar"
+    _ignore_aidl = defined(invoker.ignore_aidl) && invoker.ignore_aidl
+    _ignore_assets = defined(invoker.ignore_assets) && invoker.ignore_assets
+    _ignore_manifest =
+        defined(invoker.ignore_manifest) && invoker.ignore_manifest
+    _ignore_native_libraries = defined(invoker.ignore_native_libraries) &&
+                               invoker.ignore_native_libraries
+
+    # Scan the AAR file and determine the resources and jar files.
+    # Some libraries might not have resources; others might have two jars.
+    _scanned_files =
+        exec_script("//build/android/gyp/aar.py",
+                    [
+                      "--input-file",
+                      rebase_path(invoker.aar_path, root_build_dir),
+                      "--list",
+                    ],
+                    "scope")
+
+    assert(_ignore_aidl || _scanned_files.aidl == [],
+           "android_aar_prebuilt() aidl not yet supported." +
+               " Implement or use ignore_aidl = true." +
+               " http://crbug.com/644439")
+    assert(_ignore_assets || _scanned_files.assets == [],
+           "android_aar_prebuilt() assets not yet supported." +
+               " Implement or use ignore_assets = true." +
+               " http://crbug.com/643966")
+    assert(_ignore_native_libraries || !_scanned_files.has_native_libraries,
+           "android_aar_prebuilt() with .so files is not supported." +
+               " Use ignore_native_libraries = true to silence this error.")
+    assert(_ignore_manifest || _scanned_files.is_manifest_empty,
+           "android_aar_prebuilt() manifest merging not yet supported and" +
+               " non-trivial AndroidManifest.xml detected." +
+               " Implement or use ignore_manifest = true." +
+               " http://crbug.com/643967")
+    assert(_scanned_files.has_classes_jar || _scanned_files.subjars == [])
+
+    action(_unpack_target_name) {
+      script = "//build/android/gyp/aar.py"  # Unzips the AAR
+      args = [
+        "--input-file",
+        rebase_path(invoker.aar_path, root_build_dir),
+        "--output-dir",
+        rebase_path(_output_path, root_build_dir),
+        "--extract",
+      ]
+      inputs = [
+        invoker.aar_path,
+      ]
+      outputs = [
+        "${_output_path}/AndroidManifest.xml",
+      ]
+
+      if (_scanned_files.has_r_text_file) {
+        # Certain packages, in particular Play Services have no R.txt even
+        # though its presence is mandated by AAR spec. Such packages cause
+        # spurious rebuilds if this output is specified unconditionally.
+        outputs += [ "${_output_path}/R.txt" ]
+      }
+
+      if (_scanned_files.resources != []) {
+        outputs += get_path_info(
+                rebase_path(_scanned_files.resources, "", _output_path),
+                "abspath")
+      }
+      if (_scanned_files.has_classes_jar) {
+        outputs += [ "${_output_path}/classes.jar" ]
+      }
+      outputs +=
+          get_path_info(rebase_path(_scanned_files.subjars, "", _output_path),
+                        "abspath")
+      if (_scanned_files.has_proguard_flags) {
+        outputs += [ "${_output_path}/proguard.txt" ]
+      }
+    }
+
+    # Create the android_resources target for resources.
+    if (_scanned_files.resources != [] || _scanned_files.has_r_text_file) {
+      _res_target_name = "${target_name}__res"
+      android_resources(_res_target_name) {
+        forward_variables_from(invoker, [ "deps" ])
+        if (!defined(deps)) {
+          deps = []
+        }
+        deps += [ ":$_unpack_target_name" ]
+        resource_dirs = []
+        generated_resource_dirs = []
+        if (_scanned_files.resources != []) {
+          generated_resource_dirs += [ "${_output_path}/res" ]
+        }
+        generated_resource_files =
+            rebase_path(_scanned_files.resources, "", _output_path)
+        android_manifest_dep = ":$_unpack_target_name"
+        android_manifest = "${_output_path}/AndroidManifest.xml"
+        if (_scanned_files.has_r_text_file) {
+          r_text_file = "${_output_path}/R.txt"
+        }
+        v14_skip = true
+      }
+    }
+
+    # Create android_java_prebuilt target for extra jars within jars/.
+    _subjar_targets = []
+    foreach(_tuple, _scanned_files.subjar_tuples) {
+      _current_target = "${target_name}__subjar_${_tuple[0]}"
+      _subjar_targets += [ ":$_current_target" ]
+      java_prebuilt(_current_target) {
+        forward_variables_from(invoker,
+                               [
+                                 "jar_excluded_patterns",
+                                 "requires_android",
+                               ])
+        deps = [
+          ":$_unpack_target_name",
+        ]
+        if (!defined(requires_android)) {
+          requires_android = true
+        }
+        supports_android = true
+        jar_path = "$_output_path/${_tuple[1]}"
+        _base_output_name = get_path_info(jar_path, "name")
+        output_name = "${invoker.target_name}-$_base_output_name"
+      }
+    }
+
+    # Create android_java_prebuilt target for classes.jar.
+    if (_scanned_files.has_classes_jar) {
+      _jar_target_name = "${target_name}__classes"
+      java_prebuilt(_jar_target_name) {
+        forward_variables_from(invoker,
+                               [
+                                 "deps",
+                                 "input_jars_paths",
+                                 "jar_excluded_patterns",
+                                 "proguard_configs",
+                                 "requires_android",
+                               ])
+        if (!defined(deps)) {
+          deps = []
+        }
+        deps += _subjar_targets + [ ":$_unpack_target_name" ]
+        if (defined(_res_target_name)) {
+          deps += [ ":$_res_target_name" ]
+        }
+        if (!defined(requires_android)) {
+          requires_android = true
+        }
+        supports_android = true
+        jar_path = "$_output_path/classes.jar"
+        output_name = invoker.target_name
+
+        if (_scanned_files.has_proguard_flags) {
+          if (!defined(proguard_configs)) {
+            proguard_configs = []
+          }
+          proguard_configs += [ "$_output_path/proguard.txt" ]
+        }
+      }
+    }
+
+    java_group(target_name) {
+      deps = []
+      if (defined(_jar_target_name)) {
+        deps += [ ":$_jar_target_name" ]
+
+        # Although subjars are meant to be private, we add them as deps here
+        # because in practice they seem to contain classes required to be in the
+        # classpath.
+        deps += _subjar_targets
+      }
+      if (defined(_res_target_name)) {
+        deps += [ ":$_res_target_name" ]
+      }
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/arm.gni
@@ -0,0 +1,126 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/v8_target_cpu.gni")
+
+# These are primarily relevant in current_cpu == "arm" contexts, where
+# ARM code is being compiled.  But they can also be relevant in the
+# other contexts when the code will change its behavior based on the
+# cpu it wants to generate code for.
+if (current_cpu == "arm" || v8_current_cpu == "arm") {
+  declare_args() {
+    # Version of the ARM processor when compiling on ARM. Ignored on non-ARM
+    # platforms.
+    arm_version = 7
+
+    # The ARM architecture. This will be a string like "armv6" or "armv7-a".
+    # An empty string means to use the default for the arm_version.
+    arm_arch = ""
+
+    # The ARM floating point hardware. This will be a string like "neon" or
+    # "vfpv3". An empty string means to use the default for the arm_version.
+    arm_fpu = ""
+
+    # The ARM floating point mode. This is either the string "hard", "soft", or
+    # "softfp". An empty string means to use the default one for the
+    # arm_version.
+    arm_float_abi = ""
+
+    # The ARM variant-specific tuning mode. This will be a string like "armv6"
+    # or "cortex-a15". An empty string means to use the default for the
+    # arm_version.
+    arm_tune = ""
+
+    # Whether to use the neon FPU instruction set or not.
+    arm_use_neon = ""
+
+    # Whether to enable optional NEON code paths.
+    arm_optionally_use_neon = false
+
+    # Thumb is a reduced instruction set available on some ARM processors that
+    # has increased code density.
+    arm_use_thumb = true
+  }
+
+  assert(arm_float_abi == "" || arm_float_abi == "hard" ||
+         arm_float_abi == "soft" || arm_float_abi == "softfp")
+
+  if (arm_use_neon == "") {
+    if (current_os == "linux" && target_cpu != v8_target_cpu) {
+      # Don't use neon on V8 simulator builds as a default.
+      arm_use_neon = false
+    } else {
+      arm_use_neon = true
+    }
+  }
+
+  if (arm_version == 6) {
+    if (arm_arch == "") {
+      arm_arch = "armv6"
+    }
+    if (arm_tune != "") {
+      arm_tune = ""
+    }
+    if (arm_float_abi == "") {
+      arm_float_abi = "softfp"
+    }
+    if (arm_fpu == "") {
+      arm_fpu = "vfp"
+    }
+    arm_use_thumb = false
+  } else if (arm_version == 7) {
+    if (arm_arch == "") {
+      arm_arch = "armv7-a"
+    }
+    if (arm_tune == "") {
+      arm_tune = "generic-armv7-a"
+    }
+
+    if (arm_float_abi == "") {
+      if (current_os == "android" || target_os == "android") {
+        arm_float_abi = "softfp"
+      } else if (current_os == "linux" && target_cpu != v8_target_cpu) {
+        # Default to the same as Android for V8 simulator builds.
+        arm_float_abi = "softfp"
+      } else {
+        arm_float_abi = "hard"
+      }
+    }
+
+    if (arm_fpu == "") {
+      if (arm_use_neon) {
+        arm_fpu = "neon"
+      } else {
+        arm_fpu = "vfpv3-d16"
+      }
+    }
+  } else if (arm_version == 8) {
+    if (arm_arch == "") {
+      arm_arch = "armv8-a"
+    }
+    if (arm_tune == "") {
+      arm_tune = "generic-armv8-a"
+    }
+
+    if (arm_float_abi == "") {
+      if (current_os == "android" || target_os == "android") {
+        arm_float_abi = "softfp"
+      } else {
+        arm_float_abi = "hard"
+      }
+    }
+
+    if (arm_fpu == "") {
+      if (arm_use_neon) {
+        arm_fpu = "neon"
+      } else {
+        arm_fpu = "vfpv3-d16"
+      }
+    }
+  }
+} else if (current_cpu == "arm64" || v8_current_cpu == "arm64") {
+  # arm64 supports only "hard".
+  arm_float_abi = "hard"
+  arm_use_neon = true
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/chrome_build.gni
@@ -0,0 +1,22 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Select the desired branding flavor. False means normal Chromium branding,
+  # true means official Google Chrome branding (requires extra Google-internal
+  # resources).
+  is_chrome_branded = false
+
+  # Break chrome.dll into multple pieces based on process type. Only available
+  # on Windows.
+  is_multi_dll_chrome = is_win && !is_component_build
+}
+
+# Refers to the subdirectory for branding in various places including
+# chrome/app/theme.
+if (is_chrome_branded) {
+  branding_path_component = "google_chrome"
+} else {
+  branding_path_component = "chromium"
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/chromecast/BUILD.gn
@@ -0,0 +1,85 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chromecast_build.gni")
+
+assert(is_chromecast)
+
+config("static_config") {
+  if (!is_clang) {
+    ldflags = [
+      # Don't allow visible symbols from libraries that contain
+      # assembly code with symbols that aren't hidden properly.
+      # http://b/26390825
+      "-Wl,--exclude-libs=libffmpeg.a",
+    ]
+
+    if (!is_android) {
+      ldflags += [
+        # We want to statically link libstdc++/libgcc on Linux.
+        # (On Android, libstdc++ and libgcc aren't used.)
+        "-static-libstdc++",
+        "-static-libgcc",
+      ]
+    }
+  }
+}
+
+config("ldconfig") {
+  visibility = [ ":*" ]
+
+  # Chromecast executables depend on several shared libraries in
+  # /oem_cast_shlib, $ORIGIN, and $ORIGIN/lib. Add these rpaths to each binary.
+  # This is explicitly disabled in Chrome for security reasons (see comments in
+  # //build/config/gcc/BUILD.gn), but necessary on Chromecast so that OEM's may
+  # override the default libraries shipped in the Cast receiver package.
+  ldflags = [
+    "-Wl,-rpath=/oem_cast_shlib",
+    "-Wl,-rpath=\$ORIGIN/lib",
+    "-Wl,-rpath=\$ORIGIN",
+  ]
+
+  # Binaries which don't live in the same directory as Chrome component
+  # libraries may still depend on them. Explicitly add the component library
+  # directory to the rpath for the component build.
+  if (is_component_build) {
+    ldflags += [ "-Wl,-rpath=/system/chrome" ]
+  }
+}
+
+config("executable_config") {
+  configs = [ ":ldconfig" ]
+
+  if (!is_clang && current_cpu == "arm") {
+    ldflags = [
+      # Export stdlibc++ and libgcc symbols to force shlibs to refer to these
+      # symbols from the executable.
+      "-Wl,--export-dynamic",
+
+      "-lm",  # stdlibc++ requires math.h
+
+      # In case we redefined stdlibc++ symbols (e.g. tc_malloc)
+      "-Wl,--allow-multiple-definition",
+
+      "-Wl,--whole-archive",
+      "-l:libstdc++.a",
+      "-l:libgcc.a",
+      "-Wl,--no-whole-archive",
+    ]
+
+    # Despite including libstdc++/libgcc archives, we still need to specify
+    # static linking for them in order to prevent the executable from having a
+    # dynamic dependency on them.
+    configs += [ ":static_config" ]
+  }
+}
+
+# Shared libaries should not have RPATH or RUNPATH set. This allows the
+# shared libs to inherit RPATH from the parent executable that is loading
+# the shared library. (See internal b/37514052 for more details.)
+config("shared_library_config") {
+  if (current_cpu == "arm") {
+    configs = [ ":static_config" ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/chromecast_build.gni
@@ -0,0 +1,36 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The args declared in this file should be referenced by components outside of
+# //chromecast. Args needed only in //chromecast should be declared in
+# //chromecast/chromecast.gni.
+declare_args() {
+  # Set this true for a Chromecast build. Chromecast builds are supported on
+  # Linux and Android.
+  is_chromecast = false
+
+  # Set this true for an audio-only Chromecast build.
+  is_cast_audio_only = false
+}
+
+# Note(slan): This arg depends on the value of is_chromecast, and thus must be
+# declared in a separate block. These blocks can be combined when/if
+# crbug.com/542846 is resolved.
+declare_args() {
+  # True if Chromecast build is targeted for linux desktop. This type of build
+  # is useful for testing and development, but currently supports only a subset
+  # of Cast functionality. Though this defaults to true for x86 Linux devices,
+  # this should be overriden manually for an embedded x86 build.
+  # TODO(slan): Remove instances of this when x86 is a fully supported platform.
+  is_cast_desktop_build = is_chromecast && target_os == "linux" &&
+                          (target_cpu == "x86" || target_cpu == "x64")
+}
+
+# Assert that Chromecast is being built for a supported platform.
+assert(is_linux || is_android || !is_chromecast,
+       "Chromecast builds are not supported on $target_os")
+
+# Assert that is_cast_audio_only and is_cast_desktop_build are both false on a
+# non-Chromecast build.
+assert(is_chromecast || (!is_cast_audio_only && !is_cast_desktop_build))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/clang/BUILD.gn
@@ -0,0 +1,65 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("clang.gni")
+
+config("find_bad_constructs") {
+  if (clang_use_chrome_plugins) {
+    cflags = []
+
+    # On Windows, the plugin is built directly into clang, so there's
+    # no need to load it dynamically.
+
+    if (is_mac || is_ios) {
+      cflags += [
+        "-Xclang",
+        "-load",
+        "-Xclang",
+        rebase_path("${clang_base_path}/lib/libFindBadConstructs.dylib",
+                    root_build_dir),
+      ]
+    } else if (is_linux || is_android) {
+      cflags += [
+        "-Xclang",
+        "-load",
+        "-Xclang",
+        rebase_path("${clang_base_path}/lib/libFindBadConstructs.so",
+                    root_build_dir),
+      ]
+    }
+
+    cflags += [
+      "-Xclang",
+      "-add-plugin",
+      "-Xclang",
+      "find-bad-constructs",
+      "-Xclang",
+      "-plugin-arg-find-bad-constructs",
+      "-Xclang",
+      "check-auto-raw-pointer",
+    ]
+
+    if (is_linux || is_android) {
+      cflags += [
+        "-Xclang",
+        "-plugin-arg-find-bad-constructs",
+        "-Xclang",
+        "check-ipc",
+      ]
+    }
+  }
+}
+
+# Enables some extra Clang-specific warnings. Some third-party code won't
+# compile with these so may want to remove this config.
+config("extra_warnings") {
+  cflags = [
+    "-Wheader-hygiene",
+
+    # Warns when a const char[] is converted to bool.
+    "-Wstring-conversion",
+
+    "-Wtautological-overlap-compare",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/clang/clang.gni
@@ -0,0 +1,13 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # Indicates if the build should use the Chrome-specific plugins for enforcing
+  # coding guidelines, etc. Only used when compiling with Clang.
+  clang_use_chrome_plugins = is_clang && !is_nacl && !use_xcode_clang
+
+  clang_base_path = "//third_party/llvm-build/Release+Asserts"
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/compiler/BUILD.gn
@@ -0,0 +1,1763 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+if (current_cpu == "arm" || current_cpu == "arm64") {
+  import("//build/config/arm.gni")
+}
+if (current_cpu == "mipsel" || current_cpu == "mips64el") {
+  import("//build/config/mips.gni")
+}
+if (is_mac) {
+  import("//build/config/mac/symbols.gni")
+}
+if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+}
+if (is_nacl) {
+  # To keep NaCl variables out of builds that don't include NaCl, all
+  # variables defined in nacl/config.gni referenced here should be protected by
+  # is_nacl conditions.
+  import("//build/config/nacl/config.gni")
+}
+
+declare_args() {
+  # Default to warnings as errors for default workflow, where we catch
+  # warnings with known toolchains. Allow overriding this e.g. for Chromium
+  # builds on Linux that could use a different version of the compiler.
+  # With GCC, warnings in no-Chromium code are always not treated as errors.
+  treat_warnings_as_errors = true
+
+  # Normally, Android builds are lightly optimized, even for debug builds, to
+  # keep binary size down. Setting this flag to true disables such optimization
+  android_full_debug = false
+
+  # Whether to use the binary binutils checked into third_party/binutils.
+  # These are not multi-arch so cannot be used except on x86 and x86-64 (the
+  # only two architectures that are currently checked in). Turn this off when
+  # you are using a custom toolchain and need to control -B in cflags.
+  linux_use_bundled_binutils =
+      linux_use_bundled_binutils_override && is_linux &&
+      (current_cpu == "x64" || current_cpu == "x86")
+  binutils_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+                              root_build_dir)
+
+  # Compile in such a way as to make it possible for the profiler to unwind full
+  # stack frames. Setting this flag has a large effect on the performance of the
+  # generated code than just setting profiling, but gives the profiler more
+  # information to analyze.
+  # Requires profiling to be set to true.
+  enable_full_stack_frames_for_profiling = false
+
+  # When we are going to use gold we need to find it.
+  # This is initialized below, after use_gold might have been overridden.
+  gold_path = false
+
+  if (is_win) {
+    # Whether the VS xtree header has been patched to disable warning 4702. If
+    # it has, then we don't need to disable 4702 (unreachable code warning).
+    # The patch is preapplied to the internal toolchain and hence all bots.
+    msvs_xtree_patched = false
+  }
+
+  # Omit unwind support in official builds to save space.
+  # We can use breakpad for these builds.
+  exclude_unwind_tables = (is_chrome_branded && is_official_build) ||
+                          (is_chromecast && !is_cast_desktop_build && !is_debug)
+
+  # If true, optimize for size. Does not affect windows builds.
+  # Linux & Mac favor speed over size.
+  # TODO(brettw) it's weird that Mac and desktop Linux are different. We should
+  # explore favoring size over speed in this case as well.
+  optimize_for_size = is_android || is_ios
+
+  # Enable fatal linker warnings. Building Chromium with certain versions
+  # of binutils can cause linker warning.
+  # See: https://bugs.chromium.org/p/chromium/issues/detail?id=457359
+  fatal_linker_warnings = true
+
+  # Build with C++ RTTI enabled. Chromium builds without RTTI by default,
+  # but some sanitizers are known to require it, like CFI diagnostics
+  # and UBsan variants.
+  use_rtti = use_cfi_diag || is_ubsan_vptr || is_ubsan_security
+
+  # AFDO (Automatic Feedback Directed Optimizer) is a form of profile-guided
+  # optimization that GCC supports. It used by ChromeOS in their official
+  # builds. To use it, set auto_profile_path to the path to a file containing
+  # the needed gcov profiling data.
+  auto_profile_path = ""
+
+  # Optimize for coverage guided fuzzing (balance between speed and number of
+  # branches)
+  optimize_for_fuzzing = false
+
+  # Optimize symbol files for maximizing goma cache hit rate. This isn't
+  # on by default when goma is enabled because setting this to true may make
+  # it harder to debug binaries.
+  strip_absolute_paths_from_debug_symbols = false
+}
+
+#if (is_clang && !is_nacl) {
+#  update_args = [ "--print-revision" ]
+#  if (llvm_force_head_revision) {
+#    update_args += [ "--llvm-force-head-revision" ]
+#  }
+#  clang_revision =
+#      exec_script("//tools/clang/scripts/update.py", update_args, "trim string")
+#}
+
+## Apply the default logic for these values if they were not set explicitly.
+#if (gold_path == false) {
+#  if (use_gold) {
+#    gold_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+#                            root_build_dir)
+#  } else {
+#    gold_path = ""
+#  }
+#}
+
+if (use_debug_fission == "default") {
+  use_debug_fission =
+      is_debug && !is_android && !is_win && use_gold && cc_wrapper == ""
+}
+
+# default_include_dirs ---------------------------------------------------------
+#
+# This is a separate config so that third_party code (which would not use the
+# source root and might have conflicting versions of some headers) can remove
+# this and specify their own include paths.
+config("default_include_dirs") {
+  include_dirs = [
+    "//",
+    root_gen_dir,
+  ]
+}
+
+# compiler ---------------------------------------------------------------------
+#
+# Base compiler configuration.
+#
+# See also "runtime_library" below for related stuff and a discussion about
+# where stuff should go. Put warning related stuff in the "warnings" config.
+
+config("compiler") {
+  asmflags = []
+  cflags = []
+  cflags_c = []
+  cflags_cc = []
+  cflags_objc = []
+  cflags_objcc = []
+  ldflags = []
+  defines = []
+  configs = []
+
+  # System-specific flags. If your compiler flags apply to one of the
+  # categories here, add it to the associated file to keep this shared config
+  # smaller.
+  if (is_win) {
+    configs += [ "//build/config/win:compiler" ]
+  } else if (is_android) {
+    configs += [ "//build/config/android:compiler" ]
+  } else if (is_linux) {
+    configs += [ "//build/config/linux:compiler" ]
+  } else if (is_nacl) {
+    configs += [ "//build/config/nacl:compiler" ]
+  } else if (is_mac) {
+    configs += [ "//build/config/mac:compiler" ]
+  } else if (is_ios) {
+    configs += [ "//build/config/ios:compiler" ]
+  } else if (is_fuchsia) {
+    configs += [ "//build/config/fuchsia:compiler" ]
+  } else if (current_os == "aix") {
+    configs += [ "//build/config/aix:compiler" ]
+  }
+
+  # See the definitions below.
+  configs += [
+    ":compiler_cpu_abi",
+    ":compiler_codegen",
+  ]
+
+  # In general, Windows is totally different, but all the other builds share
+  # some common GCC configuration.
+  if (!is_win) {
+    # Common GCC compiler flags setup.
+    # --------------------------------
+    cflags += [ "-fno-strict-aliasing" ]  # See http://crbug.com/32204
+
+    # Stack protection.
+    if (is_mac) {
+      # The strong variant of the stack protector significantly increases
+      # binary size, so only enable it in debug mode.
+      if (is_debug) {
+        cflags += [ "-fstack-protector-strong" ]
+      } else {
+        cflags += [ "-fstack-protector" ]
+      }
+    } else if (is_posix && !is_chromeos && !is_nacl) {
+      # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc supports it.
+      # See also https://crbug.com/533294
+      cflags += [ "--param=ssp-buffer-size=4" ]
+
+      # The x86 toolchain currently has problems with stack-protector.
+      if (is_android && current_cpu == "x86") {
+        cflags += [ "-fno-stack-protector" ]
+      } else if (current_os != "aix") {
+        # Not available on aix.
+        cflags += [ "-fstack-protector" ]
+      }
+    }
+
+    # Linker warnings.
+    if (fatal_linker_warnings && !(is_chromeos && current_cpu == "arm") &&
+        !(is_android && use_order_profiling) && !is_mac && !is_ios &&
+        current_os != "aix") {
+      # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
+      # TODO(lizeb,pasko): Fix link errors when linking with order_profiling=1
+      # crbug.com/485542
+      ldflags += [ "-Wl,--fatal-warnings" ]
+    }
+  }
+
+  # Eliminate build metadata (__DATE__, __TIME__ and __TIMESTAMP__) for
+  # deterministic build.  See https://crbug.com/314403
+  if (!is_official_build) {
+    if (is_win && !is_clang) {
+      cflags += [
+        "/wd4117",  # Trying to define or undefine a predefined macro.
+        "/D__DATE__=",
+        "/D__TIME__=",
+        "/D__TIMESTAMP__=",
+      ]
+    } else {
+      cflags += [
+        "-Wno-builtin-macro-redefined",
+        "-D__DATE__=",
+        "-D__TIME__=",
+        "-D__TIMESTAMP__=",
+      ]
+    }
+  }
+
+  if (is_clang && is_debug) {
+    # Allow comparing the address of references and 'this' against 0
+    # in debug builds. Technically, these can never be null in
+    # well-defined C/C++ and Clang can optimize such checks away in
+    # release builds, but they may be used in asserts in debug builds.
+    cflags_cc += [
+      "-Wno-undefined-bool-conversion",
+      "-Wno-tautological-undefined-compare",
+    ]
+  }
+
+  if (is_clang && !is_nacl) {
+    # This is here so that all files get recompiled after a clang roll and
+    # when turning clang on or off. (defines are passed via the command line,
+    # and build system rebuild things when their commandline changes). Nothing
+    # should ever read this define.
+    #defines += [ "CR_CLANG_REVISION=\"$clang_revision\"" ]
+  }
+
+  # Non-Mac Posix compiler flags setup.
+  # -----------------------------------
+  if (is_posix && !(is_mac || is_ios)) {
+    if (enable_profiling) {
+      if (!is_debug) {
+        cflags += [ "-g" ]
+
+        if (enable_full_stack_frames_for_profiling) {
+          cflags += [
+            "-fno-inline",
+            "-fno-optimize-sibling-calls",
+          ]
+        }
+      }
+    }
+
+    if (is_official_build) {
+      # Explicitly pass --build-id to ld. Compilers used to always pass this
+      # implicitly but don't any more (in particular clang when built without
+      # ENABLE_LINKER_BUILD_ID=ON). The crash infrastructure does need a build
+      # id, so explicitly enable it in official builds. It's not needed in
+      # unofficial builds and computing it does slow down the link, so go with
+      # faster links in unofficial builds.
+      ldflags += [ "-Wl,--build-id=sha1" ]
+    }
+
+    defines += [ "_FILE_OFFSET_BITS=64" ]
+
+    if (!is_android) {
+      defines += [
+        "_LARGEFILE_SOURCE",
+        "_LARGEFILE64_SOURCE",
+      ]
+    }
+
+    if (!is_nacl) {
+      if (exclude_unwind_tables) {
+        cflags += [
+          "-fno-unwind-tables",
+          "-fno-asynchronous-unwind-tables",
+        ]
+        defines += [ "NO_UNWIND_TABLES" ]
+      } else {
+        cflags += [ "-funwind-tables" ]
+      }
+    }
+  }
+
+  # Linux/Android common flags setup.
+  # ---------------------------------
+  if (is_linux || is_android || is_fuchsia) {
+    if (use_pic) {
+      cflags += [ "-fPIC" ]
+      ldflags += [ "-fPIC" ]
+    }
+
+    # Use pipes for communicating between sub-processes. Faster.
+    cflags += [ "-pipe" ]
+
+    ldflags += [
+      "-Wl,-z,noexecstack",
+      "-Wl,-z,now",
+      "-Wl,-z,relro",
+    ]
+    if (!using_sanitizer) {
+      if (!use_cfi_diag) {
+        ldflags += [ "-Wl,-z,defs" ]
+      }
+
+      # Functions interposed by the sanitizers can make ld think
+      # that some libraries aren't needed when they actually are,
+      # http://crbug.com/234010. As workaround, disable --as-needed.
+      if (!is_nacl && !is_android) {
+        # TODO(pcc): Fix linker bug which requires us to link pthread
+        # unconditionally here (crbug.com/623236).
+        ldflags += [
+          "-Wl,--no-as-needed",
+          "-lpthread",
+        ]
+      }
+      ldflags += [ "-Wl,--as-needed" ]
+    }
+  }
+
+  # Linux-specific compiler flags setup.
+  # ------------------------------------
+  if (is_android && is_clang) {
+    _rebased_android_toolchain_root =
+        rebase_path(android_toolchain_root, root_build_dir)
+
+    # Let clang find the linker in the NDK.
+    ldflags += [ "--gcc-toolchain=$_rebased_android_toolchain_root" ]
+  }
+
+  if (is_posix && use_lld && !is_nacl) {
+    ldflags += [ "-fuse-ld=lld" ]
+  } else if (use_gold) {
+    ldflags += [ "-fuse-ld=gold" ]
+    if (is_android) {
+      # Use -mstackrealign due to a bug on ia32 Jelly Bean.
+      # See crbug.com/521527
+      if (current_cpu == "x86") {
+        cflags += [ "-mstackrealign" ]
+      }
+    } else {
+      # On Android, this isn't needed.  gcc in the NDK knows to look next to
+      # it with -fuse-ld=gold, and clang gets a --gcc-toolchain flag passed
+      # above.
+      ldflags += [ "-B$gold_path" ]
+
+      if (linux_use_bundled_binutils) {
+        ldflags += [
+          # Experimentation found that using four linking threads
+          # saved ~20% of link time.
+          # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+          # Only apply this to the target linker, since the host
+          # linker might not be gold, but isn't used much anyway.
+          "-Wl,--threads",
+          "-Wl,--thread-count=4",
+        ]
+      }
+    }
+
+    # TODO(thestig): Make this flag work with GN.
+    #if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) {
+    #  ldflags += [
+    #    "-Wl,--detect-odr-violations",
+    #  ]
+    #}
+  } else if (linux_use_bundled_binutils) {
+    # Gold is the default linker for the bundled binutils so we explicitly
+    # enable the bfd linker when use_gold is not set.
+    ldflags += [ "-fuse-ld=bfd" ]
+  }
+
+  if (is_posix && (use_gold || (use_lld && !is_nacl)) && !using_sanitizer &&
+      !(is_android && use_order_profiling)) {
+    # TODO(thakis): Remove `!is_android` below once NDK gold has been rolled
+    # with the fix for https://sourceware.org/bugzilla/show_bug.cgi?id=17704
+    # merged.  See also https://crbug.com/663886
+    # `linux_use_bundled_binutils` is to avoid breaking Linux distros which may
+    # still have a buggy gold.
+    # The bug only affects x86 and x64, so we can still use ICF when targeting
+    # other architectures.
+    if ((!is_android && linux_use_bundled_binutils) ||
+        !(current_cpu == "x86" || current_cpu == "x64")) {
+      ldflags += [ "-Wl,--icf=all" ]
+    }
+  }
+
+  if (linux_use_bundled_binutils) {
+    cflags += [ "-B$binutils_path" ]
+  }
+
+  # Clang-specific compiler flags setup.
+  # ------------------------------------
+  if (is_clang) {
+    cflags += [ "-fcolor-diagnostics" ]
+  }
+
+  # Print absolute paths in diagnostics. There is no precedent for doing this
+  # on Linux/Mac (GCC doesn't support it), but MSVC does this with /FC and
+  # Windows developers rely on it (crbug.com/636109) so only do this on Windows.
+  if (is_clang && is_win) {
+    cflags += [ "-fdiagnostics-absolute-paths" ]
+  }
+
+  # Makes builds independent of absolute file path.
+  # clang-cl (used if is_win) doesn't expose this flag.
+  # Currently disabled for nacl since its toolchain lacks this flag (too old).
+  # TODO(zforman): Once nacl's toolchain is updated, remove check.
+  if (is_clang && is_linux && strip_absolute_paths_from_debug_symbols) {
+    absolute_path = rebase_path("//.")
+    cflags += [ "-fdebug-prefix-map=$absolute_path=." ]
+  }
+
+  # C++11 compiler flags setup.
+  # ---------------------------
+  if (is_linux || is_android || (is_nacl && is_clang) || current_os == "aix") {
+    # gnu++11 instead of c++11 is needed because some code uses typeof() (a
+    # GNU extension).
+    # TODO(thakis): Eventually switch this to c++11 instead,
+    # http://crbug.com/427584
+    cflags_cc += [ "-std=gnu++11" ]
+  } else if (!is_win && !is_nacl) {
+    # TODO(mcgrathr) - the NaCl GCC toolchain doesn't support either gnu++11
+    # or c++11; we technically don't need this toolchain any more, but there
+    # are still a few buildbots using it, so until those are turned off
+    # we need the !is_nacl clause and the (is_nacl && is_clang) clause, above.
+    cflags_cc += [ "-std=c++11" ]
+  }
+
+  if (is_mac) {
+    cflags_cc += [ "-stdlib=libc++" ]
+    ldflags += [ "-stdlib=libc++" ]
+  }
+
+  # Add flags for link-time optimization. These flags enable
+  # optimizations/transformations that require whole-program visibility at link
+  # time, so they need to be applied to all translation units, and we may end up
+  # with miscompiles if only part of the program is compiled with LTO flags. For
+  # that reason, we cannot allow targets to enable or disable these flags, for
+  # example by disabling the optimize configuration.
+  # TODO(pcc): Make this conditional on is_official_build rather than on gn
+  # flags for specific features.
+  if (!is_debug && (allow_posix_link_time_opt || is_cfi) && !is_nacl) {
+    if (use_thin_lto) {
+      cflags += [ "-flto=thin" ]
+      ldflags += [ "-flto=thin" ]
+
+      # Limit the parallelism to avoid too agressive competition between
+      # linker jobs. This is still suboptimal to a potential dynamic
+      # resource allocation scheme, but should be good enough.
+      if (use_lld) {
+        ldflags += [
+          "-Wl,--thinlto-jobs=8",
+          "-Wl,--thinlto-cache-dir=" +
+              rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
+        ]
+      } else {
+        ldflags += [ "-Wl,-plugin-opt,jobs=8" ]
+      }
+    } else {
+      cflags += [ "-flto" ]
+      ldflags += [ "-flto" ]
+
+      # Apply a lower LTO optimization level as the default is too slow.
+      if (is_linux) {
+        if (use_lld) {
+          ldflags += [ "-Wl,--lto-O1" ]
+        } else {
+          ldflags += [ "-Wl,-plugin-opt,O1" ]
+        }
+      } else if (is_mac) {
+        ldflags += [ "-Wl,-mllvm,-O1" ]
+      }
+    }
+
+    cflags += [ "-fwhole-program-vtables" ]
+    ldflags += [ "-fwhole-program-vtables" ]
+
+    # Work-around for http://openradar.appspot.com/20356002
+    if (is_mac) {
+      ldflags += [ "-Wl,-all_load" ]
+    }
+
+    # Allows the linker to apply --gc-sections and ICF to the LTO object file.
+    # Also, when targeting ARM, without this flag, LTO produces a .text section
+    # that is larger than the maximum call displacement, preventing the linker
+    # from relocating calls (http://llvm.org/PR22999).
+    if (is_linux) {
+      if (use_lld) {
+        ldflags += [
+          "-Wl,-mllvm,-function-sections",
+          "-Wl,-mllvm,-data-sections",
+        ]
+      } else {
+        ldflags += [
+          "-Wl,-plugin-opt,-function-sections",
+          "-Wl,-plugin-opt,-data-sections",
+        ]
+      }
+    }
+  }
+
+  # Pass the same C/C++ flags to the objective C/C++ compiler.
+  cflags_objc += cflags_c
+  cflags_objcc += cflags_cc
+
+  # Assign any flags set for the C compiler to asmflags so that they are sent
+  # to the assembler. The Windows assembler takes different types of flags
+  # so only do so for posix platforms.
+  if (is_posix) {
+    asmflags += cflags
+    asmflags += cflags_c
+  }
+}
+
+# This is separate from :compiler (and not even a sub-config there)
+# so that some targets can remove it from the list with:
+#   configs -= [ "//build/config/compiler:pthread" ]
+config("pthread") {
+  if (is_linux) {
+    cflags = [ "-pthread" ]
+    ldflags = [ "-pthread" ]
+  }
+}
+
+# This provides the basic options to select the target CPU and ABI.
+# It is factored out of "compiler" so that special cases can use this
+# without using everything that "compiler" brings in.  Options that
+# tweak code generation for a particular CPU do not belong here!
+# See "compiler_codegen", below.
+config("compiler_cpu_abi") {
+  cflags = []
+  ldflags = []
+
+  if (is_posix && !(is_mac || is_ios)) {
+    # CPU architecture. We may or may not be doing a cross compile now, so for
+    # simplicity we always explicitly set the architecture.
+    if (current_cpu == "x64") {
+      cflags += [
+        "-m64",
+        "-march=x86-64",
+      ]
+      ldflags += [ "-m64" ]
+    } else if (current_cpu == "x86") {
+      cflags += [ "-m32" ]
+      ldflags += [ "-m32" ]
+      if (!is_nacl) {
+        cflags += [
+          "-msse2",
+          "-mfpmath=sse",
+          "-mmmx",
+        ]
+      }
+    } else if (current_cpu == "arm") {
+      if (is_clang && !is_android && !is_nacl) {
+        cflags += [ "--target=arm-linux-gnueabihf" ]
+        ldflags += [ "--target=arm-linux-gnueabihf" ]
+      }
+      if (!is_nacl) {
+        cflags += [
+          "-march=$arm_arch",
+          "-mfloat-abi=$arm_float_abi",
+        ]
+      }
+      if (arm_tune != "") {
+        cflags += [ "-mtune=$arm_tune" ]
+      }
+    } else if (current_cpu == "arm64") {
+      if (is_clang && !is_android && !is_nacl) {
+        cflags += [ "--target=aarch64-linux-gnu" ]
+        ldflags += [ "--target=aarch64-linux-gnu" ]
+      }
+    } else if (current_cpu == "mipsel" && !is_nacl) {
+      if (mips_arch_variant == "r6") {
+        if (is_clang) {
+          cflags += [
+            "--target=mipsel-linux-gnu",
+            "-march=mips32r6",
+          ]
+          ldflags += [ "--target=mipsel-linux-gnu" ]
+        } else {
+          cflags += [
+            "-mips32r6",
+            "-Wa,-mips32r6",
+          ]
+          if (is_android) {
+            ldflags += [
+              "-mips32r6",
+              "-Wl,-melf32ltsmip",
+            ]
+          }
+        }
+        if (mips_use_msa == true) {
+          cflags += [
+            "-mmsa",
+            "-mfp64",
+          ]
+        }
+      } else if (mips_arch_variant == "r2") {
+        if (is_clang) {
+          if (is_android) {
+            cflags += [
+              "--target=mipsel-linux-android",
+              "-march=mipsel",
+              "-mcpu=mips32r2",
+            ]
+            ldflags += [ "--target=mipsel-linux-android" ]
+          } else {
+            cflags += [
+              "--target=mipsel-linux-gnu",
+              "-march=mipsel",
+              "-mcpu=mips32r2",
+            ]
+            ldflags += [ "--target=mipsel-linux-gnu" ]
+          }
+        } else {
+          cflags += [
+            "-mips32r2",
+            "-Wa,-mips32r2",
+          ]
+          if (mips_float_abi == "hard" && mips_fpu_mode != "") {
+            cflags += [ "-m$mips_fpu_mode" ]
+          }
+        }
+      } else if (mips_arch_variant == "r1") {
+        if (is_clang) {
+          if (is_android) {
+            cflags += [
+              "--target=mipsel-linux-android",
+              "-march=mipsel",
+              "-mcpu=mips32",
+            ]
+            ldflags += [ "--target=mipsel-linux-android" ]
+          } else {
+            cflags += [
+              "--target=mipsel-linux-gnu",
+              "-march=mipsel",
+              "-mcpu=mips32",
+            ]
+            ldflags += [ "--target=mipsel-linux-gnu" ]
+          }
+        } else {
+          cflags += [
+            "-mips32",
+            "-Wa,-mips32",
+          ]
+        }
+      }
+
+      if (mips_dsp_rev == 1) {
+        cflags += [ "-mdsp" ]
+      } else if (mips_dsp_rev == 2) {
+        cflags += [ "-mdspr2" ]
+      }
+
+      cflags += [ "-m${mips_float_abi}-float" ]
+    } else if (current_cpu == "mips64el") {
+      if (mips_arch_variant == "r6") {
+        if (is_clang) {
+          if (is_android) {
+            cflags += [
+              "--target=mips64el-linux-android",
+              "-march=mips64el",
+              "-mcpu=mips64r6",
+            ]
+            ldflags += [ "--target=mips64el-linux-android" ]
+          }
+        } else {
+          cflags += [
+            "-mips64r6",
+            "-Wa,-mips64r6",
+          ]
+          ldflags += [ "-mips64r6" ]
+        }
+        if (mips_use_msa == true) {
+          cflags += [
+            "-mmsa",
+            "-mfp64",
+          ]
+        }
+      } else if (mips_arch_variant == "r2") {
+        cflags += [
+          "-mips64r2",
+          "-Wa,-mips64r2",
+        ]
+        ldflags += [ "-mips64r2" ]
+      }
+    } else if (current_cpu == "pnacl" && is_nacl_nonsfi) {
+      if (target_cpu == "x86" || target_cpu == "x64") {
+        cflags += [
+          "-arch",
+          "x86-32-nonsfi",
+          "--pnacl-bias=x86-32-nonsfi",
+          "--target=i686-unknown-nacl",
+        ]
+        ldflags += [
+          "-arch",
+          "x86-32-nonsfi",
+          "--target=i686-unknown-nacl",
+        ]
+      } else if (target_cpu == "arm") {
+        cflags += [
+          "-arch",
+          "arm-nonsfi",
+          "-mfloat-abi=hard",
+          "--pnacl-bias=arm-nonsfi",
+          "--target=armv7-unknown-nacl-gnueabihf",
+        ]
+        ldflags += [
+          "-arch",
+          "arm-nonsfi",
+          "--target=armv7-unknown-nacl-gnueabihf",
+        ]
+      }
+    }
+  }
+
+  asmflags = cflags
+}
+
+# This provides options to tweak code generation that are necessary
+# for particular Chromium code or for working around particular
+# compiler bugs (or the combination of the two).
+config("compiler_codegen") {
+  configs = []
+  cflags = []
+
+  if (is_nacl) {
+    configs += [ "//build/config/nacl:compiler_codegen" ]
+  } else if (is_posix && !is_mac && !is_ios) {
+    if (current_cpu == "x86") {
+      if (is_clang) {
+        cflags += [
+          # Else building libyuv gives clang's register allocator issues,
+          # see llvm.org/PR15798 / crbug.com/233709
+          "-momit-leaf-frame-pointer",
+        ]
+      }
+    } else if (current_cpu == "arm") {
+      if (is_android && !is_clang) {
+        # Clang doesn't support these flags.
+        cflags += [
+          # The tree-sra optimization (scalar replacement for
+          # aggregates enabling subsequent optimizations) leads to
+          # invalid code generation when using the Android NDK's
+          # compiler (r5-r7). This can be verified using
+          # webkit_unit_tests' WTF.Checked_int8_t test.
+          "-fno-tree-sra",
+
+          # The following option is disabled to improve binary
+          # size and performance in gcc 4.9.
+          "-fno-caller-saves",
+        ]
+      }
+    }
+  }
+
+  asmflags = cflags
+}
+
+# This is separate from :compiler_codegen (and not even a sub-config there)
+# so that some targets can remove it from the list with:
+#   configs -= [ "//build/config/compiler:clang_stackrealign" ]
+# See https://crbug.com/556393 for details of where it must be avoided.
+config("clang_stackrealign") {
+  if (is_clang && current_cpu == "x86" && is_linux) {
+    cflags = [
+      # Align the stack on 16-byte boundaries, http://crbug.com/418554.
+      "-mstack-alignment=16",
+      "-mstackrealign",
+    ]
+  }
+}
+
+config("compiler_arm_fpu") {
+  if (current_cpu == "arm" && !is_ios && !is_nacl) {
+    cflags = [ "-mfpu=$arm_fpu" ]
+    asmflags = cflags
+  }
+}
+
+config("compiler_arm_thumb") {
+  if (current_cpu == "arm" && arm_use_thumb && is_posix &&
+      !(is_mac || is_ios || is_nacl)) {
+    cflags = [ "-mthumb" ]
+    if (is_android && !is_clang) {
+      # Clang doesn't support this option.
+      cflags += [ "-mthumb-interwork" ]
+    }
+  }
+}
+
+config("compiler_arm") {
+  if (current_cpu == "arm" && is_chromeos) {
+    # arm is normally the default mode for clang, but on chromeos a wrapper
+    # is used to pass -mthumb, and therefor change the default.
+    cflags = [ "-marm" ]
+  }
+}
+
+# runtime_library -------------------------------------------------------------
+#
+# Sets the runtime library and associated options.
+#
+# How do you determine what should go in here vs. "compiler" above? Consider if
+# a target might choose to use a different runtime library (ignore for a moment
+# if this is possible or reasonable on your system). If such a target would want
+# to change or remove your option, put it in the runtime_library config. If a
+# target wants the option regardless, put it in the compiler config.
+
+config("runtime_library") {
+  defines = []
+  configs = []
+
+  # System-specific flags. If your compiler flags apply to one of the
+  # categories here, add it to the associated file to keep this shared config
+  # smaller.
+  if (is_win) {
+    configs += [ "//build/config/win:runtime_library" ]
+  } else if (is_linux) {
+    configs += [ "//build/config/linux:runtime_library" ]
+  } else if (is_ios) {
+    configs += [ "//build/config/ios:runtime_library" ]
+  } else if (is_mac) {
+    configs += [ "//build/config/mac:runtime_library" ]
+  } else if (is_android) {
+    configs += [ "//build/config/android:runtime_library" ]
+  }
+
+  if (is_posix) {
+    configs += [ "//build/config/posix:runtime_library" ]
+  }
+
+  if (is_component_build) {
+    defines += [ "COMPONENT_BUILD" ]
+  }
+}
+
+# default_warnings ------------------------------------------------------------
+#
+# Collects all warning flags that are used by default.  This is used as a
+# subconfig of both chromium_code and no_chromium_code.  This way these
+# flags are guaranteed to appear on the compile command line after -Wall.
+config("default_warnings") {
+  cflags = []
+  cflags_cc = []
+  ldflags = []
+
+  if (is_win) {
+    if (treat_warnings_as_errors) {
+      cflags += [ "/WX" ]
+    }
+    if (fatal_linker_warnings) {
+      ldflags += [ "/WX" ]
+    }
+
+    cflags += [
+      # Assume UTF-8 by default to avoid code page dependencies.
+      "/utf-8",
+    ]
+
+    cflags += [
+      # Warnings permanently disabled:
+
+      # C4091: 'typedef ': ignored on left of 'X' when no variable is
+      #                    declared.
+      # This happens in a number of Windows headers. Dumb.
+      "/wd4091",
+
+      # C4127: conditional expression is constant
+      # This warning can in theory catch dead code and other problems, but
+      # triggers in far too many desirable cases where the conditional
+      # expression is either set by macros or corresponds some legitimate
+      # compile-time constant expression (due to constant template args,
+      # conditionals comparing the sizes of different types, etc.).  Some of
+      # these can be worked around, but it's not worth it.
+      "/wd4127",
+
+      # C4251: 'identifier' : class 'type' needs to have dll-interface to be
+      #        used by clients of class 'type2'
+      # This is necessary for the shared library build.
+      "/wd4251",
+
+      # C4312 is a VS 2015 64-bit warning for integer to larger pointer.
+      # TODO(brucedawson): fix warnings, crbug.com/554200
+      "/wd4312",
+
+      # C4351: new behavior: elements of array 'array' will be default
+      #        initialized
+      # This is a silly "warning" that basically just alerts you that the
+      # compiler is going to actually follow the language spec like it's
+      # supposed to, instead of not following it like old buggy versions did.
+      # There's absolutely no reason to turn this on.
+      "/wd4351",
+
+      # C4355: 'this': used in base member initializer list
+      # It's commonly useful to pass |this| to objects in a class' initializer
+      # list.  While this warning can catch real bugs, most of the time the
+      # constructors in question don't attempt to call methods on the passed-in
+      # pointer (until later), and annotating every legit usage of this is
+      # simply more hassle than the warning is worth.
+      "/wd4355",
+
+      # C4503: 'identifier': decorated name length exceeded, name was
+      #        truncated
+      # This only means that some long error messages might have truncated
+      # identifiers in the presence of lots of templates.  It has no effect on
+      # program correctness and there's no real reason to waste time trying to
+      # prevent it.
+      "/wd4503",
+
+      # Warning C4589 says: "Constructor of abstract class ignores
+      # initializer for virtual base class." Disable this warning because it
+      # is flaky in VS 2015 RTM. It triggers on compiler generated
+      # copy-constructors in some cases.
+      "/wd4589",
+
+      # C4611: interaction between 'function' and C++ object destruction is
+      #        non-portable
+      # This warning is unavoidable when using e.g. setjmp/longjmp.  MSDN
+      # suggests using exceptions instead of setjmp/longjmp for C++, but
+      # Chromium code compiles without exception support.  We therefore have to
+      # use setjmp/longjmp for e.g. JPEG decode error handling, which means we
+      # have to turn off this warning (and be careful about how object
+      # destruction happens in such cases).
+      "/wd4611",
+
+      # Warnings to evaluate and possibly fix/reenable later:
+
+      "/wd4100",  # Unreferenced formal function parameter.
+      "/wd4121",  # Alignment of a member was sensitive to packing.
+      "/wd4244",  # Conversion: possible loss of data.
+      "/wd4505",  # Unreferenced local function has been removed.
+      "/wd4510",  # Default constructor could not be generated.
+      "/wd4512",  # Assignment operator could not be generated.
+      "/wd4610",  # Class can never be instantiated, constructor required.
+      "/wd4838",  # Narrowing conversion. Doesn't seem to be very useful.
+      "/wd4995",  # 'X': name was marked as #pragma deprecated
+      "/wd4996",  # Deprecated function warning.
+
+      # These are variable shadowing warnings that are new in VS2015. We
+      # should work through these at some point -- they may be removed from
+      # the RTM release in the /W4 set.
+      "/wd4456",
+      "/wd4457",
+      "/wd4458",
+      "/wd4459",
+    ]
+
+    cflags_cc += [
+      # Allow "noexcept" annotations even though we compile with exceptions
+      # disabled.
+      "/wd4577",
+    ]
+
+    if (current_cpu == "x86") {
+      cflags += [
+        # VC++ 2015 changes 32-bit size_t truncation warnings from 4244 to
+        # 4267. Example: short TruncTest(size_t x) { return x; }
+        # Since we disable 4244 we need to disable 4267 during migration.
+        # TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
+        "/wd4267",
+      ]
+    }
+
+    # VS xtree header file needs to be patched or 4702 (unreachable code
+    # warning) is reported if _HAS_EXCEPTIONS=0. Disable the warning if xtree is
+    # not patched.
+    if (!msvs_xtree_patched &&
+        exec_script("../../win_is_xtree_patched.py", [], "value") == 0) {
+      cflags += [ "/wd4702" ]  # Unreachable code.
+    }
+
+    # Building with Clang on Windows is a work in progress and very
+    # experimental. See crbug.com/82385.
+    if (is_clang) {
+      cflags += [
+        # TODO(hans): Make this list shorter eventually, http://crbug.com/504657
+        "-Wno-microsoft-enum-value",  # http://crbug.com/505296
+        "-Wno-unknown-pragmas",  # http://crbug.com/505314
+        "-Wno-microsoft-cast",  # http://crbug.com/550065
+        "-Wno-microsoft-enum-forward-reference",  # http://crbug.com/718880
+      ]
+    }
+  } else {
+    if (is_mac && !is_nacl) {
+      # When compiling Objective-C, warns if a method is used whose
+      # availability is newer than the deployment target. This is not
+      # required when compiling Chrome for iOS.
+      cflags += [ "-Wpartial-availability" ]
+    }
+
+    if (is_ios) {
+      # When compiling Objective-C, warns if a selector named via @selector has
+      # not been defined in any visible interface.
+      cflags += [ "-Wundeclared-selector" ]
+    }
+
+    # Suppress warnings about ABI changes on ARM (Clang doesn't give this
+    # warning).
+    if (current_cpu == "arm" && !is_clang) {
+      cflags += [ "-Wno-psabi" ]
+    }
+
+    if (!is_clang) {
+      cflags_cc += [
+        # See comment for -Wno-c++11-narrowing.
+        "-Wno-narrowing",
+      ]
+
+      # Don't warn about the "typedef 'foo' locally defined but not used"
+      # for gcc 4.8.
+      # TODO: remove this flag once all builds work. See crbug.com/227506
+      cflags += [ "-Wno-unused-local-typedefs" ]
+
+      # Don't warn about "maybe" uninitialized. Clang doesn't include this
+      # in -Wall but gcc does, and it gives false positives.
+      cflags += [ "-Wno-maybe-uninitialized" ]
+    }
+  }
+
+  # Common Clang and GCC warning setup.
+  if (!is_win || is_clang) {
+    cflags += [
+      # Disables.
+      "-Wno-missing-field-initializers",  # "struct foo f = {0};"
+      "-Wno-unused-parameter",  # Unused function parameters.
+    ]
+  }
+
+  if (is_chromeos && is_clang && !is_nacl) {
+    # TODO(thakis): Enable this, crbug.com/507717
+    cflags += [ "-Wno-shift-negative-value" ]
+  }
+
+  if (is_clang) {
+    cflags += [
+      # TODO(thakis): Consider -Wloop-analysis (turns on
+      # -Wrange-loop-analysis too).
+
+      # This warns on using ints as initializers for floats in
+      # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+      # which happens in several places in chrome code. Not sure if
+      # this is worth fixing.
+      "-Wno-c++11-narrowing",
+
+      # Warns on switches on enums that cover all enum values but
+      # also contain a default: branch. Chrome is full of that.
+      "-Wno-covered-switch-default",
+
+      # TODO(thakis): This used to be implied by -Wno-unused-function,
+      # which we no longer use. Check if it makes sense to remove
+      # this as well. http://crbug.com/316352
+      "-Wno-unneeded-internal-declaration",
+
+      # TODO(hans): Get this cleaned up, http://crbug.com/428099
+      "-Wno-inconsistent-missing-override",
+    ]
+
+    if (is_linux && target_cpu == "x86") {
+      cflags += [
+        # TODO(thakis): Remove from 32-bit Linux eventually, https://707084
+        "-Wno-deprecated-register",
+      ]
+    }
+
+    # use_xcode_clang only refers to the iOS toolchain, host binaries use
+    # chromium's clang always.
+    if (!is_nacl && (!use_xcode_clang || current_toolchain == host_toolchain)) {
+      # Flags NaCl (Clang 3.7) and Xcode 7.3 (Clang clang-703.0.31) do not
+      # recognize.
+      cflags += [
+        # TODO(thakis): https://crbug.com/604888
+        "-Wno-undefined-var-template",
+
+        # TODO(thakis): https://crbug.com/617318
+        "-Wno-nonportable-include-path",
+
+        # TODO(hans): https://crbug.com/637306
+        "-Wno-address-of-packed-member",
+
+        # TODO(hans): https://crbug.com/681136
+        "-Wno-unused-lambda-capture",
+
+        # TODO(thakis ): https://crbug.com/683349
+        "-Wno-user-defined-warnings",
+      ]
+    } else if (use_xcode_clang &&
+               (xcode_version == "0830" || xcode_version == "0831" ||
+                xcode_version == "0832")) {
+      # This is necessary to allow a progressive transition from using xcode 8 to 8.3. Remove when all bots are migrated to 8.3.
+      cflags += [
+        # TODO(thakis): https://crbug.com/604888
+        "-Wno-undefined-var-template",
+
+        # TODO(hans): https://crbug.com/637306
+        "-Wno-address-of-packed-member",
+      ]
+    }
+  }
+}
+
+# chromium_code ---------------------------------------------------------------
+#
+# Toggles between higher and lower warnings for code that is (or isn't)
+# part of Chromium.
+
+config("chromium_code") {
+  if (is_win) {
+    cflags = [ "/W4" ]  # Warning level 4.
+  } else {
+    cflags = [ "-Wall" ]
+    if (treat_warnings_as_errors) {
+      cflags += [ "-Werror" ]
+
+      # The compiler driver can sometimes (rarely) emit warnings before calling
+      # the actual linker.  Make sure these warnings are treated as errors as
+      # well.
+      ldflags = [ "-Werror" ]
+    }
+    if (is_clang) {
+      # Enable -Wextra for chromium_code when we control the compiler.
+      cflags += [ "-Wextra" ]
+    }
+
+    # In Chromium code, we define __STDC_foo_MACROS in order to get the
+    # C99 macros on Mac and Linux.
+    defines = [
+      "__STDC_CONSTANT_MACROS",
+      "__STDC_FORMAT_MACROS",
+    ]
+
+    if (!is_debug && !using_sanitizer &&
+        (!is_linux || !is_clang || is_official_build) &&
+        current_cpu != "s390x" && current_cpu != "s390" &&
+        current_cpu != "ppc64" && current_cpu != "ppc64") {
+      # _FORTIFY_SOURCE isn't really supported by Clang now, see
+      # http://llvm.org/bugs/show_bug.cgi?id=16821.
+      # It seems to work fine with Ubuntu 12 headers though, so use it in
+      # official builds.
+      #
+      # Non-chromium code is not guaranteed to compile cleanly with
+      # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are
+      # disabled, so only do that for Release build.
+      defines += [ "_FORTIFY_SOURCE=2" ]
+    }
+
+    if (is_mac || is_ios) {
+      cflags_objc = [ "-Wobjc-missing-property-synthesis" ]
+      cflags_objcc = [ "-Wobjc-missing-property-synthesis" ]
+    }
+  }
+
+  configs = [ ":default_warnings" ]
+}
+
+config("no_chromium_code") {
+  cflags = []
+  cflags_cc = []
+  defines = []
+
+  if (is_win) {
+    cflags += [
+      "/W3",  # Warning level 3.
+      "/wd4800",  # Disable warning when forcing value to bool.
+      "/wd4267",  # TODO(jschuh): size_t to int.
+      "/wd4996",  # Deprecated function warning.
+    ]
+    defines += [
+      "_CRT_NONSTDC_NO_WARNINGS",
+      "_CRT_NONSTDC_NO_DEPRECATE",
+    ]
+  } else {
+    # GCC may emit unsuppressible warnings so don't add -Werror for no chromium
+    # code. crbug.com/589724
+    if (treat_warnings_as_errors && is_clang) {
+      cflags += [ "-Werror" ]
+      ldflags = [ "-Werror" ]
+    }
+    if (is_clang && !is_nacl) {
+      # TODO(thakis): Remove !is_nacl once
+      # https://codereview.webrtc.org/1552863002/ made its way into chromium.
+      cflags += [ "-Wall" ]
+    }
+  }
+
+  if (is_clang) {
+    cflags += [
+      # Lots of third-party libraries have unused variables. Instead of
+      # suppressing them individually, we just blanket suppress them here.
+      "-Wno-unused-variable",
+    ]
+  }
+
+  configs = [ ":default_warnings" ]
+}
+
+# rtti ------------------------------------------------------------------------
+#
+# Allows turning Run-Time Type Identification on or off.
+
+config("rtti") {
+  if (is_win) {
+    cflags_cc = [ "/GR" ]
+  } else {
+    cflags_cc = [ "-frtti" ]
+  }
+}
+
+config("no_rtti") {
+  # Some sanitizer configs may require RTTI to be left enabled globally
+  if (!use_rtti) {
+    if (is_win) {
+      cflags_cc = [ "/GR-" ]
+    } else {
+      cflags_cc = [ "-fno-rtti" ]
+      cflags_objcc = cflags_cc
+    }
+  }
+}
+
+# Warnings ---------------------------------------------------------------------
+
+# This will generate warnings when using Clang if code generates exit-time
+# destructors, which will slow down closing the program.
+# TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+config("wexit_time_destructors") {
+  # TODO: Enable on Windows too, http://crbug.com/404525
+  if (is_clang && !is_win) {
+    cflags = [ "-Wexit-time-destructors" ]
+  }
+}
+
+# On Windows compiling on x64, VC will issue a warning when converting
+# size_t to int because it will truncate the value. Our code should not have
+# these warnings and one should use a static_cast or a checked_cast for the
+# conversion depending on the case. However, a lot of code still needs to be
+# fixed. Apply this config to such targets to disable the warning.
+#
+# Note that this can be applied regardless of platform and architecture to
+# clean up the call sites. This will only apply the flag when necessary.
+#
+# TODO(jschuh): crbug.com/167187 fix this and delete this config.
+config("no_size_t_to_int_warning") {
+  if (is_win && current_cpu == "x64") {
+    cflags = [ "/wd4267" ]
+  }
+}
+
+# Some code presumes that pointers to structures/objects are compatible
+# regardless of whether what they point to is already known to be valid.
+# gcc 4.9 and earlier had no way of suppressing this warning without
+# supressing the rest of them.  Here we centralize the identification of
+# the gcc 4.9 toolchains.
+config("no_incompatible_pointer_warnings") {
+  cflags = []
+  if (is_clang) {
+    cflags += [ "-Wno-incompatible-pointer-types" ]
+  } else if (current_cpu == "mipsel") {
+    cflags += [ "-w" ]
+  } else if (is_chromeos && current_cpu == "arm") {
+    cflags += [ "-w" ]
+  }
+}
+
+# Optimization -----------------------------------------------------------------
+#
+# The BUILDCONFIG file sets the "default_optimization" config on targets by
+# default. It will be equivalent to either "optimize" (release) or
+# "no_optimize" (debug) optimization configs.
+#
+# You can override the optimization level on a per-target basis by removing the
+# default config and then adding the named one you want:
+#
+#   configs -= [ "//build/config/compiler:default_optimization" ]
+#   configs += [ "//build/config/compiler:optimize_max" ]
+
+# Shared settings for both "optimize" and "optimize_max" configs.
+# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags.
+if (is_win) {
+  common_optimize_on_cflags = [
+    "/Ob2",  # Both explicit and auto inlining.
+    "/Oy-",  # Disable omitting frame pointers, must be after /O2.
+    "/d2Zi+",  # Improve debugging of optimized code.
+    "/Zc:inline",  # Remove unreferenced COMDAT (faster links).
+  ]
+  if (!is_asan) {
+    common_optimize_on_cflags += [
+      # Put data in separate COMDATs. This allows the linker
+      # to put bit-identical constants at the same address even if
+      # they're unrelated constants, which saves binary size.
+      # This optimization can't be used when ASan is enabled because
+      # it is not compatible with the ASan ODR checker.
+      "/Gw",
+    ]
+  }
+  common_optimize_on_ldflags = []
+
+  # /OPT:ICF is not desirable in Debug builds, since code-folding can result in
+  # misleading symbols in stack traces. It is also incompatible with
+  # incremental linking, which we enable for both Debug and component builds.
+  if (!is_debug && !is_component_build) {
+    common_optimize_on_ldflags += [ "/OPT:ICF" ]  # Redundant COMDAT folding.
+  }
+
+  if (is_official_build) {
+    common_optimize_on_ldflags += [ "/OPT:REF" ]  # Remove unreferenced data.
+    if (!use_lld) {
+      common_optimize_on_ldflags += [
+        # Set the number of LTCG code-gen threads to eight. The default is four.
+        # This gives a 5-10% link speedup.
+        "/cgthreads:8",
+      ]
+      if (use_incremental_wpo) {
+        # Incremental Link-time code generation.
+        common_optimize_on_ldflags += [ "/LTCG:INCREMENTAL" ]
+      } else {
+        common_optimize_on_ldflags += [ "/LTCG" ]  # Link-time code generation.
+      }
+      if (full_wpo_on_official) {
+        if (use_incremental_wpo) {
+          arflags = [ "/LTCG:INCREMENTAL" ]
+        } else {
+          arflags = [ "/LTCG" ]
+        }
+      }
+    }
+  }
+} else {
+  common_optimize_on_cflags = []
+  common_optimize_on_ldflags = []
+
+  if (is_android) {
+    # TODO(jdduke) Re-enable on mips after resolving linking
+    # issues with libc++ (crbug.com/456380).
+    if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+      common_optimize_on_ldflags += [
+        # Warn in case of text relocations.
+        "-Wl,--warn-shared-textrel",
+      ]
+    }
+  }
+
+  if (is_mac || is_ios) {
+    if (symbol_level == 2) {
+      # Mac dead code stripping requires symbols.
+      common_optimize_on_ldflags += [ "-Wl,-dead_strip" ]
+    }
+  } else if (current_os != "aix") {
+    # Non-Mac Posix flags.
+    # Aix does not support these.
+
+    common_optimize_on_cflags += [
+      # Don't emit the GCC version ident directives, they just end up in the
+      # .comment section taking up binary size.
+      "-fno-ident",
+
+      # Put data and code in their own sections, so that unused symbols
+      # can be removed at link time with --gc-sections.
+      "-fdata-sections",
+      "-ffunction-sections",
+    ]
+
+    common_optimize_on_ldflags += [
+      # Specifically tell the linker to perform optimizations.
+      # See http://lwn.net/Articles/192624/ .
+      "-Wl,-O1",
+      "-Wl,--gc-sections",
+    ]
+  }
+}
+
+config("default_stack_frames") {
+  if (is_posix) {
+    if (enable_frame_pointers) {
+      cflags = [ "-fno-omit-frame-pointer" ]
+    } else {
+      cflags = [ "-fomit-frame-pointer" ]
+    }
+  }
+  # On Windows, the flag to enable framepointers "/Oy-" must always come after
+  # the optimization flag [e.g. "/O2"]. The optimization flag is set by one of
+  # the "optimize" configs, see rest of this file. The ordering that cflags are
+  # applied is well-defined by the GN spec, and there is no way to ensure that
+  # cflags set by "default_stack_frames" is applied after those set by an
+  # "optimize" config. Similarly, there is no way to propagate state from this
+  # config into the "optimize" config. We always apply the "/Oy-" config in the
+  # definition for common_optimize_on_cflags definition, even though this may
+  # not be correct.
+}
+
+# Default "optimization on" config.
+config("optimize") {
+  if (is_win) {
+    # TODO(thakis): Remove is_clang here, https://crbug.com/598772
+    if (is_official_build && full_wpo_on_official && !is_clang) {
+      common_optimize_on_cflags += [
+        "/GL",  # Whole program optimization.
+
+        # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+        # Probably anything that this would catch that wouldn't be caught in a
+        # normal build isn't going to actually be a bug, so the incremental
+        # value of C4702 for PGO builds is likely very small.
+        "/wd4702",
+      ]
+    }
+
+    # Favor size over speed, /O1 must be before the common flags. The GYP
+    # build also specifies /Os and /GF but these are implied by /O1.
+    cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
+  } else if (optimize_for_size && !is_nacl) {
+    # Favor size over speed.
+    # TODO(crbug.com/718650): Fix -Os in PNaCl compiler and remove the is_nacl
+    # guard above.
+    if (is_clang) {
+      cflags = [ "-Oz" ] + common_optimize_on_cflags
+    } else {
+      cflags = [ "-Os" ] + common_optimize_on_cflags
+    }
+  } else {
+    cflags = [ "-O2" ] + common_optimize_on_cflags
+  }
+  ldflags = common_optimize_on_ldflags
+}
+
+# Same config as 'optimize' but without the WPO flag.
+config("optimize_no_wpo") {
+  if (is_win) {
+    # Favor size over speed, /O1 must be before the common flags. The GYP
+    # build also specifies /Os and /GF but these are implied by /O1.
+    cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
+  } else if (optimize_for_size && !is_nacl) {
+    # Favor size over speed.
+    # TODO(crbug.com/718650): Fix -Os in PNaCl compiler and remove the is_nacl
+    # guard above.
+    if (is_clang) {
+      cflags = [ "-Oz" ] + common_optimize_on_cflags
+    } else {
+      cflags = [ "-Os" ] + common_optimize_on_cflags
+    }
+  } else if (optimize_for_fuzzing) {
+    cflags = [ "-O1" ] + common_optimize_on_cflags
+  } else {
+    cflags = [ "-O2" ] + common_optimize_on_cflags
+  }
+  ldflags = common_optimize_on_ldflags
+}
+
+# Turn off optimizations.
+config("no_optimize") {
+  if (is_win) {
+    cflags = [
+      "/Od",  # Disable optimization.
+      "/Ob0",  # Disable all inlining (on by default).
+      "/GF",  # Enable string pooling (off by default).
+    ]
+  } else if (is_android && !android_full_debug) {
+    # On Android we kind of optimize some things that don't affect debugging
+    # much even when optimization is disabled to get the binary size down.
+    if (is_clang) {
+      cflags = [ "-Oz" ] + common_optimize_on_cflags
+    } else {
+      cflags = [ "-Os" ] + common_optimize_on_cflags
+    }
+  } else {
+    cflags = [ "-O0" ]
+    ldflags = []
+  }
+}
+
+# Turns up the optimization level. On Windows, this implies whole program
+# optimization and link-time code generation which is very expensive and should
+# be used sparingly.
+config("optimize_max") {
+  if (is_nacl && is_nacl_irt) {
+    # The NaCl IRT is a special case and always wants its own config.
+    # Various components do:
+    #   if (!is_debug) {
+    #     configs -= [ "//build/config/compiler:default_optimization" ]
+    #     configs += [ "//build/config/compiler:optimize_max" ]
+    #   }
+    # So this config has to have the selection logic just like
+    # "default_optimization", below.
+    configs = [ "//build/config/nacl:irt_optimize" ]
+  } else {
+    ldflags = common_optimize_on_ldflags
+    if (is_win) {
+      # Favor speed over size, /O2 must be before the common flags. The GYP
+      # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
+      cflags = [ "/O2" ] + common_optimize_on_cflags
+
+      if (is_official_build) {
+        if (!is_clang) {
+          cflags += [
+            "/GL",  # Whole program optimization.
+
+            # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+            # Probably anything that this would catch that wouldn't be caught
+            # in a normal build isn't going to actually be a bug, so the
+            # incremental value of C4702 for PGO builds is likely very small.
+            "/wd4702",
+          ]
+        }
+        # TODO(crbug.com/598772): Enable -flto for Clang.
+      }
+    } else if (optimize_for_fuzzing) {
+      cflags = [ "-O1" ] + common_optimize_on_cflags
+    } else {
+      cflags = [ "-O2" ] + common_optimize_on_cflags
+    }
+  }
+}
+
+# This config can be used to override the default settings for per-component
+# and whole-program optimization, optimizing the particular target for speed
+# instead of code size. This config is exactly the same as "optimize_max"
+# except that we use -O3 instead of -O2 on non-win, non-IRT platforms.
+#
+# TODO(crbug.com/621335) - rework how all of these configs are related
+# so that we don't need this disclaimer.
+config("optimize_speed") {
+  if (is_nacl && is_nacl_irt) {
+    # The NaCl IRT is a special case and always wants its own config.
+    # Various components do:
+    #   if (!is_debug) {
+    #     configs -= [ "//build/config/compiler:default_optimization" ]
+    #     configs += [ "//build/config/compiler:optimize_max" ]
+    #   }
+    # So this config has to have the selection logic just like
+    # "default_optimization", below.
+    configs = [ "//build/config/nacl:irt_optimize" ]
+  } else {
+    ldflags = common_optimize_on_ldflags
+    if (is_win) {
+      # Favor speed over size, /O2 must be before the common flags. The GYP
+      # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
+      cflags = [ "/O2" ] + common_optimize_on_cflags
+
+      # TODO(thakis): Remove is_clang here, https://crbug.com/598772
+      if (is_official_build && !is_clang) {
+        cflags += [
+          "/GL",  # Whole program optimization.
+
+          # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+          # Probably anything that this would catch that wouldn't be caught in a
+          # normal build isn't going to actually be a bug, so the incremental
+          # value of C4702 for PGO builds is likely very small.
+          "/wd4702",
+        ]
+      }
+    } else if (optimize_for_fuzzing) {
+      cflags = [ "-O1" ] + common_optimize_on_cflags
+    } else {
+      cflags = [ "-O3" ] + common_optimize_on_cflags
+    }
+  }
+}
+
+config("optimize_fuzzing") {
+  cflags = [ "-O1" ] + common_optimize_on_cflags
+  ldflags = common_optimize_on_ldflags
+  visibility = [ ":default_optimization" ]
+}
+
+# The default optimization applied to all targets. This will be equivalent to
+# either "optimize" or "no_optimize", depending on the build flags.
+config("default_optimization") {
+  if (is_nacl && is_nacl_irt) {
+    # The NaCl IRT is a special case and always wants its own config.
+    # It gets optimized the same way regardless of the type of build.
+    configs = [ "//build/config/nacl:irt_optimize" ]
+  } else if (is_debug) {
+    configs = [ ":no_optimize" ]
+  } else if (optimize_for_fuzzing) {
+    assert(!is_win, "Fuzzing optimize level not supported on Windows")
+    configs = [ ":optimize_fuzzing" ]
+  } else {
+    configs = [ ":optimize" ]
+  }
+}
+
+# GCC supports a form of profile-guided optimization called AFDO, which
+# is used by ChromeOS in their official builds. However,
+# //base/allocator:tcmalloc currently doesn't work correctly with AFDO
+# so we provide separate config so that the flag can be disabled per-target.
+# TODO(crbug.com/633719): Remove this config once tcmalloc works with AFDO
+# or we remove tcmalloc or we stop using AFDO.
+config("afdo") {
+  if (auto_profile_path != "" && current_toolchain == default_toolchain) {
+    cflags = [ "-fauto-profile=${auto_profile_path}" ]
+  }
+}
+
+# Symbols ----------------------------------------------------------------------
+
+# The BUILDCONFIG file sets the "default_symbols" config on targets by
+# default. It will be equivalent to one the three specific symbol levels.
+#
+# You can override the symbol level on a per-target basis by removing the
+# default config and then adding the named one you want:
+#
+#   configs -= [ "//build/config/compiler:default_symbols" ]
+#   configs += [ "//build/config/compiler:symbols" ]
+
+# Full symbols.
+config("symbols") {
+  if (is_win) {
+    if (use_goma) {
+      # Note that this requires is_win_fastlink, enforced elsewhere.
+      cflags = [ "/Z7" ]  # Debug information in the .obj files.
+    } else {
+      cflags = [ "/Zi" ]  # Produce PDB file, no edit and continue.
+    }
+
+    if (is_win_fastlink) {
+      # Tell VS 2015+ to create a PDB that references debug
+      # information in .obj and .lib files instead of copying
+      # it all. This flag is incompatible with /PROFILE
+      ldflags = [ "/DEBUG:FASTLINK" ]
+    } else {
+      ldflags = [ "/DEBUG" ]
+    }
+
+    if (is_clang) {
+      # /DEBUG:FASTLINK requires every object file to have standalone debug
+      # information.
+      if (is_win_fastlink) {
+        cflags += [ "-fstandalone-debug" ]
+      } else {
+        cflags += [ "-fno-standalone-debug" ]
+      }
+    }
+  } else {
+    if (is_mac || is_ios) {
+      cflags = [ "-gdwarf-2" ]
+      if (is_mac && enable_dsyms) {
+        # If generating dSYMs, specify -fno-standalone-debug. This was
+        # originally specified for https://crbug.com/479841 because dsymutil
+        # could not handle a 4GB dSYM file. But dsymutil from Xcodes prior to
+        # version 7 also produces debug data that is incompatible with Breakpad
+        # dump_syms, so this is still required (https://crbug.com/622406).
+        cflags += [ "-fno-standalone-debug" ]
+      }
+    } else if (is_android) {
+      # Breakpad can't handle DWARF 4 symbols properly yet, so use DWARF 3
+      # explicitly on android where we are hitting https://crbug.com/638485.
+      # The arguments MUST be in this order because of a gcc arg parsing bug.
+      cflags = [
+        "-gdwarf-3",
+        "-g2",
+      ]
+    } else {
+      cflags = [ "-g2" ]
+    }
+    if (use_debug_fission) {
+      cflags += [ "-gsplit-dwarf" ]
+    }
+    asmflags = cflags
+    ldflags = []
+
+    # TODO(thakis): Figure out if there's a way to make this go for 32-bit,
+    # currently we get "warning:
+    # obj/native_client/src/trusted/service_runtime/sel_asm/nacl_switch_32.o:
+    # DWARF info may be corrupt; offsets in a range list entry are in different
+    # sections" there.  Maybe just a bug in nacl_switch_32.S.
+    # TODO(thakis): Figure out if there's a way to make this go for official
+    # builds, currently get
+    # "third_party/binutils/Linux_x64/Release/bin/ld.gold: warning:
+    # /tmp/lto-llvm-0b5201.o: corrupt debug info in .debug_info"
+    if (!is_mac && !is_ios && !is_nacl && target_cpu != "x86" &&
+        (use_gold || use_lld) && !allow_posix_link_time_opt &&
+        !is_official_build) {
+      ldflags += [ "-Wl,--gdb-index" ]
+    }
+  }
+}
+
+# Minimal symbols.
+config("minimal_symbols") {
+  if (is_win) {
+    # Linker symbols for backtraces only.
+    cflags = []
+    ldflags = [ "/DEBUG" ]
+  } else {
+    if (is_android) {
+      # Breakpad can't handle DWARF 4 symbols properly yet, so use DWARF 3
+      # explicitly on android where we are hitting https://crbug.com/638485.
+      # The arguments MUST be in this order because of a gcc arg parsing bug.
+      cflags = [
+        "-gdwarf-3",
+        "-g1",
+      ]
+    } else {
+      cflags = [ "-g1" ]
+    }
+
+    # Note: -gsplit-dwarf implicitly turns on -g2 with clang, so don't pass it.
+    asmflags = cflags
+    ldflags = []
+  }
+}
+
+# No symbols.
+config("no_symbols") {
+  if (!is_win) {
+    cflags = [ "-g0" ]
+    asmflags = cflags
+  }
+}
+
+# Default symbols.
+config("default_symbols") {
+  if (symbol_level == 0) {
+    configs = [ ":no_symbols" ]
+  } else if (symbol_level == 1) {
+    configs = [ ":minimal_symbols" ]
+  } else if (symbol_level == 2) {
+    configs = [ ":symbols" ]
+  } else {
+    assert(false)
+  }
+}
+
+if (is_ios || is_mac) {
+  # On Mac and iOS, this enables support for ARC (automatic ref-counting).
+  # See http://clang.llvm.org/docs/AutomaticReferenceCounting.html.
+  config("enable_arc") {
+    common_flags = [ "-fobjc-arc" ]
+    cflags_objc = common_flags
+    cflags_objcc = common_flags
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/compiler/compiler.gni
@@ -0,0 +1,182 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/arm.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/compiler/pgo/pgo.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build_overrides/build.gni")
+
+declare_args() {
+  # How many symbols to include in the build. This affects the performance of
+  # the build since the symbols are large and dealing with them is slow.
+  #   2 means regular build with symbols.
+  #   1 means minimal symbols, usually enough for backtraces only. Symbols with
+  # internal linkage (static functions or those in anonymous namespaces) may not
+  # appear when using this level.
+  #   0 means no symbols.
+  #   -1 means auto-set according to debug/release and platform.
+  symbol_level = -1
+
+  # Compile in such a way as to enable profiling of the generated code. For
+  # example, don't omit the frame pointer and leave in symbols.
+  enable_profiling = false
+
+  # use_debug_fission: whether to use split DWARF debug info
+  # files. This can reduce link time significantly, but is incompatible
+  # with some utilities such as icecc and ccache. Requires gold and
+  # gcc >= 4.8 or clang.
+  # http://gcc.gnu.org/wiki/DebugFission
+  #
+  # This is a placeholder value indicating that the code below should set
+  # the default.  This is necessary to delay the evaluation of the default
+  # value expression until after its input values such as use_gold have
+  # been set, e.g. by a toolchain_args() block.
+  use_debug_fission = "default"
+
+  # Tell VS to create a PDB that references information in .obj files rather
+  # than copying it all. This should improve linker performance. mspdbcmf.exe
+  # can be used to convert a fastlink pdb to a normal one.
+  is_win_fastlink = false
+
+  # Whether or not we should turn on incremental WPO. Only affects the VS
+  # Windows build.
+  use_incremental_wpo = false
+
+  # Root directory that will store the MSVC link repro. This should only be
+  # used for debugging purposes on the builders where a MSVC linker flakyness
+  # has been observed. The targets for which a link repro should be generated
+  # should add somethink like this to their configuration:
+  #   if (linkrepro_root_dir != "") {
+  #     ldflags = ["/LINKREPRO:" + linkrepro_root_dir + "/" + target_name]
+  #   }
+  #
+  # Note that doing a link repro uses a lot of disk space and slows down the
+  # build, so this shouldn't be enabled on too many targets.
+  #
+  # See crbug.com/669854.
+  linkrepro_root_dir = ""
+
+  # Whether or not we should use position independent code.
+  use_pic = true
+}
+
+# Determine whether to enable or disable frame pointers, based on the platform
+# and build arguments.
+if (is_mac || is_ios) {
+  enable_frame_pointers = true
+} else if (is_win) {
+  # 64-bit Windows ABI doesn't support frame pointers.
+  if (target_cpu == "x64") {
+    enable_frame_pointers = false
+  } else {
+    enable_frame_pointers = true
+  }
+} else if (is_chromeos) {
+  # ChromeOS requires frame pointers in x64 builds, to support CWP.
+  # TODO(711784): Building ARM Thumb without frame pointers can lead to code
+  # in ChromeOS which triggers some ARM A12/A17 errata. They can be disabled
+  # on non-x64 once that is resolved.
+  enable_frame_pointers = true
+} else if (current_cpu == "arm64") {
+  # Ensure that stacks from arm64 crash dumps are usable (crbug.com/391706).
+  enable_frame_pointers = true
+} else {
+  # Explicitly ask for frame pointers, otherwise:
+  # * Stacks may be missing for sanitizer and profiling builds.
+  # * Debug tcmalloc can crash (crbug.com/636489).
+  enable_frame_pointers = using_sanitizer || enable_profiling || is_debug
+}
+
+# In general assume that if we have frame pointers then we can use them to
+# unwind the stack. However, this requires that they are enabled by default for
+# most translation units, that they are emitted correctly, and that the
+# compiler or platform provides a way to access them.
+can_unwind_with_frame_pointers = enable_frame_pointers
+if (current_cpu == "arm" && arm_use_thumb) {
+  # We cannot currently unwind ARM Thumb frame pointers correctly.
+  can_unwind_with_frame_pointers = false
+} else if (is_win) {
+  # Windows 32-bit does provide frame pointers, but the compiler does not
+  # provide intrinsics to access them, so we don't use them.
+  can_unwind_with_frame_pointers = false
+}
+
+assert(!can_unwind_with_frame_pointers || enable_frame_pointers)
+
+declare_args() {
+  # Whether or not the official builds should be built with full WPO. Enabled by
+  # default for the PGO and the x64 builds.
+  if (chrome_pgo_phase > 0) {
+    full_wpo_on_official = true
+  } else {
+    full_wpo_on_official = false
+  }
+}
+
+declare_args() {
+  # Whether to use the gold linker from binutils instead of lld or bfd.
+  use_gold =
+      !use_lld && !(is_chromecast && is_linux &&
+                    (current_cpu == "arm" || current_cpu == "mipsel")) &&
+      ((is_linux && (current_cpu == "x64" || current_cpu == "x86" ||
+                     current_cpu == "arm" || current_cpu == "mipsel")) ||
+       (is_android && (current_cpu == "x86" || current_cpu == "x64" ||
+                       current_cpu == "arm" || current_cpu == "arm64")) ||
+       is_fuchsia)
+}
+
+# If it wasn't manually set, set to an appropriate default.
+assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level")
+if (symbol_level == -1) {
+  if (is_android && use_order_profiling) {
+    # With instrumentation enabled, debug info puts libchrome.so over 4gb, which
+    # causes the linker to produce an invalid ELF. http://crbug.com/574476
+    symbol_level = 0
+  } else if (is_android && !is_component_build &&
+             !(android_64bit_target_cpu && !build_apk_secondary_abi)) {
+    # Reduce symbol level when it will cause invalid elf files to be created
+    # (due to file size). https://crbug.com/648948.
+    symbol_level = 1
+  } else if (is_win && use_goma && !is_clang) {
+    # goma doesn't support PDB files, so we disable symbols during goma
+    # compilation because otherwise the redundant debug information generated
+    # by visual studio (repeated in every .obj file) makes linker
+    # memory consumption and link times unsustainable (crbug.com/630074).
+    # Clang on windows does not have this issue.
+    # If you use is_win_fastlink = true then you can set symbol_level = 2 when
+    # using goma.
+    symbol_level = 1
+  } else if ((!is_nacl && !is_linux) || is_debug || is_official_build ||
+             is_chromecast) {
+    # Linux builds slower by having symbols as part of the target binary,
+    # whereas Mac and Windows have them separate, so in Release Linux, default
+    # them off, but keep them on for Official builds and Chromecast builds.
+    symbol_level = 2
+  } else if (using_sanitizer) {
+    # Sanitizers require symbols for filename suppressions to work.
+    symbol_level = 1
+  } else {
+    symbol_level = 0
+  }
+} else if (symbol_level == 2) {
+  if (is_win) {
+    # See crbug.com/630074
+    assert(is_win_fastlink || !use_goma,
+           "Goma builds that use symbol_level 2 must use is_win_fastlink.")
+  }
+}
+
+# Assert that the configuration isn't going to hit https://crbug.com/648948.
+assert(ignore_elf32_limitations || !is_android ||
+           (android_64bit_target_cpu && !build_apk_secondary_abi) ||
+           is_component_build || symbol_level < 2,
+       "Android 32-bit non-component builds cannot have symbol_level=2 " +
+           "due to 4GiB file size limit, see https://crbug.com/648948. " +
+           "If you really want to try this out, " +
+           "set ignore_elf32_limitations=true.")
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/compiler/pgo/BUILD.gn
@@ -0,0 +1,101 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/compiler/pgo/pgo.gni")
+
+# Configuration that enables PGO instrumentation.
+config("pgo_instrumentation_flags") {
+  visibility = [ ":default_pgo_flags" ]
+  cflags = []
+  ldflags = []
+
+  # Only add flags when chrome_pgo_phase == 1, so that variables we would use
+  # are not required to be defined when we're not actually using PGO.
+  if (chrome_pgo_phase == 1) {
+    if (is_clang) {
+      cflags = [ "-fprofile-instr-generate" ]
+      if (is_win) {
+        # Normally, we pass -fprofile-instr-generate to the compiler and it
+        # automatically passes the right flags to the linker.
+        # However, on Windows, we call the linker directly, without going
+        # through the compiler driver. This means we need to pass the right
+        # flags ourselves.
+        _clang_rt_base_path =
+            "$clang_base_path/lib/clang/$clang_version/lib/windows"
+        if (target_cpu == "x86") {
+          _clang_rt_suffix = "-i386.lib"
+        } else if (target_cpu == "x64") {
+          _clang_rt_suffix = "-x86_64.lib"
+        }
+        assert(_clang_rt_suffix != "", "target CPU $target_cpu not supported")
+        ldflags += [ "$_clang_rt_base_path/clang_rt.profile$_clang_rt_suffix" ]
+      } else {
+        ldflags += [ "-fprofile-instr-generate" ]
+      }
+    } else if (is_win) {
+      ldflags = [
+        # In MSVC, we must use /LTCG when using PGO.
+        "/LTCG",
+
+        # Make sure that enough memory gets allocated for the PGO profiling
+        # buffers and also cap this memory. Usually a PGI instrumented build
+        # of chrome_child.dll requires ~55MB of memory for storing its counter
+        # etc, normally the linker should automatically choose an appropriate
+        # amount of memory but it doesn't always do a good estimate and
+        # sometime allocates too little or too much (and so the instrumented
+        # image fails to start). Making sure that the buffer has a size in the
+        # [128 MB, 512 MB] range should prevent this from happening.
+        "/GENPROFILE:MEMMIN=134217728",
+        "/GENPROFILE:MEMMAX=536870912",
+        "/PogoSafeMode",
+      ]
+    }
+  }
+}
+
+# Configuration that enables optimization using profile data.
+config("pgo_optimization_flags") {
+  visibility = [ ":default_pgo_flags" ]
+  cflags = []
+  ldflags = []
+
+  # Only add flags when chrome_pgo_phase == 2, so that variables we would use
+  # are not required to be defined when we're not actually using PGO.
+  if (chrome_pgo_phase == 2) {
+    if (is_clang) {
+      assert(pgo_data_path != "",
+             "Please set pgo_data_path to point at the profile data")
+      cflags += [
+        "-fprofile-instr-use=$pgo_data_path",
+
+        # It's possible to have some profile data legitimately missing,
+        # and at least some profile data always ends up being considered
+        # out of date, so make sure we don't error for those cases.
+        "-Wno-profile-instr-unprofiled",
+        "-Wno-error=profile-instr-out-of-date",
+      ]
+    } else if (is_win) {
+      ldflags += [
+        # In MSVC, we must use /LTCG when using PGO.
+        "/LTCG",
+        "/USEPROFILE",
+      ]
+    }
+  }
+}
+
+# Applies flags necessary when profile-guided optimization is used.
+# Flags are only added if PGO is enabled, so that this config is safe to
+# include by default.
+config("default_pgo_flags") {
+  if (chrome_pgo_phase == 0) {
+    # Nothing. This config should be a no-op when chrome_pgo_phase == 0.
+  } else if (chrome_pgo_phase == 1) {
+    configs = [ ":pgo_instrumentation_flags" ]
+  } else if (chrome_pgo_phase == 2) {
+    configs = [ ":pgo_optimization_flags" ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/compiler/pgo/pgo.gni
@@ -0,0 +1,17 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Specify the current PGO phase.
+  # Here's the different values that can be used:
+  #     0 : Means that PGO is turned off.
+  #     1 : Used during the PGI (instrumentation) phase.
+  #     2 : Used during the PGO (optimization) phase.
+  #
+  # TODO(sebmarchand): Add support for the PGU (update) phase.
+  chrome_pgo_phase = 0
+
+  # When using chrome_pgo_phase = 2, read profile data from this path.
+  pgo_data_path = ""
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/crypto.gni
@@ -0,0 +1,23 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file declares build flags for the SSL library configuration.
+#
+# TODO(brettw) this should probably be moved to src/crypto or somewhere, and
+# the global build dependency on it should be removed.
+#
+# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
+# required. See the declare_args block of BUILDCONFIG.gn for advice on how
+# to set up feature flags.
+
+# True when we're using OpenSSL for representing certificates. When targeting
+# Android, the platform certificate library is used for certificate
+# verification. On NaCl, verification isn't supported. On other targets, this
+# flag also enables OpenSSL for certificate verification, but this configuration
+# is unsupported.
+use_openssl_certs = is_android || is_nacl
+
+# True if NSS is used for certificate handling. It is possible to use OpenSSL
+# for the crypto library, but NSS for the platform certificate library.
+use_nss_certs = is_linux
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/dcheck_always_on.gni
@@ -0,0 +1,8 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Set to true to enable dcheck in Release builds.
+  dcheck_always_on = false
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/features.gni
@@ -0,0 +1,73 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================
+#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# These flags are effectively global. Your feature flag should go near the
+# code it controls. Most of these items are here now because they control
+# legacy global #defines passed to the compiler (now replaced with generated
+# buildflag headers -- see //build/buildflag_header.gni).
+#
+# There is more advice on where to put build flags in the "Build flag" section
+# of //build/config/BUILDCONFIG.gn.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+if (is_android) {
+  import("//build/config/android/config.gni")
+}
+
+declare_args() {
+  # Enables Native Client support.
+  # Temporarily disable nacl on arm64 linux to get rid of compilation errors.
+  # TODO(mcgrathr): When mipsel-nacl-clang is available, drop the exclusion.
+  enable_nacl = !is_ios && !is_android && !is_chromecast &&
+                current_cpu != "mipsel" && !(is_linux && target_cpu == "arm64")
+
+  # Non-SFI is not yet supported on mipsel
+  enable_nacl_nonsfi = current_cpu != "mipsel"
+
+  # Enables proprietary codecs and demuxers; e.g. H264, AAC, MP3, and MP4.
+  # We always build Google Chrome and Chromecast with proprietary codecs.
+  #
+  # Note: this flag is used by WebRTC which is DEPSed into Chrome. Moving it
+  # out of //build will require using the build_overrides directory.
+  proprietary_codecs = is_chrome_branded || is_chromecast
+
+  # Variable safe_browsing is used to control the build time configuration for
+  # safe browsing feature. Safe browsing can be compiled in 3 different levels:
+  # 0 disables it, 1 enables it fully, and 2 enables mobile protection via an
+  # external API.
+  if (is_ios || is_chromecast) {
+    safe_browsing_mode = 0
+  } else if (is_android) {
+    safe_browsing_mode = 2
+  } else {
+    safe_browsing_mode = 1
+  }
+
+  # Set to true make a build that disables activation of field trial tests
+  # specified in testing/variations/fieldtrial_testing_config_*.json.
+  # Note: this setting is ignored if is_chrome_branded.
+  fieldtrial_testing_like_official_build = is_chrome_branded
+
+  # libudev usage. This currently only affects the content layer.
+  use_udev = is_linux && !is_chromecast
+
+  use_dbus = is_linux && !is_chromecast
+
+  # Option controlling the use of GConf (the classic GNOME configuration
+  # system).
+  use_gconf = is_linux && !is_chromeos && !is_chromecast
+
+  use_gio = is_linux && !is_chromeos && !is_chromecast
+}
+#
+# =============================================
+#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# See comment at the top.
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/freetype/BUILD.gn
@@ -0,0 +1,16 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/freetype/freetype.gni")
+
+group("freetype") {
+  if (use_system_freetype) {
+    public_configs = [ "//build/linux:freetype_from_pkgconfig" ]
+  } else {
+    public_deps = [
+      "//third_party/freetype",
+    ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/freetype/OWNERS
@@ -0,0 +1,2 @@
+bungeman@chromium.org
+drott@chromium.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/freetype/freetype.gni
@@ -0,0 +1,14 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Blink needs a recent and properly build-configured FreeType version to
+  # support OpenType variations, color emoji and avoid security bugs. By default
+  # we ship and link such a version as part of Chrome. For distributions that
+  # prefer to keep linking to the version the system, FreeType must be newer
+  # than version 2.7.1 and have color bitmap support compiled in. WARNING:
+  # System FreeType configurations other than as described WILL INTRODUCE TEXT
+  # RENDERING AND SECURITY REGRESSIONS.
+  use_system_freetype = false
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/fuchsia/BUILD.gn
@@ -0,0 +1,40 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/fuchsia/config.gni")
+import("//build/config/sysroot.gni")
+
+assert(is_fuchsia)
+assert(is_posix)
+
+config("compiler") {
+  defines = [ "SYSROOT_VERSION=$sysroot_version" ]
+  cflags = []
+  ldflags = []
+  if (current_cpu == "arm64") {
+    cflags += [ "--target=aarch64-fuchsia" ]
+    ldflags += [ "--target=aarch64-fuchsia" ]
+  } else if (current_cpu == "x64") {
+    cflags += [ "--target=x86_64-fuchsia" ]
+    ldflags += [ "--target=x86_64-fuchsia" ]
+  } else {
+    assert(false, "Unsupported architecture")
+  }
+  asmflags = cflags
+
+  # TODO(thakis): Once Fuchsia's libclang_rt.builtin no longer has upstream
+  # patches, we might want to make tools/clang/scripts/update.py build it
+  # and bundle it with the clang package instead of using the library from
+  # the SDK, https://crbug.com/724204
+  ldflags += [
+    "-resource-dir",
+    rebase_path(fuchsia_sdk, root_build_dir) + "/toolchain_libs/clang/5.0.0",
+  ]
+
+  libs = [
+    "mxio",
+    "magenta",
+    "unwind",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/fuchsia/OWNERS
@@ -0,0 +1,1 @@
+scottmg@chromium.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/fuchsia/config.gni
@@ -0,0 +1,10 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(current_os == "fuchsia")
+
+declare_args() {
+  # Path to Fuchsia SDK.
+  fuchsia_sdk = "//third_party/fuchsia-sdk"
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/gcc/BUILD.gn
@@ -0,0 +1,151 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # When non empty, overrides the target rpath value. This allows a user to
+  # make a Chromium build where binaries and shared libraries are meant to be
+  # installed into separate directories, like /usr/bin/chromium and
+  # /usr/lib/chromium for instance. It is useful when a build system that
+  # generates a whole target root filesystem (like Yocto) is used on top of gn,
+  # especially when cross-compiling.
+  # Note: this gn arg is similar to gyp target_rpath generator flag.
+  gcc_target_rpath = ""
+}
+
+# This config causes functions not to be automatically exported from shared
+# libraries. By default, all symbols are exported but this means there are
+# lots of exports that slow everything down. In general we explicitly mark
+# which functiosn we want to export from components.
+#
+# Some third_party code assumes all functions are exported so this is separated
+# into its own config so such libraries can remove this config to make symbols
+# public again.
+#
+# See http://gcc.gnu.org/wiki/Visibility
+config("symbol_visibility_hidden") {
+  # Note that -fvisibility-inlines-hidden is set globally in the compiler
+  # config since that can almost always be applied.
+  cflags = [ "-fvisibility=hidden" ]
+
+  # Visibility attribute is not supported on AIX.
+  if (current_os != "aix") {
+    cflags_cc = [
+      # Not exporting C++ inline functions can generally be applied anywhere
+      # so we do so here. Normal function visibility is controlled by
+      # //build/config/gcc:symbol_visibility_hidden.
+      "-fvisibility-inlines-hidden",
+    ]
+  }
+}
+
+# This config is usually set when :symbol_visibility_hidden is removed.
+# It's often a good idea to set visibility explicitly, as there're flags
+# which would error out otherwise (e.g. -fsanitize=cfi-unrelated-cast)
+config("symbol_visibility_default") {
+  cflags = [ "-fvisibility=default" ]
+}
+
+# The rpath is the dynamic library search path. Setting this config on a link
+# step will put the directory where the build generates shared libraries into
+# the rpath.
+#
+# It's important that this *not* be used for release builds we push out.
+# Chrome uses some setuid binaries, and hard links preserve setuid bits. An
+# unprivileged user could gain root privileges by hardlinking a setuid
+# executable and then adding in whatever binaries they want to run into the lib
+# directory.
+#
+# Example bug: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=520126
+#
+# This is required for component builds since the build generates many shared
+# libraries in the build directory that we expect to be automatically loaded.
+# It will be automatically applied in this case by :executable_ldconfig.
+#
+# In non-component builds, certain test binaries may expect to load dynamic
+# libraries from the current directory. As long as these aren't distributed,
+# this is OK. For these cases use something like this:
+#
+#  if (is_linux && !is_component_build) {
+#    configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
+#  }
+config("rpath_for_built_shared_libraries") {
+  if (!is_android) {
+    # Note: Android doesn't support rpath.
+    if (shlib_subdir != ".") {
+      rpath_link = "${shlib_subdir}/"
+    } else {
+      rpath_link = "."
+    }
+    if (current_toolchain != default_toolchain || gcc_target_rpath == "") {
+      ldflags = [
+        # Want to pass "\$". GN will re-escape as required for ninja.
+        "-Wl,-rpath=\$ORIGIN/${rpath_link}",
+        "-Wl,-rpath-link=${rpath_link}",
+      ]
+    } else {
+      ldflags = [
+        "-Wl,-rpath=${gcc_target_rpath}",
+        "-Wl,-rpath-link=${rpath_link}",
+      ]
+    }
+  }
+}
+
+# Settings for executables.
+config("executable_ldconfig") {
+  # WARNING! //sandbox/linux:chrome_sandbox will not pick up this
+  # config, because it is a setuid binary that needs special flags.
+  # If you add things to this config, make sure you check to see
+  # if they should be added to that target as well.
+  ldflags = []
+  if (is_android) {
+    ldflags += [
+      "-Bdynamic",
+      "-Wl,-z,nocopyreloc",
+    ]
+  } else {
+    # See the rpath_for... config above for why this is necessary for component
+    # builds. Sanitizers use a custom libc++ where this is also necessary.
+    if (is_component_build || using_sanitizer) {
+      configs = [ ":rpath_for_built_shared_libraries" ]
+    }
+    if (current_cpu == "mipsel") {
+      ldflags += [ "-pie" ]
+    }
+  }
+
+  if ((!is_android || !use_gold) && current_os != "aix") {
+    # Find the path containing shared libraries for this toolchain
+    # relative to the build directory. ${root_out_dir} will be a
+    # subdirectory of ${root_build_dir} when cross compiling.
+    _rpath_link = rebase_path(root_out_dir, root_build_dir)
+    if (shlib_subdir != ".") {
+      _rpath_link += "/$shlib_subdir"
+    }
+    if (is_android) {
+      _rebased_sysroot = rebase_path(sysroot, root_build_dir)
+      _rpath_link += ":$_rebased_sysroot/usr/lib"
+    }
+
+    ldflags += [
+      "-Wl,-rpath-link=$_rpath_link",
+
+      # TODO(GYP): Do we need a check on the binutils version here?
+      #
+      # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+      # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+      "-Wl,--disable-new-dtags",
+    ]
+  }
+}
+
+config("no_exceptions") {
+  cflags_cc = [ "-fno-exceptions" ]
+  cflags_objcc = cflags_cc
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/get_host_byteorder.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Get Byteorder of host architecture"""
+
+
+import sys
+
+print sys.byteorder
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/host_byteorder.gni
@@ -0,0 +1,27 @@
+# Copyright (c) 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "host_byteorder" variable.
+# Not that this is currently used only for building v8.
+# The chromium code generally assumes little-endianness.
+declare_args() {
+  host_byteorder = "undefined"
+}
+
+# Detect host byteorder
+# ppc64 can be either BE or LE
+if (host_cpu == "ppc64") {
+  if (current_os == "aix") {
+    host_byteorder = "big"
+  } else {
+    # Only use the script when absolutely necessary
+    host_byteorder =
+        exec_script("//build/config/get_host_byteorder.py", [], "trim string")
+  }
+} else if (host_cpu == "ppc" || host_cpu == "s390" || host_cpu == "s390x" ||
+           host_cpu == "mips" || host_cpu == "mips64") {
+  host_byteorder = "big"
+} else {
+  host_byteorder = "little"
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/BUILD.gn
@@ -0,0 +1,155 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ios/ios_sdk.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # Enabling this option makes clang compile to an intermediate
+  # representation ("bitcode"), and not to native code. This is preferred
+  # when including WebRTC in the apps that will be sent to Apple's App Store
+  # and mandatory for the apps that run on watchOS or tvOS.
+  # The option only works when building with Xcode (use_xcode_clang = true).
+  # Mimicking how Xcode handles it, the production builds (is_debug = false)
+  # get real bitcode sections added, while the debug builds (is_debug = true)
+  # only get bitcode-section "markers" added in them.
+  # NOTE: This option is ignored when building versions for the iOS simulator,
+  # where a part of libvpx is compiled from the assembly code written using
+  # Intel assembly syntax; Yasm / Nasm do not support emitting bitcode parts.
+  # That is not a limitation for now as Xcode mandates the presence of bitcode
+  # only when building bitcode-enabled projects for real devices (ARM CPUs).
+  enable_ios_bitcode = false
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+config("compiler") {
+  # These flags are shared between the C compiler and linker.
+  common_ios_flags = []
+
+  # CPU architecture.
+  if (current_cpu == "x64") {
+    common_ios_flags += [
+      "-arch",
+      "x86_64",
+    ]
+  } else if (current_cpu == "x86") {
+    common_ios_flags += [
+      "-arch",
+      "i386",
+    ]
+  } else if (current_cpu == "armv7" || current_cpu == "arm") {
+    common_ios_flags += [
+      "-arch",
+      "armv7",
+    ]
+  } else if (current_cpu == "arm64") {
+    common_ios_flags += [
+      "-arch",
+      "arm64",
+    ]
+  }
+
+  # This is here so that all files get recompiled after an Xcode update.
+  # (defines are passed via the command line, and build system rebuild things
+  # when their commandline changes). Nothing should ever read this define.
+  defines = [ "CR_XCODE_VERSION=$xcode_version" ]
+
+  asmflags = common_ios_flags
+  cflags = common_ios_flags
+
+  # Without this, the constructors and destructors of a C++ object inside
+  # an Objective C struct won't be called, which is very bad.
+  cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
+
+  cflags_c = [ "-std=c99" ]
+  cflags_objc = cflags_c
+
+  ldflags = common_ios_flags
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is iOS-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  common_flags = [
+    "-isysroot",
+    sysroot,
+
+    "-stdlib=libc++",
+  ]
+
+  if (use_ios_simulator) {
+    common_flags += [ "-mios-simulator-version-min=$ios_deployment_target" ]
+  } else {
+    common_flags += [ "-miphoneos-version-min=$ios_deployment_target" ]
+  }
+
+  if (use_xcode_clang && enable_ios_bitcode && !use_ios_simulator) {
+    if (is_debug) {
+      common_flags += [ "-fembed-bitcode-marker" ]
+    } else {
+      common_flags += [ "-fembed-bitcode" ]
+    }
+  }
+
+  asmflags = common_flags
+  cflags = common_flags
+  ldflags = common_flags
+
+  # TODO(crbug.com/634373): Remove once Xcode's libc++ has LLVM r256325. Most
+  # likely this means one Xcode 8 is released and required.
+  if (use_xcode_clang && get_path_info(ios_sdk_version, "name") != "10") {
+    common_cc_flags = [
+      "-isystem",
+      rebase_path("//third_party/llvm-build/Release+Asserts/include/c++/v1",
+                  root_build_dir),
+    ]
+
+    cflags_cc = common_cc_flags
+    cflags_objcc = common_cc_flags
+  }
+
+  if (ios_enable_coverage) {
+    configs = [ ":enable_coverage" ]
+  }
+}
+
+config("ios_executable_flags") {
+}
+
+config("ios_dynamic_flags") {
+  ldflags = [ "-Wl,-ObjC" ]  # Always load Objective-C categories and class.
+}
+
+config("xctest_config") {
+  common_flags = [
+    "-F",
+    "$ios_sdk_platform_path/Developer/Library/Frameworks",
+  ]
+
+  cflags = common_flags
+  ldflags = common_flags
+
+  libs = [
+    "Foundation.framework",
+    "XCTest.framework",
+  ]
+}
+
+# This enables support for LLVM code coverage. See
+# http://llvm.org/docs/CoverageMappingFormat.html.
+config("enable_coverage") {
+  cflags = [
+    "-fprofile-instr-generate",
+    "-fcoverage-mapping",
+  ]
+  ldflags = [ "-fprofile-instr-generate" ]
+}
+
+group("xctest") {
+  public_configs = [ ":xctest_config" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/BuildInfo.plist
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+  <key>BuildMachineOSBuild</key>
+  <string>${BUILD_MACHINE_OS_BUILD}</string>
+  <key>CFBundleSupportedPlatforms</key>
+  <array>
+    <string>${IOS_SUPPORTED_PLATFORM}</string>
+  </array>
+  <key>DTCompiler</key>
+  <string>${GCC_VERSION}</string>
+  <key>DTPlatformName</key>
+  <string>${IOS_PLATFORM_NAME}</string>
+  <key>DTPlatformVersion</key>
+  <string>${IOS_PLATFORM_VERSION}</string>
+  <key>DTPlatformBuild</key>
+  <string>${IOS_PLATFORM_BUILD}</string>
+  <key>DTSDKBuild</key>
+  <string>${IOS_SDK_BUILD}</string>
+  <key>DTSDKName</key>
+  <string>${IOS_SDK_NAME}</string>
+  <key>MinimumOSVersion</key>
+  <string>${IOS_DEPLOYMENT_TARGET}</string>
+  <key>DTXcode</key>
+  <string>${XCODE_VERSION}</string>
+  <key>DTXcodeBuild</key>
+  <string>${XCODE_BUILD}</string>
+  <key>UIDeviceFamily</key>
+  <array>
+    <integer>1</integer>
+    <integer>2</integer>
+  </array>
+</dict>
+</plist>
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/Host-Info.plist
@@ -0,0 +1,126 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>CFBundleDevelopmentRegion</key>
+	<string>en</string>
+	<key>CFBundleDisplayName</key>
+	<string>${PRODUCT_NAME}</string>
+	<key>CFBundleExecutable</key>
+	<string>${EXECUTABLE_NAME}</string>
+	<key>CFBundleIdentifier</key>
+	<string>${IOS_BUNDLE_ID_PREFIX}.test.${EXECUTABLE_NAME:rfc1034identifier}</string>
+	<key>CFBundleInfoDictionaryVersion</key>
+	<string>6.0</string>
+	<key>CFBundleName</key>
+	<string>${PRODUCT_NAME}</string>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleShortVersionString</key>
+	<string>1.0</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>CFBundleVersion</key>
+	<string>1.0</string>
+	<key>LSRequiresIPhoneOS</key>
+	<true/>
+	<key>NSAppTransportSecurity</key>
+	<dict>
+		<key>NSAllowsArbitraryLoads</key>
+		<true/>
+	</dict>
+	<key>UIRequiredDeviceCapabilities</key>
+	<array>
+		<string>armv7</string>
+	</array>
+	<key>UILaunchImages</key>
+	<array>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>7.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{320, 480}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>7.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{320, 568}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>8.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{375, 667}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>8.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{414, 736}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>8.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Landscape</string>
+			<key>UILaunchImageSize</key>
+			<string>{414, 736}</string>
+		</dict>
+	</array>
+	<key>UILaunchImages~ipad</key>
+	<array>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>7.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Portrait</string>
+			<key>UILaunchImageSize</key>
+			<string>{768, 1024}</string>
+		</dict>
+		<dict>
+			<key>UILaunchImageMinimumOSVersion</key>
+			<string>7.0</string>
+			<key>UILaunchImageName</key>
+			<string>Default</string>
+			<key>UILaunchImageOrientation</key>
+			<string>Landscape</string>
+			<key>UILaunchImageSize</key>
+			<string>{768, 1024}</string>
+		</dict>
+	</array>
+	<key>UISupportedInterfaceOrientations</key>
+	<array>
+		<string>UIInterfaceOrientationPortrait</string>
+		<string>UIInterfaceOrientationLandscapeLeft</string>
+		<string>UIInterfaceOrientationLandscapeRight</string>
+	</array>
+	<key>UISupportedInterfaceOrientations~ipad</key>
+	<array>
+		<string>UIInterfaceOrientationPortrait</string>
+		<string>UIInterfaceOrientationPortraitUpsideDown</string>
+		<string>UIInterfaceOrientationLandscapeLeft</string>
+		<string>UIInterfaceOrientationLandscapeRight</string>
+	</array>
+</dict>
+</plist>
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/Module-Info.plist
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+  <key>CFBundleDevelopmentRegion</key>
+  <string>en</string>
+  <key>CFBundleExecutable</key>
+  <string>${MODULE_NAME}</string>
+  <key>CFBundleIdentifier</key>
+  <string>${IOS_BUNDLE_ID_PREFIX}.test.${EXECUTABLE_NAME:rfc1034identifier}.${MODULE_NAME:rfc1034identifier}</string>
+  <key>CFBundleInfoDictionaryVersion</key>
+  <string>6.0</string>
+  <key>CFBundleName</key>
+  <string>${PRODUCT_NAME}</string>
+  <key>CFBundlePackageType</key>
+  <string>BNDL</string>
+  <key>CFBundleShortVersionString</key>
+  <string>1.0</string>
+  <key>CFBundleSignature</key>
+  <string>????</string>
+  <key>CFBundleVersion</key>
+  <string>1</string>
+</dict>
+</plist>
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/OWNERS
@@ -0,0 +1,1 @@
+file://build/config/mac/OWNERS
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/codesign.py
@@ -0,0 +1,437 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import datetime
+import fnmatch
+import glob
+import os
+import plistlib
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+def GetProvisioningProfilesDir():
+  """Returns the location of the installed mobile provisioning profiles.
+
+  Returns:
+    The path to the directory containing the installed mobile provisioning
+    profiles as a string.
+  """
+  return os.path.join(
+      os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
+
+
+def LoadPlistFile(plist_path):
+  """Loads property list file at |plist_path|.
+
+  Args:
+    plist_path: path to the property list file to load.
+
+  Returns:
+    The content of the property list file as a python object.
+  """
+  return plistlib.readPlistFromString(subprocess.check_output([
+      'xcrun', 'plutil', '-convert', 'xml1', '-o', '-', plist_path]))
+
+
+class Bundle(object):
+  """Wraps a bundle."""
+
+  def __init__(self, bundle_path):
+    """Initializes the Bundle object with data from bundle Info.plist file."""
+    self._path = bundle_path
+    self._data = LoadPlistFile(os.path.join(self._path, 'Info.plist'))
+
+  @property
+  def path(self):
+    return self._path
+
+  @property
+  def identifier(self):
+    return self._data['CFBundleIdentifier']
+
+  @property
+  def binary_path(self):
+    return os.path.join(self._path, self._data['CFBundleExecutable'])
+
+
+class ProvisioningProfile(object):
+  """Wraps a mobile provisioning profile file."""
+
+  def __init__(self, provisioning_profile_path):
+    """Initializes the ProvisioningProfile with data from profile file."""
+    self._path = provisioning_profile_path
+    self._data = plistlib.readPlistFromString(subprocess.check_output([
+        'xcrun', 'security', 'cms', '-D', '-u', 'certUsageAnyCA',
+        '-i', provisioning_profile_path]))
+
+  @property
+  def path(self):
+    return self._path
+
+  @property
+  def application_identifier_pattern(self):
+    return self._data.get('Entitlements', {}).get('application-identifier', '')
+
+  @property
+  def team_identifier(self):
+    return self._data.get('TeamIdentifier', [''])[0]
+
+  @property
+  def entitlements(self):
+    return self._data.get('Entitlements', {})
+
+  @property
+  def expiration_date(self):
+    return self._data.get('ExpirationDate', datetime.datetime.now())
+
+  def ValidToSignBundle(self, bundle_identifier):
+    """Checks whether the provisioning profile can sign bundle_identifier.
+
+    Args:
+      bundle_identifier: the identifier of the bundle that needs to be signed.
+
+    Returns:
+      True if the mobile provisioning profile can be used to sign a bundle
+      with the corresponding bundle_identifier, False otherwise.
+    """
+    return fnmatch.fnmatch(
+        '%s.%s' % (self.team_identifier, bundle_identifier),
+        self.application_identifier_pattern)
+
+  def Install(self, installation_path):
+    """Copies mobile provisioning profile info to |installation_path|."""
+    shutil.copy2(self.path, installation_path)
+
+
+class Entitlements(object):
+  """Wraps an Entitlement plist file."""
+
+  def __init__(self, entitlements_path):
+    """Initializes Entitlements object from entitlement file."""
+    self._path = entitlements_path
+    self._data = LoadPlistFile(self._path)
+
+  @property
+  def path(self):
+    return self._path
+
+  def ExpandVariables(self, substitutions):
+    self._data = self._ExpandVariables(self._data, substitutions)
+
+  def _ExpandVariables(self, data, substitutions):
+    if isinstance(data, str):
+      for key, substitution in substitutions.iteritems():
+        data = data.replace('$(%s)' % (key,), substitution)
+      return data
+
+    if isinstance(data, dict):
+      for key, value in data.iteritems():
+        data[key] = self._ExpandVariables(value, substitutions)
+      return data
+
+    if isinstance(data, list):
+      for i, value in enumerate(data):
+        data[i] = self._ExpandVariables(value, substitutions)
+
+    return data
+
+  def LoadDefaults(self, defaults):
+    for key, value in defaults.iteritems():
+      if key not in self._data:
+        self._data[key] = value
+
+  def WriteTo(self, target_path):
+    plistlib.writePlist(self._data, target_path)
+
+
+def FindProvisioningProfile(bundle_identifier, required):
+  """Finds mobile provisioning profile to use to sign bundle.
+
+  Args:
+    bundle_identifier: the identifier of the bundle to sign.
+
+  Returns:
+    The ProvisioningProfile object that can be used to sign the Bundle
+    object or None if no matching provisioning profile was found.
+  """
+  provisioning_profile_paths = glob.glob(
+      os.path.join(GetProvisioningProfilesDir(), '*.mobileprovision'))
+
+  # Iterate over all installed mobile provisioning profiles and filter those
+  # that can be used to sign the bundle, ignoring expired ones.
+  now = datetime.datetime.now()
+  valid_provisioning_profiles = []
+  one_hour = datetime.timedelta(0, 3600)
+  for provisioning_profile_path in provisioning_profile_paths:
+    provisioning_profile = ProvisioningProfile(provisioning_profile_path)
+    if provisioning_profile.expiration_date - now < one_hour:
+      sys.stderr.write(
+          'Warning: ignoring expired provisioning profile: %s.\n' %
+          provisioning_profile_path)
+      continue
+    if provisioning_profile.ValidToSignBundle(bundle_identifier):
+      valid_provisioning_profiles.append(provisioning_profile)
+
+  if not valid_provisioning_profiles:
+    if required:
+      sys.stderr.write(
+          'Error: no mobile provisioning profile found for "%s".\n' %
+          bundle_identifier)
+      sys.exit(1)
+    return None
+
+  # Select the most specific mobile provisioning profile, i.e. the one with
+  # the longest application identifier pattern (prefer the one with the latest
+  # expiration date as a secondary criteria).
+  selected_provisioning_profile = max(
+      valid_provisioning_profiles,
+      key=lambda p: (len(p.application_identifier_pattern), p.expiration_date))
+
+  one_week = datetime.timedelta(7)
+  if selected_provisioning_profile.expiration_date - now < 2 * one_week:
+    sys.stderr.write(
+        'Warning: selected provisioning profile will expire soon: %s' %
+        selected_provisioning_profile.path)
+  return selected_provisioning_profile
+
+
+def CodeSignBundle(bundle_path, identity, extra_args):
+  process = subprocess.Popen(['xcrun', 'codesign', '--force', '--sign',
+      identity, '--timestamp=none'] + list(extra_args) + [bundle_path],
+      stderr=subprocess.PIPE)
+  _, stderr = process.communicate()
+  if process.returncode:
+    sys.stderr.write(stderr)
+    sys.exit(process.returncode)
+  for line in stderr.splitlines():
+    if line.endswith(': replacing existing signature'):
+      # Ignore warning about replacing existing signature as this should only
+      # happen when re-signing system frameworks (and then it is expected).
+      continue
+    sys.stderr.write(line)
+    sys.stderr.write('\n')
+
+
+def InstallSystemFramework(framework_path, bundle_path, args):
+  """Install framework from |framework_path| to |bundle| and code-re-sign it."""
+  installed_framework_path = os.path.join(
+      bundle_path, 'Frameworks', os.path.basename(framework_path))
+
+  if os.path.exists(installed_framework_path):
+    shutil.rmtree(installed_framework_path)
+
+  shutil.copytree(framework_path, installed_framework_path)
+  CodeSignBundle(installed_framework_path, args.identity,
+      ['--deep', '--preserve-metadata=identifier,entitlements'])
+
+
+def GenerateEntitlements(path, provisioning_profile, bundle_identifier):
+  """Generates an entitlements file.
+
+  Args:
+    path: path to the entitlements template file
+    provisioning_profile: ProvisioningProfile object to use, may be None
+    bundle_identifier: identifier of the bundle to sign.
+  """
+  entitlements = Entitlements(path)
+  if provisioning_profile:
+    entitlements.LoadDefaults(provisioning_profile.entitlements)
+    app_identifier_prefix = provisioning_profile.team_identifier + '.'
+  else:
+    app_identifier_prefix = '*.'
+  entitlements.ExpandVariables({
+      'CFBundleIdentifier': bundle_identifier,
+      'AppIdentifierPrefix': app_identifier_prefix,
+  })
+  return entitlements
+
+
+class Action(object):
+  """Class implementing one action supported by the script."""
+
+  @classmethod
+  def Register(cls, subparsers):
+    parser = subparsers.add_parser(cls.name, help=cls.help)
+    parser.set_defaults(func=cls._Execute)
+    cls._Register(parser)
+
+
+class CodeSignBundleAction(Action):
+  """Class implementing the code-sign-bundle action."""
+
+  name = 'code-sign-bundle'
+  help = 'perform code signature for a bundle'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument(
+        '--entitlements', '-e', dest='entitlements_path',
+        help='path to the entitlements file to use')
+    parser.add_argument(
+        'path', help='path to the iOS bundle to codesign')
+    parser.add_argument(
+        '--identity', '-i', required=True,
+        help='identity to use to codesign')
+    parser.add_argument(
+        '--binary', '-b', required=True,
+        help='path to the iOS bundle binary')
+    parser.add_argument(
+        '--framework', '-F', action='append', default=[], dest='frameworks',
+        help='install and resign system framework')
+    parser.add_argument(
+        '--disable-code-signature', action='store_true', dest='no_signature',
+        help='disable code signature')
+    parser.add_argument(
+        '--platform', '-t', required=True,
+        help='platform the signed bundle is targetting')
+    parser.set_defaults(no_signature=False)
+
+  @staticmethod
+  def _Execute(args):
+    if not args.identity:
+      args.identity = '-'
+
+    bundle = Bundle(args.path)
+
+    # Delete existing embedded mobile provisioning.
+    embedded_provisioning_profile = os.path.join(
+        bundle.path, 'embedded.mobileprovision')
+    if os.path.isfile(embedded_provisioning_profile):
+      os.unlink(embedded_provisioning_profile)
+
+    # Delete existing code signature.
+    signature_file = os.path.join(args.path, '_CodeSignature', 'CodeResources')
+    if os.path.isfile(signature_file):
+      shutil.rmtree(os.path.dirname(signature_file))
+
+    # Install system frameworks if requested.
+    for framework_path in args.frameworks:
+      InstallSystemFramework(framework_path, args.path, args)
+
+    # Copy main binary into bundle.
+    if os.path.isfile(bundle.binary_path):
+      os.unlink(bundle.binary_path)
+    shutil.copy(args.binary, bundle.binary_path)
+
+    if args.no_signature:
+      return
+
+    codesign_extra_args = []
+
+    # Find mobile provisioning profile and embeds it into the bundle (if a code
+    # signing identify has been provided, fails if no valid mobile provisioning
+    # is found).
+    provisioning_profile_required = args.identity != '-'
+    provisioning_profile = FindProvisioningProfile(
+        bundle.identifier, provisioning_profile_required)
+    if provisioning_profile and args.platform != 'iphonesimulator':
+      provisioning_profile.Install(embedded_provisioning_profile)
+
+      temporary_entitlements_file = tempfile.NamedTemporaryFile(suffix='.xcent')
+      codesign_extra_args.extend(
+          ['--entitlements', temporary_entitlements_file.name])
+
+      entitlements = GenerateEntitlements(
+          args.entitlements_path, provisioning_profile, bundle.identifier)
+      entitlements.WriteTo(temporary_entitlements_file.name)
+
+    CodeSignBundle(bundle.path, args.identity, codesign_extra_args)
+
+
+class CodeSignFileAction(Action):
+  """Class implementing code signature for a single file."""
+
+  name = 'code-sign-file'
+  help = 'code-sign a single file'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument(
+        'path', help='path to the file to codesign')
+    parser.add_argument(
+        '--identity', '-i', required=True,
+        help='identity to use to codesign')
+    parser.add_argument(
+        '--output', '-o',
+        help='if specified copy the file to that location before signing it')
+    parser.set_defaults(sign=True)
+
+  @staticmethod
+  def _Execute(args):
+    if not args.identity:
+      args.identity = '-'
+
+    install_path = args.path
+    if args.output:
+
+      if os.path.isfile(args.output):
+        os.unlink(args.output)
+      elif os.path.isdir(args.output):
+        shutil.rmtree(args.output)
+
+      if os.path.isfile(args.path):
+        shutil.copy(args.path, args.output)
+      elif os.path.isdir(args.path):
+        shutil.copytree(args.path, args.output)
+
+      install_path = args.output
+
+    CodeSignBundle(install_path, args.identity,
+      ['--deep', '--preserve-metadata=identifier,entitlements'])
+
+
+class GenerateEntitlementsAction(Action):
+  """Class implementing the generate-entitlements action."""
+
+  name = 'generate-entitlements'
+  help = 'generate entitlements file'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument(
+        '--entitlements', '-e', dest='entitlements_path',
+        help='path to the entitlements file to use')
+    parser.add_argument(
+        'path', help='path to the entitlements file to generate')
+    parser.add_argument(
+        '--info-plist', '-p', required=True,
+        help='path to the bundle Info.plist')
+
+  @staticmethod
+  def _Execute(args):
+    info_plist = LoadPlistFile(args.info_plist)
+    bundle_identifier = info_plist['CFBundleIdentifier']
+    provisioning_profile = FindProvisioningProfile(bundle_identifier, False)
+    entitlements = GenerateEntitlements(
+        args.entitlements_path, provisioning_profile, bundle_identifier)
+    entitlements.WriteTo(args.path)
+
+
+def Main():
+  parser = argparse.ArgumentParser('codesign iOS bundles')
+  parser.add_argument('--developer_dir', required=False,
+                      help='Path to Xcode.')
+  subparsers = parser.add_subparsers()
+
+  actions = [
+      CodeSignBundleAction,
+      CodeSignFileAction,
+      GenerateEntitlementsAction,
+  ]
+
+  for action in actions:
+    action.Register(subparsers)
+
+  args = parser.parse_args()
+  if args.developer_dir:
+    os.environ['DEVELOPER_DIR'] = args.developer_dir
+  args.func(args)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/entitlements.plist
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>application-identifier</key>
+	<string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
+	<key>keychain-access-groups</key>
+	<array>
+		<string>$(AppIdentifierPrefix)$(CFBundleIdentifier)</string>
+	</array>
+</dict>
+</plist>
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/find_signing_identity.py
@@ -0,0 +1,47 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+import re
+
+def ListIdentities():
+  return subprocess.check_output([
+    'xcrun',
+    'security',
+    'find-identity',
+    '-v',
+    '-p',
+    'codesigning',
+  ])
+
+
+def FindValidIdentity(identity_description):
+  lines = list(map(str.strip, ListIdentities().splitlines()))
+  # Look for something like "2) XYZ "iPhone Developer: Name (ABC)""
+  exp = re.compile('[0-9]+\) ([A-F0-9]+) "([^"]*)"')
+  for line in lines:
+    res = exp.match(line)
+    if res is None:
+      continue
+    if identity_description in res.group(2):
+      yield res.group(1)
+
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser('codesign iOS bundles')
+  parser.add_argument(
+      '--developer_dir', required=False,
+      help='Path to Xcode.')
+  parser.add_argument(
+      '--identity-description', required=True,
+      help='Text description used to select the code signing identity.')
+  args = parser.parse_args()
+  if args.developer_dir:
+    os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+  for identity in FindValidIdentity(args.identity_description):
+    print identity
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/ios_sdk.gni
@@ -0,0 +1,168 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # SDK path to use. When empty this will use the default SDK based on the
+  # value of use_ios_simulator.
+  ios_sdk_path = ""
+  ios_sdk_name = ""
+  ios_sdk_version = ""
+  ios_sdk_platform = ""
+  ios_sdk_platform_path = ""
+  xcode_version = ""
+  xcode_build = ""
+  machine_os_build = ""
+
+  # Version of iOS that we're targeting.
+  ios_deployment_target = "9.0"
+
+  # The iOS Code signing identity to use
+  # TODO(GYP), TODO(sdfresne): Consider having a separate
+  # ios_enable_code_signing_flag=<bool> flag to make the invocation clearer.
+  ios_enable_code_signing = true
+  ios_code_signing_identity = ""
+  ios_code_signing_identity_description = "iPhone Developer"
+
+  # Prefix for CFBundleIdentifier property of iOS bundles (correspond to the
+  # "Organization Identifier" in Xcode). Code signing will fail if no mobile
+  # provisioning for the selected code signing identify support that prefix.
+  ios_app_bundle_id_prefix = "org.chromium"
+
+  # If true, then allow using Xcode to automatically manage certificates. This
+  # requires loading a separate Xcode project and enable automatically managed
+  # certificates. When true, all test application will use the same bundle id
+  # to avoid running out of certificates if using a free account.
+  ios_automatically_manage_certs = true
+
+  # Enabling this option makes clang compile for profiling to gather code
+  # coverage metrics.
+  ios_enable_coverage = false
+
+  # If non-empty, this list must contain valid cpu architecture, and the final
+  # build will be a multi-architecture build (aka fat build) supporting the
+  # main $target_cpu architecture and all of $additional_target_cpus.
+  #
+  # For example to build an application that will run on both arm64 and armv7
+  # devices, you would use the following in args.gn file when running "gn args":
+  #
+  #   target_os = "ios"
+  #   target_cpu = "arm64"
+  #   additional_target_cpus = [ "arm" ]
+  #
+  # You can also pass the value via "--args" parameter for "gn gen" command by
+  # using the syntax --args='additional_target_cpus=["arm"] target_cpu="arm64"'.
+  additional_target_cpus = []
+}
+
+assert(custom_toolchain == "" || additional_target_cpus == [],
+       "cannot define both custom_toolchain and additional_target_cpus")
+
+use_ios_simulator = current_cpu == "x86" || current_cpu == "x64"
+
+ios_generic_test_bundle_id_suffix = "generic-unit-test"
+
+# Initialize additional_toolchains from additional_target_cpus. Assert here
+# that the list does not contains $target_cpu nor duplicates as this would
+# cause weird errors during the build.
+additional_toolchains = []
+if (additional_target_cpus != []) {
+  foreach(_additional_target_cpu, additional_target_cpus) {
+    assert(_additional_target_cpu != target_cpu,
+           "target_cpu must not be listed in additional_target_cpus")
+
+    _toolchain = "//build/toolchain/mac:ios_clang_$_additional_target_cpu"
+    foreach(_additional_toolchain, additional_toolchains) {
+      assert(_toolchain != _additional_toolchain,
+             "additional_target_cpus must not contains duplicate values")
+    }
+
+    additional_toolchains += [ _toolchain ]
+  }
+}
+
+if (ios_sdk_path == "") {
+  # Compute default target.
+  if (use_ios_simulator) {
+    ios_sdk_name = "iphonesimulator"
+    ios_sdk_platform = "iPhoneSimulator"
+  } else {
+    ios_sdk_name = "iphoneos"
+    ios_sdk_platform = "iPhoneOS"
+  }
+
+  ios_sdk_info_args = []
+  if (!use_system_xcode) {
+    ios_sdk_info_args += [
+      "--developer_dir",
+      hermetic_xcode_path,
+    ]
+  }
+  ios_sdk_info_args += [ ios_sdk_name ]
+  script_name = "//build/config/mac/sdk_info.py"
+  _ios_sdk_result = exec_script(script_name, ios_sdk_info_args, "scope")
+  ios_sdk_path = _ios_sdk_result.sdk_path
+  ios_sdk_version = _ios_sdk_result.sdk_version
+  ios_sdk_platform_path = _ios_sdk_result.sdk_platform_path
+  ios_sdk_build = _ios_sdk_result.sdk_build
+  xcode_version = _ios_sdk_result.xcode_version
+  xcode_build = _ios_sdk_result.xcode_build
+  machine_os_build = _ios_sdk_result.machine_os_build
+  if (use_ios_simulator) {
+    # This is weird, but Xcode sets DTPlatformBuild to an empty field for
+    # simulator builds.
+    ios_platform_build = ""
+  } else {
+    ios_platform_build = ios_sdk_build
+  }
+}
+
+if (ios_enable_code_signing && !use_ios_simulator) {
+  find_signing_identity_args = [
+    "--identity-description",
+    ios_code_signing_identity_description,
+  ]
+  if (!use_system_xcode) {
+    find_signing_identity_args += [
+      "--developer_dir",
+      hermetic_xcode_path,
+    ]
+  }
+
+  # If an identity is not provided, look for one on the host
+  if (ios_code_signing_identity == "") {
+    _ios_identities = exec_script("find_signing_identity.py",
+                                  find_signing_identity_args,
+                                  "list lines")
+    if (_ios_identities == []) {
+      print("Tried to prepare a device build without specifying a code signing")
+      print("identity and could not detect one automatically either.")
+      print("TIP: Simulator builds don't require code signing...")
+      assert(false)
+    } else {
+      _ios_identities_len = 0
+      foreach(_, _ios_identities) {
+        _ios_identities_len += 1
+      }
+
+      ios_code_signing_identity = _ios_identities[0]
+      if (_ios_identities_len != 1) {
+        print("Warning: Multiple codesigning identities match " +
+              "\"$ios_code_signing_identity_description\"")
+        foreach(_ios_identity, _ios_identities) {
+          _selected = ""
+          if (ios_code_signing_identity == _ios_identity) {
+            _selected = " (selected)"
+          }
+          print("Warning: - $_ios_identity$_selected")
+        }
+        print("Warning: Please use either ios_code_signing_identity or ")
+        print("Warning: ios_code_signing_identity_description variable to ")
+        print("Warning: control which identity is selected.")
+        print()
+      }
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/rules.gni
@@ -0,0 +1,1509 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/ios/ios_sdk.gni")
+import("//build/config/mac/base_rules.gni")
+import("//build/config/mac/symbols.gni")
+import("//build/toolchain/toolchain.gni")
+
+# Invokes lipo on multiple arch-specific binaries to create a fat binary.
+#
+# Arguments
+#
+#   arch_binary_target
+#     name of the target generating the arch-specific binaries, they must
+#     be named $target_out_dir/$toolchain_cpu/$arch_binary_output.
+#
+#   arch_binary_output
+#     (optional, defaults to the name of $arch_binary_target) base name of
+#     the arch-specific binary generated by arch_binary_target.
+#
+#   output_name
+#     (optional, defaults to $target_name) base name of the target output,
+#     the full path will be $target_out_dir/$output_name.
+#
+#   configs
+#     (optional) a list of configurations, this is used to check whether
+#     the binary should be stripped, when "enable_stripping" is true.
+#
+template("lipo_binary") {
+  assert(defined(invoker.arch_binary_target),
+         "arch_binary_target must be defined for $target_name")
+
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _all_target_cpu = [ current_cpu ] + additional_target_cpus
+  _all_toolchains = [ current_toolchain ] + additional_toolchains
+
+  _arch_binary_target = invoker.arch_binary_target
+  _arch_binary_output = get_label_info(_arch_binary_target, "name")
+  if (defined(invoker.arch_binary_output)) {
+    _arch_binary_output = invoker.arch_binary_output
+  }
+
+  action(_target_name) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "arch_binary_output",
+                             "arch_binary_target",
+                             "configs",
+                             "output_name",
+                           ])
+
+    script = "//build/toolchain/mac/linker_driver.py"
+
+    outputs = [
+      "$target_out_dir/$_output_name",
+    ]
+
+    deps = []
+    _index = 0
+    inputs = []
+    foreach(_cpu, _all_target_cpu) {
+      _toolchain = _all_toolchains[_index]
+      _index = _index + 1
+
+      inputs +=
+          [ get_label_info("$_arch_binary_target($_toolchain)",
+                           "target_out_dir") + "/$_cpu/$_arch_binary_output" ]
+
+      deps += [ "$_arch_binary_target($_toolchain)" ]
+    }
+
+    args = []
+    if (!use_system_xcode) {
+      args += [
+        "--developer_dir",
+        hermetic_xcode_path,
+      ]
+    }
+    args += [
+              "xcrun",
+              "lipo",
+              "-create",
+              "-output",
+              rebase_path("$target_out_dir/$_output_name", root_build_dir),
+            ] + rebase_path(inputs, root_build_dir)
+
+    if (enable_dsyms) {
+      _dsyms_output_dir = "$root_out_dir/$_output_name.dSYM"
+      outputs += [
+        "$_dsyms_output_dir/",
+        "$_dsyms_output_dir/Contents/Info.plist",
+        "$_dsyms_output_dir/Contents/Resources/DWARF/$_output_name",
+      ]
+      args += [ "-Wcrl,dsym," + rebase_path("$root_out_dir/.", root_build_dir) ]
+    }
+
+    if (enable_stripping) {
+      args += [ "-Wcrl,strip,-x,-S" ]
+      if (save_unstripped_output) {
+        outputs += [ "$root_out_dir/$_output_name.unstripped" ]
+        args += [ "-Wcrl,unstripped," +
+                  rebase_path("$root_out_dir/.", root_build_dir) ]
+      }
+    }
+  }
+}
+
+# Wrapper around create_bundle taking care of code signature settings.
+#
+# Arguments
+#
+#   product_type
+#       string, product type for the generated Xcode project.
+#
+#   bundle_deps
+#       (optional) list of additional dependencies
+#
+#   bundle_deps_filter
+#       (optional) list of dependencies to filter (for more information
+#       see "gn help bundle_deps_filter")
+#
+#   bundle_extension
+#       string, extension of the bundle, used to generate bundle name.
+#
+#   bundle_binary_target
+#       string, label of the target generating the bundle main binary.
+#
+#   bundle_binary_output
+#       (optional) string, base name of the binary generated by the
+#       bundle_binary_target target, defaults to the target name.
+#
+#   extra_system_frameworks
+#       (optional) list of system framework to copy to the bundle.
+#
+#   enable_code_signing
+#       (optional) boolean, control whether code signing is enabled or not,
+#       default to ios_enable_code_signing if not defined.
+#
+#   entitlements_path:
+#       (optional) path to the template to use to generate the application
+#       entitlements by performing variable substitutions, defaults to
+#       //build/config/ios/entitlements.plist.
+#
+#   entitlements_target:
+#       (optional) label of the target generating the application
+#       entitlements (must generate a single file as output); cannot be
+#       defined if entitlements_path is set.
+#
+template("create_signed_bundle") {
+  assert(defined(invoker.product_type),
+         "product_type must be defined for $target_name")
+  assert(defined(invoker.bundle_extension),
+         "bundle_extension must be defined for $target_name")
+  assert(defined(invoker.bundle_binary_target),
+         "bundle_binary_target must be defined for $target_name")
+
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _bundle_binary_target = invoker.bundle_binary_target
+  _bundle_binary_output = get_label_info(_bundle_binary_target, "name")
+  if (defined(invoker.bundle_binary_output)) {
+    _bundle_binary_output = invoker.bundle_binary_output
+  }
+
+  _bundle_extension = invoker.bundle_extension
+  _bundle_root_dir = "$root_out_dir/$_output_name$_bundle_extension"
+
+  if (!defined(invoker.entitlements_target)) {
+    _entitlements_path = "//build/config/ios/entitlements.plist"
+    if (defined(invoker.entitlements_path)) {
+      _entitlements_path = invoker.entitlements_path
+    }
+  } else {
+    assert(!defined(invoker.entitlements_path),
+           "Cannot define both entitlements_path and entitlements_target " +
+               "for $target_name")
+
+    _entitlements_target_outputs =
+        get_target_outputs(invoker.entitlements_target)
+    _entitlements_path = _entitlements_target_outputs[0]
+  }
+
+  _enable_code_signing = ios_enable_code_signing
+  if (defined(invoker.enable_code_signing)) {
+    _enable_code_signing = invoker.enable_code_signing
+  }
+
+  create_bundle(_target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "bundle_deps_filter",
+                             "data_deps",
+                             "deps",
+                             "product_type",
+                             "public_configs",
+                             "public_deps",
+                             "testonly",
+                             "visibility",
+                           ])
+
+    bundle_root_dir = _bundle_root_dir
+    bundle_resources_dir = _bundle_root_dir
+    bundle_executable_dir = _bundle_root_dir
+    bundle_plugins_dir = "$_bundle_root_dir/PlugIns"
+
+    if (!defined(public_deps)) {
+      public_deps = []
+    }
+    public_deps += [ _bundle_binary_target ]
+
+    if (defined(invoker.bundle_deps)) {
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += invoker.bundle_deps
+    }
+    if (defined(invoker.entitlements_target)) {
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += [ invoker.entitlements_target ]
+    }
+
+    code_signing_script = "//build/config/ios/codesign.py"
+    code_signing_sources = [
+      _entitlements_path,
+      get_label_info(_bundle_binary_target, "target_out_dir") +
+          "/$_bundle_binary_output",
+    ]
+    code_signing_outputs = [ "$_bundle_root_dir/$_output_name" ]
+    if (_enable_code_signing) {
+      code_signing_outputs +=
+          [ "$_bundle_root_dir/_CodeSignature/CodeResources" ]
+    }
+    if (ios_code_signing_identity != "" && !use_ios_simulator) {
+      code_signing_outputs += [ "$_bundle_root_dir/embedded.mobileprovision" ]
+    }
+
+    if (defined(invoker.extra_system_frameworks)) {
+      foreach(_framework, invoker.extra_system_frameworks) {
+        code_signing_outputs += [ "$bundle_root_dir/Frameworks/" +
+                                  get_path_info(_framework, "file") ]
+      }
+    }
+
+    code_signing_args = []
+    if (!use_system_xcode) {
+      code_signing_args += [
+        "--developer_dir",
+        hermetic_xcode_path,
+      ]
+    }
+    code_signing_args += [
+      "code-sign-bundle",
+      "-t=" + ios_sdk_name,
+      "-i=" + ios_code_signing_identity,
+      "-e=" + rebase_path(_entitlements_path, root_build_dir),
+      "-b=" + rebase_path("$target_out_dir/$_output_name", root_build_dir),
+      rebase_path(bundle_root_dir, root_build_dir),
+    ]
+    if (!_enable_code_signing) {
+      code_signing_args += [ "--disable-code-signature" ]
+    }
+    if (defined(invoker.extra_system_frameworks)) {
+      # All framework in extra_system_frameworks are expected to be
+      # system framework and the path to be already system absolute
+      # so do not use rebase_path here.
+      foreach(_framework, invoker.extra_system_frameworks) {
+        code_signing_args += [ "-F=" + _framework ]
+      }
+    }
+  }
+}
+
+# Generates Info.plist files for Mac apps and frameworks.
+#
+# Arguments
+#
+#     info_plist:
+#         (optional) string, path to the Info.plist file that will be used for
+#         the bundle.
+#
+#     info_plist_target:
+#         (optional) string, if the info_plist is generated from an action,
+#         rather than a regular source file, specify the target name in lieu
+#         of info_plist. The two arguments are mutually exclusive.
+#
+#     executable_name:
+#         string, name of the generated target used for the product
+#         and executable name as specified in the output Info.plist.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+template("ios_info_plist") {
+  assert(defined(invoker.info_plist) != defined(invoker.info_plist_target),
+         "Only one of info_plist or info_plist_target may be specified in " +
+             target_name)
+
+  if (defined(invoker.info_plist)) {
+    _info_plist = invoker.info_plist
+  } else {
+    _info_plist_target_output = get_target_outputs(invoker.info_plist_target)
+    _info_plist = _info_plist_target_output[0]
+  }
+
+  info_plist(target_name) {
+    format = "binary1"
+    extra_substitutions = []
+    if (defined(invoker.extra_substitutions)) {
+      extra_substitutions = invoker.extra_substitutions
+    }
+    extra_substitutions += [
+      "IOS_BUNDLE_ID_PREFIX=$ios_app_bundle_id_prefix",
+      "IOS_DEPLOYMENT_TARGET=$ios_deployment_target",
+      "IOS_PLATFORM_BUILD=$ios_platform_build",
+      "IOS_PLATFORM_NAME=$ios_sdk_name",
+      "IOS_PLATFORM_VERSION=$ios_sdk_version",
+      "IOS_SDK_BUILD=$ios_sdk_build",
+      "IOS_SDK_NAME=$ios_sdk_name$ios_sdk_version",
+      "IOS_SUPPORTED_PLATFORM=$ios_sdk_platform",
+    ]
+    plist_templates = [
+      "//build/config/ios/BuildInfo.plist",
+      _info_plist,
+    ]
+    if (defined(invoker.info_plist_target)) {
+      deps = [
+        invoker.info_plist_target,
+      ]
+    }
+    forward_variables_from(invoker,
+                           [
+                             "executable_name",
+                             "output_name",
+                             "visibility",
+                           ])
+  }
+}
+
+# Template to build an application bundle for iOS.
+#
+# This should be used instead of "executable" built-in target type on iOS.
+# As the template forward the generation of the application executable to
+# an "executable" target, all arguments supported by "executable" targets
+# are also supported by this template.
+#
+# Arguments
+#
+#   output_name:
+#       (optional) string, name of the generated application, if omitted,
+#       defaults to the target_name.
+#
+#   extra_substitutions:
+#       (optional) list of string in "key=value" format, each value will
+#       be used as an additional variable substitution rule when generating
+#       the application Info.plist
+#
+#   info_plist:
+#       (optional) string, path to the Info.plist file that will be used for
+#       the bundle.
+#
+#   info_plist_target:
+#       (optional) string, if the info_plist is generated from an action,
+#       rather than a regular source file, specify the target name in lieu
+#       of info_plist. The two arguments are mutually exclusive.
+#
+#   entitlements_path:
+#       (optional) path to the template to use to generate the application
+#       entitlements by performing variable substitutions, defaults to
+#       //build/config/ios/entitlements.plist.
+#
+#   entitlements_target:
+#       (optional) label of the target generating the application
+#       entitlements (must generate a single file as output); cannot be
+#       defined if entitlements_path is set.
+#
+#   bundle_extension:
+#       (optional) bundle extension including the dot, default to ".app".
+#
+#   product_type
+#       (optional) string, product type for the generated Xcode project,
+#       default to "com.apple.product-type.application". Should generally
+#       not be overridden.
+#
+#   enable_code_signing
+#       (optional) boolean, control whether code signing is enabled or not,
+#       default to ios_enable_code_signing if not defined.
+#
+# For more information, see "gn help executable".
+template("ios_app_bundle") {
+  _output_name = target_name
+  _target_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _arch_executable_source = _target_name + "_arch_executable_sources"
+  _arch_executable_target = _target_name + "_arch_executable"
+  _lipo_executable_target = _target_name + "_executable"
+
+  source_set(_arch_executable_source) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_deps",
+                             "bundle_deps_filter",
+                             "bundle_extension",
+                             "enable_code_signing",
+                             "entitlements_path",
+                             "entitlements_target",
+                             "extra_substitutions",
+                             "extra_system_frameworks",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "product_type",
+                             "visibility",
+                           ])
+
+    visibility = [ ":$_arch_executable_target" ]
+  }
+
+  if (current_toolchain == default_toolchain || use_ios_simulator) {
+    _generate_entitlements_target = _target_name + "_gen_entitlements"
+    _generate_entitlements_output =
+        get_label_info(":$_generate_entitlements_target($default_toolchain)",
+                       "target_out_dir") + "/$_output_name.xcent"
+  }
+
+  executable(_arch_executable_target) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_deps",
+                             "bundle_deps_filter",
+                             "bundle_extension",
+                             "enable_code_signing",
+                             "entitlements_path",
+                             "entitlements_target",
+                             "extra_substitutions",
+                             "extra_system_frameworks",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "product_type",
+                             "sources",
+                             "visibility",
+                           ])
+
+    visibility = [ ":$_lipo_executable_target($default_toolchain)" ]
+    if (current_toolchain != default_toolchain) {
+      visibility += [ ":$_target_name" ]
+    }
+
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ ":$_arch_executable_source" ]
+
+    if (!defined(libs)) {
+      libs = []
+    }
+    libs += [ "UIKit.framework" ]
+
+    if (!defined(ldflags)) {
+      ldflags = []
+    }
+    ldflags += [
+      "-Xlinker",
+      "-rpath",
+      "-Xlinker",
+      "@executable_path/Frameworks",
+      "-Xlinker",
+      "-objc_abi_version",
+      "-Xlinker",
+      "2",
+    ]
+
+    if (use_ios_simulator) {
+      deps += [ ":$_generate_entitlements_target($default_toolchain)" ]
+
+      if (!defined(inputs)) {
+        inputs = []
+      }
+      inputs += [ _generate_entitlements_output ]
+
+      if (!defined(ldflags)) {
+        ldflags = []
+      }
+      ldflags += [
+        "-Xlinker",
+        "-sectcreate",
+        "-Xlinker",
+        "__TEXT",
+        "-Xlinker",
+        "__entitlements",
+        "-Xlinker",
+        rebase_path(_generate_entitlements_output, root_build_dir),
+      ]
+    }
+
+    output_name = _output_name
+    output_prefix_override = true
+    output_dir = "$target_out_dir/$current_cpu"
+  }
+
+  if (current_toolchain != default_toolchain) {
+    # For fat builds, only the default toolchain will generate an application
+    # bundle. For the other toolchains, the template is only used for building
+    # the arch-specific binary, thus the default target is just a group().
+
+    group(_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "visibility",
+                               "testonly",
+                             ])
+      public_deps = [
+        ":$_arch_executable_target",
+      ]
+    }
+  } else {
+    lipo_binary(_lipo_executable_target) {
+      forward_variables_from(invoker,
+                             [
+                               "configs",
+                               "testonly",
+                             ])
+
+      visibility = [ ":$_target_name" ]
+      output_name = _output_name
+      arch_binary_target = ":$_arch_executable_target"
+      arch_binary_output = _output_name
+    }
+
+    _generate_info_plist = target_name + "_generate_info_plist"
+    ios_info_plist(_generate_info_plist) {
+      forward_variables_from(invoker,
+                             [
+                               "extra_substitutions",
+                               "info_plist",
+                               "info_plist_target",
+                             ])
+
+      executable_name = _output_name
+    }
+
+    if (current_toolchain == default_toolchain) {
+      if (!defined(invoker.entitlements_target)) {
+        _entitlements_path = "//build/config/ios/entitlements.plist"
+        if (defined(invoker.entitlements_path)) {
+          _entitlements_path = invoker.entitlements_path
+        }
+      } else {
+        assert(!defined(invoker.entitlements_path),
+               "Cannot define both entitlements_path and entitlements_target" +
+                   "for $_target_name")
+
+        _entitlements_target_outputs =
+            get_target_outputs(invoker.entitlements_target)
+        _entitlements_path = _entitlements_target_outputs[0]
+      }
+
+      action(_generate_entitlements_target) {
+        _gen_info_plist_outputs = get_target_outputs(":$_generate_info_plist")
+        _info_plist_path = _gen_info_plist_outputs[0]
+
+        script = "//build/config/ios/codesign.py"
+        deps = [
+          ":$_generate_info_plist",
+        ]
+        if (defined(invoker.entitlements_target)) {
+          deps += [ invoker.entitlements_target ]
+        }
+        sources = [
+          _entitlements_path,
+          _info_plist_path,
+        ]
+        outputs = [
+          _generate_entitlements_output,
+        ]
+
+        args = []
+        if (!use_system_xcode) {
+          args += [
+            "--developer_dir",
+            hermetic_xcode_path,
+          ]
+        }
+        args += [
+                  "generate-entitlements",
+                  "-e=" + rebase_path(_entitlements_path, root_build_dir),
+                  "-p=" + rebase_path(_info_plist_path, root_build_dir),
+                ] + rebase_path(outputs, root_build_dir)
+      }
+    }
+
+    _bundle_data_info_plist = target_name + "_bundle_data_info_plist"
+    bundle_data(_bundle_data_info_plist) {
+      forward_variables_from(invoker, [ "testonly" ])
+
+      sources = get_target_outputs(":$_generate_info_plist")
+      outputs = [
+        "{{bundle_root_dir}}/Info.plist",
+      ]
+      public_deps = [
+        ":$_generate_info_plist",
+      ]
+    }
+
+    _app_product_type = "com.apple.product-type.application"
+    _product_type = _app_product_type
+    if (defined(invoker.product_type)) {
+      _product_type = invoker.product_type
+    }
+
+    _app_bundle_extension = ".app"
+    _bundle_extension = _app_bundle_extension
+    if (defined(invoker.bundle_extension)) {
+      _bundle_extension = invoker.bundle_extension
+    }
+
+    # Only write PkgInfo for real application, not application extension (they
+    # have the same product type but a different extension).
+    _write_pkg_info = _product_type == _app_product_type &&
+                      _bundle_extension == _app_bundle_extension
+
+    if (_write_pkg_info) {
+      _create_pkg_info = target_name + "_pkg_info"
+      action(_create_pkg_info) {
+        forward_variables_from(invoker, [ "testonly" ])
+        script = "//build/config/mac/write_pkg_info.py"
+        sources = get_target_outputs(":$_generate_info_plist")
+        outputs = [
+          # Cannot name the output PkgInfo as the name will not be unique if
+          # multiple ios_app_bundle are defined in the same BUILD.gn file. The
+          # file is renamed in the bundle_data outputs to the correct name.
+          "$target_gen_dir/$target_name",
+        ]
+        args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
+               [ "--output" ] + rebase_path(outputs, root_build_dir)
+        deps = [
+          ":$_generate_info_plist",
+        ]
+      }
+
+      _bundle_data_pkg_info = target_name + "_bundle_data_pkg_info"
+      bundle_data(_bundle_data_pkg_info) {
+        forward_variables_from(invoker, [ "testonly" ])
+        sources = get_target_outputs(":$_create_pkg_info")
+        outputs = [
+          "{{bundle_resources_dir}}/PkgInfo",
+        ]
+        public_deps = [
+          ":$_create_pkg_info",
+        ]
+      }
+    }
+
+    create_signed_bundle(_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "bundle_deps",
+                               "bundle_deps_filter",
+                               "data_deps",
+                               "deps",
+                               "enable_code_signing",
+                               "entitlements_path",
+                               "entitlements_target",
+                               "extra_system_frameworks",
+                               "public_configs",
+                               "public_deps",
+                               "testonly",
+                               "visibility",
+                             ])
+
+      output_name = _output_name
+      bundle_binary_target = ":$_lipo_executable_target"
+      bundle_binary_output = _output_name
+      bundle_extension = _bundle_extension
+      product_type = _product_type
+
+      if (!defined(bundle_deps)) {
+        bundle_deps = []
+      }
+      bundle_deps += [ ":$_bundle_data_info_plist" ]
+      if (_write_pkg_info) {
+        bundle_deps += [ ":$_bundle_data_pkg_info" ]
+      }
+
+      if (use_ios_simulator) {
+        if (!defined(data_deps)) {
+          data_deps = []
+        }
+        data_deps += [ "//testing/iossim" ]
+      }
+    }
+  }
+}
+
+set_defaults("ios_app_bundle") {
+  configs = default_executable_configs
+}
+
+# Template to build an application extension bundle for iOS.
+#
+# This should be used instead of "executable" built-in target type on iOS.
+# As the template forward the generation of the application executable to
+# an "executable" target, all arguments supported by "executable" targets
+# are also supported by this template.
+#
+# Arguments
+#
+#   output_name:
+#       (optional) string, name of the generated application, if omitted,
+#       defaults to the target_name.
+#
+#   extra_substitutions:
+#       (optional) list of string in "key=value" format, each value will
+#       be used as an additional variable substitution rule when generating
+#       the application Info.plist
+#
+#   info_plist:
+#       (optional) string, path to the Info.plist file that will be used for
+#       the bundle.
+#
+#   info_plist_target:
+#       (optional) string, if the info_plist is generated from an action,
+#       rather than a regular source file, specify the target name in lieu
+#       of info_plist. The two arguments are mutually exclusive.
+#
+# For more information, see "gn help executable".
+template("ios_appex_bundle") {
+  ios_app_bundle(target_name) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_extension",
+                             "product_type",
+                           ])
+    bundle_extension = ".appex"
+    product_type = "com.apple.product-type.app-extension"
+
+    # Add linker flags required for an application extension (determined by
+    # inspecting the link command-line when using Xcode 9.0+).
+    if (!defined(ldflags)) {
+      ldflags = []
+    }
+    ldflags += [
+      "-e",
+      "_NSExtensionMain",
+      "-fapplication-extension",
+    ]
+  }
+}
+
+set_defaults("ios_appex_bundle") {
+  configs = default_executable_configs
+}
+
+# Compile a xib or storyboard file and add it to a bundle_data so that it is
+# available at runtime in the bundle.
+#
+# Arguments
+#
+#   source:
+#       string, path of the xib or storyboard to compile.
+#
+# Forwards all variables to the bundle_data target.
+template("bundle_data_ib_file") {
+  assert(defined(invoker.source), "source needs to be defined for $target_name")
+
+  _source_extension = get_path_info(invoker.source, "extension")
+  assert(_source_extension == "xib" || _source_extension == "storyboard",
+         "source must be a .xib or .storyboard for $target_name")
+
+  _target_name = target_name
+  if (_source_extension == "xib") {
+    _compile_ib_file = target_name + "_compile_xib"
+    _output_extension = "nib"
+  } else {
+    _compile_ib_file = target_name + "_compile_storyboard"
+    _output_extension = "storyboardc"
+  }
+
+  compile_ib_files(_compile_ib_file) {
+    sources = [
+      invoker.source,
+    ]
+    output_extension = _output_extension
+    visibility = [ ":$_target_name" ]
+    ibtool_flags = [
+      "--minimum-deployment-target",
+      ios_deployment_target,
+      "--auto-activate-custom-fonts",
+      "--target-device",
+      "iphone",
+      "--target-device",
+      "ipad",
+    ]
+  }
+
+  bundle_data(_target_name) {
+    forward_variables_from(invoker, "*", [ "source" ])
+
+    if (!defined(public_deps)) {
+      public_deps = []
+    }
+    public_deps += [ ":$_compile_ib_file" ]
+
+    sources = get_target_outputs(":$_compile_ib_file")
+
+    outputs = [
+      "{{bundle_resources_dir}}/{{source_file_part}}",
+    ]
+  }
+}
+
+# Compile a strings file and add it to a bundle_data so that it is available
+# at runtime in the bundle.
+#
+# Arguments
+#
+#   source:
+#       string, path of the strings file to compile.
+#
+#   output:
+#       string, path of the compiled file in the final bundle.
+#
+# Forwards all variables to the bundle_data target.
+template("bundle_data_strings") {
+  assert(defined(invoker.source), "source needs to be defined for $target_name")
+  assert(defined(invoker.output), "output needs to be defined for $target_name")
+
+  _source_extension = get_path_info(invoker.source, "extension")
+  assert(_source_extension == "strings",
+         "source must be a .strings for $target_name")
+
+  _target_name = target_name
+  _convert_target = target_name + "_compile_strings"
+
+  convert_plist(_convert_target) {
+    visibility = [ ":$_target_name" ]
+    source = invoker.source
+    output =
+        "$target_gen_dir/$_target_name/" + get_path_info(invoker.source, "file")
+    format = "binary1"
+  }
+
+  bundle_data(_target_name) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "source",
+                             "output",
+                           ])
+
+    if (!defined(public_deps)) {
+      public_deps = []
+    }
+    public_deps += [ ":$_convert_target" ]
+
+    sources = get_target_outputs(":$_convert_target")
+
+    outputs = [
+      invoker.output,
+    ]
+  }
+}
+
+# Template to package a shared library into an iOS framework bundle.
+#
+# By default, the bundle target this template generates does not link the
+# resulting framework into anything that depends on it. If a dependency wants
+# a link-time (as well as build-time) dependency on the framework bundle,
+# depend against "$target_name+link". If only the build-time dependency is
+# required (e.g., for copying into another bundle), then use "$target_name".
+#
+# Arguments
+#
+#     output_name:
+#         (optional) string, name of the generated framework without the
+#         .framework suffix. If omitted, defaults to target_name.
+#
+#     public_headers:
+#         (optional) list of paths to header file that needs to be copied
+#         into the framework bundle Headers subdirectory. If omitted or
+#         empty then the Headers subdirectory is not created.
+#
+#     sources
+#         (optional) list of files. Needs to be defined and non-empty if
+#         public_headers is defined and non-empty.
+#
+#   enable_code_signing
+#       (optional) boolean, control whether code signing is enabled or not,
+#       default to ios_enable_code_signing if not defined.
+#
+# This template provides two targets for the resulting framework bundle. The
+# link-time behavior varies depending on which of the two targets below is
+# added as a dependency:
+#   - $target_name only adds a build-time dependency. Targets that depend on
+#     it will not link against the framework.
+#   - $target_name+link adds a build-time and link-time dependency. Targets
+#     that depend on it will link against the framework.
+#
+# The build-time-only dependency is used for when a target needs to use the
+# framework either only for resources, or because the target loads it at run-
+# time, via dlopen() or NSBundle. The link-time dependency will cause the
+# dependee to have the framework loaded by dyld at launch.
+#
+# Example of build-time only dependency:
+#
+#     framework_bundle("CoreTeleportation") {
+#       sources = [ ... ]
+#     }
+#
+#     bundle_data("core_teleportation_bundle_data") {
+#       deps = [ ":CoreTeleportation" ]
+#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+#       outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ]
+#     }
+#
+#     app_bundle("GoatTeleporter") {
+#       sources = [ ... ]
+#       deps = [
+#         ":core_teleportation_bundle_data",
+#       ]
+#     }
+#
+# The GoatTeleporter.app will not directly link against
+# CoreTeleportation.framework, but it will be included in the bundle's
+# Frameworks directory.
+#
+# Example of link-time dependency:
+#
+#     framework_bundle("CoreTeleportation") {
+#       sources = [ ... ]
+#       ldflags = [
+#         "-install_name",
+#         "@executable_path/../Frameworks/$target_name.framework"
+#       ]
+#     }
+#
+#     bundle_data("core_teleportation_bundle_data") {
+#       deps = [ ":CoreTeleportation+link" ]
+#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+#       outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ]
+#     }
+#
+#     app_bundle("GoatTeleporter") {
+#       sources = [ ... ]
+#       deps = [
+#         ":core_teleportation_bundle_data",
+#       ]
+#     }
+#
+# Note that the framework is still copied to the app's bundle, but dyld will
+# load this library when the app is launched because it uses the "+link"
+# target as a dependency. This also requires that the framework set its
+# install_name so that dyld can locate it.
+#
+# See "gn help shared_library" for more information on arguments supported
+# by shared library target.
+template("ios_framework_bundle") {
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _has_public_headers =
+      defined(invoker.public_headers) && invoker.public_headers != []
+
+  # Public configs are not propagated across toolchain (see crbug.com/675224)
+  # so some configs have to be defined for both default_toolchain and all others
+  # toolchains when performing a fat build. Use "get_label_info" to construct
+  # the path since they need to be relative to the default_toolchain.
+
+  _default_toolchain_root_out_dir =
+      get_label_info("$_target_name($default_toolchain)", "root_out_dir")
+  _default_toolchain_target_gen_dir =
+      get_label_info("$_target_name($default_toolchain)", "target_gen_dir")
+
+  if (_has_public_headers) {
+    _framework_headers_target = _target_name + "_framework_headers"
+    _framework_headers_config = _target_name + "_framework_headers_config"
+    config(_framework_headers_config) {
+      # The link settings are inherited from the framework_bundle config.
+      cflags = [
+        "-F",
+        rebase_path("$_default_toolchain_root_out_dir/.", root_build_dir),
+      ]
+    }
+
+    _headers_map_config = _target_name + "_headers_map"
+    _header_map_filename =
+        "$_default_toolchain_target_gen_dir/$_output_name.headers.hmap"
+    config(_headers_map_config) {
+      visibility = [ ":$_target_name" ]
+      include_dirs = [ _header_map_filename ]
+    }
+  }
+
+  _arch_shared_library_source = _target_name + "_arch_shared_library_sources"
+  _arch_shared_library_target = _target_name + "_arch_shared_library"
+  _lipo_shared_library_target = _target_name + "_shared_library"
+  _link_target_name = _target_name + "+link"
+
+  _framework_public_config = _target_name + "_public_config"
+  config(_framework_public_config) {
+    # TODO(sdefresne): should we have a framework_dirs similar to lib_dirs
+    # and include_dirs to avoid duplicate values on the command-line.
+    visibility = [ ":$_target_name" ]
+    ldflags = [
+      "-F",
+      rebase_path("$_default_toolchain_root_out_dir/.", root_build_dir),
+    ]
+    lib_dirs = [ root_out_dir ]
+    libs = [ "$_output_name.framework" ]
+  }
+
+  source_set(_arch_shared_library_source) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_deps",
+                             "bundle_deps_filter",
+                             "data_deps",
+                             "enable_code_signing",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "visibility",
+                           ])
+
+    visibility = [ ":$_arch_shared_library_target" ]
+
+    if (_has_public_headers) {
+      configs += [
+        ":$_framework_headers_config",
+        ":$_headers_map_config",
+      ]
+
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += [ ":$_framework_headers_target($default_toolchain)" ]
+    }
+  }
+
+  shared_library(_arch_shared_library_target) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "bundle_deps",
+                             "bundle_deps_filter",
+                             "data_deps",
+                             "enable_code_signing",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "sources",
+                             "visibility",
+                           ])
+
+    visibility = [ ":$_lipo_shared_library_target($default_toolchain)" ]
+    if (current_toolchain != default_toolchain) {
+      visibility += [ ":$_target_name" ]
+    }
+
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ ":$_arch_shared_library_source" ]
+
+    if (!defined(ldflags)) {
+      ldflags = []
+    }
+    ldflags += [
+      "-Xlinker",
+      "-install_name",
+      "-Xlinker",
+      "@rpath/$_output_name.framework/$_output_name",
+      "-Xlinker",
+      "-objc_abi_version",
+      "-Xlinker",
+      "2",
+    ]
+
+    output_extension = ""
+    output_name = _output_name
+    output_prefix_override = true
+    output_dir = "$target_out_dir/$current_cpu"
+  }
+
+  if (current_toolchain != default_toolchain) {
+    # For fat builds, only the default toolchain will generate a framework
+    # bundle. For the other toolchains, the template is only used for building
+    # the arch-specific binary, thus the default target is just a group().
+
+    group(_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "visibility",
+                               "testonly",
+                             ])
+      public_deps = [
+        ":$_arch_shared_library_target",
+      ]
+    }
+
+    group(_link_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "public_configs",
+                               "visibility",
+                               "testonly",
+                             ])
+      public_deps = [
+        ":$_link_target_name($default_toolchain)",
+      ]
+
+      if (_has_public_headers) {
+        if (!defined(public_configs)) {
+          public_configs = []
+        }
+        public_configs += [ ":$_framework_headers_config" ]
+      }
+      if (!defined(all_dependent_configs)) {
+        all_dependent_configs = []
+      }
+      all_dependent_configs += [ ":$_framework_public_config" ]
+    }
+
+    if (defined(invoker.bundle_deps)) {
+      assert(invoker.bundle_deps != [], "mark bundle_deps as used")
+    }
+  } else {
+    if (_has_public_headers) {
+      _public_headers = invoker.public_headers
+      _framework_root = "$root_out_dir/$_output_name.framework"
+
+      _compile_headers_map_target = _target_name + "_compile_headers_map"
+      action(_compile_headers_map_target) {
+        visibility = [ ":$_framework_headers_target" ]
+        forward_variables_from(invoker,
+                               [
+                                 "deps",
+                                 "public_deps",
+                                 "testonly",
+                               ])
+        script = "//build/config/ios/write_framework_hmap.py"
+        outputs = [
+          _header_map_filename,
+        ]
+
+        # The header map generation only wants the list of headers, not all of
+        # sources, so filter any non-header source files from "sources". It is
+        # less error prone that having the developer duplicate the list of all
+        # headers in addition to "sources".
+        set_sources_assignment_filter([
+                                        "*.c",
+                                        "*.cc",
+                                        "*.cpp",
+                                        "*.m",
+                                        "*.mm",
+                                      ])
+        sources = invoker.sources
+        set_sources_assignment_filter([])
+
+        args = [
+                 rebase_path(_header_map_filename),
+                 rebase_path(_framework_root, root_build_dir),
+               ] + rebase_path(sources, root_build_dir)
+      }
+
+      _create_module_map_target = _target_name + "_module_map"
+      action(_create_module_map_target) {
+        visibility = [ ":$_framework_headers_target" ]
+        script = "//build/config/ios/write_framework_modulemap.py"
+        outputs = [
+          "$_framework_root/Modules/module.modulemap",
+        ]
+        args = [ rebase_path("$_framework_root", root_build_dir) ]
+      }
+
+      _copy_public_headers_target = _target_name + "_copy_public_headers"
+      copy(_copy_public_headers_target) {
+        visibility = [ ":$_framework_headers_target" ]
+        sources = _public_headers
+        outputs = [
+          "$_framework_root/Headers/{{source_file_part}}",
+        ]
+      }
+
+      group(_framework_headers_target) {
+        forward_variables_from(invoker, [ "testonly" ])
+        deps = [
+          ":$_compile_headers_map_target",
+          ":$_copy_public_headers_target",
+          ":$_create_module_map_target",
+        ]
+      }
+    }
+
+    lipo_binary(_lipo_shared_library_target) {
+      forward_variables_from(invoker,
+                             [
+                               "configs",
+                               "testonly",
+                             ])
+
+      visibility = [ ":$_target_name" ]
+      output_name = _output_name
+      arch_binary_target = ":$_arch_shared_library_target"
+      arch_binary_output = _output_name
+    }
+
+    _info_plist_target = _target_name + "_info_plist"
+    _info_plist_bundle = _target_name + "_info_plist_bundle"
+    ios_info_plist(_info_plist_target) {
+      visibility = [ ":$_info_plist_bundle" ]
+      executable_name = _output_name
+      forward_variables_from(invoker,
+                             [
+                               "extra_substitutions",
+                               "info_plist",
+                               "info_plist_target",
+                             ])
+    }
+
+    bundle_data(_info_plist_bundle) {
+      visibility = [ ":$_target_name" ]
+      forward_variables_from(invoker, [ "testonly" ])
+      sources = get_target_outputs(":$_info_plist_target")
+      outputs = [
+        "{{bundle_root_dir}}/Info.plist",
+      ]
+      public_deps = [
+        ":$_info_plist_target",
+      ]
+    }
+
+    create_signed_bundle(_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "bundle_deps",
+                               "bundle_deps_filter",
+                               "data_deps",
+                               "deps",
+                               "enable_code_signing",
+                               "public_configs",
+                               "public_deps",
+                               "testonly",
+                               "visibility",
+                             ])
+
+      product_type = "com.apple.product-type.framework"
+      bundle_extension = ".framework"
+
+      output_name = _output_name
+      bundle_binary_target = ":$_lipo_shared_library_target"
+      bundle_binary_output = _output_name
+
+      if (!defined(deps)) {
+        deps = []
+      }
+      deps += [ ":$_info_plist_bundle" ]
+    }
+
+    group(_link_target_name) {
+      forward_variables_from(invoker,
+                             [
+                               "public_configs",
+                               "public_deps",
+                               "testonly",
+                               "visibility",
+                             ])
+      if (!defined(public_deps)) {
+        public_deps = []
+      }
+      public_deps += [ ":$_target_name" ]
+
+      if (_has_public_headers) {
+        if (!defined(public_configs)) {
+          public_configs = []
+        }
+        public_configs += [ ":$_framework_headers_config" ]
+      }
+      if (!defined(all_dependent_configs)) {
+        all_dependent_configs = []
+      }
+      all_dependent_configs += [ ":$_framework_public_config" ]
+    }
+
+    bundle_data(_target_name + "+bundle") {
+      forward_variables_from(invoker,
+                             [
+                               "testonly",
+                               "visibility",
+                             ])
+      public_deps = [
+        ":$_target_name",
+      ]
+      sources = [
+        "$root_out_dir/$_output_name.framework",
+      ]
+      outputs = [
+        "{{bundle_resources_dir}}/Frameworks/$_output_name.framework",
+      ]
+    }
+  }
+}
+
+set_defaults("ios_framework_bundle") {
+  configs = default_shared_library_configs
+}
+
+# For Chrome on iOS we want to run XCTests for all our build configurations
+# (Debug, Release, ...). In addition, the symbols visibility is configured to
+# private by default. To simplify testing with those constraints, our tests are
+# compiled in the TEST_HOST target instead of the .xctest bundle.
+template("ios_xctest_test") {
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _xctest_target = _target_name + "_module"
+  _xctest_output = _output_name + "_module"
+
+  _host_target = _target_name
+  _host_output = _output_name
+
+  _xctest_arch_loadable_module_target = _xctest_target + "_arch_loadable_module"
+  _xctest_lipo_loadable_module_target = _xctest_target + "_loadable_module"
+
+  loadable_module(_xctest_arch_loadable_module_target) {
+    visibility = [ ":$_xctest_lipo_loadable_module_target($default_toolchain)" ]
+    if (current_toolchain != default_toolchain) {
+      visibility += [ ":$_xctest_target" ]
+    }
+
+    sources = [
+      "//build/config/ios/xctest_shell.mm",
+    ]
+    configs += [ "//build/config/ios:xctest_config" ]
+
+    output_dir = "$target_out_dir/$current_cpu"
+    output_name = _xctest_output
+    output_prefix_override = true
+    output_extension = ""
+  }
+
+  if (current_toolchain != default_toolchain) {
+    # For fat builds, only the default toolchain will generate a test bundle.
+    # For the other toolchains, the template is only used for building the
+    # arch-specific binary, thus the default target is just a group().
+    group(_xctest_target) {
+      forward_variables_from(invoker,
+                             [
+                               "visibility",
+                               "testonly",
+                             ])
+      public_deps = [
+        ":$_xctest_arch_loadable_module_target",
+      ]
+    }
+  } else {
+    _xctest_info_plist_target = _xctest_target + "_info_plist"
+    _xctest_info_plist_bundle = _xctest_target + "_info_plist_bundle"
+    ios_info_plist(_xctest_info_plist_target) {
+      visibility = [ ":$_xctest_info_plist_bundle" ]
+      info_plist = "//build/config/ios/Module-Info.plist"
+      executable_name = _host_output
+      if (ios_automatically_manage_certs) {
+        # Use the same bundle identifier for EarlGrey tests as for unit tests
+        # when managing certificates as the number of free certs is limited.
+        extra_substitutions = [
+          "EXECUTABLE_NAME=gtest.${ios_generic_test_bundle_id_suffix}",
+          "MODULE_NAME=${ios_generic_test_bundle_id_suffix}-module",
+        ]
+      } else {
+        extra_substitutions = [ "MODULE_NAME=$_xctest_output" ]
+      }
+    }
+
+    bundle_data(_xctest_info_plist_bundle) {
+      visibility = [ ":$_xctest_target" ]
+      public_deps = [
+        ":$_xctest_info_plist_target",
+      ]
+      sources = get_target_outputs(":$_xctest_info_plist_target")
+      outputs = [
+        "{{bundle_root_dir}}/Info.plist",
+      ]
+    }
+
+    lipo_binary(_xctest_lipo_loadable_module_target) {
+      forward_variables_from(invoker,
+                             [
+                               "configs",
+                               "testonly",
+                             ])
+
+      visibility = [ ":$_xctest_target" ]
+      output_name = _xctest_output
+      arch_binary_target = ":$_xctest_arch_loadable_module_target"
+      arch_binary_output = _xctest_output
+    }
+
+    _xctest_bundle = _xctest_target + "_bundle"
+    create_signed_bundle(_xctest_target) {
+      forward_variables_from(invoker, [ "enable_code_signing" ])
+      visibility = [ ":$_xctest_bundle" ]
+
+      product_type = "com.apple.product-type.bundle.unit-test"
+      bundle_extension = ".xctest"
+
+      output_name = _xctest_output
+      bundle_binary_target = ":$_xctest_lipo_loadable_module_target"
+      bundle_binary_output = _xctest_output
+
+      deps = [
+        ":$_xctest_info_plist_bundle",
+      ]
+    }
+
+    bundle_data(_xctest_bundle) {
+      visibility = [ ":$_host_target" ]
+      public_deps = [
+        ":$_xctest_target",
+      ]
+      sources = [
+        "$root_out_dir/$_xctest_output.xctest",
+      ]
+      outputs = [
+        "{{bundle_plugins_dir}}/$_xctest_output.xctest",
+      ]
+    }
+  }
+
+  ios_app_bundle(_host_target) {
+    forward_variables_from(invoker, "*", [ "testonly" ])
+
+    testonly = true
+    output_name = _host_output
+    configs += [ "//build/config/ios:xctest_config" ]
+
+    if (!defined(invoker.info_plist) && !defined(invoker.info_plist_target)) {
+      info_plist = "//build/config/ios/Host-Info.plist"
+      if (ios_automatically_manage_certs) {
+        # Use the same bundle identifier for EarlGrey tests as for unit tests
+        # when managing certificates as the number of free certs is limited.
+        if (!defined(extra_substitutions)) {
+          extra_substitutions = []
+        }
+        extra_substitutions +=
+            [ "EXECUTABLE_NAME=gtest.${ios_generic_test_bundle_id_suffix}" ]
+      }
+    }
+
+    # Xcode needs those two framework installed in the application (and signed)
+    # for the XCTest to run, so install them using extra_system_frameworks.
+    _ios_platform_library = "$ios_sdk_platform_path/Developer/Library"
+    extra_system_frameworks = [
+      "$_ios_platform_library/Frameworks/XCTest.framework",
+      "$_ios_platform_library/PrivateFrameworks/IDEBundleInjection.framework",
+    ]
+
+    if (current_toolchain == default_toolchain) {
+      if (!defined(bundle_deps)) {
+        bundle_deps = []
+      }
+      bundle_deps += [ ":$_xctest_bundle" ]
+    }
+
+    if (!defined(ldflags)) {
+      ldflags = []
+    }
+    ldflags += [
+      "-Xlinker",
+      "-rpath",
+      "-Xlinker",
+      "@executable_path/Frameworks",
+      "-Xlinker",
+      "-rpath",
+      "-Xlinker",
+      "@loader_path/Frameworks",
+    ]
+  }
+}
+
+set_defaults("ios_xctest_test") {
+  configs = default_executable_configs
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/write_framework_hmap.py
@@ -0,0 +1,97 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import struct
+import sys
+
+def Main(args):
+  if len(args) < 4:
+    print >> sys.stderr, "Usage: %s output.hmap Foo.framework header1.h..." %\
+        (args[0])
+    return 1
+
+  (out, framework, all_headers) = args[1], args[2], args[3:]
+
+  framework_name = os.path.basename(framework).split('.')[0]
+  all_headers = map(os.path.abspath, all_headers)
+  filelist = {}
+  for header in all_headers:
+    filename = os.path.basename(header)
+    filelist[filename] = header
+    filelist[os.path.join(framework_name, filename)] = header
+  WriteHmap(out, filelist)
+  return 0
+
+
+def NextGreaterPowerOf2(x):
+  return 2**(x).bit_length()
+
+
+def WriteHmap(output_name, filelist):
+  """Generates a header map based on |filelist|.
+
+  Per Mark Mentovai:
+    A header map is structured essentially as a hash table, keyed by names used
+    in #includes, and providing pathnames to the actual files.
+
+  The implementation below and the comment above comes from inspecting:
+    http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+  while also looking at the implementation in clang in:
+    https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+  """
+  magic = 1751998832
+  version = 1
+  _reserved = 0
+  count = len(filelist)
+  capacity = NextGreaterPowerOf2(count)
+  strings_offset = 24 + (12 * capacity)
+  max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
+
+  out = open(output_name, 'wb')
+  out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
+                        count, capacity, max_value_length))
+
+  # Create empty hashmap buckets.
+  buckets = [None] * capacity
+  for file, path in filelist.items():
+    key = 0
+    for c in file:
+      key += ord(c.lower()) * 13
+
+    # Fill next empty bucket.
+    while buckets[key & capacity - 1] is not None:
+      key = key + 1
+    buckets[key & capacity - 1] = (file, path)
+
+  next_offset = 1
+  for bucket in buckets:
+    if bucket is None:
+      out.write(struct.pack('<LLL', 0, 0, 0))
+    else:
+      (file, path) = bucket
+      key_offset = next_offset
+      prefix_offset = key_offset + len(file) + 1
+      suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
+      next_offset = suffix_offset + len(os.path.basename(path)) + 1
+      out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
+
+  # Pad byte since next offset starts at 1.
+  out.write(struct.pack('<x'))
+
+  for bucket in buckets:
+    if bucket is not None:
+      (file, path) = bucket
+      out.write(struct.pack('<%ds' % len(file), file))
+      out.write(struct.pack('<s', '\0'))
+      base = os.path.dirname(path) + os.sep
+      out.write(struct.pack('<%ds' % len(base), base))
+      out.write(struct.pack('<s', '\0'))
+      path = os.path.basename(path)
+      out.write(struct.pack('<%ds' % len(path), path))
+      out.write(struct.pack('<s', '\0'))
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/write_framework_modulemap.py
@@ -0,0 +1,26 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def Main(framework):
+  # Find the name of the binary based on the part before the ".framework".
+  binary = os.path.basename(framework).split('.')[0]
+  module_path = os.path.join(framework, 'Modules');
+  if not os.path.exists(module_path):
+    os.mkdir(module_path)
+  module_template = 'framework module %s {\n' \
+                    '  umbrella header "%s.h"\n' \
+                    '\n' \
+                    '  export *\n' \
+                    '  module * { export * }\n' \
+                    '}\n' % (binary, binary)
+
+  module_file = open(os.path.join(module_path, 'module.modulemap'), 'w')
+  module_file.write(module_template)
+  module_file.close()
+
+if __name__ == '__main__':
+  Main(sys.argv[1])
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ios/xctest_shell.mm
@@ -0,0 +1,19 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <UIKit/UIKit.h>
+#import <XCTest/XCTest.h>
+
+// For Chrome on iOS we want to run EarlGrey tests (that are XCTests) for all
+// our build configurations (Debug, Release, ...). In addition, the symbols
+// visibility is configured to private by default. To simplify testing with
+// those constraints, our tests are compiled in the TEST_HOST target instead
+// of the .xctest bundle that all link against this single test (just there to
+// ensure that the bundle is not empty).
+
+@interface XCTestShellEmptyClass : NSObject
+@end
+
+@implementation XCTestShellEmptyClass
+@end
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/BUILD.gn
@@ -0,0 +1,95 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/ui.gni")
+
+group("linux") {
+  visibility = [ "//:optimize_gn_gen" ]
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Linux-only. This is not applied to Android, but is applied to ChromeOS.
+config("compiler") {
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Linux-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  # Set here because OS_CHROMEOS cannot be autodetected in build_config.h like
+  # OS_LINUX and the like.
+  if (is_chromeos) {
+    defines = [ "OS_CHROMEOS" ]
+  }
+}
+
+config("x11") {
+  libs = [
+    "X11",
+    "X11-xcb",
+    "xcb",
+    "Xcomposite",
+    "Xcursor",
+    "Xdamage",
+    "Xext",
+    "Xfixes",
+    "Xi",
+    "Xrender",
+    "Xtst",
+  ]
+}
+
+config("xcomposite") {
+  libs = [ "Xcomposite" ]
+}
+
+config("xext") {
+  libs = [ "Xext" ]
+}
+
+config("xrandr") {
+  libs = [ "Xrandr" ]
+}
+
+config("xscrnsaver") {
+  libs = [ "Xss" ]
+}
+
+config("xfixes") {
+  libs = [ "Xfixes" ]
+}
+
+config("libcap") {
+  libs = [ "cap" ]
+}
+
+config("xi") {
+  libs = [ "Xi" ]
+}
+
+config("xtst") {
+  libs = [ "Xtst" ]
+}
+
+config("libresolv") {
+  libs = [ "resolv" ]
+}
+
+if (use_glib) {
+  pkg_config("glib") {
+    packages = [
+      "glib-2.0",
+      "gmodule-2.0",
+      "gobject-2.0",
+      "gthread-2.0",
+    ]
+    defines = [
+      "GLIB_VERSION_MAX_ALLOWED=GLIB_VERSION_2_32",
+      "GLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_26",
+    ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/atk/BUILD.gn
@@ -0,0 +1,47 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/ui.gni")
+
+# CrOS doesn't install GTK, gconf or any gnome packages.
+assert(!is_chromeos)
+
+# These packages should _only_ be expected when building for a target.
+# If these extra checks are not run, gconf is required when building host
+# tools for a CrOS build.
+assert(current_toolchain == default_toolchain)
+
+if (use_atk) {
+  assert(use_glib, "use_atk=true requires that use_glib=true")
+}
+
+pkg_config("atk_base") {
+  packages = [ "atk" ]
+  atk_lib_dir = exec_script(pkg_config_script,
+                            pkg_config_args + [
+                                  "--libdir",
+                                  "atk",
+                                ],
+                            "string")
+  defines = [ "ATK_LIB_DIR=\"$atk_lib_dir\"" ]
+}
+
+# gn orders flags on a target before flags from configs. The default config
+# adds -Wall, and these flags have to be after -Wall -- so they need to
+# come from a config and can't be on the target directly.
+config("atk") {
+  configs = [ ":atk_base" ]
+
+  cflags = [
+    # glib uses the pre-c++11 typedef-as-static_assert hack.
+    "-Wno-unused-local-typedef",
+
+    # G_DEFINE_TYPE automatically generates a *get_instance_private
+    # inline function after glib 2.37. That's unused. Prevent to
+    # complain about it.
+    "-Wno-unused-function",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/dbus/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/linux/pkg_config.gni")
+
+assert(use_dbus)
+
+# Note: if your target also depends on //dbus, you don't need to add this
+# config (it will get added automatically if you depend on //dbus).
+pkg_config("dbus") {
+  packages = [ "dbus-1" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/gconf/BUILD.gn
@@ -0,0 +1,19 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/linux/pkg_config.gni")
+
+# CrOS doesn't install GTK, gconf or any gnome packages.
+assert(!is_chromeos && use_gconf)
+
+# These packages should _only_ be expected when building for a target.
+# If these extra checks are not run, gconf is required when building host
+# tools for a CrOS build.
+assert(current_toolchain == default_toolchain)
+
+pkg_config("gconf") {
+  packages = [ "gconf-2.0" ]
+  defines = [ "USE_GCONF" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/gtk/BUILD.gn
@@ -0,0 +1,39 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/gtk/gtk.gni")
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux, "This file should only be referenced on Linux")
+
+# The target in this file will automatically reference GTK2 or GTK3 depending
+# on the state of the build flag. Some builds reference both 2 and 3, and some
+# builds reference neither, so both need to be available but in different
+# directories so pkg-config is only run when necessary.
+
+# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
+# parts that explicitly need GTK are whitelisted on this target.
+group("gtk") {
+  visibility = [
+    "//chrome/test:interactive_ui_tests",
+    "//gpu/gles2_conform_support:gles2_conform_test_windowless",
+    "//remoting/host",
+    "//remoting/host/linux",
+    "//remoting/host/it2me:common",
+    "//remoting/host/it2me:remote_assistance_host",
+    "//remoting/host:remoting_me2me_host_static",
+    "//remoting/test:it2me_standalone_host_main",
+    "//webrtc/examples:peerconnection_client",
+  ]
+
+  if (use_gtk3) {
+    public_deps = [
+      "//build/config/linux/gtk3",
+    ]
+  } else {
+    public_deps = [
+      "//build/config/linux/gtk2",
+    ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/gtk/gtk.gni
@@ -0,0 +1,12 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Include this file if you need to know at build time whether we're compiling
+# against GTK 2 or 3. But in most cases you can just depend on
+# //build/config/linux/gtk and it will switch for you.
+
+declare_args() {
+  # Whether to compile against GTKv3 instead of GTKv2.
+  use_gtk3 = true
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/gtk2/BUILD.gn
@@ -0,0 +1,44 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux, "This file should only be referenced on Linux")
+
+# Depend on //build/config/linux/gtk2 to use GTKv2. Depend on
+# //build/config/linux/gtk to get GTK 2 or 3 depending on the build flags.
+#
+# GN doesn't check visibility for configs so we give this an obviously internal
+# name to discourage random targets from accidentally depending on this and
+# bypassing the GTK target's visibility.
+pkg_config("gtk2_internal_config") {
+  # Gtk requires gmodule, but it does not list it as a dependency in some
+  # misconfigured systems.
+  packages = [
+    "gmodule-2.0",
+    "gtk+-2.0",
+    "gthread-2.0",
+  ]
+}
+
+# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
+# parts that explicitly need GTK2 are whitelisted on this target.
+group("gtk2") {
+  visibility = [
+    "//gpu/gles2_conform_support:gles2_conform_test_windowless",
+    "//build/config/linux/gtk",
+    "//chrome/browser/ui/libgtkui:*",
+  ]
+  public_configs = [ ":gtk2_internal_config" ]
+}
+
+# Depend on "gtkprint" to get this.
+pkg_config("gtkprint2_internal_config") {
+  packages = [ "gtk+-unix-print-2.0" ]
+}
+
+group("gtkprint2") {
+  visibility = [ "//chrome/browser/ui/libgtkui:libgtk2ui" ]
+  public_configs = [ ":gtkprint2_internal_config" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/gtk3/BUILD.gn
@@ -0,0 +1,43 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux, "This file should only be referenced on Linux")
+
+# Depend on //build/config/linux/gtk3 to use GTKv3. Depend on
+# //build/config/linux/gtk to get GTK 2 or 3 depending on the build flags.
+#
+# GN doesn't check visibility for configs so we give this an obviously internal
+# name to discourage random targets from accidentally depending on this and
+# bypassing the GTK target's visibility.
+pkg_config("gtk3_internal_config") {
+  # Gtk requires gmodule, but it does not list it as a dependency in some
+  # misconfigured systems.
+  packages = [
+    "gmodule-2.0",
+    "gtk+-3.0",
+    "gthread-2.0",
+  ]
+}
+
+# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
+# parts that explicitly need GTK3 are whitelisted on this target.
+group("gtk3") {
+  visibility = [
+    "//build/config/linux/gtk",
+    "//chrome/browser/ui/libgtkui:*",
+  ]
+  public_configs = [ ":gtk3_internal_config" ]
+}
+
+# Depend on "gtkprint3" to get this.
+pkg_config("gtkprint3_internal_config") {
+  packages = [ "gtk+-unix-print-3.0" ]
+}
+
+group("gtkprint3") {
+  visibility = [ "//chrome/browser/ui/libgtkui:libgtk3ui" ]
+  public_configs = [ ":gtkprint3_internal_config" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/libffi/BUILD.gn
@@ -0,0 +1,9 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+pkg_config("libffi") {
+  packages = [ "libffi" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/pangocairo/BUILD.gn
@@ -0,0 +1,16 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+pkg_config("pangocairo") {
+  packages = [ "pangocairo" ]
+
+  # We don't want pkgconfig for pangocairo to explicitly request FreeType to get
+  # linked, because we control which FreeType to link to.
+  extra_args = [
+    "-v",
+    "freetype",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/pkg-config.py
@@ -0,0 +1,219 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import subprocess
+import sys
+import re
+from optparse import OptionParser
+
+# This script runs pkg-config, optionally filtering out some results, and
+# returns the result.
+#
+# The result will be [ <includes>, <cflags>, <libs>, <lib_dirs>, <ldflags> ]
+# where each member is itself a list of strings.
+#
+# You can filter out matches using "-v <regexp>" where all results from
+# pkgconfig matching the given regular expression will be ignored. You can
+# specify more than one regular expression my specifying "-v" more than once.
+#
+# You can specify a sysroot using "-s <sysroot>" where sysroot is the absolute
+# system path to the sysroot used for compiling. This script will attempt to
+# generate correct paths for the sysroot.
+#
+# When using a sysroot, you must also specify the architecture via
+# "-a <arch>" where arch is either "x86" or "x64".
+#
+# CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
+# and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
+# depending on whether the systemroot is for a 32 or 64 bit architecture. They
+# specify the 'lib' or 'lib64' of the pkgconfig path by defining the
+# 'system_libdir' variable in the args.gn file. pkg_config.gni communicates this
+# variable to this script with the "--system_libdir <system_libdir>" flag. If no
+# flag is provided, then pkgconfig files are assumed to come from
+# <systemroot>/usr/lib/pkgconfig.
+#
+# Additionally, you can specify the option --atleast-version. This will skip
+# the normal outputting of a dictionary and instead print true or false,
+# depending on the return value of pkg-config for the given package.
+
+
+def SetConfigPath(options):
+  """Set the PKG_CONFIG_LIBDIR environment variable.
+
+  This takes into account any sysroot and architecture specification from the
+  options on the given command line.
+  """
+
+  sysroot = options.sysroot
+  assert sysroot
+
+  # Compute the library path name based on the architecture.
+  arch = options.arch
+  if sysroot and not arch:
+    print "You must specify an architecture via -a if using a sysroot."
+    sys.exit(1)
+
+  libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig'
+  libdir += ':' + sysroot + '/usr/share/pkgconfig'
+  os.environ['PKG_CONFIG_LIBDIR'] = libdir
+  return libdir
+
+
+def GetPkgConfigPrefixToStrip(args):
+  """Returns the prefix from pkg-config where packages are installed.
+
+  This returned prefix is the one that should be stripped from the beginning of
+  directory names to take into account sysroots.
+  """
+  # Some sysroots, like the Chromium OS ones, may generate paths that are not
+  # relative to the sysroot. For example,
+  # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all
+  # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr)
+  # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+  # To support this correctly, it's necessary to extract the prefix to strip
+  # from pkg-config's |prefix| variable.
+  prefix = subprocess.check_output(["pkg-config", "--variable=prefix"] + args,
+      env=os.environ)
+  if prefix[-4] == '/usr':
+    return prefix[4:]
+  return prefix
+
+
+def MatchesAnyRegexp(flag, list_of_regexps):
+  """Returns true if the first argument matches any regular expression in the
+  given list."""
+  for regexp in list_of_regexps:
+    if regexp.search(flag) != None:
+      return True
+  return False
+
+
+def RewritePath(path, strip_prefix, sysroot):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(strip_prefix):
+      path = path[len(strip_prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+def main():
+  # If this is run on non-Linux platforms, just return nothing and indicate
+  # success. This allows us to "kind of emulate" a Linux build from other
+  # platforms.
+  if "linux" not in sys.platform:
+    print "[[],[],[],[],[]]"
+    return 0
+
+  parser = OptionParser()
+  parser.add_option('-d', '--debug', action='store_true')
+  parser.add_option('-p', action='store', dest='pkg_config', type='string',
+                    default='pkg-config')
+  parser.add_option('-v', action='append', dest='strip_out', type='string')
+  parser.add_option('-s', action='store', dest='sysroot', type='string')
+  parser.add_option('-a', action='store', dest='arch', type='string')
+  parser.add_option('--system_libdir', action='store', dest='system_libdir',
+                    type='string', default='lib')
+  parser.add_option('--atleast-version', action='store',
+                    dest='atleast_version', type='string')
+  parser.add_option('--libdir', action='store_true', dest='libdir')
+  (options, args) = parser.parse_args()
+
+  # Make a list of regular expressions to strip out.
+  strip_out = []
+  if options.strip_out != None:
+    for regexp in options.strip_out:
+      strip_out.append(re.compile(regexp))
+
+  if options.sysroot:
+    libdir = SetConfigPath(options)
+    if options.debug:
+      sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir)
+    prefix = GetPkgConfigPrefixToStrip(args)
+  else:
+    prefix = ''
+
+  if options.atleast_version:
+    # When asking for the return value, just run pkg-config and print the return
+    # value, no need to do other work.
+    if not subprocess.call([options.pkg_config,
+                            "--atleast-version=" + options.atleast_version] +
+                            args):
+      print "true"
+    else:
+      print "false"
+    return 0
+
+  if options.libdir:
+    cmd = [options.pkg_config, "--variable=libdir"] + args
+    if options.debug:
+      sys.stderr.write('Running: %s\n' % cmd)
+    try:
+      libdir = subprocess.check_output(cmd)
+    except:
+      print "Error from pkg-config."
+      return 1
+    sys.stdout.write(libdir.strip())
+    return 0
+
+  cmd = [options.pkg_config, "--cflags", "--libs"] + args
+  if options.debug:
+    sys.stderr.write('Running: %s\n' % ' '.join(cmd))
+
+  try:
+    flag_string = subprocess.check_output(cmd)
+  except:
+    sys.stderr.write('Could not run pkg-config.\n')
+    return 1
+
+  # For now just split on spaces to get the args out. This will break if
+  # pkgconfig returns quoted things with spaces in them, but that doesn't seem
+  # to happen in practice.
+  all_flags = flag_string.strip().split(' ')
+
+
+  sysroot = options.sysroot
+  if not sysroot:
+    sysroot = ''
+
+  includes = []
+  cflags = []
+  libs = []
+  lib_dirs = []
+  ldflags = []
+
+  for flag in all_flags[:]:
+    if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out):
+      continue;
+
+    if flag[:2] == '-l':
+      libs.append(RewritePath(flag[2:], prefix, sysroot))
+    elif flag[:2] == '-L':
+      lib_dirs.append(RewritePath(flag[2:], prefix, sysroot))
+    elif flag[:2] == '-I':
+      includes.append(RewritePath(flag[2:], prefix, sysroot))
+    elif flag[:3] == '-Wl':
+      ldflags.append(flag)
+    elif flag == '-pthread':
+      # Many libs specify "-pthread" which we don't need since we always include
+      # this anyway. Removing it here prevents a bunch of duplicate inclusions
+      # on the command line.
+      pass
+    else:
+      cflags.append(flag)
+
+  # Output a GN array, the first one is the cflags, the second are the libs. The
+  # JSON formatter prints GN compatible lists when everything is a list of
+  # strings.
+  print json.dumps([includes, cflags, libs, lib_dirs, ldflags])
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/linux/pkg_config.gni
@@ -0,0 +1,117 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+# Defines a config specifying the result of running pkg-config for the given
+# packages. Put the package names you want to query in the "packages" variable
+# inside the template invocation.
+#
+# You can also add defines via the "defines" variable. This can be useful to
+# add this to the config to pass defines that the library expects to get by
+# users of its headers.
+#
+# Example:
+#   pkg_config("mything") {
+#     packages = [ "mything1", "mything2" ]
+#     defines = [ "ENABLE_AWESOME" ]
+#   }
+#
+# You can also use "extra args" to filter out results (see pkg-config.py):
+#   extra_args = [ "-v, "foo" ]
+# To ignore libs and ldflags (only cflags/defines will be set, which is useful
+# when doing manual dynamic linking), set:
+#   ignore_libs = true
+
+declare_args() {
+  # A pkg-config wrapper to call instead of trying to find and call the right
+  # pkg-config directly. Wrappers like this are common in cross-compilation
+  # environments.
+  # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on
+  # the sysroot mechanism to find the right .pc files.
+  pkg_config = ""
+
+  # A optional pkg-config wrapper to use for tools built on the host.
+  host_pkg_config = ""
+
+  # CrOS systemroots place pkgconfig files at <systemroot>/usr/share/pkgconfig
+  # and one of <systemroot>/usr/lib/pkgconfig or <systemroot>/usr/lib64/pkgconfig
+  # depending on whether the systemroot is for a 32 or 64 bit architecture.
+  #
+  # When build under GYP, CrOS board builds specify the 'system_libdir' variable
+  # as part of the GYP_DEFINES provided by the CrOS emerge build or simple
+  # chrome build scheme. This variable permits controlling this for GN builds
+  # in similar fashion by setting the `system_libdir` variable in the build's
+  # args.gn file to 'lib' or 'lib64' as appropriate for the target architecture.
+  system_libdir = "lib"
+}
+
+pkg_config_script = "//build/config/linux/pkg-config.py"
+
+# Define the args we pass to the pkg-config script for other build files that
+# need to invoke it manually.
+if (sysroot != "") {
+  # Pass the sysroot if we're using one (it requires the CPU arch also).
+  pkg_config_args = [
+    "-s",
+    rebase_path(sysroot),
+    "-a",
+    current_cpu,
+  ]
+} else if (pkg_config != "") {
+  pkg_config_args = [
+    "-p",
+    pkg_config,
+  ]
+} else {
+  pkg_config_args = []
+}
+
+# Only use the custom libdir when building with the target sysroot.
+if (target_sysroot != "" && sysroot == target_sysroot) {
+  pkg_config_args += [
+    "--system_libdir",
+    system_libdir,
+  ]
+}
+
+if (host_pkg_config != "") {
+  host_pkg_config_args = [
+    "-p",
+    host_pkg_config,
+  ]
+} else {
+  host_pkg_config_args = pkg_config_args
+}
+
+template("pkg_config") {
+  assert(defined(invoker.packages),
+         "Variable |packages| must be defined to be a list in pkg_config.")
+  config(target_name) {
+    if (host_toolchain == current_toolchain) {
+      args = host_pkg_config_args + invoker.packages
+    } else {
+      args = pkg_config_args + invoker.packages
+    }
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+
+    pkgresult = exec_script(pkg_config_script, args, "value")
+    include_dirs = pkgresult[0]
+    cflags = pkgresult[1]
+
+    if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
+      libs = pkgresult[2]
+      lib_dirs = pkgresult[3]
+      ldflags = pkgresult[4]
+    }
+
+    forward_variables_from(invoker,
+                           [
+                             "defines",
+                             "visibility",
+                           ])
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/locales.gni
@@ -0,0 +1,187 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android doesn't ship all locales in order to save space (but webview does).
+# http://crbug.com/369218
+if (is_android) {
+  android_chrome_omitted_locales = [
+    "bn",
+    "et",
+    "gu",
+    "kn",
+    "ml",
+    "mr",
+    "ms",
+    "ta",
+    "te",
+  ]
+}
+
+# Chrome on iOS only ships with a subset of the locales supported by other
+# version of Chrome as the corresponding locales are not supported by the
+# operating system (but for simplicity, the corresponding .pak files are
+# still generated).
+if (is_ios) {
+  ios_unsupported_locales = [
+    "am",
+    "bn",
+    "et",
+    "fil",
+    "gu",
+    "kn",
+    "lv",
+    "ml",
+    "mr",
+    "sl",
+    "sw",
+    "ta",
+    "te",
+  ]
+}
+
+# Note: keep in sync with below.
+locales = [
+  "am",
+  "ar",
+  "bg",
+  "bn",
+  "ca",
+  "cs",
+  "da",
+  "de",
+  "el",
+  "en-GB",
+  "en-US",
+  "es",
+  "et",
+  "fa",
+  "fi",
+  "fil",
+  "fr",
+  "gu",
+  "he",
+  "hi",
+  "hr",
+  "hu",
+  "id",
+  "it",
+  "ja",
+  "kn",
+  "ko",
+  "lt",
+  "lv",
+  "ml",
+  "mr",
+  "ms",
+  "nb",
+  "nl",
+  "pl",
+  "pt-PT",
+  "ro",
+  "ru",
+  "sk",
+  "sl",
+  "sr",
+  "sv",
+  "sw",
+  "ta",
+  "te",
+  "th",
+  "tr",
+  "uk",
+  "vi",
+  "zh-CN",
+  "zh-TW",
+]
+
+# Chrome on iOS uses different names for "es-419" and "pt-BR" (called
+# respectively "es-MX" and "pt" on iOS).
+if (!is_ios) {
+  locales += [
+    "es-419",
+    "pt-BR",
+  ]
+} else {
+  locales += [
+    "es-MX",
+    "pt",
+  ]
+
+  ios_packed_locales = locales - ios_unsupported_locales
+}
+
+locales_with_fake_bidi = locales + [ "fake-bidi" ]
+
+# Same as the locales list but in the format Mac expects for output files:
+# it uses underscores instead of hyphens, and "en" instead of "en-US".
+locales_as_mac_outputs = [
+  "am",
+  "ar",
+  "bg",
+  "bn",
+  "ca",
+  "cs",
+  "da",
+  "de",
+  "el",
+  "en_GB",
+  "en",
+  "es",
+  "et",
+  "fa",
+  "fi",
+  "fil",
+  "fr",
+  "gu",
+  "he",
+  "hi",
+  "hr",
+  "hu",
+  "id",
+  "it",
+  "ja",
+  "kn",
+  "ko",
+  "lt",
+  "lv",
+  "ml",
+  "mr",
+  "ms",
+  "nb",
+  "nl",
+  "pl",
+  "pt_PT",
+  "ro",
+  "ru",
+  "sk",
+  "sl",
+  "sr",
+  "sv",
+  "sw",
+  "ta",
+  "te",
+  "th",
+  "tr",
+  "uk",
+  "vi",
+  "zh_CN",
+  "zh_TW",
+]
+
+# Chrome on iOS uses different names for "es-419" and "pt-BR" (called
+# respectively "es-MX" and "pt" on iOS).
+if (!is_ios) {
+  locales_as_mac_outputs += [
+    "es_419",
+    "pt_BR",
+  ]
+} else {
+  locales_as_mac_outputs += [
+    "es_MX",
+    "pt",
+  ]
+
+  ios_packed_locales_as_mac_outputs =
+      locales_as_mac_outputs - ios_unsupported_locales
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/BUILD.gn
@@ -0,0 +1,104 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/config/mac/mac_sdk.gni")
+import("//build/config/mac/symbols.gni")
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+config("compiler") {
+  # These flags are shared between the C compiler and linker.
+  common_mac_flags = []
+
+  # CPU architecture.
+  if (current_cpu == "x64") {
+    common_mac_flags += [
+      "-arch",
+      "x86_64",
+    ]
+  } else if (current_cpu == "x86") {
+    common_mac_flags += [
+      "-arch",
+      "i386",
+    ]
+  }
+
+  # This is here so that all files get recompiled after an Xcode update.
+  # (defines are passed via the command line, and build system rebuild things
+  # when their commandline changes). Nothing should ever read this define.
+  defines = [ "CR_XCODE_VERSION=$xcode_version" ]
+
+  asmflags = common_mac_flags
+  cflags = common_mac_flags
+
+  # Without this, the constructors and destructors of a C++ object inside
+  # an Objective C struct won't be called, which is very bad.
+  cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
+
+  cflags_c = [ "-std=c99" ]
+  cflags_objc = cflags_c
+
+  ldflags = common_mac_flags
+
+  if (save_unstripped_output) {
+    ldflags += [ "-Wcrl,unstripped," + rebase_path(root_out_dir) ]
+  }
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Mac-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  common_flags = [
+    "-isysroot",
+    rebase_path(sysroot, root_build_dir),
+    "-mmacosx-version-min=$mac_deployment_target",
+  ]
+
+  asmflags = common_flags
+  cflags = common_flags
+  ldflags = common_flags
+
+  # Prevent Mac OS X AssertMacros.h (included by system header) from defining
+  # macros that collide with common names, like 'check', 'require', and
+  # 'verify'.
+  # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h
+  defines = [ "__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0" ]
+}
+
+# On Mac, this is used for everything except static libraries.
+config("mac_dynamic_flags") {
+  ldflags = [ "-Wl,-ObjC" ]  # Always load Objective-C categories and classes.
+
+  if (is_component_build) {
+    ldflags += [
+      # Path for loading shared libraries for unbundled binaries.
+      "-Wl,-rpath,@loader_path/.",
+
+      # Path for loading shared libraries for bundled binaries. Get back from
+      # Binary.app/Contents/MacOS.
+      "-Wl,-rpath,@loader_path/../../..",
+    ]
+  }
+}
+
+# On Mac, this is used only for executables.
+config("mac_executable_flags") {
+  # Remove this when targeting >=10.7 since it is the default in that config.
+  ldflags = [ "-Wl,-pie" ]  # Position independent.
+}
+
+# The ldflags referenced below are handled by
+# //build/toolchain/mac/linker_driver.py.
+# Remove this config if a target wishes to change the arguments passed to the
+# strip command during linking. This config by default strips all symbols
+# from a binary, but some targets may wish to specify a saves file to preserve
+# specific symbols.
+config("strip_all") {
+  if (enable_stripping) {
+    ldflags = [ "-Wcrl,strip,-x,-S" ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/BuildInfo.plist
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+  <key>BuildMachineOSBuild</key>
+  <string>${BUILD_MACHINE_OS_BUILD}</string>
+  <key>DTCompiler</key>
+  <string>${GCC_VERSION}</string>
+  <key>DTSDKBuild</key>
+  <string>${MAC_SDK_BUILD}</string>
+  <key>DTSDKName</key>
+  <string>${MAC_SDK_NAME}</string>
+  <key>DTXcode</key>
+  <string>${XCODE_VERSION}</string>
+  <key>DTXcodeBuild</key>
+  <string>${XCODE_BUILD}</string>
+  <key>CFBundleShortVersionString</key>
+  <string>${VERSION}</string>
+  <key>CFBundleVersion</key>
+  <string>${VERSION_BUILD}</string>
+  <key>CFBundleIdentifier</key>
+  <string>org.chromium.${PRODUCT_NAME:rfc1034identifier}</string>
+  <key>SCM_REVISION</key>
+  <string>${COMMIT_HASH}</string>
+</dict>
+</plist>
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/OWNERS
@@ -0,0 +1,4 @@
+rsesek@chromium.org
+sdefresne@chromium.org
+
+# COMPONENT: Build
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/base_rules.gni
@@ -0,0 +1,253 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains rules that are shared between Mac and iOS.
+
+import("//build/toolchain/toolchain.gni")
+import("//build/config/mac/symbols.gni")
+
+if (is_mac) {
+  import("//build/config/mac/mac_sdk.gni")
+} else if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+}
+
+# Convert plist file to given format.
+#
+# Arguments
+#
+#   source:
+#     string, path to the plist file to convert
+#
+#   output:
+#     string, path to the converted plist, must be under $root_build_dir
+#
+#   format:
+#     string, the format to `plutil -convert` the plist to.
+template("convert_plist") {
+  assert(defined(invoker.source), "source must be defined for $target_name")
+  assert(defined(invoker.output), "output must be defined for $target_name")
+  assert(defined(invoker.format), "format must be defined for $target_name")
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "visibility",
+                             "testonly",
+                             "deps",
+                           ])
+
+    script = "//build/config/mac/xcrun.py"
+    sources = [
+      invoker.source,
+    ]
+    outputs = [
+      invoker.output,
+    ]
+    args = []
+    if (!use_system_xcode) {
+      args += [
+        "--developer_dir",
+        hermetic_xcode_path,
+      ]
+    }
+    args += [
+      "plutil",
+      "-convert",
+      invoker.format,
+      "-o",
+      rebase_path(invoker.output, root_build_dir),
+      rebase_path(invoker.source, root_build_dir),
+    ]
+  }
+}
+
+# Template to merge multiple plist files and perform variable substitutions.
+#
+# Arguments
+#
+#     plist_templates:
+#         string array, paths to plist files which will be used for the bundle.
+#
+#     format:
+#         string, the format to `plutil -convert` the plist to when
+#         generating the output.
+#
+#     substitutions:
+#         string array, 'key=value' pairs used to replace ${key} by value
+#         when generating the output plist file.
+#
+#     output_name:
+#         string, name of the generated plist file.
+template("compile_plist") {
+  assert(defined(invoker.plist_templates),
+         "A list of template plist files must be specified for $target_name")
+  assert(defined(invoker.format),
+         "The plist format must be specified for $target_name")
+  assert(defined(invoker.substitutions),
+         "A list of key=value pairs must be specified for $target_name")
+  assert(defined(invoker.output_name),
+         "The name of the output file must be specified for $target_name")
+
+  _output_name = invoker.output_name
+  _merged_name = get_path_info(_output_name, "dir") + "/" +
+                 get_path_info(_output_name, "name") + "_merged." +
+                 get_path_info(_output_name, "extension")
+
+  _merge_target = target_name + "_merge"
+
+  action(_merge_target) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "testonly",
+                           ])
+
+    script = "//build/config/mac/plist_util.py"
+    sources = invoker.plist_templates
+    outputs = [
+      _merged_name,
+    ]
+    args = [
+             "merge",
+             "-f=" + invoker.format,
+             "-o=" + rebase_path(_merged_name, root_build_dir),
+           ] + rebase_path(invoker.plist_templates, root_build_dir)
+  }
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+    script = "//build/config/mac/plist_util.py"
+    sources = [
+      _merged_name,
+    ]
+    outputs = [
+      _output_name,
+    ]
+    args = [
+      "substitute",
+      "-f=" + invoker.format,
+      "-o=" + rebase_path(_output_name, root_build_dir),
+      "-t=" + rebase_path(_merged_name, root_build_dir),
+    ]
+    foreach(_substitution, invoker.substitutions) {
+      args += [ "-s=$_substitution" ]
+    }
+    deps = [
+      ":$_merge_target",
+    ]
+  }
+}
+
+# The base template used to generate Info.plist files for iOS and Mac apps and
+# frameworks.
+#
+# Arguments
+#
+#     plist_templates:
+#         string array, paths to plist files which will be used for the bundle.
+#
+#     executable_name:
+#         string, name of the generated target used for the product
+#         and executable name as specified in the output Info.plist.
+#
+#     format:
+#         string, the format to `plutil -convert` the plist to when
+#         generating the output.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+#
+#     output_name:
+#         (optional) string, name of the generated plist file, default to
+#         "$target_gen_dir/$target_name.plist".
+template("info_plist") {
+  assert(defined(invoker.executable_name),
+         "The executable_name must be specified for $target_name")
+  executable_name = invoker.executable_name
+
+  compile_plist(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "plist_templates",
+                             "testonly",
+                             "deps",
+                             "visibility",
+                             "format",
+                           ])
+
+    if (defined(invoker.output_name)) {
+      output_name = invoker.output_name
+    } else {
+      output_name = "$target_gen_dir/$target_name.plist"
+    }
+
+    substitutions = [
+      "BUILD_MACHINE_OS_BUILD=$machine_os_build",
+      "EXECUTABLE_NAME=$executable_name",
+      "GCC_VERSION=com.apple.compilers.llvm.clang.1_0",
+      "PRODUCT_NAME=$executable_name",
+      "XCODE_BUILD=$xcode_build",
+      "XCODE_VERSION=$xcode_version",
+    ]
+    if (defined(invoker.extra_substitutions)) {
+      substitutions += invoker.extra_substitutions
+    }
+  }
+}
+
+# Template to compile .xib and .storyboard files.
+#
+# Arguments
+#
+#     sources:
+#         list of string, sources to compile
+#
+#     ibtool_flags:
+#         (optional) list of string, additional flags to pass to the ibtool
+template("compile_ib_files") {
+  action_foreach(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+    assert(defined(invoker.sources),
+           "sources must be specified for $target_name")
+    assert(defined(invoker.output_extension),
+           "output_extension must be specified for $target_name")
+
+    ibtool_flags = []
+    if (defined(invoker.ibtool_flags)) {
+      ibtool_flags = invoker.ibtool_flags
+    }
+
+    _output_extension = invoker.output_extension
+
+    script = "//build/config/mac/compile_ib_files.py"
+    sources = invoker.sources
+    outputs = [
+      "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension",
+    ]
+    args = [
+      "--input",
+      "{{source}}",
+      "--output",
+      rebase_path(
+          "$target_gen_dir/$target_name/{{source_name_part}}.$_output_extension"),
+    ]
+    if (!use_system_xcode) {
+      args += [
+        "--developer_dir",
+        hermetic_xcode_path,
+      ]
+    }
+    args += ibtool_flags
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/compile_ib_files.py
@@ -0,0 +1,57 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import argparse
+import logging
+import os
+import re
+import subprocess
+import sys
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description='A script to compile xib and storyboard.',
+      fromfile_prefix_chars='@')
+  parser.add_argument('-o', '--output', required=True,
+                      help='Path to output bundle.')
+  parser.add_argument('-i', '--input', required=True,
+                      help='Path to input xib or storyboard.')
+  parser.add_argument('--developer_dir', required=False,
+                      help='Path to Xcode.')
+  args, unknown_args = parser.parse_known_args()
+
+  if args.developer_dir:
+    os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+  ibtool_args = [
+      'xcrun', 'ibtool',
+      '--errors', '--warnings', '--notices',
+      '--output-format', 'human-readable-text'
+  ]
+  ibtool_args += unknown_args
+  ibtool_args += [
+      '--compile',
+      os.path.abspath(args.output),
+      os.path.abspath(args.input)
+  ]
+
+  ibtool_section_re = re.compile(r'/\*.*\*/')
+  ibtool_re = re.compile(r'.*note:.*is clipping its content')
+  ibtoolout = subprocess.Popen(ibtool_args, stdout=subprocess.PIPE)
+  current_section_header = None
+  for line in ibtoolout.stdout:
+    if ibtool_section_re.match(line):
+      current_section_header = line
+    elif not ibtool_re.match(line):
+      if current_section_header:
+        sys.stdout.write(current_section_header)
+        current_section_header = None
+      sys.stdout.write(line)
+  return ibtoolout.returncode
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/mac_sdk.gni
@@ -0,0 +1,105 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/toolchain/toolchain.gni")
+
+# See https://bugs.chromium.org/p/webrtc/issues/detail?id=5453.
+# We can drop the rtc_require_mac_10_7_deployment flag when Chromium
+# also requires a 10.7 deployment target.
+import("//build_overrides/build.gni")
+
+declare_args() {
+  # Minimum supported version of the Mac SDK.
+  mac_sdk_min = mac_sdk_min_build_override
+
+  # Minimum supported version of OSX.
+  mac_deployment_target = "10.9"
+
+  # Path to a specific version of the Mac SDK, not including a slash at the end.
+  # If empty, the path to the lowest version greater than or equal to
+  # mac_sdk_min is used.
+  mac_sdk_path = ""
+
+  # The SDK name as accepted by xcodebuild.
+  mac_sdk_name = "macosx"
+}
+
+# Check that the version of macOS SDK used is the one requested when building
+# a version of Chrome shipped to the users. Disable the check if building for
+# iOS as the version macOS SDK used is not relevant for the tool build for the
+# host (they are not shipped) --- this is required as Chrome on iOS is usually
+# build with the latest version of Xcode that may not ship with the version of
+# the macOS SDK used to build Chrome on mac.
+# TODO(crbug.com/635745): the check for target_os should be replaced by a
+# check that current_toolchain is default_toolchain, and the file should
+# assert that current_os is "mac" once this file is no longer included by
+# iOS toolchains.
+_verify_sdk = is_chrome_branded && is_official_build && target_os != "ios"
+
+find_sdk_args = [ "--print_sdk_path" ]
+if (!use_system_xcode) {
+  find_sdk_args += [
+    "--developer_dir",
+    hermetic_xcode_path,
+  ]
+}
+if (_verify_sdk) {
+  find_sdk_args += [
+    "--verify",
+    mac_sdk_min,
+    "--sdk_path=" + mac_sdk_path,
+  ]
+} else {
+  find_sdk_args += [ mac_sdk_min ]
+}
+
+# The tool will print the SDK path on the first line, and the version on the
+# second line.
+find_sdk_lines =
+    exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines")
+mac_sdk_version = find_sdk_lines[1]
+if (mac_sdk_path == "") {
+  mac_sdk_path = find_sdk_lines[0]
+}
+
+script_name = "//build/config/mac/sdk_info.py"
+sdk_info_args = []
+if (!use_system_xcode) {
+  sdk_info_args += [
+    "--developer_dir",
+    hermetic_xcode_path,
+  ]
+}
+sdk_info_args += [ mac_sdk_name ]
+
+_mac_sdk_result = exec_script(script_name, sdk_info_args, "scope")
+xcode_version = _mac_sdk_result.xcode_version
+xcode_build = _mac_sdk_result.xcode_build
+machine_os_build = _mac_sdk_result.machine_os_build
+
+if (mac_sdk_version != mac_sdk_min_build_override &&
+    exec_script("//build/check_return_value.py",
+                [
+                  "test",
+                  xcode_version,
+                  "-ge",
+                  "0730",
+                ],
+                "value") != 1) {
+  print(
+      "********************************************************************************")
+  print(
+      " WARNING: The Mac OS X SDK is incompatible with the version of Xcode. To fix,")
+  print(
+      "          either upgrade Xcode to the latest version or install the Mac OS X")
+  print(
+      "          $mac_sdk_min_build_override SDK. For more information, see https://crbug.com/620127.")
+  print()
+  print(" Current SDK Version:   $mac_sdk_version")
+  print(" Current Xcode Version: $xcode_version ($xcode_build)")
+  print(
+      "********************************************************************************")
+  assert(false, "SDK is incompatible with Xcode")
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/package_framework.py
@@ -0,0 +1,60 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import errno
+import os
+import shutil
+import sys
+
+def Main():
+  parser = argparse.ArgumentParser(description='Create Mac Framework symlinks')
+  parser.add_argument('--framework', action='store', type=str, required=True)
+  parser.add_argument('--version', action='store', type=str)
+  parser.add_argument('--contents', action='store', type=str, nargs='+')
+  parser.add_argument('--stamp', action='store', type=str, required=True)
+  args = parser.parse_args()
+
+  VERSIONS = 'Versions'
+  CURRENT = 'Current'
+
+  # Ensure the Foo.framework/Versions/A/ directory exists and create the
+  # Foo.framework/Versions/Current symlink to it.
+  if args.version:
+    try:
+      os.makedirs(os.path.join(args.framework, VERSIONS, args.version), 0744)
+    except OSError as e:
+      if e.errno != errno.EEXIST:
+        raise e
+    _Relink(os.path.join(args.version),
+            os.path.join(args.framework, VERSIONS, CURRENT))
+
+  # Establish the top-level symlinks in the framework bundle. The dest of
+  # the symlinks may not exist yet.
+  if args.contents:
+    for item in args.contents:
+      _Relink(os.path.join(VERSIONS, CURRENT, item),
+              os.path.join(args.framework, item))
+
+  # Write out a stamp file.
+  if args.stamp:
+    with open(args.stamp, 'w') as f:
+      f.write(str(args))
+
+  return 0
+
+
+def _Relink(dest, link):
+  """Creates a symlink to |dest| named |link|. If |link| already exists,
+  it is overwritten."""
+  try:
+    os.remove(link)
+  except OSError as e:
+    if e.errno != errno.ENOENT:
+      shutil.rmtree(link)
+  os.symlink(dest, link)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/plist_util.py
@@ -0,0 +1,254 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import plistlib
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import shlex
+
+
+# Xcode substitutes variables like ${PRODUCT_NAME} when compiling Info.plist.
+# It also supports supports modifiers like :identifier or :rfc1034identifier.
+# SUBST_RE matches a variable substitution pattern with an optional modifier,
+# while IDENT_RE matches all characters that are not valid in an "identifier"
+# value (used when applying the modifier).
+SUBST_RE = re.compile(r'\$\{(?P<id>[^}]*?)(?P<modifier>:[^}]*)?\}')
+IDENT_RE = re.compile(r'[_/\s]')
+
+
+def InterpolateList(values, substitutions):
+  """Interpolates variable references into |value| using |substitutions|.
+
+  Inputs:
+    values: a list of values
+    substitutions: a mapping of variable names to values
+
+  Returns:
+    A new list of values with all variables references ${VARIABLE} replaced
+    by their value in |substitutions| or None if any of the variable has no
+    subsitution.
+  """
+  result = []
+  for value in values:
+    interpolated = InterpolateValue(value, substitutions)
+    if interpolated is None:
+      return None
+    result.append(interpolated)
+  return result
+
+
+def InterpolateString(value, substitutions):
+  """Interpolates variable references into |value| using |substitutions|.
+
+  Inputs:
+    value: a string
+    substitutions: a mapping of variable names to values
+
+  Returns:
+    A new string with all variables references ${VARIABLES} replaced by their
+    value in |substitutions| or None if any of the variable has no substitution.
+  """
+  result = value
+  for match in reversed(list(SUBST_RE.finditer(value))):
+    variable = match.group('id')
+    if variable not in substitutions:
+      return None
+    # Some values need to be identifier and thus the variables references may
+    # contains :modifier attributes to indicate how they should be converted
+    # to identifiers ("identifier" replaces all invalid characters by '_' and
+    # "rfc1034identifier" replaces them by "-" to make valid URI too).
+    modifier = match.group('modifier')
+    if modifier == ':identifier':
+      interpolated = IDENT_RE.sub('_', substitutions[variable])
+    elif modifier == ':rfc1034identifier':
+      interpolated = IDENT_RE.sub('-', substitutions[variable])
+    else:
+      interpolated = substitutions[variable]
+    result = result[:match.start()] + interpolated + result[match.end():]
+  return result
+
+
+def InterpolateValue(value, substitutions):
+  """Interpolates variable references into |value| using |substitutions|.
+
+  Inputs:
+    value: a value, can be a dictionary, list, string or other
+    substitutions: a mapping of variable names to values
+
+  Returns:
+    A new value with all variables references ${VARIABLES} replaced by their
+    value in |substitutions| or None if any of the variable has no substitution.
+  """
+  if isinstance(value, dict):
+    return Interpolate(value, substitutions)
+  if isinstance(value, list):
+    return InterpolateList(value, substitutions)
+  if isinstance(value, str):
+    return InterpolateString(value, substitutions)
+  return value
+
+
+def Interpolate(plist, substitutions):
+  """Interpolates variable references into |value| using |substitutions|.
+
+  Inputs:
+    plist: a dictionary representing a Property List (.plist) file
+    substitutions: a mapping of variable names to values
+
+  Returns:
+    A new plist with all variables references ${VARIABLES} replaced by their
+    value in |substitutions|. All values that contains references with no
+    substitutions will be removed and the corresponding key will be cleared
+    from the plist (not recursively).
+  """
+  result = {}
+  for key in plist:
+    value = InterpolateValue(plist[key], substitutions)
+    if value is not None:
+      result[key] = value
+  return result
+
+
+def LoadPList(path):
+  """Loads Plist at |path| and returns it as a dictionary."""
+  fd, name = tempfile.mkstemp()
+  try:
+    subprocess.check_call(['plutil', '-convert', 'xml1', '-o', name, path])
+    with os.fdopen(fd, 'r') as f:
+      return plistlib.readPlist(f)
+  finally:
+    os.unlink(name)
+
+
+def SavePList(path, format, data):
+  """Saves |data| as a Plist to |path| in the specified |format|."""
+  fd, name = tempfile.mkstemp()
+  try:
+    with os.fdopen(fd, 'w') as f:
+      plistlib.writePlist(data, f)
+    subprocess.check_call(['plutil', '-convert', format, '-o', path, name])
+  finally:
+    os.unlink(name)
+
+
+def MergePList(plist1, plist2):
+  """Merges |plist1| with |plist2| recursively.
+
+  Creates a new dictionary representing a Property List (.plist) files by
+  merging the two dictionary |plist1| and |plist2| recursively (only for
+  dictionary values). List value will be concatenated.
+
+  Args:
+    plist1: a dictionary representing a Property List (.plist) file
+    plist2: a dictionary representing a Property List (.plist) file
+
+  Returns:
+    A new dictionary representing a Property List (.plist) file by merging
+    |plist1| with |plist2|. If any value is a dictionary, they are merged
+    recursively, otherwise |plist2| value is used. If values are list, they
+    are concatenated.
+  """
+  if not isinstance(plist1, dict) or not isinstance(plist2, dict):
+    if plist2 is not None:
+      return plist2
+    else:
+      return plist1
+  result = {}
+  for key in set(plist1) | set(plist2):
+    if key in plist2:
+      value = plist2[key]
+    else:
+      value = plist1[key]
+    if isinstance(value, dict):
+      value = MergePList(plist1.get(key, None), plist2.get(key, None))
+    if isinstance(value, list):
+      value = plist1.get(key, []) + plist2.get(key, [])
+    result[key] = value
+  return result
+
+
+class Action(object):
+  """Class implementing one action supported by the script."""
+
+  @classmethod
+  def Register(cls, subparsers):
+    parser = subparsers.add_parser(cls.name, help=cls.help)
+    parser.set_defaults(func=cls._Execute)
+    cls._Register(parser)
+
+
+class MergeAction(Action):
+  """Class to merge multiple plist files."""
+
+  name = 'merge'
+  help = 'merge multiple plist files'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument(
+        '-o', '--output', required=True,
+        help='path to the output plist file')
+    parser.add_argument(
+        '-f', '--format', required=True, choices=('xml1', 'binary1', 'json'),
+        help='format of the plist file to generate')
+    parser.add_argument(
+          'path', nargs="+",
+          help='path to plist files to merge')
+
+  @staticmethod
+  def _Execute(args):
+    data = {}
+    for filename in args.path:
+      data = MergePList(data, LoadPList(filename))
+    SavePList(args.output, args.format, data)
+
+
+class SubstituteAction(Action):
+  """Class implementing the variable substitution in a plist file."""
+
+  name = 'substitute'
+  help = 'perform pattern substitution in a plist file'
+
+  @staticmethod
+  def _Register(parser):
+    parser.add_argument(
+        '-o', '--output', required=True,
+        help='path to the output plist file')
+    parser.add_argument(
+        '-t', '--template', required=True,
+        help='path to the template file')
+    parser.add_argument(
+        '-s', '--substitution', action='append', default=[],
+        help='substitution rule in the format key=value')
+    parser.add_argument(
+        '-f', '--format', required=True, choices=('xml1', 'binary1', 'json'),
+        help='format of the plist file to generate')
+
+  @staticmethod
+  def _Execute(args):
+    substitutions = {}
+    for substitution in args.substitution:
+      key, value = substitution.split('=', 1)
+      substitutions[key] = value
+    data = Interpolate(LoadPList(args.template), substitutions)
+    SavePList(args.output, args.format, data)
+
+
+def Main():
+  parser = argparse.ArgumentParser(description='manipulate plist files')
+  subparsers = parser.add_subparsers()
+
+  for action in [MergeAction, SubstituteAction]:
+    action.Register(subparsers)
+
+  args = parser.parse_args()
+  args.func(args)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/prepare_framework_version.py
@@ -0,0 +1,42 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+import sys
+
+# Ensures that the current version matches the last-produced version, which is
+# stored in the version_file. If it does not, then the framework_root_dir is
+# obliterated.
+# Usage: python prepare_framework_version.py out/obj/version_file \
+#                                            out/Framework.framework \
+#                                            'A'
+
+def PrepareFrameworkVersion(version_file, framework_root_dir, version):
+  # Test what the current framework version is. Stop if it is up-to-date.
+  try:
+    with open(version_file, 'r') as f:
+      current_version = f.read()
+      if current_version == version:
+        return
+  except IOError:
+    pass
+
+  # The framework version has changed, so clobber the framework.
+  if os.path.exists(framework_root_dir):
+    shutil.rmtree(framework_root_dir)
+
+  # Write out the new framework version file, making sure its containing
+  # directory exists.
+  dirname = os.path.dirname(version_file)
+  if not os.path.isdir(dirname):
+    os.makedirs(dirname, 0700)
+
+  with open(version_file, 'w+') as f:
+    f.write(version)
+
+
+if __name__ == '__main__':
+  PrepareFrameworkVersion(sys.argv[1], sys.argv[2], sys.argv[3])
+  sys.exit(0)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/rules.gni
@@ -0,0 +1,692 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/mac/base_rules.gni")
+
+# Generates Info.plist files for Mac apps and frameworks.
+#
+# Arguments
+#
+#     info_plist:
+#         (optional) string, path to the Info.plist file that will be used for
+#         the bundle.
+#
+#     info_plist_target:
+#         (optional) string, if the info_plist is generated from an action,
+#         rather than a regular source file, specify the target name in lieu
+#         of info_plist. The two arguments are mutually exclusive.
+#
+#     executable_name:
+#         string, name of the generated target used for the product
+#         and executable name as specified in the output Info.plist.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+template("mac_info_plist") {
+  assert(defined(invoker.info_plist) != defined(invoker.info_plist_target),
+         "Only one of info_plist or info_plist_target may be specified in " +
+             target_name)
+
+  if (defined(invoker.info_plist)) {
+    _info_plist = invoker.info_plist
+  } else {
+    _info_plist_target_output = get_target_outputs(invoker.info_plist_target)
+    _info_plist = _info_plist_target_output[0]
+  }
+
+  info_plist(target_name) {
+    format = "xml1"
+    extra_substitutions = []
+    if (defined(invoker.extra_substitutions)) {
+      extra_substitutions = invoker.extra_substitutions
+    }
+    extra_substitutions += [
+      "MAC_SDK_BUILD=$mac_sdk_version",
+      "MAC_SDK_NAME=$mac_sdk_name$mac_sdk_version",
+    ]
+    plist_templates = [
+      "//build/config/mac/BuildInfo.plist",
+      _info_plist,
+    ]
+    if (defined(invoker.info_plist_target)) {
+      deps = [
+        invoker.info_plist_target,
+      ]
+    }
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "executable_name",
+                           ])
+  }
+}
+
+# Template to compile and package Mac XIB files as bundle data.
+#
+# Arguments
+#
+#     sources:
+#         list of string, sources to comiple
+#
+#     output_path:
+#         (optional) string, the path to use for the outputs list in the
+#         bundle_data step. If unspecified, defaults to bundle_resources_dir.
+template("mac_xib_bundle_data") {
+  _target_name = target_name
+  _compile_target_name = _target_name + "_compile_ibtool"
+
+  compile_ib_files(_compile_target_name) {
+    forward_variables_from(invoker, [ "testonly" ])
+    visibility = [ ":$_target_name" ]
+    sources = invoker.sources
+    output_extension = "nib"
+    ibtool_flags = [
+      "--minimum-deployment-target",
+      mac_deployment_target,
+
+      # TODO(rsesek): Enable this once all the bots are on Xcode 7+.
+      # "--target-device",
+      # "mac",
+    ]
+  }
+
+  bundle_data(_target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "visibility",
+                           ])
+
+    public_deps = [
+      ":$_compile_target_name",
+    ]
+    sources = get_target_outputs(":$_compile_target_name")
+
+    _output_path = "{{bundle_resources_dir}}"
+    if (defined(invoker.output_path)) {
+      _output_path = invoker.output_path
+    }
+
+    outputs = [
+      "$_output_path/{{source_file_part}}",
+    ]
+  }
+}
+
+# Template to package a shared library into a Mac framework bundle.
+#
+# By default, the bundle target this template generates does not link the
+# resulting framework into anything that depends on it. If a dependency wants
+# a link-time (as well as build-time) dependency on the framework bundle,
+# depend against "$target_name+link". If only the build-time dependency is
+# required (e.g., for copying into another bundle), then use "$target_name".
+#
+# Arguments
+#
+#     info_plist:
+#         (optional) string, path to the Info.plist file that will be used for
+#         the bundle.
+#
+#     info_plist_target:
+#         (optional) string, if the info_plist is generated from an action,
+#         rather than a regular source file, specify the target name in lieu
+#         of info_plist. The two arguments are mutually exclusive.
+#
+#     output_name:
+#         (optional) string, name of the generated framework without the
+#         .framework suffix. If omitted, defaults to target_name.
+#
+#     framework_version:
+#         (optional) string, version of the framework. Typically this is a
+#         single letter, like "A". If omitted, the Versions/ subdirectory
+#         structure will not be created, and build output will go directly
+#         into the framework subdirectory.
+#
+#     framework_contents:
+#         (optional) list of string, top-level items in the framework. For
+#         frameworks with a framework_version, this is the list of symlinks to
+#         create in the .framework directory that link into Versions/Current/.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+#
+# This template provides two targets for the resulting framework bundle. The
+# link-time behavior varies depending on which of the two targets below is
+# added as a dependency:
+#   - $target_name only adds a build-time dependency. Targets that depend on
+#     it will not link against the framework.
+#   - $target_name+link adds a build-time and link-time dependency. Targets
+#     that depend on it will link against the framework.
+#
+# The build-time-only dependency is used for when a target needs to use the
+# framework either only for resources, or because the target loads it at run-
+# time, via dlopen() or NSBundle. The link-time dependency will cause the
+# dependee to have the framework loaded by dyld at launch.
+#
+# Example of build-time only dependency:
+#
+#     mac_framework_bundle("CoreTeleportation") {
+#       sources = [ ... ]
+#     }
+#
+#     bundle_data("core_teleportation_bundle_data") {
+#       deps = [ ":CoreTeleportation" ]
+#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+#       outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ]
+#     }
+#
+#     app_bundle("GoatTeleporter") {
+#       sources = [ ... ]
+#       deps = [
+#         ":core_teleportation_bundle_data",
+#       ]
+#     }
+#
+# The GoatTeleporter.app will not directly link against
+# CoreTeleportation.framework, but it will be included in the bundle's
+# Frameworks directory.
+#
+# Example of link-time dependency:
+#
+#     mac_framework_bundle("CoreTeleportation") {
+#       sources = [ ... ]
+#       ldflags = [
+#         "-install_name",
+#         "@executable_path/../Frameworks/$target_name.framework"
+#       ]
+#     }
+#
+#     bundle_data("core_teleportation_bundle_data") {
+#       deps = [ ":CoreTeleportation+link" ]
+#       sources = [ "$root_out_dir/CoreTeleportation.framework" ]
+#       outputs = [ "{{bundle_root_dir}}/Frameworks/{{source_file_part}}" ]
+#     }
+#
+#     app_bundle("GoatTeleporter") {
+#       sources = [ ... ]
+#       deps = [
+#         ":core_teleportation_bundle_data",
+#       ]
+#     }
+#
+# Note that the framework is still copied to the app's bundle, but dyld will
+# load this library when the app is launched because it uses the "+link"
+# target as a dependency. This also requires that the framework set its
+# install_name so that dyld can locate it.
+#
+# See "gn help shared_library" for more information on arguments supported
+# by shared library target.
+template("mac_framework_bundle") {
+  assert(defined(invoker.deps),
+         "Dependencies must be specified for $target_name")
+  assert(!defined(invoker.framework_contents) ||
+             defined(invoker.framework_version),
+         "framework_contents requres a versioned framework")
+
+  _info_plist_target = target_name + "_info_plist"
+
+  mac_info_plist(_info_plist_target) {
+    executable_name = target_name
+    if (defined(invoker.output_name)) {
+      executable_name = invoker.output_name
+    }
+    forward_variables_from(invoker,
+                           [
+                             "extra_substitutions",
+                             "info_plist",
+                             "info_plist_target",
+                             "testonly",
+                           ])
+  }
+
+  _info_plist_bundle_data = _info_plist_target + "_bundle_data"
+
+  bundle_data(_info_plist_bundle_data) {
+    forward_variables_from(invoker, [ "testonly" ])
+    sources = get_target_outputs(":$_info_plist_target")
+    outputs = [
+      "{{bundle_resources_dir}}/Info.plist",
+    ]
+    public_deps = [
+      ":$_info_plist_target",
+    ]
+  }
+
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  # Create a file to track the build dependency on the framework_version and
+  # framework_contents variables.
+  _framework_toc = []
+  if (defined(invoker.framework_version)) {
+    _framework_toc += [
+      "Version=" + invoker.framework_version,
+      _output_name,
+    ]
+    _framework_contents = [ _output_name ]
+  }
+  if (defined(invoker.framework_contents)) {
+    _framework_toc += invoker.framework_contents
+    _framework_contents += invoker.framework_contents
+  }
+  _framework_toc_file = "$target_out_dir/${target_name}.toc"
+  write_file(_framework_toc_file, _framework_toc)
+
+  # Create local variables for referencing different parts of the bundle.
+  _framework_target = _target_name
+  _framework_name = _output_name + ".framework"
+  _framework_base_dir = "$root_out_dir/$_framework_name"
+  if (defined(invoker.framework_version) && invoker.framework_version != "") {
+    _framework_version = invoker.framework_version
+    _framework_root_dir = _framework_base_dir + "/Versions/$_framework_version"
+  } else {
+    _framework_root_dir = _framework_base_dir
+  }
+
+  # Clean the entire framework if the framework_version changes.
+  _version_arg = "''"
+  if (defined(invoker.framework_version)) {
+    _version_arg = _framework_version
+  }
+  _version_file = "$target_out_dir/${target_name}_version"
+  exec_script("//build/config/mac/prepare_framework_version.py",
+              [
+                rebase_path(_version_file),
+                rebase_path(_framework_base_dir),
+                _version_arg,
+              ])
+
+  # Create the symlinks.
+  _framework_package_target = target_name + "_package"
+  action(_framework_package_target) {
+    script = "//build/config/mac/package_framework.py"
+
+    # The TOC file never needs to be read, since its contents are the values
+    # of GN variables. It is only used to trigger this rule when the values
+    # change.
+    inputs = [
+      _framework_toc_file,
+    ]
+
+    _stamp_file = "$target_out_dir/run_${_framework_package_target}.stamp"
+    outputs = [
+      _stamp_file,
+    ]
+
+    visibility = [ ":$_framework_target" ]
+
+    args = [
+      "--framework",
+      rebase_path(_framework_base_dir, root_build_dir),
+      "--stamp",
+      rebase_path(_stamp_file, root_build_dir),
+    ]
+
+    if (defined(invoker.framework_version)) {
+      args += [
+                "--version",
+                invoker.framework_version,
+                "--contents",
+              ] + _framework_contents
+      # It is not possible to list _framework_contents as outputs, since
+      # ninja does not properly stat symbolic links.
+      # https://github.com/ninja-build/ninja/issues/1186
+    }
+  }
+
+  _link_shared_library_target = target_name + "_shared_library"
+  _shared_library_bundle_data = target_name + "_shared_library_bundle_data"
+
+  shared_library(_link_shared_library_target) {
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "assert_no_deps",
+                             "bundle_deps",
+                             "code_signing_enabled",
+                             "data_deps",
+                             "info_plist",
+                             "info_plist_target",
+                             "output_name",
+                             "visibility",
+                           ])
+    visibility = [ ":$_shared_library_bundle_data" ]
+    output_name = _output_name
+    output_prefix_override = true
+    output_extension = ""
+    output_dir = "$target_out_dir/$_link_shared_library_target"
+  }
+
+  bundle_data(_shared_library_bundle_data) {
+    visibility = [ ":$_framework_target" ]
+    forward_variables_from(invoker, [ "testonly" ])
+    sources = [
+      "$target_out_dir/$_link_shared_library_target/$_output_name",
+    ]
+    outputs = [
+      "{{bundle_executable_dir}}/$_output_name",
+    ]
+    public_deps = [
+      ":$_link_shared_library_target",
+    ]
+  }
+
+  _framework_public_config = _target_name + "_public_config"
+  config(_framework_public_config) {
+    # TODO(sdefresne): should we have a framework_dirs similar to lib_dirs
+    # and include_dirs to avoid duplicate values on the command-line.
+    visibility = [ ":$_framework_target" ]
+    ldflags = [
+      "-F",
+      rebase_path("$root_out_dir/.", root_build_dir),
+    ]
+    lib_dirs = [ root_out_dir ]
+    libs = [ _framework_name ]
+  }
+
+  create_bundle(_framework_target) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                           ])
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+      visibility += [ ":$_target_name+link" ]
+    }
+
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ ":$_info_plist_bundle_data" ]
+
+    if (defined(invoker.bundle_deps)) {
+      deps += invoker.bundle_deps
+    }
+
+    if (!defined(public_deps)) {
+      public_deps = []
+    }
+    public_deps += [
+      ":$_framework_package_target",
+      ":$_shared_library_bundle_data",
+    ]
+
+    bundle_root_dir = _framework_root_dir
+    bundle_resources_dir = "$bundle_root_dir/Resources"
+    bundle_executable_dir = "$bundle_root_dir"
+  }
+
+  group(_target_name + "+link") {
+    forward_variables_from(invoker,
+                           [
+                             "public_configs",
+                             "testonly",
+                             "visibility",
+                           ])
+    public_deps = [
+      ":$_target_name",
+    ]
+    if (!defined(public_configs)) {
+      public_configs = []
+    }
+    public_configs += [ ":$_framework_public_config" ]
+  }
+}
+
+set_defaults("mac_framework_bundle") {
+  configs = default_shared_library_configs
+}
+
+# Template to create a Mac executable application bundle.
+#
+# Arguments
+#
+#     package_type:
+#         (optional) string, the product package type to create. Options are:
+#             "app" to create a .app bundle (default)
+#             "xpc" to create an .xpc service bundle
+#
+#     info_plist:
+#         (optional) string, path to the Info.plist file that will be used for
+#         the bundle.
+#
+#     info_plist_target:
+#         (optional) string, if the info_plist is generated from an action,
+#         rather than a regular source file, specify the target name in lieu
+#         of info_plist. The two arguments are mutually exclusive.
+#
+#     output_name:
+#         (optional) string, name of the generated app without the
+#         .app suffix. If omitted, defaults to target_name.
+#
+#     extra_configs:
+#         (optional) list of label, additional configs to apply to the
+#         executable target.
+#
+#     remove_configs:
+#         (optional) list of label, default configs to remove from the target.
+#
+#     extra_substitutions:
+#         (optional) string array, 'key=value' pairs for extra fields which are
+#         specified in a source Info.plist template.
+template("mac_app_bundle") {
+  _target_name = target_name
+  _output_name = target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  _package_type = "app"
+  if (defined(invoker.package_type)) {
+    _package_type = invoker.package_type
+  }
+
+  if (_package_type == "app") {
+    _output_extension = "app"
+    _product_type = "com.apple.product-type.application"
+    _write_pkg_info = true
+  } else if (_package_type == "xpc") {
+    _output_extension = "xpc"
+    _product_type = "com.apple.product-type.xpc-service"
+    _write_pkg_info = false
+  } else {
+    assert(false, "Unsupported packge_type: " + packge_type)
+  }
+
+  _executable_target = target_name + "_executable"
+  _executable_bundle_data = _executable_target + "_bundle_data"
+
+  _info_plist_target = target_name + "_info_plist"
+
+  mac_info_plist(_info_plist_target) {
+    executable_name = _output_name
+    forward_variables_from(invoker,
+                           [
+                             "extra_substitutions",
+                             "info_plist",
+                             "info_plist_target",
+                             "testonly",
+                           ])
+  }
+
+  if (_write_pkg_info) {
+    _pkg_info_target = target_name + "_pkg_info"
+
+    action(_pkg_info_target) {
+      forward_variables_from(invoker, [ "testonly" ])
+      script = "//build/config/mac/write_pkg_info.py"
+      sources = get_target_outputs(":$_info_plist_target")
+      outputs = [
+        "$target_gen_dir/$_pkg_info_target",
+      ]
+      args = [ "--plist" ] + rebase_path(sources, root_build_dir) +
+             [ "--output" ] + rebase_path(outputs, root_build_dir)
+      deps = [
+        ":$_info_plist_target",
+      ]
+    }
+  }
+
+  executable(_executable_target) {
+    visibility = [ ":$_executable_bundle_data" ]
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "assert_no_deps",
+                             "data_deps",
+                             "info_plist",
+                             "output_name",
+                             "visibility",
+                           ])
+    if (defined(extra_configs)) {
+      configs += extra_configs
+    }
+    if (defined(remove_configs)) {
+      configs -= remove_configs
+    }
+    output_name = _output_name
+    output_dir = "$target_out_dir/$_executable_target"
+  }
+
+  bundle_data(_executable_bundle_data) {
+    visibility = [ ":$_target_name" ]
+    forward_variables_from(invoker, [ "testonly" ])
+    sources = [
+      "$target_out_dir/$_executable_target/$_output_name",
+    ]
+    outputs = [
+      "{{bundle_executable_dir}}/$_output_name",
+    ]
+    public_deps = [
+      ":$_executable_target",
+    ]
+  }
+
+  _info_plist_bundle_data = _info_plist_target + "_bundle_data"
+
+  bundle_data(_info_plist_bundle_data) {
+    forward_variables_from(invoker, [ "testonly" ])
+    visibility = [ ":$_target_name" ]
+    sources = get_target_outputs(":$_info_plist_target")
+    outputs = [
+      "{{bundle_root_dir}}/Info.plist",
+    ]
+    public_deps = [
+      ":$_info_plist_target",
+    ]
+  }
+
+  if (_write_pkg_info) {
+    _pkg_info_bundle_data = _pkg_info_target + "_bundle_data"
+
+    bundle_data(_pkg_info_bundle_data) {
+      forward_variables_from(invoker, [ "testonly" ])
+      visibility = [ ":$_target_name" ]
+      sources = get_target_outputs(":$_pkg_info_target")
+      outputs = [
+        "{{bundle_root_dir}}/PkgInfo",
+      ]
+      public_deps = [
+        ":$_pkg_info_target",
+      ]
+    }
+  }
+
+  create_bundle(_target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [
+      ":$_executable_bundle_data",
+      ":$_info_plist_bundle_data",
+    ]
+    if (_write_pkg_info) {
+      deps += [ ":$_pkg_info_bundle_data" ]
+    }
+    product_type = _product_type
+    bundle_root_dir =
+        "$root_out_dir/${_output_name}.${_output_extension}/Contents"
+    bundle_resources_dir = "$bundle_root_dir/Resources"
+    bundle_executable_dir = "$bundle_root_dir/MacOS"
+  }
+}
+
+# Template to package a loadable_module into a .plugin bundle.
+#
+# This takes no extra arguments that differ from a loadable_module.
+template("mac_plugin_bundle") {
+  assert(defined(invoker.deps),
+         "Dependencies must be specified for $target_name")
+
+  _target_name = target_name
+  _loadable_module_target = _target_name + "_loadable_module"
+  _loadable_module_bundle_data = _loadable_module_target + "_bundle_data"
+
+  _output_name = _target_name
+  if (defined(invoker.output_name)) {
+    _output_name = invoker.output_name
+  }
+
+  loadable_module(_loadable_module_target) {
+    visibility = [ ":$_loadable_module_bundle_data" ]
+    forward_variables_from(invoker,
+                           "*",
+                           [
+                             "assert_no_deps",
+                             "data_deps",
+                             "output_name",
+                             "visibility",
+                           ])
+    output_dir = "$target_out_dir"
+    output_name = _output_name
+  }
+
+  bundle_data(_loadable_module_bundle_data) {
+    forward_variables_from(invoker, [ "testonly" ])
+    visibility = [ ":$_target_name" ]
+    sources = [
+      "$target_out_dir/${_output_name}.so",
+    ]
+    outputs = [
+      "{{bundle_executable_dir}}/$_output_name",
+    ]
+    public_deps = [
+      ":$_loadable_module_target",
+    ]
+  }
+
+  create_bundle(_target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "public_deps",
+                             "testonly",
+                             "visibility",
+                           ])
+    if (!defined(deps)) {
+      deps = []
+    }
+    deps += [ ":$_loadable_module_bundle_data" ]
+
+    bundle_root_dir = "$root_out_dir/$_output_name.plugin/Contents"
+    bundle_executable_dir = "$bundle_root_dir/MacOS"
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/sdk_info.py
@@ -0,0 +1,73 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+# This script prints information about the build system, the operating
+# system and the iOS or Mac SDK (depending on the platform "iphonesimulator",
+# "iphoneos" or "macosx" generally).
+#
+# In the GYP build, this is done inside GYP itself based on the SDKROOT
+# variable.
+
+def FormatVersion(version):
+  """Converts Xcode version to a format required for Info.plist."""
+  version = version.replace('.', '')
+  version = version + '0' * (3 - len(version))
+  return version.zfill(4)
+
+
+def FillXcodeVersion(settings):
+  """Fills the Xcode version and build number into |settings|."""
+  lines = subprocess.check_output(['xcodebuild', '-version']).splitlines()
+  settings['xcode_version'] = FormatVersion(lines[0].split()[-1])
+  settings['xcode_build'] = lines[-1].split()[-1]
+
+
+def FillMachineOSBuild(settings):
+  """Fills OS build number into |settings|."""
+  settings['machine_os_build'] = subprocess.check_output(
+      ['sw_vers', '-buildVersion']).strip()
+
+
+def FillSDKPathAndVersion(settings, platform, xcode_version):
+  """Fills the SDK path and version for |platform| into |settings|."""
+  settings['sdk_path'] = subprocess.check_output([
+      'xcrun', '-sdk', platform, '--show-sdk-path']).strip()
+  settings['sdk_version'] = subprocess.check_output([
+      'xcrun', '-sdk', platform, '--show-sdk-version']).strip()
+  settings['sdk_platform_path'] = subprocess.check_output([
+      'xcrun', '-sdk', platform, '--show-sdk-platform-path']).strip()
+  # TODO: unconditionally use --show-sdk-build-version once Xcode 7.2 or
+  # higher is required to build Chrome for iOS or OS X.
+  if xcode_version >= '0720':
+    settings['sdk_build'] = subprocess.check_output([
+        'xcrun', '-sdk', platform, '--show-sdk-build-version']).strip()
+  else:
+    settings['sdk_build'] = settings['sdk_version']
+
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser()
+  parser.add_argument("--developer_dir", required=False)
+  args, unknownargs = parser.parse_known_args()
+  if args.developer_dir:
+    os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+  if len(unknownargs) != 1:
+    sys.stderr.write(
+        'usage: %s [iphoneos|iphonesimulator|macosx]\n' %
+        os.path.basename(sys.argv[0]))
+    sys.exit(1)
+
+  settings = {}
+  FillMachineOSBuild(settings)
+  FillXcodeVersion(settings)
+  FillSDKPathAndVersion(settings, unknownargs[0], settings['xcode_version'])
+
+  for key in sorted(settings):
+    print '%s="%s"' % (key, settings[key])
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/symbols.gni
@@ -0,0 +1,30 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+# This file declares arguments and configs that control whether dSYM debug
+# info is produced and whether build products are stripped.
+
+declare_args() {
+  # Produce dSYM files for targets that are configured to do so. dSYM
+  # generation is controlled globally as it is a linker output (produced via
+  # the //build/toolchain/mac/linker_driver.py. Enabling this will result in
+  # all shared library, loadable module, and executable targets having a dSYM
+  # generated.
+  enable_dsyms = is_official_build || using_sanitizer
+
+  # Strip symbols from linked targets by default. If this is enabled, the
+  # //build/config/mac:strip_all config will be applied to all linked targets.
+  # If custom stripping parameters are required, remove that config from a
+  # linked target and apply custom -Wcrl,strip flags. See
+  # //build/toolchain/mac/linker_driver.py for more information.
+  enable_stripping = is_official_build
+}
+
+# Save unstripped copies of targets with a ".unstripped" suffix. This is
+# useful to preserve the original output when enable_stripping=true but
+# we're not actually generating real dSYMs.
+save_unstripped_output = enable_stripping && !enable_dsyms
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/write_pkg_info.py
@@ -0,0 +1,47 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import plist_util
+import sys
+
+# This script creates a PkgInfo file for an OS X .app bundle's plist.
+# Usage: python write_pkg_info.py --plist Foo.app/Contents/Info.plist \
+#           --output Foo.app/Contents/PkgInfo
+
+def Main():
+  parser = argparse.ArgumentParser(
+      description='A script to write PkgInfo files for .app bundles.')
+  parser.add_argument('--plist', required=True,
+                      help='Path to the Info.plist for the .app.')
+  parser.add_argument('--output', required=True,
+                      help='Path to the desired output file.')
+  args = parser.parse_args()
+
+  # Remove the output if it exists already.
+  if os.path.exists(args.output):
+    os.unlink(args.output)
+
+  plist = plist_util.LoadPList(args.plist)
+  package_type = plist['CFBundlePackageType']
+  if package_type != 'APPL':
+    raise ValueError('Expected CFBundlePackageType to be %s, got %s' % \
+        ('AAPL', package_type))
+
+  # The format of PkgInfo is eight characters, representing the bundle type
+  # and bundle signature, each four characters. If that is missing, four
+  # '?' characters are used instead.
+  signature_code = plist.get('CFBundleSignature', '????')
+  if len(signature_code) != 4:
+    raise ValueError('CFBundleSignature should be exactly four characters, ' +
+        'got %s' % signature_code)
+
+  with open(args.output, 'w') as fp:
+    fp.write('%s%s' % (package_type, signature_code))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mac/xcrun.py
@@ -0,0 +1,28 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser(
+      description='A script to execute a command via xcrun.')
+  parser.add_argument('--stamp', action='store', type=str,
+      help='Write a stamp file to this path on success.')
+  parser.add_argument('--developer_dir', required=False,
+                      help='Path to Xcode.')
+  args, unknown_args = parser.parse_known_args()
+
+  if args.developer_dir:
+    os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+  rv = subprocess.check_call(['xcrun'] + unknown_args)
+  if rv == 0 and args.stamp:
+    if os.path.exists(args.stamp):
+      os.unlink(args.stamp)
+    open(args.stamp, 'w+').close()
+
+  sys.exit(rv)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/mips.gni
@@ -0,0 +1,58 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/v8_target_cpu.gni")
+
+# These are primarily relevant in current_cpu == "mips*" contexts, where
+# MIPS code is being compiled.  But they can also be relevant in the
+# other contexts when the code will change its behavior based on the
+# cpu it wants to generate code for.
+if (current_cpu == "mipsel" || v8_current_cpu == "mipsel") {
+  declare_args() {
+    # MIPS arch variant. Possible values are:
+    #   "r1"
+    #   "r2"
+    #   "r6"
+    mips_arch_variant = "r1"
+
+    # MIPS DSP ASE revision. Possible values are:
+    #   0: unavailable
+    #   1: revision 1
+    #   2: revision 2
+    mips_dsp_rev = 0
+
+    # MIPS SIMD Arch compilation flag.
+    mips_use_msa = false
+
+    # MIPS floating-point ABI. Possible values are:
+    #   "hard": sets the GCC -mhard-float option.
+    #   "soft": sets the GCC -msoft-float option.
+    mips_float_abi = "hard"
+
+    # MIPS32 floating-point register width. Possible values are:
+    #   "fp32": sets the GCC -mfp32 option.
+    #   "fp64": sets the GCC -mfp64 option.
+    #   "fpxx": sets the GCC -mfpxx option.
+    mips_fpu_mode = "fp32"
+  }
+} else if (current_cpu == "mips64el" || v8_current_cpu == "mips64el") {
+  # MIPS arch variant. Possible values are:
+  #   "r2"
+  #   "r6"
+  if (current_os == "android" || target_os == "android") {
+    declare_args() {
+      mips_arch_variant = "r6"
+
+      # MIPS SIMD Arch compilation flag.
+      mips_use_msa = true
+    }
+  } else {
+    declare_args() {
+      mips_arch_variant = "r2"
+
+      # MIPS SIMD Arch compilation flag.
+      mips_use_msa = false
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/nacl/BUILD.gn
@@ -0,0 +1,143 @@
+# Copyright (c) 2014 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+
+# Native Client Definitions
+config("nacl_defines") {
+  if (is_linux || is_android || is_nacl) {
+    defines = [
+      "_POSIX_C_SOURCE=199506",
+      "_XOPEN_SOURCE=600",
+      "_GNU_SOURCE=1",
+      "__STDC_LIMIT_MACROS=1",
+    ]
+  } else if (is_win) {
+    defines = [ "__STDC_LIMIT_MACROS=1" ]
+  }
+
+  if (current_cpu == "pnacl" && !is_nacl_nonsfi) {
+    # TODO: Remove the following definition once NACL_BUILD_ARCH and
+    # NACL_BUILD_SUBARCH are defined by the PNaCl toolchain.
+    defines += [ "NACL_BUILD_ARCH=pnacl" ]
+  }
+}
+
+config("nexe_defines") {
+  defines = [
+    "DYNAMIC_ANNOTATIONS_ENABLED=1",
+    "DYNAMIC_ANNOTATIONS_PREFIX=NACL_",
+  ]
+}
+
+config("nacl_warnings") {
+  if (is_win) {
+    # Some NaCl code uses forward declarations of static const variables,
+    # with initialized definitions later on.  (The alternative would be
+    # many, many more forward declarations of everything used in that
+    # const variable's initializer before the definition.)  The Windows
+    # compiler is too stupid to notice that there is an initializer later
+    # in the file, and warns about the forward declaration.
+    cflags = [ "/wd4132" ]
+  }
+}
+
+# The base target that all targets in the NaCl build should depend on.
+# This allows configs to be modified for everything in the NaCl build, even when
+# the NaCl build is composed into the Chrome build.  (GN has no functionality to
+# add flags to everything in //native_client, having a base target works around
+# that limitation.)
+source_set("nacl_base") {
+  public_configs = [
+    ":nacl_defines",
+    ":nacl_warnings",
+  ]
+  if (current_os == "nacl") {
+    public_configs += [ ":nexe_defines" ]
+  }
+}
+
+config("compiler") {
+  configs = []
+  cflags = []
+  ldflags = []
+  libs = []
+
+  if (is_clang && current_cpu != "pnacl") {
+    # -no-integrated-as is the default in nacl-clang for historical
+    # compatibility with inline assembly code and so forth.  But there
+    # are no such cases in Chromium code, and -integrated-as is nicer in
+    # general.  Moreover, the IRT must be built using LLVM's assembler
+    # on x86-64 to preserve sandbox base address hiding.  Use it
+    # everywhere for consistency (and possibly quicker builds).
+    cflags += [ "-integrated-as" ]
+  }
+  if (is_nacl_nonsfi) {
+    cflags += [ "--pnacl-allow-translate" ]
+    ldflags += [
+      "--pnacl-allow-translate",
+      "--pnacl-allow-native",
+      "-Wl,--noirt",
+      "-Wt,--noirt",
+      "-Wt,--noirtshim",
+
+      # The clang driver automatically injects -lpthread when using libc++, but
+      # the toolchain doesn't have it yet.  To get around this, use
+      # -nodefaultlibs and make each executable target depend on
+      # "//native_client/src/nonsfi/irt:nacl_sys_private".
+      "-nodefaultlibs",
+    ]
+    libs += [
+      "c++",
+      "m",
+      "c",
+      "pnaclmm",
+    ]
+    include_dirs = [ "//native_client/src/public/linux_syscalls" ]
+  }
+
+  asmflags = cflags
+}
+
+config("compiler_codegen") {
+  cflags = []
+
+  if (is_nacl_irt) {
+    cflags += [
+      # A debugger should be able to unwind IRT call frames.  This is
+      # the default behavior on x86-64 and when compiling C++ with
+      # exceptions enabled; the change is for the benefit of x86-32 C.
+      # The frame pointer is unnecessary when unwind tables are used.
+      "-fasynchronous-unwind-tables",
+      "-fomit-frame-pointer",
+    ]
+
+    if (current_cpu == "x86") {
+      # The x86-32 IRT needs to be callable with an under-aligned
+      # stack; so we disable SSE instructions, which can fault on
+      # misaligned addresses.  See
+      # https://code.google.com/p/nativeclient/issues/detail?id=3935
+      cflags += [
+        "-mstackrealign",
+        "-mno-sse",
+      ]
+    }
+  }
+
+  asmflags = cflags
+}
+
+config("irt_optimize") {
+  cflags = [
+    # Optimize for space, keep the IRT nexe small.
+    "-Os",
+
+    # These are omitted from non-IRT libraries to keep the libraries
+    # themselves small.
+    "-ffunction-sections",
+    "-fdata-sections",
+  ]
+
+  ldflags = [ "-Wl,--gc-sections" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/nacl/config.gni
@@ -0,0 +1,52 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+
+declare_args() {
+  # Native Client supports both Newlib and Glibc C libraries where Newlib
+  # is assumed to be the default one; use this to determine whether Glibc
+  # is being used instead.
+  is_nacl_glibc = false
+}
+
+is_nacl_irt = false
+is_nacl_nonsfi = false
+
+if (enable_nacl) {
+  nacl_toolchain_dir = "//native_client/toolchain/${host_os}_x86"
+
+  if (is_nacl_glibc) {
+    if (current_cpu == "x86" || current_cpu == "x64") {
+      nacl_toolchain_package = "nacl_x86_glibc"
+    } else if (current_cpu == "arm") {
+      nacl_toolchain_package = "nacl_arm_glibc"
+    }
+  } else {
+    nacl_toolchain_package = "pnacl_newlib"
+  }
+
+  if (current_cpu == "pnacl") {
+    _nacl_tuple = "pnacl"
+  } else if (current_cpu == "x86" || current_cpu == "x64") {
+    _nacl_tuple = "x86_64-nacl"
+  } else if (current_cpu == "arm") {
+    _nacl_tuple = "arm-nacl"
+  } else if (current_cpu == "mipsel") {
+    _nacl_tuple = "mipsel-nacl"
+  }
+
+  nacl_toolchain_bindir = "${nacl_toolchain_dir}/${nacl_toolchain_package}/bin"
+  nacl_toolchain_tooldir =
+      "${nacl_toolchain_dir}/${nacl_toolchain_package}/${_nacl_tuple}"
+  nacl_toolprefix = "${nacl_toolchain_bindir}/${_nacl_tuple}-"
+
+  nacl_irt_toolchain = "//build/toolchain/nacl:irt_" + target_cpu
+  is_nacl_irt = current_toolchain == nacl_irt_toolchain
+
+  # Non-SFI mode is a lightweight sandbox used by Chrome OS for running ARC
+  # applications.
+  nacl_nonsfi_toolchain = "//build/toolchain/nacl:newlib_pnacl_nonsfi"
+  is_nacl_nonsfi = current_toolchain == nacl_nonsfi_toolchain
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/nacl/rules.gni
@@ -0,0 +1,185 @@
+# Copyright 2015 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/features.gni")
+import("//build/config/nacl/config.gni")
+
+# Generate a nmf file
+#
+# Native Client Manifest (nmf) is a JSON file that tells the browser where to
+# download and load Native Client application files and libraries.
+#
+# Variables:
+#   executables: .nexe/.pexe/.bc executables to generate nmf for
+#   lib_prefix: path to prepend to shared libraries in the nmf
+#   nmf: the name and the path of the output file
+#   nmfflags: additional flags for the nmf generator
+#   stage_dependencies: directory for staging libraries
+template("generate_nmf") {
+  assert(defined(invoker.executables), "Must define executables")
+  assert(defined(invoker.nmf), "Must define nmf")
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "data_deps",
+                             "executables",
+                             "lib_prefix",
+                             "nmf",
+                             "nmfflags",
+                             "public_deps",
+                             "stage_dependencies",
+                             "testonly",
+                             "visibility",
+                           ])
+    if (!defined(nmfflags)) {
+      nmfflags = []
+    }
+
+    # TODO(phosek): Remove this conditional once
+    # https://bugs.chromium.org/p/nativeclient/issues/detail?id=4339 is
+    # resolved.
+    if (current_cpu == "pnacl") {
+      objdump = rebase_path("${nacl_toolchain_bindir}/x86_64-nacl-objdump")
+    } else {
+      objdump = rebase_path("${nacl_toolprefix}objdump")
+    }
+    if (host_os == "win") {
+      objdump += ".exe"
+    }
+
+    script = "//native_client_sdk/src/tools/create_nmf.py"
+    inputs = [
+      objdump,
+    ]
+    sources = executables
+    outputs = [
+      nmf,
+    ]
+    if (is_nacl_glibc) {
+      if (defined(stage_dependencies)) {
+        nmfflags += [ "--stage-dependencies=" +
+                      rebase_path(stage_dependencies, root_build_dir) ]
+        lib_path = stage_dependencies
+      } else {
+        lib_path = root_build_dir
+      }
+      if (defined(lib_prefix)) {
+        nmfflags += [ "--lib-prefix=" + lib_prefix ]
+        lib_path += "/${lib_prefix}"
+      }
+
+      # Starts empty so the code below can use += everywhere.
+      data = []
+
+      nmfflags += [ "--library-path=" + rebase_path(root_out_dir) ]
+
+      # NOTE: There is no explicit dependency for the lib directory
+      # (lib32 and lib64 for x86/x64) created in the product directory.
+      # They are created as a side-effect of nmf creation.
+      if (current_cpu != "x86" && current_cpu != "x64") {
+        nmfflags +=
+            [ "--library-path=" + rebase_path("${nacl_toolchain_tooldir}/lib") ]
+        if (current_cpu == "arm") {
+          data += [ "${lib_path}/libarm/" ]
+        } else {
+          data += [ "${lib_path}/lib/" ]
+        }
+      } else {
+        # For x86-32, the lib/ directory is called lib32/ instead.
+        if (current_cpu == "x86") {
+          nmfflags += [ "--library-path=" +
+                        rebase_path("${nacl_toolchain_tooldir}/lib32") ]
+          data += [ "${lib_path}/lib32/" ]
+        }
+
+        # x86-32 Windows needs to build both x86-32 and x86-64 NaCl
+        # binaries into the same nmf covering both architectures.  That
+        # gets handled at a higher level (see the nacl_test_data template),
+        # so a single generate_nmf invocation gets both x86-32 and x86-64
+        # nexes listed in executables.
+        if (current_cpu == "x64" || target_os == "win") {
+          # For x86-64, the lib/ directory is called lib64/ instead
+          # when copied by create_nmf.py.
+          glibc_tc = "//build/toolchain/nacl:glibc"
+          assert(current_toolchain == "${glibc_tc}_${current_cpu}")
+          if (current_cpu == "x64") {
+            x64_out_dir = root_out_dir
+          } else {
+            x64_out_dir = get_label_info(":${target_name}(${glibc_tc}_x64)",
+                                         "root_out_dir")
+          }
+          nmfflags += [
+            "--library-path=" + rebase_path(x64_out_dir),
+            "--library-path=" + rebase_path("${nacl_toolchain_tooldir}/lib"),
+          ]
+          data += [ "${lib_path}/lib64/" ]
+        }
+      }
+    }
+    args = [
+             "--no-default-libpath",
+             "--objdump=" + objdump,
+             "--output=" + rebase_path(nmf, root_build_dir),
+           ] + nmfflags + rebase_path(sources, root_build_dir)
+    if (is_nacl_glibc && current_cpu == "arm") {
+      deps += [ "//native_client/src/untrusted/elf_loader:elf_loader" ]
+    }
+  }
+}
+
+# Generate a nmf file for Non-SFI tests
+#
+# Non-SFI tests use a different manifest format from regular Native Client and
+# as such requires a different generator.
+#
+# Variables:
+#   executable: Non-SFI .nexe executable to generate nmf for
+#   nmf: the name and the path of the output file
+#   nmfflags: additional flags for the nmf generator
+template("generate_nonsfi_test_nmf") {
+  assert(defined(invoker.executable), "Must define executable")
+  assert(defined(invoker.nmf), "Must define nmf")
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "data_deps",
+                             "executable",
+                             "nmf",
+                             "testonly",
+                             "public_deps",
+                             "visibility",
+                           ])
+
+    script = "//ppapi/tests/create_nonsfi_test_nmf.py"
+    sources = [
+      executable,
+    ]
+    outputs = [
+      nmf,
+    ]
+
+    # NOTE: We use target_cpu rather than current_cpu on purpose because
+    # current_cpu is always going to be pnacl for Non-SFI, but the Non-SFI
+    # .nexe executable is always translated to run on the target machine.
+    if (target_cpu == "x86") {
+      arch = "x86-32"
+    } else if (target_cpu == "x64") {
+      arch = "x86-64"
+    } else {
+      arch = target_cpu
+    }
+    args = [
+      "--program=" + rebase_path(executable, root_build_dir),
+      "--arch=${arch}",
+      "--output=" + rebase_path(nmf, root_build_dir),
+    ]
+    if (defined(invoker.nmfflags)) {
+      args += invoker.nmfflags
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/pch.gni
@@ -0,0 +1,12 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/goma.gni")
+
+declare_args() {
+  # Precompiled header file support is by default available,
+  # but for distributed build system uses (like goma) or when
+  # doing official builds.
+  enable_precompiled_headers = !is_official_build && !use_goma
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/posix/BUILD.gn
@@ -0,0 +1,43 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/toolchain.gni")
+
+assert(is_posix)
+
+group("posix") {
+  visibility = [ "//:optimize_gn_gen" ]
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Posix-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  if (!is_mac && !is_ios && sysroot != "") {
+    # Pass the sysroot to all C compiler variants, the assembler, and linker.
+    cflags = [ "--sysroot=" + rebase_path(sysroot, root_build_dir) ]
+    asmflags = cflags
+    ldflags = cflags
+
+    # Need to get some linker flags out of the sysroot.
+    ld_paths = exec_script("sysroot_ld_path.py",
+                           [
+                             rebase_path("//build/linux/sysroot_ld_path.sh",
+                                         root_build_dir),
+                             rebase_path(sysroot),
+                           ],
+                           "list lines")
+    foreach(ld_path, ld_paths) {
+      ld_path = rebase_path(ld_path, root_build_dir)
+      ldflags += [
+        "-L" + ld_path,
+        "-Wl,-rpath-link=" + ld_path,
+      ]
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/posix/sysroot_ld_path.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file takes two arguments, the relative location of the shell script that
+# does the checking, and the name of the sysroot.
+
+# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in
+# Python in this file.
+
+import subprocess
+import sys
+
+if len(sys.argv) != 3:
+  print "Need two arguments"
+  sys.exit(1)
+
+result = subprocess.check_output([sys.argv[1], sys.argv[2]]).strip()
+result = result.replace(" ", "\n")
+if result != "":
+  print result
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/sanitizers/BUILD.gn
@@ -0,0 +1,579 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build_overrides/build.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/chromecast_build.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+
+if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+}
+
+# Contains the dependencies needed for sanitizers to link into
+# executables and shared_libraries. Unconditionally depend upon
+# "//build/config:exe_and_shlib_deps" to pull in this target.
+group("deps") {
+  visibility = [ "//build/config:exe_and_shlib_deps" ]
+  if (using_sanitizer) {
+    public_configs = [
+      ":sanitizer_options_link_helper",
+
+      # Even when a target removes default_sanitizer_flags, it may be depending
+      # on a library that did not remove default_sanitizer_flags. Thus, we need
+      # to add the ldflags here as well as in default_sanitizer_flags.
+      ":default_sanitizer_ldflags",
+    ]
+    deps = [
+      ":options_sources",
+    ]
+    if (is_win) {
+      exe = ".exe"
+    } else {
+      exe = ""
+    }
+    data = [
+      "//tools/valgrind/asan/",
+      "$clang_base_path/bin/llvm-symbolizer${exe}",
+    ]
+    if (is_linux) {
+      # llvm-symbolizer needs this.
+      data += [ "$clang_base_path/lib/libstdc++.so.6" ]
+    }
+
+    if (use_prebuilt_instrumented_libraries ||
+        use_locally_built_instrumented_libraries) {
+      deps += [ "//third_party/instrumented_libraries:deps" ]
+    }
+
+    # ASAN is supported on iOS but the runtime library depends on the compiler
+    # used (Chromium version of clang versus Xcode version of clang). Only copy
+    # the ASAN runtime on iOS if building with Chromium clang.
+    if (is_win || is_mac || (is_ios && !use_xcode_clang)) {
+      data_deps = [
+        ":copy_asan_runtime",
+      ]
+    }
+    if (is_mac || (is_ios && !use_xcode_clang)) {
+      public_deps = [
+        ":asan_runtime_bundle_data",
+      ]
+    }
+  }
+}
+
+if ((is_mac || is_win || (is_ios && !use_xcode_clang)) && using_sanitizer) {
+  if (is_mac) {
+    _clang_rt_dso_path = "darwin/libclang_rt.asan_osx_dynamic.dylib"
+  } else if (is_ios) {
+    _clang_rt_dso_path = "darwin/libclang_rt.asan_iossim_dynamic.dylib"
+  } else if (is_win && target_cpu == "x86") {
+    _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-i386.dll"
+  } else if (is_win && target_cpu == "x64") {
+    _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-x86_64.dll"
+  }
+
+  _clang_rt_dso_full_path =
+      "$clang_base_path/lib/clang/$clang_version/lib/$_clang_rt_dso_path"
+
+  if (!is_ios) {
+    copy("copy_asan_runtime") {
+      sources = [
+        _clang_rt_dso_full_path,
+      ]
+      outputs = [
+        "$root_out_dir/{{source_file_part}}",
+      ]
+    }
+  } else {
+    # On iOS, the runtime library need to be code signed (adhoc signature)
+    # starting with Xcode 8, so use an action instead of a copy on iOS.
+    action("copy_asan_runtime") {
+      script = "//build/config/ios/codesign.py"
+      sources = [
+        _clang_rt_dso_full_path,
+      ]
+      outputs = [
+        "$root_out_dir/" + get_path_info(sources[0], "file"),
+      ]
+      args = [
+        "code-sign-file",
+        "--identity=" + ios_code_signing_identity,
+        "--output=" + rebase_path(outputs[0], root_build_dir),
+        rebase_path(sources[0], root_build_dir),
+      ]
+    }
+  }
+
+  if (is_mac || is_ios) {
+    bundle_data("asan_runtime_bundle_data") {
+      sources = get_target_outputs(":copy_asan_runtime")
+      outputs = [
+        "{{bundle_executable_dir}}/{{source_file_part}}",
+      ]
+      public_deps = [
+        ":copy_asan_runtime",
+      ]
+    }
+  }
+}
+
+config("sanitizer_options_link_helper") {
+  if (is_mac || is_ios) {
+    ldflags = [ "-Wl,-U,_sanitizer_options_link_helper" ]
+  } else if (!is_win) {
+    ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ]
+  }
+}
+
+static_library("options_sources") {
+  # This is a static_library instead of a source_set, as it shouldn't be
+  # unconditionally linked into targets.
+  visibility = [
+    ":deps",
+    "//:gn_visibility",
+  ]
+  sources = [
+    "//build/sanitizers/sanitizer_options.cc",
+  ]
+
+  # Don't compile this target with any sanitizer code. It can be called from
+  # the sanitizer runtimes, so instrumenting these functions could cause
+  # recursive calls into the runtime if there is an error.
+  configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ]
+
+  if (is_asan) {
+    if (!defined(asan_suppressions_file)) {
+      asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
+    }
+    sources += [ asan_suppressions_file ]
+  }
+
+  if (is_lsan) {
+    if (!defined(lsan_suppressions_file)) {
+      lsan_suppressions_file = "//build/sanitizers/lsan_suppressions.cc"
+    }
+    sources += [ lsan_suppressions_file ]
+  }
+
+  if (is_tsan) {
+    if (!defined(tsan_suppressions_file)) {
+      tsan_suppressions_file = "//build/sanitizers/tsan_suppressions.cc"
+    }
+    sources += [ tsan_suppressions_file ]
+  }
+}
+
+# Applies linker flags necessary when either :deps or :default_sanitizer_flags
+# are used.
+config("default_sanitizer_ldflags") {
+  visibility = [
+    ":default_sanitizer_flags",
+    ":deps",
+  ]
+
+  if (is_posix) {
+    ldflags = []
+    if (is_asan) {
+      ldflags += [ "-fsanitize=address" ]
+      if (is_mac) {
+        # https://crbug.com/708707
+        ldflags += [ "-fno-sanitize-address-use-after-scope" ]
+      } else {
+        ldflags += [ "-fsanitize-address-use-after-scope" ]
+      }
+    }
+    if (is_lsan) {
+      ldflags += [ "-fsanitize=leak" ]
+    }
+    if (is_tsan) {
+      ldflags += [ "-fsanitize=thread" ]
+    }
+    if (is_msan) {
+      ldflags += [ "-fsanitize=memory" ]
+    }
+    if (is_ubsan || is_ubsan_security) {
+      ldflags += [ "-fsanitize=undefined" ]
+    }
+    if (is_ubsan_null) {
+      ldflags += [ "-fsanitize=null" ]
+    }
+    if (is_ubsan_vptr) {
+      ldflags += [ "-fsanitize=vptr" ]
+    }
+
+    if (use_sanitizer_coverage) {
+      ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ]
+    }
+
+    if (is_cfi && !is_nacl) {
+      ldflags += [ "-fsanitize=cfi-vcall" ]
+      if (use_cfi_cast) {
+        ldflags += [
+          "-fsanitize=cfi-derived-cast",
+          "-fsanitize=cfi-unrelated-cast",
+        ]
+      }
+      if (use_cfi_diag) {
+        ldflags += [
+          "-fno-sanitize-trap=cfi",
+          "-fsanitize-recover=cfi",
+        ]
+      }
+    }
+  } else if (is_win && is_asan) {
+    # Windows directly calls link.exe instead of the compiler driver when
+    # linking.  Hence, pass the runtime libraries instead of -fsanitize=address.
+    # In the static-library build, libraries are different for executables
+    # and dlls, see link_executable and link_shared_library below.
+    # This here handles only the component build.
+    if (target_cpu == "x64") {
+      # Windows 64-bit. TODO(etienneb): Remove the assert when this is ready.
+      if (is_component_build) {
+        assert(false, "win/asan does not work in 64-bit yet")
+        libs = [
+          "clang_rt.asan_dynamic-x86_64.lib",
+          "clang_rt.asan_dynamic_runtime_thunk-x86_64.lib",
+        ]
+      }
+    } else {
+      assert(target_cpu == "x86", "WinASan unsupported architecture")
+      if (is_component_build) {
+        libs = [
+          "clang_rt.asan_dynamic-i386.lib",
+          "clang_rt.asan_dynamic_runtime_thunk-i386.lib",
+        ]
+      }
+    }
+  }
+}
+
+config("common_sanitizer_flags") {
+  cflags = []
+
+  # Sanitizers need line table info for stack traces. They don't need type info
+  # or variable info, so we can leave that out to speed up the build (unless
+  # it's explicitly asked for by setting |sanitizer_keep_symbols| to true).
+  if (using_sanitizer) {
+    assert(is_clang, "sanitizers only supported with clang")
+    if (!sanitizer_keep_symbols) {
+      cflags += [ "-gline-tables-only" ]
+    }
+
+    cflags += [
+      # Column info in debug data confuses Visual Studio's debugger, so don't
+      # use this by default.  However, clusterfuzz needs it for good attribution
+      # of reports to CLs, so turn it on there.
+      "-gcolumn-info",
+    ]
+  }
+
+  # Common options for AddressSanitizer, LeakSanitizer, ThreadSanitizer,
+  # MemorySanitizer and non-official CFI builds.
+  if (using_sanitizer || (is_cfi && !is_official_build)) {
+    if (is_posix) {
+      cflags += [ "-fno-omit-frame-pointer" ]
+    } else {
+      cflags += [ "/Oy-" ]
+    }
+  }
+}
+
+# TODO(thomasanderson): Move this out of build/config/sanitizers.
+config("libcxx_flags") {
+  if (use_custom_libcxx) {
+    prefix = "//buildtools/third_party"
+    include = "trunk/include"
+    cflags_cc = [
+      "-nostdinc++",
+      "-isystem" + rebase_path("$prefix/libc++/$include", root_build_dir),
+      "-isystem" + rebase_path("$prefix/libc++abi/$include", root_build_dir),
+    ]
+  }
+}
+
+config("asan_flags") {
+  cflags = []
+  if (is_asan) {
+    cflags += [ "-fsanitize=address" ]
+    if (!is_mac) {
+      cflags += [ "-fsanitize-address-use-after-scope" ]
+    } else {
+      # https://crbug.com/708707
+      cflags += [ "-fno-sanitize-address-use-after-scope" ]
+    }
+    if (!asan_globals) {
+      cflags += [
+        "-mllvm",
+        "-asan-globals=0",
+      ]
+    }
+    if (is_win) {
+      if (!defined(asan_win_blacklist_path)) {
+        asan_win_blacklist_path =
+            rebase_path("//tools/memory/asan/blacklist_win.txt", root_build_dir)
+      }
+      cflags += [ "-fsanitize-blacklist=$asan_win_blacklist_path" ]
+    } else {
+      # TODO(rnk): Remove this as discussed in http://crbug.com/427202.
+      if (!defined(asan_blacklist_path)) {
+        asan_blacklist_path =
+            rebase_path("//tools/memory/asan/blacklist.txt", root_build_dir)
+      }
+      cflags += [ "-fsanitize-blacklist=$asan_blacklist_path" ]
+    }
+  }
+}
+
+config("link_executable") {
+  if (is_asan && is_win && !is_component_build) {
+    if (target_cpu == "x64") {
+      # Windows 64-bit. TODO(etienneb): Remove the assert when this is ready.
+      assert(false, "win/asan does not work in 64-bit yet")
+      libs = [ "clang_rt.asan-x86_64.lib" ]
+      ldflags = [ "-wholearchive:clang_rt.asan-x86_64.lib" ]
+    } else {
+      assert(target_cpu == "x86", "WinASan unsupported architecture")
+      libs = [ "clang_rt.asan-i386.lib" ]
+      ldflags = [ "-wholearchive:clang_rt.asan-i386.lib" ]
+    }
+  }
+}
+
+config("link_shared_library") {
+  if (is_asan && is_win && !is_component_build) {
+    if (target_cpu == "x64") {
+      # Windows 64-bit. TODO(etienneb): Remove the assert when this is ready.
+      assert(false, "win/asan does not work in 64-bit yet")
+      libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ]
+    } else {
+      assert(target_cpu == "x86", "WinASan unsupported architecture")
+      libs = [ "clang_rt.asan_dll_thunk-i386.lib" ]
+    }
+  }
+}
+
+config("cfi_flags") {
+  cflags = []
+  if (is_cfi && !is_nacl) {
+    if (!defined(cfi_blacklist_path)) {
+      cfi_blacklist_path =
+          rebase_path("//tools/cfi/blacklist.txt", root_build_dir)
+    }
+    cflags += [
+      "-fsanitize=cfi-vcall",
+      "-fsanitize-blacklist=$cfi_blacklist_path",
+    ]
+
+    if (use_cfi_cast) {
+      cflags += [
+        "-fsanitize=cfi-derived-cast",
+        "-fsanitize=cfi-unrelated-cast",
+      ]
+    }
+
+    if (use_cfi_icall) {
+      cflags += [ "-fsanitize=cfi-icall" ]
+    }
+
+    if (use_cfi_diag) {
+      cflags += [
+        "-fno-sanitize-trap=cfi",
+        "-fsanitize-recover=cfi",
+        "-fno-inline-functions",
+        "-fno-inline",
+        "-fno-omit-frame-pointer",
+        "-O1",
+      ]
+    } else {
+      defines = [ "CFI_ENFORCEMENT" ]
+    }
+  }
+}
+
+config("coverage_flags") {
+  cflags = []
+
+  if (use_sanitizer_coverage) {
+    cflags += [
+      "-fsanitize-coverage=$sanitizer_coverage_flags",
+      "-mllvm",
+      "-sanitizer-coverage-prune-blocks=1",
+    ]
+    if (current_cpu == "arm") {
+      # http://crbug.com/517105
+      cflags += [
+        "-mllvm",
+        "-sanitizer-coverage-block-threshold=0",
+      ]
+    }
+    defines = [ "SANITIZER_COVERAGE" ]
+  }
+}
+
+config("lsan_flags") {
+  if (is_lsan) {
+    cflags = [ "-fsanitize=leak" ]
+  }
+}
+
+config("msan_flags") {
+  if (is_msan) {
+    assert(is_linux, "msan only supported on linux x86_64")
+    if (!defined(msan_blacklist_path)) {
+      msan_blacklist_path =
+          rebase_path("//tools/msan/blacklist.txt", root_build_dir)
+    }
+    cflags = [
+      "-fsanitize=memory",
+      "-fsanitize-memory-track-origins=$msan_track_origins",
+      "-fsanitize-blacklist=$msan_blacklist_path",
+    ]
+  }
+}
+
+config("tsan_flags") {
+  if (is_tsan) {
+    assert(is_linux, "tsan only supported on linux x86_64")
+    if (!defined(tsan_blacklist_path)) {
+      tsan_blacklist_path =
+          rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir)
+    }
+    cflags = [
+      "-fsanitize=thread",
+      "-fsanitize-blacklist=$tsan_blacklist_path",
+    ]
+  }
+}
+
+config("ubsan_flags") {
+  cflags = []
+  if (is_ubsan) {
+    if (!defined(ubsan_blacklist_path)) {
+      ubsan_blacklist_path =
+          rebase_path("//tools/ubsan/blacklist.txt", root_build_dir)
+    }
+    cflags += [
+      # Yasm dies with an "Illegal instruction" error when bounds checking is
+      # enabled. See http://crbug.com/489901
+      # "-fsanitize=bounds",
+      "-fsanitize=float-divide-by-zero",
+      "-fsanitize=integer-divide-by-zero",
+      "-fsanitize=null",
+      "-fsanitize=object-size",
+      "-fsanitize=return",
+      "-fsanitize=returns-nonnull-attribute",
+      "-fsanitize=shift-exponent",
+      "-fsanitize=signed-integer-overflow",
+      "-fsanitize=unreachable",
+      "-fsanitize=vla-bound",
+      "-fsanitize-blacklist=$ubsan_blacklist_path",
+    ]
+
+    # Chromecast ubsan builds fail to compile with these
+    # experimental flags, so only add them to non-chromecast ubsan builds.
+    if (!is_chromecast) {
+      cflags += [
+        # Employ the experimental PBQP register allocator to avoid slow
+        # compilation on files with too many basic blocks.
+        # See http://crbug.com/426271.
+        "-mllvm",
+        "-regalloc=pbqp",
+
+        # Speculatively use coalescing to slightly improve the code generated
+        # by PBQP regallocator. May increase compile time.
+        "-mllvm",
+        "-pbqp-coalescing",
+      ]
+    }
+  }
+}
+
+config("ubsan_no_recover") {
+  if (is_ubsan_no_recover) {
+    cflags = [ "-fno-sanitize-recover=undefined" ]
+  }
+}
+
+config("ubsan_security_flags") {
+  if (is_ubsan_security) {
+    if (!defined(ubsan_security_blacklist_path)) {
+      ubsan_security_blacklist_path =
+          rebase_path("//tools/ubsan/security_blacklist.txt", root_build_dir)
+    }
+    cflags = [
+      "-fsanitize=signed-integer-overflow,shift,vptr,function,vla-bound",
+      "-fsanitize-blacklist=$ubsan_security_blacklist_path",
+    ]
+  }
+}
+
+config("ubsan_null_flags") {
+  if (is_ubsan_null) {
+    cflags = [ "-fsanitize=null" ]
+  }
+}
+
+config("ubsan_vptr_flags") {
+  if (is_ubsan_vptr) {
+    if (!defined(ubsan_vptr_blacklist_path)) {
+      ubsan_vptr_blacklist_path =
+          rebase_path("//tools/ubsan/vptr_blacklist.txt", root_build_dir)
+    }
+    cflags = [
+      "-fsanitize=vptr",
+      "-fsanitize-blacklist=$ubsan_vptr_blacklist_path",
+    ]
+  }
+}
+
+config("fuzzing_build_mode") {
+  if (use_libfuzzer || use_afl) {
+    defines = [ "FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION" ]
+  }
+}
+
+all_sanitizer_configs = [
+  ":common_sanitizer_flags",
+  ":libcxx_flags",
+  ":coverage_flags",
+  ":default_sanitizer_ldflags",
+  ":asan_flags",
+  ":cfi_flags",
+  ":lsan_flags",
+  ":msan_flags",
+  ":tsan_flags",
+  ":ubsan_flags",
+  ":ubsan_no_recover",
+  ":ubsan_null_flags",
+  ":ubsan_security_flags",
+  ":ubsan_vptr_flags",
+  ":fuzzing_build_mode",
+]
+
+# This config is applied by default to all targets. It sets the compiler flags
+# for sanitizer usage, or, if no sanitizer is set, does nothing.
+#
+# This needs to be in a separate config so that targets can opt out of
+# sanitizers (by removing the config) if they desire. Even if a target
+# removes this config, executables & shared libraries should still depend on
+# :deps if any of their dependencies have not opted out of sanitizers.
+# Keep this list in sync with default_sanitizer_flags_but_ubsan_vptr.
+config("default_sanitizer_flags") {
+  configs = all_sanitizer_configs
+}
+
+# This config is equivalent to default_sanitizer_flags, but excludes ubsan_vptr.
+# This allows to selectively disable ubsan_vptr, when needed. In particular,
+# if some third_party code is required to be compiled without rtti, which
+# is a requirement for ubsan_vptr.
+config("default_sanitizer_flags_but_ubsan_vptr") {
+  configs = all_sanitizer_configs - [ ":ubsan_vptr_flags" ]
+}
+
+config("default_sanitizer_flags_but_coverage") {
+  configs = all_sanitizer_configs - [ ":coverage_flags" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/sanitizers/OWNERS
@@ -0,0 +1,2 @@
+mmoroz@chromium.org
+ochang@chromium.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/sanitizers/sanitizers.gni
@@ -0,0 +1,203 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # Compile for Address Sanitizer to find memory bugs.
+  is_asan = false
+
+  # Compile for Leak Sanitizer to find leaks.
+  is_lsan = false
+
+  # Compile for Memory Sanitizer to find uninitialized reads.
+  is_msan = false
+
+  # Compile for Thread Sanitizer to find threading bugs.
+  is_tsan = false
+
+  # Compile for Undefined Behaviour Sanitizer to find various types of
+  # undefined behaviour (excludes vptr checks).
+  is_ubsan = false
+
+  # Halt the program if a problem is detected.
+  is_ubsan_no_recover = false
+
+  # Compile for Undefined Behaviour Sanitizer's null pointer checks.
+  is_ubsan_null = false
+
+  # Compile for Undefined Behaviour Sanitizer's vptr checks.
+  is_ubsan_vptr = false
+
+  # Track where uninitialized memory originates from. From fastest to slowest:
+  # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the
+  # chain of stores leading from allocation site to use site.
+  msan_track_origins = 2
+
+  # Use dynamic libraries instrumented by one of the sanitizers instead of the
+  # standard system libraries. Set this flag to download prebuilt binaries from
+  # GCS.
+  use_prebuilt_instrumented_libraries = false
+
+  # Use dynamic libraries instrumented by one of the sanitizers instead of the
+  # standard system libraries. Set this flag to build the libraries from source.
+  use_locally_built_instrumented_libraries = false
+
+  # Enable building with SyzyAsan which can find certain types of memory
+  # errors. Only works on Windows. See
+  # https://github.com/google/syzygy/wiki/SyzyASanHowTo
+  is_syzyasan = false
+
+  # Compile with Control Flow Integrity to protect virtual calls and casts.
+  # See http://clang.llvm.org/docs/ControlFlowIntegrity.html
+  #
+  # TODO(pcc): Remove this flag if/when CFI is enabled in all official builds.
+  is_cfi = target_os == "linux" && !is_chromeos && target_cpu == "x64" &&
+           is_official_build && allow_posix_link_time_opt
+
+  # Enable checks for bad casts: derived cast and unrelated cast.
+  # TODO(krasin): remove this, when we're ready to add these checks by default.
+  # https://crbug.com/626794
+  use_cfi_cast = false
+
+  # Enable checks for indirect function calls via a function pointer.
+  # TODO(pcc): remove this when we're ready to add these checks by default.
+  # https://crbug.com/701919
+  use_cfi_icall = false
+
+  # By default, Control Flow Integrity will crash the program if it detects a
+  # violation. Set this to true to print detailed diagnostics instead.
+  use_cfi_diag = false
+
+  # Compile for fuzzing with LLVM LibFuzzer.
+  # See http://www.chromium.org/developers/testing/libfuzzer
+  use_libfuzzer = false
+
+  # Compile for fuzzing with AFL.
+  use_afl = false
+
+  # Enables core ubsan security features. Will later be removed once it matches
+  # is_ubsan.
+  is_ubsan_security = false
+
+  # Compile for fuzzing with Dr. Fuzz
+  # See http://www.chromium.org/developers/testing/dr-fuzz
+  use_drfuzz = false
+
+  # Helper variable for testing builds with disabled libfuzzer.
+  # Not for client use.
+  disable_libfuzzer = false
+
+  # Value for -fsanitize-coverage flag. Setting this causes
+  # use_sanitizer_coverage to be enabled.
+  # Default value when unset and use_afl=true or use_libfuzzer=true:
+  #     trace-pc-guard
+  # Default value when unset and use_sanitizer_coverage=true:
+  #     trace-pc-guard,indirect-calls
+  sanitizer_coverage_flags = ""
+
+  # Keep symbol level when building with sanitizers. When sanitizers are
+  # enabled, the default is to compile with the minimum debug info level
+  # necessary, overriding any other symbol level arguments that may be set.
+  # Setting this to true prevents this.
+  sanitizer_keep_symbols = false
+}
+
+# Disable sanitizers for non-default toolchains.
+if (current_toolchain != default_toolchain) {
+  is_asan = false
+  is_cfi = false
+  is_lsan = false
+  is_msan = false
+  is_syzyasan = false
+  is_tsan = false
+  is_ubsan = false
+  is_ubsan_null = false
+  is_ubsan_no_recover = false
+  is_ubsan_security = false
+  is_ubsan_vptr = false
+  msan_track_origins = 0
+  sanitizer_coverage_flags = ""
+  use_afl = false
+  use_cfi_diag = false
+  use_custom_libcxx = false
+  use_drfuzz = false
+  use_libfuzzer = false
+  use_prebuilt_instrumented_libraries = false
+  use_locally_built_instrumented_libraries = false
+  use_sanitizer_coverage = false
+}
+
+# Args that are in turn dependent on other args must be in a separate
+# declare_args block. User overrides are only applied at the end of a
+# declare_args block.
+declare_args() {
+  # Use libc++ (buildtools/third_party/libc++ and
+  # buildtools/third_party/libc++abi) instead of stdlibc++ as standard library.
+  # This is intended to be used for instrumented builds.
+  use_custom_libcxx =
+      (is_asan && is_linux && !is_chromeos) || is_tsan || is_msan || is_ubsan ||
+      is_ubsan_security || use_libfuzzer || use_afl
+
+  # Enable -fsanitize-coverage.
+  use_sanitizer_coverage =
+      use_libfuzzer || use_afl || sanitizer_coverage_flags != ""
+
+  # Detect overflow/underflow for global objects.
+  #
+  # Mac: http://crbug.com/352073
+  asan_globals = !is_mac
+}
+
+if ((use_afl || use_libfuzzer) && sanitizer_coverage_flags == "") {
+  sanitizer_coverage_flags = "trace-pc-guard"
+} else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") {
+  sanitizer_coverage_flags = "trace-pc-guard,indirect-calls"
+}
+
+using_sanitizer =
+    is_asan || is_lsan || is_tsan || is_msan || is_ubsan || is_ubsan_null ||
+    is_ubsan_vptr || is_ubsan_security || use_sanitizer_coverage
+
+assert(!using_sanitizer || is_clang,
+       "Sanitizers (is_*san) require setting is_clang = true in 'gn args'")
+
+prebuilt_instrumented_libraries_available =
+    is_msan && (msan_track_origins == 0 || msan_track_origins == 2)
+
+if (use_libfuzzer && is_linux) {
+  if (is_asan) {
+    # We do leak checking with libFuzzer on Linux. Set is_lsan for code that
+    # relies on LEAK_SANITIZER define to avoid false positives.
+    is_lsan = true
+  }
+  if (is_msan) {
+    use_prebuilt_instrumented_libraries = true
+  }
+}
+
+# MSan only links Chrome properly in release builds (brettw -- 9/1/2015). The
+# same is possibly true for the other non-ASan sanitizers. But regardless of
+# whether it links, one would normally never run a sanitizer in debug mode.
+# Running in debug mode probably indicates you forgot to set the "is_debug =
+# false" flag in the build args. ASan seems to run fine in debug mode.
+#
+# If you find a use-case where you want to compile a sanitizer in debug mode
+# and have verified it works, ask brettw and we can consider removing it from
+# this condition. We may also be able to find another way to enable your case
+# without having people accidentally get broken builds by compiling an
+# unsupported or unadvisable configurations.
+#
+# For one-off testing, just comment this assertion out.
+assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_null || is_ubsan_vptr),
+       "Sanitizers should generally be used in release (set is_debug=false).")
+
+assert(!is_msan || (is_linux && current_cpu == "x64"),
+       "MSan currently only works on 64-bit Linux and ChromeOS builds.")
+
+# ASAN build on Windows is not working in debug mode. Intercepting memory
+# allocation functions is hard on Windows and not yet implemented in LLVM.
+assert(!is_win || !is_debug || !is_asan,
+       "ASan on Windows doesn't work in debug (set is_debug=false).")
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/sysroot.gni
@@ -0,0 +1,91 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "sysroot" variable which is the absolute path
+# of the sysroot. If no sysroot applies, the variable will be an empty string.
+
+import("//build/config/chrome_build.gni")
+
+declare_args() {
+  # The absolute path of the sysroot that is applied when compiling using
+  # the target toolchain.
+  target_sysroot = ""
+
+  # The absolute path to directory containing linux sysroot images
+  target_sysroot_dir = "//build/linux"
+
+  use_sysroot = current_cpu != "s390x" && current_cpu != "s390" &&
+                current_cpu != "ppc64" && current_cpu != "ppc"
+}
+
+if (current_os == target_os && current_cpu == target_cpu &&
+    target_sysroot != "") {
+  sysroot = target_sysroot
+} else if (is_android) {
+  import("//build/config/android/config.gni")
+  if (current_cpu == "x86") {
+    sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
+  } else if (current_cpu == "arm") {
+    sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
+  } else if (current_cpu == "mipsel") {
+    sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
+  } else if (current_cpu == "x64") {
+    sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
+  } else if (current_cpu == "arm64") {
+    sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
+  } else if (current_cpu == "mips64el") {
+    sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
+  } else {
+    assert(false, "No android sysroot for cpu: $target_cpu")
+  }
+} else if (is_linux && use_sysroot) {
+  # By default build against a sysroot image downloaded from Cloud Storage
+  # during gclient runhooks.
+  if (current_cpu == "x64") {
+    sysroot = "$target_sysroot_dir/debian_jessie_amd64-sysroot"
+  } else if (current_cpu == "x86") {
+    sysroot = "$target_sysroot_dir/debian_jessie_i386-sysroot"
+  } else if (current_cpu == "mipsel") {
+    sysroot = "$target_sysroot_dir/debian_jessie_mips-sysroot"
+  } else if (current_cpu == "arm") {
+    sysroot = "$target_sysroot_dir/debian_jessie_arm-sysroot"
+  } else if (current_cpu == "arm64") {
+    sysroot = "$target_sysroot_dir/debian_jessie_arm64-sysroot"
+  } else {
+    assert(false, "No linux sysroot for cpu: $target_cpu")
+  }
+
+  if (sysroot != "") {
+    _script_arch = current_cpu
+    if (_script_arch == "x86") {
+      _script_arch = "i386"
+    } else if (_script_arch == "x64") {
+      _script_arch = "amd64"
+    }
+    assert(
+        exec_script("//build/dir_exists.py",
+                    [ rebase_path(sysroot) ],
+                    "string") == "True",
+        "Missing sysroot ($sysroot). To fix, run: build/linux/sysroot_scripts/install-sysroot.py --arch=$_script_arch")
+  }
+} else if (is_mac) {
+  import("//build/config/mac/mac_sdk.gni")
+  sysroot = mac_sdk_path
+} else if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+  sysroot = ios_sdk_path
+} else if (is_fuchsia) {
+  import("//build/config/fuchsia/config.gni")
+  if (current_cpu == "arm64") {
+    sysroot = fuchsia_sdk + "/sysroot/aarch64-fuchsia"
+  } else if (current_cpu == "x64") {
+    sysroot = fuchsia_sdk + "/sysroot/x86_64-fuchsia"
+  } else {
+    sysroot = ""
+  }
+  sysroot_stamp = rebase_path("$sysroot/.stamp")
+  sysroot_version = read_file(sysroot_stamp, "trim string")
+} else {
+  sysroot = ""
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/ui.gni
@@ -0,0 +1,72 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================
+#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# These flags are effectively global. Your feature flag should go near the
+# code it controls. Most of these items are here now because they control
+# legacy global #defines passed to the compiler (now replaced with generated
+# buildflag headers -- see //build/buildflag_header.gni).
+#
+# These flags are ui-related so should eventually be moved to various places
+# in //ui/*.
+#
+# There is more advice on where to put build flags in the "Build flag" section
+# of //build/config/BUILDCONFIG.gn.
+
+import("//build/config/chromecast_build.gni")
+
+declare_args() {
+  # Indicates if Ash is enabled. Ash is the Aura Shell which provides a
+  # desktop-like environment for Aura. Requires use_aura = true
+  use_ash = is_chromeos && !is_chromecast
+
+  # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
+  # that does not require X11. Enabling this feature disables use of glib, x11,
+  # Pango, and Cairo. Default to false on non-Chromecast builds.
+  use_ozone = is_chromeos || (is_chromecast && !is_android)
+
+  # Indicates if Aura is enabled. Aura is a low-level windowing library, sort
+  # of a replacement for GDI or GTK.
+  use_aura = is_win || is_linux
+
+  # Whether we should use glib, a low level C utility library.
+  use_glib = is_linux
+}
+
+declare_args() {
+  # True means the UI is built using the "views" framework.
+  toolkit_views =
+      (is_mac || is_win || is_chromeos || use_aura) && !is_chromecast
+}
+
+# Additional dependent variables -----------------------------------------------
+#
+# These variables depend on other variables and can't be set externally.
+
+# Indicates if the UI toolkit depends on X11.
+use_x11 = is_linux && !use_ozone
+
+# Turn off glib if Ozone is enabled.
+if (use_ozone) {
+  use_glib = false
+}
+
+if (is_linux && !use_ozone) {
+  use_cairo = true
+  use_pango = true
+} else {
+  use_cairo = false
+  use_pango = false
+}
+
+# Whether to use atk, the Accessibility ToolKit library
+use_atk = is_desktop_linux && use_x11
+# =============================================
+#   PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# See comment at the top.
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/v8_target_cpu.gni
@@ -0,0 +1,61 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+  # This arg is used when we want to tell the JIT-generating v8 code
+  # that we want to have it generate for an architecture that is different
+  # than the architecture that v8 will actually run on; we then run the
+  # code under an emulator. For example, we might run v8 on x86, but
+  # generate arm code and run that under emulation.
+  #
+  # This arg is defined here rather than in the v8 project because we want
+  # some of the common architecture-specific args (like arm_float_abi or
+  # mips_arch_variant) to be set to their defaults either if the current_cpu
+  # applies *or* if the v8_current_cpu applies.
+  #
+  # As described below, you can also specify the v8_target_cpu to use
+  # indirectly by specifying a `custom_toolchain` that contains v8_$cpu in the
+  # name after the normal toolchain.
+  #
+  # For example, `gn gen --args="custom_toolchain=...:clang_x64_v8_arm64"`
+  # is equivalent to setting --args=`v8_target_cpu="arm64"`. Setting
+  # `custom_toolchain` is more verbose but makes the toolchain that is
+  # (effectively) being used explicit.
+  #
+  # v8_target_cpu can only be used to target one architecture in a build,
+  # so if you wish to build multiple copies of v8 that are targetting
+  # different architectures, you will need to do something more
+  # complicated involving multiple toolchains along the lines of
+  # custom_toolchain, above.
+  v8_target_cpu = ""
+}
+
+if (v8_target_cpu == "") {
+  if (current_toolchain == "//build/toolchain/linux:clang_x64_v8_arm64") {
+    v8_target_cpu = "arm64"
+  } else if (current_toolchain == "//build/toolchain/linux:clang_x86_v8_arm") {
+    v8_target_cpu = "arm"
+  } else if (current_toolchain ==
+             "//build/toolchain/linux:clang_x86_v8_mips64el") {
+    v8_target_cpu = "mips64el"
+  } else if (current_toolchain ==
+             "//build/toolchain/linux:clang_x86_v8_mipsel") {
+    v8_target_cpu = "mipsel"
+  } else if (is_msan) {
+    # If we're running under a sanitizer, if we configure v8 to generate
+    # code that will be run under a simulator, then the generated code
+    # also gets the benefits of the sanitizer.
+    v8_target_cpu = "arm64"
+  } else {
+    v8_target_cpu = target_cpu
+  }
+}
+
+declare_args() {
+  # This argument is declared here so that it can be overridden in toolchains.
+  # It should never be explicitly set by the user.
+  v8_current_cpu = v8_target_cpu
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/win/BUILD.gn
@@ -0,0 +1,438 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/toolchain.gni")
+
+assert(is_win)
+
+declare_args() {
+  # Set this to true to enable static analysis through Visual Studio's
+  # /analyze. This dramatically slows compiles and reports thousands of
+  # warnings, so normally this is done on a build machine and only the new
+  # warnings are examined.
+  use_vs_code_analysis = false
+
+  # Turn this on to have the linker output extra timing information.
+  win_linker_timing = false
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Windows-only.
+config("compiler") {
+  if (current_cpu == "x86") {
+    asmflags = [
+      # When /safeseh is specified, the linker will only produce an image if it
+      # can also produce a table of the image's safe exception handlers. This
+      # table specifies for the operating system which exception handlers are
+      # valid for the image. Note that /SAFESEH isn't accepted on the command
+      # line, only /safeseh. This is only accepted by ml.exe, not ml64.exe.
+      "/safeseh",
+    ]
+  }
+
+  cflags = [
+    "/Gy",  # Enable function-level linking.
+    "/FS",  # Preserve previous PDB behavior.
+    "/bigobj",  # Some of our files are bigger than the regular limits.
+  ]
+
+  # Force C/C++ mode for the given GN detected file type. This is necessary
+  # for precompiled headers where the same source file is compiled in both
+  # modes.
+  cflags_c = [ "/TC" ]
+  cflags_cc = [ "/TP" ]
+
+  cflags += [
+    # Tell the compiler to crash on failures. This is undocumented
+    # and unsupported but very handy.
+    "/d2FastFail",
+
+    # Work around crbug.com/526851, bug in VS 2015 RTM compiler.
+    "/Zc:sizedDealloc-",
+  ]
+
+  # Building with Clang on Windows is a work in progress and very
+  # experimental. See crbug.com/82385.
+  if (is_clang) {
+    cflags += [ "-fmsc-version=1900" ]
+
+    if (current_cpu == "x86") {
+      cflags += [ "-m32" ]
+    } else {
+      cflags += [ "-m64" ]
+    }
+
+    if (exec_script("//build/win/use_ansi_codes.py", [], "trim string") ==
+        "True") {
+      cflags += [
+        # cmd.exe doesn't understand ANSI escape codes by default,
+        # so only enable them if something emulating them is around.
+        "-fansi-escape-codes",
+      ]
+    }
+
+    # Clang runtime libraries, such as the sanitizer runtimes, live here.
+    lib_dirs = [ "$clang_base_path/lib/clang/$clang_version/lib/windows" ]
+  }
+
+  # /PROFILE ensures that the PDB file contains FIXUP information (growing the
+  # PDB file by about 5%) but does not otherwise alter the output binary. This
+  # information is used by the Syzygy optimization tool when decomposing the
+  # release image. It is enabled for syzyasan builds and opportunistically for
+  # other builds where it is not prohibited (not supported when incrementally
+  # linking, or using /debug:fastlink).
+  if (is_syzyasan) {
+    assert(!is_win_fastlink)
+    ldflags = [ "/PROFILE" ]
+  } else {
+    if (!is_debug && !is_component_build) {
+      if (is_win_fastlink) {
+        # /PROFILE implies the following linker flags. Therefore if we are
+        # skipping /PROFILE because it is incompatible with /DEBUG:FASTLINK
+        # we should explicitly add these flags in order to avoid unintended
+        # consequences such as larger binaries.
+        ldflags = [
+          "/OPT:REF",
+          "/OPT:ICF",
+          "/INCREMENTAL:NO",
+          "/FIXED:NO",
+        ]
+      } else {
+        ldflags = [ "/PROFILE" ]
+      }
+    }
+  }
+
+  # arflags apply only to static_libraries. The normal linker configs are only
+  # set for executable and shared library targets so arflags must be set
+  # elsewhere. Since this is relatively contained, we just apply them in this
+  # more general config and they will only have an effect on static libraries.
+  arflags = [
+    # "No public symbols found; archive member will be inaccessible." This
+    # means that one or more object files in the library can never be
+    # pulled in to targets that link to this library. It's just a warning that
+    # the source file is a no-op.
+    "/ignore:4221",
+  ]
+}
+
+config("vs_code_analysis") {
+  if (use_vs_code_analysis && !is_clang) {
+    # When use_vs_code_analysis is specified add the /analyze switch to enable
+    # static analysis. Specifying /analyze:WX- says that /analyze warnings
+    # should not be treated as errors.
+    cflags = [ "/analyze:WX-" ]
+
+    # Also, disable various noisy warnings that have low value.
+    cflags += [
+      "/wd6011",  # Dereferencing NULL pointer
+
+      # C6285 is ~16% of raw warnings and has low value
+      "/wd6285",  # non-zero constant || non-zero constant
+      "/wd6308",  # realloc might return null pointer
+
+      # Possible infinite loop: use of the constant
+      # EXCEPTION_CONTINUE_EXECUTION in the exception-filter
+      "/wd6312",
+
+      "/wd6322",  # Empty _except block
+      "/wd6330",  # 'char' used instead of 'unsigned char' for istype() call
+
+      # C6334 is ~80% of raw warnings and has low value
+      "/wd6334",  # sizeof applied to an expression with an operator
+      "/wd6326",  # Potential comparison of constant with constant
+      "/wd6340",  # Sign mismatch in function parameter
+      "/wd28159",  # Consider using 'GetTickCount64'
+      "/wd28196",  # The precondition is not satisfied
+      "/wd28204",  # Inconsistent SAL annotations
+      "/wd28251",  # Inconsistent SAL annotations
+      "/wd28252",  # Inconsistent SAL annotations
+      "/wd28253",  # Inconsistent SAL annotations
+      "/wd28278",  # Function appears with no prototype in scope
+      "/wd28285",  # syntax error in SAL annotation (in algorithm)
+      "/wd28301",  # Inconsistent SAL annotations
+      "/wd28182",  # Dereferencing NULL pointer
+    ]
+  }
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Windows-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+  cflags = []
+
+  # Defines that set up the CRT.
+  defines = [
+    "__STD_C",
+    "_CRT_RAND_S",
+    "_CRT_SECURE_NO_DEPRECATE",
+    "_HAS_EXCEPTIONS=0",
+    "_SCL_SECURE_NO_DEPRECATE",
+  ]
+
+  # Defines that set up the Windows SDK.
+  defines += [
+    "_ATL_NO_OPENGL",
+    "_WINDOWS",
+    "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS",
+    "PSAPI_VERSION=1",
+    "WIN32",
+    "_SECURE_ATL",
+  ]
+
+  if (!use_vs_code_analysis) {
+    # This is required for ATL to use XP-safe versions of its functions.
+    # However it is prohibited when using /analyze
+    defines += [ "_USING_V110_SDK71_" ]
+  }
+}
+
+# Sets the default Windows build version. This is separated because some
+# targets need to manually override it for their compiles.
+config("winver") {
+  defines = [
+    "NTDDI_VERSION=0x0A000000",
+    "_WIN32_WINNT=0x0A00",
+    "WINVER=0x0A00",
+  ]
+}
+
+# Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs.
+config("sdk_link") {
+  if (current_cpu == "x64") {
+    ldflags = [ "/MACHINE:X64" ]
+    lib_dirs = [
+      "$windows_sdk_path\Lib\winv6.3\um\x64",
+      "$visual_studio_path\VC\lib\amd64",
+      "$visual_studio_path\VC\atlmfc\lib\amd64",
+    ]
+  } else {
+    ldflags = [
+      "/MACHINE:X86",
+      "/SAFESEH",  # Not compatible with x64 so use only for x86.
+      "/largeaddressaware",
+    ]
+    lib_dirs = [
+      "$windows_sdk_path\Lib\winv6.3\um\x86",
+      "$visual_studio_path\VC\lib",
+      "$visual_studio_path\VC\atlmfc\lib",
+    ]
+  }
+}
+
+# This default linker setup is provided separately from the SDK setup so
+# targets who want different library configurations can remove this and specify
+# their own.
+config("common_linker_setup") {
+  ldflags = [
+    "/fastfail",
+    "/FIXED:NO",
+    "/ignore:4199",
+    "/ignore:4221",
+    "/NXCOMPAT",
+  ]
+
+  # ASLR makes debugging with windbg difficult because Chrome.exe and
+  # Chrome.dll share the same base name. As result, windbg will name the
+  # Chrome.dll module like chrome_<base address>, where <base address>
+  # typically changes with each launch. This in turn means that breakpoints in
+  # Chrome.dll don't stick from one launch to the next. For this reason, we
+  # turn ASLR off in debug builds.
+  if (is_debug) {
+    ldflags += [ "/DYNAMICBASE:NO" ]
+  } else {
+    ldflags += [ "/DYNAMICBASE" ]
+  }
+
+  if (win_linker_timing) {
+    ldflags += [
+      "/time",
+      "/verbose:incr",
+    ]
+  }
+}
+
+config("cfi_linker") {
+  # Control Flow Guard (CFG)
+  # https://msdn.microsoft.com/en-us/library/windows/desktop/mt637065.aspx
+  # /DYNAMICBASE (ASLR) is turned off in debug builds, therefore CFG can’t be
+  # turned on either.
+  # TODO(thakis): Turn this on with lld once supported, https://crbug.com/693709
+  if (!is_debug && !use_lld) {
+    # Turn on CFG in msvc linker, regardless of compiler used.
+    ldflags = [ "/guard:cf" ]
+  }
+}
+
+# CRT --------------------------------------------------------------------------
+
+# Configures how the runtime library (CRT) is going to be used.
+# See https://msdn.microsoft.com/en-us/library/2kzt1wy3.aspx for a reference of
+# what each value does.
+config("default_crt") {
+  if (is_component_build) {
+    # Component mode: dynamic CRT. Since the library is shared, it requires
+    # exceptions or will give errors about things not matching, so keep
+    # exceptions on.
+    configs = [ ":dynamic_crt" ]
+  } else {
+    if (current_os != "win") {
+      # WindowsRT: use the dynamic CRT.
+      configs = [ ":dynamic_crt" ]
+    } else {
+      # Desktop Windows: static CRT.
+      configs = [ ":static_crt" ]
+    }
+  }
+}
+
+# Use this to force the debug CRT for when building perf-critical build tools
+# that need to be fully optimized even in debug builds, for those times when the
+# debug CRT is part of the bottleneck. This also avoids *implicitly* defining
+# _DEBUG.
+config("release_crt") {
+  if (is_component_build) {
+    cflags = [ "/MD" ]
+  } else {
+    cflags = [ "/MT" ]
+  }
+}
+
+config("dynamic_crt") {
+  if (is_debug) {
+    # This pulls in the DLL debug CRT and defines _DEBUG
+    cflags = [ "/MDd" ]
+  } else {
+    cflags = [ "/MD" ]
+  }
+}
+
+config("static_crt") {
+  if (is_debug) {
+    # This pulls in the static debug CRT and defines _DEBUG
+    cflags = [ "/MTd" ]
+  } else {
+    cflags = [ "/MT" ]
+  }
+}
+
+# Subsystem --------------------------------------------------------------------
+
+# This is appended to the subsystem to specify a minimum version.
+if (current_cpu == "x64") {
+  # The number after the comma is the minimum required OS version.
+  # 5.02 = Windows Server 2003.
+  subsystem_version_suffix = ",5.02"
+} else {
+  # 5.01 = Windows XP.
+  subsystem_version_suffix = ",5.01"
+}
+
+config("console") {
+  ldflags = [ "/SUBSYSTEM:CONSOLE$subsystem_version_suffix" ]
+}
+config("windowed") {
+  ldflags = [ "/SUBSYSTEM:WINDOWS$subsystem_version_suffix" ]
+}
+
+# Incremental linking ----------------------------------------------------------
+
+incremental_linking_on_switch = [ "/INCREMENTAL" ]
+incremental_linking_off_switch = [ "/INCREMENTAL:NO" ]
+
+# Disable incremental linking for syzyasan, enable for debug builds and all
+# component builds - any builds where performance is not job one.
+if ((is_debug || is_component_build) && !is_syzyasan) {
+  default_incremental_linking_switch = incremental_linking_on_switch
+} else {
+  default_incremental_linking_switch = incremental_linking_off_switch
+}
+
+# Applies incremental linking or not depending on the current configuration.
+config("default_incremental_linking") {
+  ldflags = default_incremental_linking_switch
+}
+
+# Explicitly on or off incremental linking
+config("incremental_linking") {
+  ldflags = incremental_linking_on_switch
+}
+config("no_incremental_linking") {
+  ldflags = incremental_linking_off_switch
+}
+
+# Some large modules can't handle incremental linking in some situations. This
+# config should be applied to large modules to turn off incremental linking
+# when it won't work.
+config("default_large_module_incremental_linking") {
+  if (symbol_level > 0 && (current_cpu == "x86" || !is_component_build)) {
+    # When symbols are on, things get so large that the tools fail due to the
+    # size of the .ilk files.
+    ldflags = incremental_linking_off_switch
+  } else {
+    # Otherwise just do the default incremental linking for this build type.
+    ldflags = default_incremental_linking_switch
+  }
+}
+
+# Character set ----------------------------------------------------------------
+
+# Not including this config means "ansi" (8-bit system codepage).
+config("unicode") {
+  defines = [
+    "_UNICODE",
+    "UNICODE",
+  ]
+}
+
+# Lean and mean ----------------------------------------------------------------
+
+# Some third party code might not compile with WIN32_LEAN_AND_MEAN so we have
+# to have a separate config for it. Remove this config from your target to
+# get the "bloaty and accomodating" version of windows.h.
+config("lean_and_mean") {
+  defines = [ "WIN32_LEAN_AND_MEAN" ]
+}
+
+# Nominmax --------------------------------------------------------------------
+
+# Some third party code defines NOMINMAX before including windows.h, which
+# then causes warnings when it's been previously defined on the command line.
+# For such targets, this config can be removed.
+
+config("nominmax") {
+  defines = [ "NOMINMAX" ]
+}
+
+# Target WinRT ----------------------------------------------------------------
+
+# When targeting Windows Runtime, certain compiler/linker flags are necessary.
+
+config("target_winrt") {
+  defines = [
+    "WINRT",
+    "WINAPI_FAMILY=WINAPI_FAMILY_PC_APP",
+  ]
+  cflags_cc = [
+    "/ZW",
+    "/EHsc",
+  ]
+}
+
+# Internal stuff --------------------------------------------------------------
+
+# Config used by the MIDL template to disable warnings.
+config("midl_warnings") {
+  if (is_clang) {
+    # MIDL generates code like "#endif !_MIDL_USE_GUIDDEF_".
+    cflags = [ "-Wno-extra-tokens" ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/win/console_app.gni
@@ -0,0 +1,18 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+  # If true, builds as a console app (rather than a windowed app), which allows
+  # logging to be printed to the user. This will cause a terminal window to pop
+  # up when the executable is not run from the command line, so should only be
+  # used for development. Only has an effect on Windows builds.
+  win_console_app = false
+}
+
+if (is_win && is_asan) {
+  # AddressSanitizer build should be a console app since it writes to stderr.
+  win_console_app = true
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/win/manifest.gni
@@ -0,0 +1,191 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# HOW MANIFESTS WORK IN THE GN BUILD
+#
+# Use the windows_manifest template to declare a manifest generation step.
+# This will combine all listed .manifest files and generate a resource file
+# referencing the resulting manifest. To link this manifest, just depend on
+# the manifest target from your executable or shared library.
+#
+# This will define an empty placeholder target on non-Windows platforms so
+# the manifest declarations and dependencies do not need to be inside of OS
+# conditionals.
+#
+# Manifests uses different resource IDs for EXE and DLL targets. You will need
+# to specify this in the manifest target declaration and only use that manifest
+# target from the correct type of binary target.
+#
+# A binary can depend on only one manifest target, but the manifest target
+# can depend on many individual .manifest files which will be merged. As a
+# result, only executables and shared libraries should depend on manifest
+# targets. If you want to add a manifest to a component, put the dependency
+# behind a "if (is_component_build)" conditional.
+#
+# Generally you will just want the defaults for the Chrome build. In this case
+# the binary should just depend on one of the targets in //build/win/. There
+# are also individual manifest files in that directory you can reference via
+# the *_manifest variables defined below to pick and choose only some defaults.
+# You might combine these with a custom manifest file to get specific behavior.
+
+# Reference this manifest as a source from windows_manifest targets to get
+# the default Chrome OS compatibility list.
+default_compatibility_manifest = "//build/win/compatibility.manifest"
+
+# Reference this manifest as a source from windows_manifest targets to get
+# the default Chrome common constrols compatibility.
+common_controls_manifest = "//build/win/common_controls.manifest"
+
+# Reference this manifest to request that Windows not perform any elevation
+# when running your program. Otherwise, it might do some autodetection and
+# request elevated privileges from the user. This is normally what you want.
+as_invoker_manifest = "//build/win/as_invoker.manifest"
+
+# An alternative to as_invoker_manifest when you want the application to always
+# elevate.
+require_administrator_manifest = "//build/win/require_administrator.manifest"
+
+# Construct a target to combine the given manifest files into a .rc file.
+#
+# Variables for the windows_manifest template:
+#
+#   sources: (required)
+#     List of source .manifest files to add.
+#
+#   type: "dll" or "exe" (required)
+#     Indicates the type of target that this manifest will be used for.
+#     DLLs and EXEs have different manifest resource IDs.
+#
+#   deps: (optional)
+#   visibility: (optional)
+#     Normal meaning.
+#
+# Example:
+#
+#   windows_manifest("doom_melon_manifest") {
+#     sources = [
+#       "doom_melon.manifest",   # Custom values in here.
+#       default_compatibility_manifest,  # Want the normal OS compat list.
+#     ]
+#     type = "exe"
+#   }
+#
+#   executable("doom_melon") {
+#     deps = [ ":doom_melon_manifest" ]
+#     ...
+#   }
+
+if (is_win) {
+  # This is the environment file that tool_wrapper.py will use for the current
+  # toolchain. It is placed in root_build_dir by the toolchain setup. This
+  # variable is the path relative to the root_build_dir which is what
+  # tool_wrapper.py expects as an argument.
+  _environment_file = "environment.$current_cpu"
+
+  template("windows_manifest") {
+    manifest_action_name = "${target_name}__gen_manifest"
+    rc_action_name = "${target_name}__gen_rc"
+    source_set_name = target_name
+
+    output_manifest = "$target_gen_dir/$source_set_name.manifest"
+    rcfile = "$output_manifest.rc"
+
+    # Make the final .manifest file.
+    action(manifest_action_name) {
+      visibility = [
+        ":$source_set_name",
+        ":$rc_action_name",
+      ]
+
+      script = "//build/toolchain/win/tool_wrapper.py"
+
+      assert(defined(invoker.sources),
+             "\"sources\" must be defined for a windows_manifest target")
+      inputs = invoker.sources
+
+      outputs = [
+        output_manifest,
+      ]
+
+      args = [
+        "manifest-wrapper",
+        _environment_file,
+        "mt.exe",
+        "-nologo",
+        "-manifest",
+      ]
+      args += rebase_path(invoker.sources, root_build_dir)
+      args += [ "-out:" + rebase_path(output_manifest, root_build_dir) ]
+
+      # Apply any dependencies from the invoker to this target, since those
+      # dependencies may have created the input manifest files.
+      forward_variables_from(invoker, [ "deps" ])
+    }
+
+    # Make the .rc file that references the final manifest file.
+    #
+    # This could easily be combined into one step, but this current separation
+    # of .manifest and .rc matches GYP and allows us to re-use tool_wrapper.py.
+    action(rc_action_name) {
+      visibility = [ ":$source_set_name" ]
+
+      script = "//build/toolchain/win/tool_wrapper.py"
+
+      outputs = [
+        rcfile,
+      ]
+
+      # EXEs have a resource ID of 1 for their manifest, DLLs use 2.
+      assert(defined(invoker.type),
+             "\"type\" must be defined for a windows_manifest")
+      if (invoker.type == "exe") {
+        manifest_resource_id = "1"
+      } else if (invoker.type == "dll") {
+        manifest_resource_id = "2"
+      } else {
+        assert(false, "Bad value of \"type\", Must be \"exe\" or \"dll\"")
+      }
+
+      args = [
+        "manifest-to-rc",
+        "$_environment_file",
+        rebase_path(output_manifest),
+        rebase_path(rcfile, root_build_dir),
+        manifest_resource_id,
+      ]
+
+      # Although generating this file doesn't technically depend on the
+      # generated manifest, this dependency causes the .rc timestamp to be
+      # updated every time the manifest is updated. Otherwise, updating the
+      # manifest will not cause a recompilation of the .rc file.
+      deps = [
+        ":$manifest_action_name",
+      ]
+    }
+
+    # This source set only exists to compile and link the resource file.
+    source_set(source_set_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      sources = [
+        rcfile,
+      ]
+      deps = [
+        ":$manifest_action_name",
+        ":$rc_action_name",
+      ]
+    }
+  }
+} else {
+  # Make a no-op group on non-Windows platforms so windows_manifest
+  # instantiations don't need to be inside windows blocks.
+  template("windows_manifest") {
+    group(target_name) {
+      # Prevent unused variable warnings on non-Windows platforms.
+      assert(invoker.type == "exe" || invoker.type == "dll")
+      assert(invoker.sources != "")
+      assert(!defined(invoker.deps) || invoker.deps != "")
+      assert(!defined(invoker.visibility) || invoker.visibility != "")
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/win/visual_studio_version.gni
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Path to Visual Studio. If empty, the default is used which is to use the
+  # automatic toolchain in depot_tools. If set, you must also set the
+  # visual_studio_version and wdk_path.
+  visual_studio_path = ""
+
+  # Version of Visual Studio pointed to by the visual_studio_path.
+  # Currently always "2015".
+  visual_studio_version = ""
+
+  # Directory of the Windows driver kit. If visual_studio_path is empty, this
+  # will be auto-filled.
+  wdk_path = ""
+
+  # Full path to the Windows SDK, not including a backslash at the end.
+  # This value is the default location, override if you have a different
+  # installation location.
+  windows_sdk_path = "C:\Program Files (x86)\Windows Kits\10"
+}
+
+if (visual_studio_path == "") {
+  toolchain_data =
+      exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope")
+  visual_studio_path = toolchain_data.vs_path
+  windows_sdk_path = toolchain_data.sdk_path
+  visual_studio_version = toolchain_data.vs_version
+  wdk_path = toolchain_data.wdk_dir
+  visual_studio_runtime_dirs = toolchain_data.runtime_dirs
+} else {
+  assert(visual_studio_version != "",
+         "You must set the visual_studio_version if you set the path")
+  assert(wdk_path != "",
+         "You must set the wdk_path if you set the visual studio path")
+  visual_studio_runtime_dirs = []
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/config/zip.gni
@@ -0,0 +1,55 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a zip archive of the inputs.
+#
+# inputs (required)
+#     List of input files relative to the current directory.
+#
+# output (required)
+#     File name to write.
+#
+# base_dir (optional)
+#     If provided, the archive paths will be relative to this directory.
+#
+# deps, public_deps, data_deps, testonly, visibility (optional)
+#     Normal meaning.
+template("zip") {
+  action(target_name) {
+    script = "//build/android/gn/zip.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = invoker.inputs
+    outputs = [
+      invoker.output,
+    ]
+
+    assert(defined(invoker.inputs))
+    rebase_inputs = rebase_path(invoker.inputs, root_build_dir)
+
+    assert(defined(invoker.output))
+    rebase_output = rebase_path(invoker.output, root_build_dir)
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--inputs=$rebase_inputs",
+      "--output=$rebase_output",
+    ]
+    if (defined(invoker.base_dir)) {
+      args += [
+        "--base-dir",
+        rebase_path(invoker.base_dir, root_build_dir),
+      ]
+    }
+
+    forward_variables_from(invoker,
+                           [
+                             "testonly",
+                             "deps",
+                             "public_deps",
+                             "data_deps",
+                             "visibility",
+                           ])
+  }
+}
--- a/media/webrtc/trunk/build/copy_test_data_ios.py
+++ b/media/webrtc/trunk/build/copy_test_data_ios.py
@@ -8,23 +8,26 @@
 import optparse
 import os
 import shutil
 import sys
 
 class WrongNumberOfArgumentsException(Exception):
   pass
 
+def EscapePath(path):
+  """Returns a path with spaces escaped."""
+  return path.replace(" ", "\\ ")
+
 def ListFilesForPath(path):
   """Returns a list of all the files under a given path."""
   output = []
-  # Ignore dotfiles and dot directories.
-  # TODO(rohitrao): This will fail to exclude cases where the initial argument
-  # is a relative path that starts with a dot.
-  if os.path.basename(path).startswith('.'):
+  # Ignore revision control metadata directories.
+  if (os.path.basename(path).startswith('.git') or
+      os.path.basename(path).startswith('.svn')):
     return output
 
   # Files get returned without modification.
   if not os.path.isdir(path):
     output.append(path)
     return output
 
   # Directories get recursively expanded.
@@ -32,23 +35,20 @@ def ListFilesForPath(path):
   for item in contents:
     full_path = os.path.join(path, item)
     output.extend(ListFilesForPath(full_path))
   return output
 
 def CalcInputs(inputs):
   """Computes the full list of input files for a set of command-line arguments.
   """
-  # |inputs| is a list of strings, each of which may contain muliple paths
-  # separated by spaces.
+  # |inputs| is a list of paths, which may be directories.
   output = []
   for input in inputs:
-    tokens = input.split()
-    for token in tokens:
-      output.extend(ListFilesForPath(token))
+    output.extend(ListFilesForPath(input))
   return output
 
 def CopyFiles(relative_filenames, output_basedir):
   """Copies files to the given output directory."""
   for file in relative_filenames:
     relative_dirname = os.path.dirname(file)
     output_dir = os.path.join(output_basedir, relative_dirname)
     output_filename = os.path.join(output_basedir, file)
@@ -72,24 +72,25 @@ def DoMain(argv):
   parser.add_option('--inputs', action='store_true', dest='list_inputs')
   parser.add_option('--outputs', action='store_true', dest='list_outputs')
   options, arglist = parser.parse_args(argv)
 
   if len(arglist) == 0:
     raise WrongNumberOfArgumentsException('<input_files> required.')
 
   files_to_copy = CalcInputs(arglist)
+  escaped_files = [EscapePath(x) for x in CalcInputs(arglist)]
   if options.list_inputs:
-    return '\n'.join(files_to_copy)
+    return '\n'.join(escaped_files)
 
   if not options.output_dir:
     raise WrongNumberOfArgumentsException('-o required.')
 
   if options.list_outputs:
-    outputs = [os.path.join(options.output_dir, x) for x in files_to_copy]
+    outputs = [os.path.join(options.output_dir, x) for x in escaped_files]
     return '\n'.join(outputs)
 
   CopyFiles(files_to_copy, options.output_dir)
   return
 
 def main(argv):
   try:
     result = DoMain(argv[1:])
--- a/media/webrtc/trunk/build/cp.py
+++ b/media/webrtc/trunk/build/cp.py
@@ -4,19 +4,20 @@
 # found in the LICENSE file.
 
 """Copy a file.
 
 This module works much like the cp posix command - it takes 2 arguments:
 (src, dst) and copies the file with path |src| to |dst|.
 """
 
+import os
 import shutil
 import sys
 
 
 def Main(src, dst):
   # Use copy instead of copyfile to ensure the executable bit is copied.
-  return shutil.copy(src, dst)
+  return shutil.copy(src, os.path.normpath(dst))
 
 
 if __name__ == '__main__':
   sys.exit(Main(sys.argv[1], sys.argv[2]))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/detect_host_arch.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Outputs host CPU architecture in format recognized by gyp."""
+
+import platform
+import re
+import sys
+
+
+def HostArch():
+  """Returns the host architecture with a predictable string."""
+  host_arch = platform.machine()
+
+  # Convert machine type to format recognized by gyp.
+  if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
+    host_arch = 'ia32'
+  elif host_arch in ['x86_64', 'amd64']:
+    host_arch = 'x64'
+  elif host_arch.startswith('arm'):
+    host_arch = 'arm'
+  elif host_arch.startswith('mips'):
+    host_arch = 'mips'
+  elif host_arch.startswith('ppc'):
+    host_arch = 'ppc'
+  elif host_arch.startswith('s390'):
+    host_arch = 's390'
+
+
+  # platform.machine is based on running kernel. It's possible to use 64-bit
+  # kernel with 32-bit userland, e.g. to give linker slightly more memory.
+  # Distinguish between different userland bitness by querying
+  # the python binary.
+  if host_arch == 'x64' and platform.architecture()[0] == '32bit':
+    host_arch = 'ia32'
+
+  return host_arch
+
+def DoMain(_):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return HostArch()
+
+if __name__ == '__main__':
+  print DoMain([])
--- a/media/webrtc/trunk/build/dir_exists.py
+++ b/media/webrtc/trunk/build/dir_exists.py
@@ -3,13 +3,21 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 """Writes True if the argument is a directory."""
 
 import os.path
 import sys
 
 def main():
-  sys.stdout.write(str(os.path.isdir(sys.argv[1])))
+  sys.stdout.write(_is_dir(sys.argv[1]))
   return 0
 
+def _is_dir(dir_name):
+  return str(os.path.isdir(dir_name))
+
+def DoMain(args):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return _is_dir(args[0])
+
 if __name__ == '__main__':
   sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/docs/mac_hermetic_toolchain.md
@@ -0,0 +1,44 @@
+# Mac and iOS hermetic toolchain instructions
+
+The following is a short explanation of why we use a the hermetic toolchain
+and instructions on how to roll a new toolchain.
+
+## How to roll a new hermetic toolchain.
+
+1. Download a new version of Xcode, and confirm either mac or ios builds
+   properly with this new version.
+
+2. Run the following command:
+
+   ```
+   src/build/package_mac_toolchain.py /path/to/Xcode.app/ [ios|mac]
+   ```
+
+   The script will create a subset of the toolchain necessary for a build, and
+   upload them to be used by hermetic builds.
+
+   If for some reason this toolchain version has already been uploaded, the
+   script will ask if we should create sub revision.  This can be necessary when
+   the package script has been updated to compress additional files.
+
+2. Create a CL with updated [MAC|IOS]_TOOLCHAIN_VERSION and _SUB_REVISION in
+   src/build/mac_toolchain.py with the version created by the previous command.
+
+3. Run the CL thru the trybots to confirm the roll works.
+
+## Why we use a hermetic toolchain.
+
+Building Chrome Mac currently requires many binaries that come bundled with
+Xcode, as well the macOS and iphoneOS SDK [also bundled with Xcode].  Note that
+Chrome ships its own version of clang [compiler], but is dependent on Xcode
+for these other binaries.
+
+Chrome should be built against the latest SDK available, but historically,
+updating the SDK has been nontrivially difficult.  Additionally, bot system
+installs can range from Xcode 5 on some bots, to the latest and
+greatest.  Using a hermetic toolchain has two main benefits:
+
+1. Build Chrome with a well-defined toolchain [rather than whatever happens to
+be installed on the machine].
+
+2. Easily roll/update the toolchain.
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/dotfile_settings.gni
@@ -0,0 +1,34 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains variables that can be imported into a repo's dotfile (.gn)
+# to make it easier to roll new versions of //build in.
+
+build_dotfile_settings = {
+  exec_script_whitelist = [
+    "//build/config/android/config.gni",
+    "//build/config/android/internal_rules.gni",
+    "//build/config/android/rules.gni",
+    "//build/config/compiler/BUILD.gn",
+    "//build/config/gcc/gcc_version.gni",
+    "//build/config/host_byteorder.gni",
+    "//build/config/ios/ios_sdk.gni",
+    "//build/config/linux/BUILD.gn",
+    "//build/config/linux/pkg_config.gni",
+    "//build/config/linux/atk/BUILD.gn",
+    "//build/config/mac/mac_sdk.gni",
+    "//build/config/mac/rules.gni",
+    "//build/config/posix/BUILD.gn",
+    "//build/config/sysroot.gni",
+    "//build/config/win/BUILD.gn",
+    "//build/config/win/visual_studio_version.gni",
+    "//build/toolchain/concurrent_links.gni",
+    "//build/toolchain/mac/BUILD.gn",
+    "//build/toolchain/nacl/BUILD.gn",
+    "//build/toolchain/toolchain.gni",
+    "//build/toolchain/win/BUILD.gn",
+    "//build/util/branding.gni",
+    "//build/util/version.gni",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/download_gold_plugin.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to download LLVM gold plugin from google storage."""
+
+import find_depot_tools
+import json
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+
+
+DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
+GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
+
+LLVM_BUILD_PATH = os.path.join(CHROME_SRC, 'third_party', 'llvm-build',
+                               'Release+Asserts')
+CLANG_UPDATE_PY = os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts',
+                               'update.py')
+CLANG_REVISION = os.popen(CLANG_UPDATE_PY + ' --print-revision').read().rstrip()
+
+CLANG_BUCKET = 'gs://chromium-browser-clang/Linux_x64'
+
+def main():
+  targz_name = 'llvmgold-%s.tgz' % CLANG_REVISION
+  remote_path = '%s/%s' % (CLANG_BUCKET, targz_name)
+
+  os.chdir(LLVM_BUILD_PATH)
+
+  subprocess.check_call(['python', GSUTIL_PATH,
+                         'cp', remote_path, targz_name])
+  subprocess.check_call(['tar', 'xzf', targz_name])
+  os.remove(targz_name)
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
--- a/media/webrtc/trunk/build/download_nacl_toolchains.py
+++ b/media/webrtc/trunk/build/download_nacl_toolchains.py
@@ -1,64 +1,61 @@
 #!/usr/bin/env python
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 """Shim to run nacl toolchain download script only if there is a nacl dir."""
 
 import os
+import shutil
 import sys
 
 
 def Main(args):
   # Exit early if disable_nacl=1.
   if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
     return 0
+  if 'OS=android' in os.environ.get('GYP_DEFINES', ''):
+    return 0
   script_dir = os.path.dirname(os.path.abspath(__file__))
   src_dir = os.path.dirname(script_dir)
   nacl_dir = os.path.join(src_dir, 'native_client')
   nacl_build_dir = os.path.join(nacl_dir, 'build')
-  download_script = os.path.join(nacl_build_dir, 'download_toolchains.py')
-  if not os.path.exists(download_script):
-    print "Can't find '%s'" % download_script
+  package_version_dir = os.path.join(nacl_build_dir, 'package_version')
+  package_version = os.path.join(package_version_dir, 'package_version.py')
+  if not os.path.exists(package_version):
+    print "Can't find '%s'" % package_version
     print 'Presumably you are intentionally building without NativeClient.'
     print 'Skipping NativeClient toolchain download.'
     sys.exit(0)
-  sys.path.insert(0, nacl_build_dir)
-  import download_toolchains
+  sys.path.insert(0, package_version_dir)
+  import package_version
 
-  # TODO (robertm): Finish getting PNaCl ready for prime time.
   # BUG:
   # We remove this --optional-pnacl argument, and instead replace it with
-  # --no-pnacl for most cases.  However, if the bot name is the pnacl_sdk
+  # --no-pnacl for most cases.  However, if the bot name is an sdk
   # bot then we will go ahead and download it.  This prevents increasing the
   # gclient sync time for developers, or standard Chrome bots.
   if '--optional-pnacl' in args:
     args.remove('--optional-pnacl')
-    # By default we don't use PNaCl toolchain yet, unless on ARM, where
-    # there is no other toolchain to build untrusted code at the moment.
-    # So analyze if we're building for ARM, or on SDK buildbot.
-    # TODO(olonho): we need to invent more reliable way to get build
-    # configuration info, to know if we're building for ARM.
     use_pnacl = False
-    if 'target_arch=arm' in os.environ.get('GYP_DEFINES', ''):
-      use_pnacl = True
     buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
-    if buildbot_name.find('pnacl') >= 0 and  buildbot_name.find('sdk') >= 0:
+    if 'pnacl' in buildbot_name and 'sdk' in buildbot_name:
       use_pnacl = True
     if use_pnacl:
       print '\n*** DOWNLOADING PNACL TOOLCHAIN ***\n'
     else:
-      args.append('--no-pnacl')
+      args = ['--exclude', 'pnacl_newlib'] + args
 
-  # Append the name of the file to use as a version and hash source.
-  # NOTE:  While not recommended, it is possible to redirect this file to
-  # a chrome location to avoid branching NaCl if just a toolchain needs
-  # to be bumped.
-  args.append(os.path.join(nacl_dir,'TOOL_REVISIONS'))
+  # Only download the ARM gcc toolchain if we are building for ARM
+  # TODO(olonho): we need to invent more reliable way to get build
+  # configuration info, to know if we're building for ARM.
+  if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''):
+      args = ['--exclude', 'nacl_arm_newlib'] + args
 
-  download_toolchains.main(args)
+  package_version.main(args)
+
   return 0
 
 
 if __name__ == '__main__':
   sys.exit(Main(sys.argv[1:]))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/download_translation_unit_tool.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to download Clang translation_unit tool from google storage."""
+
+import find_depot_tools
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+
+
+DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
+GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
+
+LLVM_BUILD_PATH = os.path.join(CHROME_SRC, 'third_party', 'llvm-build',
+                               'Release+Asserts')
+CLANG_UPDATE_PY = os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts',
+                               'update.py')
+CLANG_REVISION = os.popen(CLANG_UPDATE_PY + ' --print-revision').read().rstrip()
+
+CLANG_BUCKET = 'gs://chromium-browser-clang'
+
+
+def main():
+  targz_name = 'translation_unit-%s.tgz' % CLANG_REVISION
+
+  if sys.platform == 'win32' or sys.platform == 'cygwin':
+    cds_full_url = CLANG_BUCKET + '/Win/' + targz_name
+  elif sys.platform == 'darwin':
+    cds_full_url = CLANG_BUCKET + '/Mac/' + targz_name
+  else:
+    assert sys.platform.startswith('linux')
+    cds_full_url = CLANG_BUCKET + '/Linux_x64/' + targz_name
+
+  os.chdir(LLVM_BUILD_PATH)
+
+  subprocess.check_call(['python', GSUTIL_PATH,
+                         'cp', cds_full_url, targz_name])
+  tarfile.open(name=targz_name, mode='r:gz').extractall(path=LLVM_BUILD_PATH)
+
+  os.remove(targz_name)
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/env_dump.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script can either source a file and dump the enironment changes done by
+# it, or just simply dump the current environment as JSON into a file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-f', '--output-json',
+                    help='File to dump the environment as JSON into.')
+  parser.add_option(
+      '-d', '--dump-mode', action='store_true',
+      help='Dump the environment to sys.stdout and exit immediately.')
+
+  parser.disable_interspersed_args()
+  options, args = parser.parse_args()
+  if options.dump_mode:
+    if args or options.output_json:
+      parser.error('Cannot specify args or --output-json with --dump-mode.')
+    json.dump(dict(os.environ), sys.stdout)
+  else:
+    if not options.output_json:
+      parser.error('Requires --output-json option.')
+
+    envsetup_cmd = ' '.join(map(pipes.quote, args))
+    full_cmd = [
+        'bash', '-c',
+        '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__))
+    ]
+    try:
+      output = subprocess.check_output(full_cmd)
+    except Exception as e:
+      sys.exit('Error running %s and dumping environment.' % envsetup_cmd)
+
+    env_diff = {}
+    new_env = json.loads(output)
+    for k, val in new_env.items():
+      if k == '_' or (k in os.environ and os.environ[k] == val):
+        continue
+      env_diff[k] = val
+    with open(options.output_json, 'w') as f:
+      json.dump(env_diff, f)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/experimental/install-build-deps.py
@@ -0,0 +1,434 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import operator
+import os
+import platform
+import re
+import subprocess
+import sys
+
+
+SUPPORTED_UBUNTU_VERSIONS = (
+  {'number': '14.04', 'codename': 'trusty'},
+  {'number': '14.10', 'codename': 'utopic'},
+  {'number': '15.04', 'codename': 'vivid'},
+  {'number': '15.10', 'codename': 'wily'},
+)
+
+
+# Packages needed for chromeos only.
+_packages_chromeos_dev = (
+  'libbluetooth-dev',
+  'libxkbcommon-dev',
+  'realpath',
+)
+
+
+# Packages needed for development.
+_packages_dev = (
+  'bison',
+  'cdbs',
+  'curl',
+  'devscripts',
+  'dpkg-dev',
+  'elfutils',
+  'fakeroot',
+  'flex',
+  'fonts-ipafont',
+  'fonts-thai-tlwg',
+  'g++',
+  'git-core',
+  'git-svn',
+  'gperf',
+  'language-pack-da',
+  'language-pack-fr',
+  'language-pack-he',
+  'language-pack-zh-hant',
+  'libapache2-mod-php5',
+  'libasound2-dev',
+  'libav-tools',
+  'libbrlapi-dev',
+  'libbz2-dev',
+  'libcairo2-dev',
+  'libcap-dev',
+  'libcups2-dev',
+  'libcurl4-gnutls-dev',
+  'libdrm-dev',
+  'libelf-dev',
+  'libgconf2-dev',
+  'libglib2.0-dev',
+  'libglu1-mesa-dev',
+  'libgnome-keyring-dev',
+  'libgtk2.0-dev',
+  'libkrb5-dev',
+  'libnspr4-dev',
+  'libnss3-dev',
+  'libpam0g-dev',
+  'libpci-dev',
+  'libpulse-dev',
+  'libsctp-dev',
+  'libspeechd-dev',
+  'libsqlite3-dev',
+  'libssl-dev',
+  'libudev-dev',
+  'libwww-perl',
+  'libxslt1-dev',
+  'libxss-dev',
+  'libxt-dev',
+  'libxtst-dev',
+  'openbox',
+  'patch',
+  'perl',
+  'php5-cgi',
+  'pkg-config',
+  'python',
+  'python-cherrypy3',
+  'python-crypto',
+  'python-dev',
+  'python-numpy',
+  'python-opencv',
+  'python-openssl',
+  'python-psutil',
+  'python-yaml',
+  'rpm',
+  'ruby',
+  'subversion',
+  'ttf-dejavu-core',
+  'ttf-indic-fonts',
+  'ttf-kochi-gothic',
+  'ttf-kochi-mincho',
+  'wdiff',
+  'zip',
+)
+
+
+# Run-time libraries required by chromeos only.
+_packages_chromeos_lib = (
+  'libbz2-1.0',
+  'libpulse0',
+)
+
+
+# Full list of required run-time libraries.
+_packages_lib = (
+  'libasound2',
+  'libatk1.0-0',
+  'libc6',
+  'libcairo2',
+  'libcap2',
+  'libcups2',
+  'libexpat1',
+  'libfontconfig1',
+  'libfreetype6',
+  'libglib2.0-0',
+  'libgnome-keyring0',
+  'libgtk2.0-0',
+  'libpam0g',
+  'libpango1.0-0',
+  'libpci3',
+  'libpcre3',
+  'libpixman-1-0',
+  'libpng12-0',
+  'libspeechd2',
+  'libsqlite3-0',
+  'libstdc++6',
+  'libx11-6',
+  'libx11-xcb1',
+  'libxau6',
+  'libxcb1',
+  'libxcomposite1',
+  'libxcursor1',
+  'libxdamage1',
+  'libxdmcp6',
+  'libxext6',
+  'libxfixes3',
+  'libxi6',
+  'libxinerama1',
+  'libxrandr2',
+  'libxrender1',
+  'libxtst6',
+  'zlib1g',
+)
+
+
+# Debugging symbols for all of the run-time libraries.
+_packages_dbg = (
+  'libatk1.0-dbg',
+  'libc6-dbg',
+  'libcairo2-dbg',
+  'libfontconfig1-dbg',
+  'libglib2.0-0-dbg',
+  'libgtk2.0-0-dbg',
+  'libpango1.0-0-dbg',
+  'libpcre3-dbg',
+  'libpixman-1-0-dbg',
+  'libsqlite3-0-dbg',
+  'libx11-6-dbg',
+  'libx11-xcb1-dbg',
+  'libxau6-dbg',
+  'libxcb1-dbg',
+  'libxcomposite1-dbg',
+  'libxcursor1-dbg',
+  'libxdamage1-dbg',
+  'libxdmcp6-dbg',
+  'libxext6-dbg',
+  'libxfixes3-dbg',
+  'libxi6-dbg',
+  'libxinerama1-dbg',
+  'libxrandr2-dbg',
+  'libxrender1-dbg',
+  'libxtst6-dbg',
+  'zlib1g-dbg',
+)
+
+
+# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf.
+_packages_lib32 = (
+  'linux-libc-dev:i386',
+)
+
+
+# arm cross toolchain packages needed to build chrome on armhf.
+_packages_arm = (
+  'g++-arm-linux-gnueabihf',
+  'libc6-dev-armhf-cross',
+  'linux-libc-dev-armhf-cross',
+)
+
+
+# Packages to build NaCl, its toolchains, and its ports.
+_packages_naclports = (
+  'ant',
+  'autoconf',
+  'bison',
+  'cmake',
+  'gawk',
+  'intltool',
+  'xsltproc',
+  'xutils-dev',
+)
+_packages_nacl = (
+  'g++-mingw-w64-i686',
+  'lib32ncurses5-dev',
+  'lib32z1-dev',
+  'libasound2:i386',
+  'libcap2:i386',
+  'libelf-dev:i386',
+  'libfontconfig1:i386',
+  'libgconf-2-4:i386',
+  'libglib2.0-0:i386',
+  'libgpm2:i386',
+  'libgtk2.0-0:i386',
+  'libncurses5:i386',
+  'libnss3:i386',
+  'libpango1.0-0:i386',
+  'libssl1.0.0:i386',
+  'libtinfo-dev',
+  'libtinfo-dev:i386',
+  'libtool',
+  'libxcomposite1:i386',
+  'libxcursor1:i386',
+  'libxdamage1:i386',
+  'libxi6:i386',
+  'libxrandr2:i386',
+  'libxss1:i386',
+  'libxtst6:i386',
+  'texinfo',
+  'xvfb',
+)
+
+
+def is_userland_64_bit():
+  return platform.architecture()[0] == '64bit'
+
+
+def package_exists(pkg):
+  return pkg in subprocess.check_output(['apt-cache', 'pkgnames']).splitlines()
+
+
+def lsb_release_short_codename():
+  return subprocess.check_output(
+      ['lsb_release', '--codename', '--short']).strip()
+
+
+def write_error(message):
+  sys.stderr.write('ERROR: %s\n' % message)
+  sys.stderr.flush()
+
+
+def nonfatal_get_output(*popenargs, **kwargs):
+  process = subprocess.Popen(
+      stdout=subprocess.PIPE, stderr=subprocess.PIPE, *popenargs, **kwargs)
+  stdout, stderr = process.communicate()
+  retcode = process.poll()
+  return retcode, stdout, stderr
+
+
+def compute_dynamic_package_lists():
+  global _packages_arm
+  global _packages_dbg
+  global _packages_dev
+  global _packages_lib
+  global _packages_lib32
+  global _packages_nacl
+
+  if is_userland_64_bit():
+    # 64-bit systems need a minimum set of 32-bit compat packages
+    # for the pre-built NaCl binaries.
+    _packages_dev += (
+      'lib32gcc1',
+      'lib32stdc++6',
+      'libc6-i386',
+    )
+
+    # When cross building for arm/Android on 64-bit systems the host binaries
+    # that are part of v8 need to be compiled with -m32 which means
+    # that basic multilib support is needed.
+    # gcc-multilib conflicts with the arm cross compiler (at least in trusty)
+    # but g++-X.Y-multilib gives us the 32-bit support that we need. Find out
+    # the appropriate value of X and Y by seeing what version the current
+    # distribution's g++-multilib package depends on.
+    output = subprocess.check_output(['apt-cache', 'depends', 'g++-multilib'])
+    multilib_package = re.search(r'g\+\+-[0-9.]+-multilib', output).group()
+    _packages_lib32 += (multilib_package,)
+
+  lsb_codename = lsb_release_short_codename()
+
+  # Find the proper version of libstdc++6-4.x-dbg.
+  if lsb_codename == 'trusty':
+    _packages_dbg += ('libstdc++6-4.8-dbg',)
+  else:
+    _packages_dbg += ('libstdc++6-4.9-dbg',)
+
+  # Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056 .
+  if lsb_codename == 'trusty':
+    _packages_arm += (
+      'g++-4.8-multilib-arm-linux-gnueabihf',
+      'gcc-4.8-multilib-arm-linux-gnueabihf',
+    )
+
+  # Find the proper version of libgbm-dev. We can't just install libgbm-dev as
+  # it depends on mesa, and only one version of mesa can exists on the system.
+  # Hence we must match the same version or this entire script will fail.
+  mesa_variant = ''
+  for variant in ('-lts-trusty', '-lts-utopic'):
+    rc, stdout, stderr = nonfatal_get_output(
+        ['dpkg-query', '-Wf\'{Status}\'', 'libgl1-mesa-glx' + variant])
+    if 'ok installed' in output:
+      mesa_variant = variant
+  _packages_dev += (
+    'libgbm-dev' + mesa_variant,
+    'libgl1-mesa-dev' + mesa_variant,
+    'libgles2-mesa-dev' + mesa_variant,
+    'mesa-common-dev' + mesa_variant,
+  )
+
+  if package_exists('ttf-mscorefonts-installer'):
+    _packages_dev += ('ttf-mscorefonts-installer',)
+  else:
+    _packages_dev += ('msttcorefonts',)
+
+  if package_exists('libnspr4-dbg'):
+    _packages_dbg += ('libnspr4-dbg', 'libnss3-dbg')
+    _packages_lib += ('libnspr4', 'libnss3')
+  else:
+    _packages_dbg += ('libnspr4-0d-dbg', 'libnss3-1d-dbg')
+    _packages_lib += ('libnspr4-0d', 'libnss3-1d')
+
+  if package_exists('libjpeg-dev'):
+    _packages_dev += ('libjpeg-dev',)
+  else:
+    _packages_dev += ('libjpeg62-dev',)
+
+  if package_exists('libudev1'):
+    _packages_dev += ('libudev1',)
+    _packages_nacl += ('libudev1:i386',)
+  else:
+    _packages_dev += ('libudev0',)
+    _packages_nacl += ('libudev0:i386',)
+
+  if package_exists('libbrlapi0.6'):
+    _packages_dev += ('libbrlapi0.6',)
+  else:
+    _packages_dev += ('libbrlapi0.5',)
+
+  if package_exists('apache2-bin'):
+    _packages_dev += ('apache2-bin',)
+  else:
+    _packages_dev += ('apache2.2-bin',)
+
+  if package_exists('xfonts-mathml'):
+    _packages_dev += ('xfonts-mathml',)
+
+  # Some packages are only needed if the distribution actually supports
+  # installing them.
+  if package_exists('appmenu-gtk'):
+    _packages_lib += ('appmenu-gtk',)
+
+  _packages_dev += _packages_chromeos_dev
+  _packages_lib += _packages_chromeos_lib
+  _packages_nacl += _packages_naclports
+
+
+def quick_check(packages):
+  rc, stdout, stderr = nonfatal_get_output([
+      'dpkg-query', '-W', '-f', '${PackageSpec}:${Status}\n'] + list(packages))
+  if rc == 0 and not stderr:
+    return 0
+  print stderr
+  return 1
+
+
+def main(argv):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--quick-check', action='store_true',
+                      help='quickly try to determine if dependencies are '
+                           'installed (this avoids interactive prompts and '
+                           'sudo commands so might not be 100% accurate)')
+  parser.add_argument('--unsupported', action='store_true',
+                      help='attempt installation even on unsupported systems')
+  args = parser.parse_args(argv)
+
+  lsb_codename = lsb_release_short_codename()
+  if not args.unsupported and not args.quick_check:
+    if lsb_codename not in map(
+        operator.itemgetter('codename'), SUPPORTED_UBUNTU_VERSIONS):
+      supported_ubuntus = ['%(number)s (%(codename)s)' % v
+                           for v in SUPPORTED_UBUNTU_VERSIONS]
+      write_error('Only Ubuntu %s are currently supported.' %
+                  ', '.join(supported_ubuntus))
+      return 1
+
+    if platform.machine() not in ('i686', 'x86_64'):
+      write_error('Only x86 architectures are currently supported.')
+      return 1
+
+  if os.geteuid() != 0 and not args.quick_check:
+    print 'Running as non-root user.'
+    print 'You might have to enter your password one or more times'
+    print 'for \'sudo\'.'
+    print
+
+  compute_dynamic_package_lists()
+
+  packages = (_packages_dev + _packages_lib + _packages_dbg + _packages_lib32 +
+              _packages_arm + _packages_nacl)
+  def packages_key(pkg):
+    s = pkg.rsplit(':', 1)
+    if len(s) == 1:
+      return (s, '')
+    return s
+  packages = sorted(set(packages), key=packages_key)
+
+  if args.quick_check:
+    return quick_check(packages)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
--- a/media/webrtc/trunk/build/extract_from_cab.py
+++ b/media/webrtc/trunk/build/extract_from_cab.py
@@ -7,17 +7,17 @@
 
 import os
 import shutil
 import subprocess
 import sys
 import tempfile
 
 def run_quiet(*args):
-  """Run 'expand' supressing noisy output. Returns returncode from process."""
+  """Run 'expand' suppressing noisy output. Returns returncode from process."""
   popen = subprocess.Popen(args, stdout=subprocess.PIPE)
   out, _ = popen.communicate()
   if popen.returncode:
     # expand emits errors to stdout, so if we fail, then print that out.
     print out
   return popen.returncode
 
 def main():
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/find_depot_tools.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Small utility function to find depot_tools and add it to the python path.
+
+Will throw an ImportError exception if depot_tools can't be found since it
+imports breakpad.
+
+This can also be used as a standalone script to print out the depot_tools
+directory location.
+"""
+
+import os
+import sys
+
+
+def IsRealDepotTools(path):
+  expanded_path = os.path.expanduser(path)
+  return os.path.isfile(os.path.join(expanded_path, 'gclient.py'))
+
+
+def add_depot_tools_to_path():
+  """Search for depot_tools and add it to sys.path."""
+  # First look if depot_tools is already in PYTHONPATH.
+  for i in sys.path:
+    if i.rstrip(os.sep).endswith('depot_tools') and IsRealDepotTools(i):
+      return i
+  # Then look if depot_tools is in PATH, common case.
+  for i in os.environ['PATH'].split(os.pathsep):
+    if IsRealDepotTools(i):
+      sys.path.append(i.rstrip(os.sep))
+      return i
+  # Rare case, it's not even in PATH, look upward up to root.
+  root_dir = os.path.dirname(os.path.abspath(__file__))
+  previous_dir = os.path.abspath(__file__)
+  while root_dir and root_dir != previous_dir:
+    i = os.path.join(root_dir, 'depot_tools')
+    if IsRealDepotTools(i):
+      sys.path.append(i)
+      return i
+    previous_dir = root_dir
+    root_dir = os.path.dirname(root_dir)
+  print >> sys.stderr, 'Failed to find depot_tools'
+  return None
+
+DEPOT_TOOLS_PATH = add_depot_tools_to_path()
+
+# pylint: disable=W0611
+import breakpad
+
+
+def main():
+  if DEPOT_TOOLS_PATH is None:
+    return 1
+  print DEPOT_TOOLS_PATH
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/find_isolated_tests.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Scans build output directory for .isolated files, calculates their SHA1
+hashes, stores final list in JSON document and then removes *.isolated files
+found (to ensure no stale *.isolated stay around on the next build).
+
+Used to figure out what tests were build in isolated mode to trigger these
+tests to run on swarming.
+
+For more info see:
+https://sites.google.com/a/chromium.org/dev/developers/testing/isolated-testing
+"""
+
+import glob
+import hashlib
+import json
+import optparse
+import os
+import re
+import sys
+
+
+def hash_file(filepath):
+  """Calculates the hash of a file without reading it all in memory at once."""
+  digest = hashlib.sha1()
+  with open(filepath, 'rb') as f:
+    while True:
+      chunk = f.read(1024*1024)
+      if not chunk:
+        break
+      digest.update(chunk)
+  return digest.hexdigest()
+
+
+def main():
+  parser = optparse.OptionParser(
+      usage='%prog --build-dir <path> --output-json <path>',
+      description=sys.modules[__name__].__doc__)
+  parser.add_option(
+      '--build-dir',
+      help='Path to a directory to search for *.isolated files.')
+  parser.add_option(
+      '--output-json',
+      help='File to dump JSON results into.')
+
+  options, _ = parser.parse_args()
+  if not options.build_dir:
+    parser.error('--build-dir option is required')
+  if not options.output_json:
+    parser.error('--output-json option is required')
+
+  result = {}
+
+  # Get the file hash values and output the pair.
+  pattern = os.path.join(options.build_dir, '*.isolated')
+  for filepath in sorted(glob.glob(pattern)):
+    test_name = os.path.splitext(os.path.basename(filepath))[0]
+    if re.match(r'^.+?\.\d$', test_name):
+      # It's a split .isolated file, e.g. foo.0.isolated. Ignore these.
+      continue
+
+    # TODO(csharp): Remove deletion once the isolate tracked dependencies are
+    # inputs for the isolated files.
+    sha1_hash = hash_file(filepath)
+    os.remove(filepath)
+    result[test_name] = sha1_hash
+
+  with open(options.output_json, 'wb') as f:
+    json.dump(result, f)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/fix_gn_headers.py
@@ -0,0 +1,218 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Fix header files missing in GN.
+
+This script takes the missing header files from check_gn_headers.py, and
+try to fix them by adding them to the GN files.
+Manual cleaning up is likely required afterwards.
+"""
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+
+
+def GitGrep(pattern):
+  p = subprocess.Popen(
+      ['git', 'grep', '-En', pattern, '--', '*.gn', '*.gni'],
+      stdout=subprocess.PIPE)
+  out, _ = p.communicate()
+  return out, p.returncode
+
+
+def ValidMatches(basename, cc, grep_lines):
+  """Filter out 'git grep' matches with header files already."""
+  matches = []
+  for line in grep_lines:
+    gnfile, linenr, contents = line.split(':')
+    linenr = int(linenr)
+    new = re.sub(cc, basename, contents)
+    lines = open(gnfile).read().splitlines()
+    assert contents in lines[linenr - 1]
+    # Skip if it's already there. It could be before or after the match.
+    if lines[linenr] == new:
+      continue
+    if lines[linenr - 2] == new:
+      continue
+    print '    ', gnfile, linenr, new
+    matches.append((gnfile, linenr, new))
+  return matches
+
+
+def AddHeadersNextToCC(headers, skip_ambiguous=True):
+  """Add header files next to the corresponding .cc files in GN files.
+
+  When skip_ambiguous is True, skip if multiple .cc files are found.
+  Returns unhandled headers.
+
+  Manual cleaning up is likely required, especially if not skip_ambiguous.
+  """
+  edits = {}
+  unhandled = []
+  for filename in headers:
+    filename = filename.strip()
+    if not (filename.endswith('.h') or filename.endswith('.hh')):
+      continue
+    basename = os.path.basename(filename)
+    print filename
+    cc = r'\b' + os.path.splitext(basename)[0] + r'\.(cc|cpp|mm)\b'
+    out, returncode = GitGrep('(/|")' + cc + '"')
+    if returncode != 0 or not out:
+      unhandled.append(filename)
+      continue
+
+    matches = ValidMatches(basename, cc, out.splitlines())
+
+    if len(matches) == 0:
+      continue
+    if len(matches) > 1:
+      print '\n[WARNING] Ambiguous matching for', filename
+      for i in enumerate(matches, 1):
+        print '%d: %s' % (i[0], i[1])
+      print
+      if skip_ambiguous:
+        continue
+
+      picked = raw_input('Pick the matches ("2,3" for multiple): ')
+      try:
+        matches = [matches[int(i) - 1] for i in picked.split(',')]
+      except (ValueError, IndexError):
+        continue
+
+    for match in matches:
+      gnfile, linenr, new = match
+      print '  ', gnfile, linenr, new
+      edits.setdefault(gnfile, {})[linenr] = new
+
+  for gnfile in edits:
+    lines = open(gnfile).read().splitlines()
+    for l in sorted(edits[gnfile].keys(), reverse=True):
+      lines.insert(l, edits[gnfile][l])
+    open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+  return unhandled
+
+
+def AddHeadersToSources(headers, skip_ambiguous=True):
+  """Add header files to the sources list in the first GN file.
+
+  The target GN file is the first one up the parent directories.
+  This usually does the wrong thing for _test files if the test and the main
+  target are in the same .gn file.
+  When skip_ambiguous is True, skip if multiple sources arrays are found.
+
+  "git cl format" afterwards is required. Manually cleaning up duplicated items
+  is likely required.
+  """
+  for filename in headers:
+    filename = filename.strip()
+    print filename
+    dirname = os.path.dirname(filename)
+    while not os.path.exists(os.path.join(dirname, 'BUILD.gn')):
+      dirname = os.path.dirname(dirname)
+    rel = filename[len(dirname) + 1:]
+    gnfile = os.path.join(dirname, 'BUILD.gn')
+
+    lines = open(gnfile).read().splitlines()
+    matched = [i for i, l in enumerate(lines) if ' sources = [' in l]
+    if skip_ambiguous and len(matched) > 1:
+      print '[WARNING] Multiple sources in', gnfile
+      continue
+
+    if len(matched) < 1:
+      continue
+    print '  ', gnfile, rel
+    index = matched[0]
+    lines.insert(index + 1, '"%s",' % rel)
+    open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+
+def RemoveHeader(headers, skip_ambiguous=True):
+  """Remove non-existing headers in GN files.
+
+  When skip_ambiguous is True, skip if multiple matches are found.
+  """
+  edits = {}
+  unhandled = []
+  for filename in headers:
+    filename = filename.strip()
+    if not (filename.endswith('.h') or filename.endswith('.hh')):
+      continue
+    basename = os.path.basename(filename)
+    print filename
+    out, returncode = GitGrep('(/|")' + basename + '"')
+    if returncode != 0 or not out:
+      unhandled.append(filename)
+      print '  Not found'
+      continue
+
+    grep_lines = out.splitlines()
+    matches = []
+    for line in grep_lines:
+      gnfile, linenr, contents = line.split(':')
+      print '    ', gnfile, linenr, contents
+      linenr = int(linenr)
+      lines = open(gnfile).read().splitlines()
+      assert contents in lines[linenr - 1]
+      matches.append((gnfile, linenr, contents))
+
+    if len(matches) == 0:
+      continue
+    if len(matches) > 1:
+      print '\n[WARNING] Ambiguous matching for', filename
+      for i in enumerate(matches, 1):
+        print '%d: %s' % (i[0], i[1])
+      print
+      if skip_ambiguous:
+        continue
+
+      picked = raw_input('Pick the matches ("2,3" for multiple): ')
+      try:
+        matches = [matches[int(i) - 1] for i in picked.split(',')]
+      except (ValueError, IndexError):
+        continue
+
+    for match in matches:
+      gnfile, linenr, contents = match
+      print '  ', gnfile, linenr, contents
+      edits.setdefault(gnfile, set()).add(linenr)
+
+  for gnfile in edits:
+    lines = open(gnfile).read().splitlines()
+    for l in sorted(edits[gnfile], reverse=True):
+      lines.pop(l - 1)
+    open(gnfile, 'w').write('\n'.join(lines) + '\n')
+
+  return unhandled
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('input_file', help="missing or non-existing headers, "
+                      "output of check_gn_headers.py")
+  parser.add_argument('--prefix',
+                      help="only handle path name with this prefix")
+  parser.add_argument('--remove', action='store_true',
+                      help="treat input_file as non-existing headers")
+
+  args, _extras = parser.parse_known_args()
+
+  headers = open(args.input_file).readlines()
+
+  if args.prefix:
+    headers = [i for i in headers if i.startswith(args.prefix)]
+
+  if args.remove:
+    RemoveHeader(headers, False)
+  else:
+    unhandled = AddHeadersNextToCC(headers)
+    AddHeadersToSources(unhandled)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/fuchsia/update_sdk.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Updates the Fuchsia SDK to the given revision. Should be used in a 'hooks_os'
+entry so that it only runs when .gclient's target_os includes 'fuchsia'."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+
+REPOSITORY_ROOT = os.path.abspath(os.path.join(
+    os.path.dirname(__file__), '..', '..'))
+sys.path.append(os.path.join(REPOSITORY_ROOT, 'build'))
+
+import find_depot_tools
+
+
+def EnsureDirExists(path):
+  if not os.path.exists(path):
+    print 'Creating directory %s' % path
+    os.makedirs(path)
+
+
+def main():
+  if len(sys.argv) != 2:
+    print >>sys.stderr, 'usage: %s <sdk_hash>' % sys.argv[0]
+    return 1
+
+  sdk_hash = sys.argv[1]
+  output_dir = os.path.join(REPOSITORY_ROOT, 'third_party', 'fuchsia-sdk')
+
+  hash_filename = os.path.join(output_dir, '.hash')
+  if os.path.exists(hash_filename):
+    with open(hash_filename, 'r') as f:
+      if f.read().strip() == sdk_hash:
+        # Nothing to do.
+        return 0
+
+  print 'Downloading SDK %s...' % sdk_hash
+
+  if os.path.isdir(output_dir):
+    shutil.rmtree(output_dir)
+
+  bucket = 'gs://fuchsia-build/fuchsia/sdk/linux64/'
+  with tempfile.NamedTemporaryFile() as f:
+    cmd = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'),
+           'cp', bucket + sdk_hash, f.name]
+    subprocess.check_call(cmd)
+    f.seek(0)
+    EnsureDirExists(output_dir)
+    tarfile.open(mode='r:gz', fileobj=f).extractall(path=output_dir)
+
+  with open(hash_filename, 'w') as f:
+    f.write(sdk_hash)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
--- a/media/webrtc/trunk/build/gdb-add-index
+++ b/media/webrtc/trunk/build/gdb-add-index
@@ -1,47 +1,184 @@
 #!/bin/bash
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 #
 # Saves the gdb index for a given binary and its shared library dependencies.
+#
+# This will run gdb index in parallel on a number of binaries using SIGUSR1
+# as the communication mechanism to simulate a semaphore. Because of the
+# nature of this technique, using "set -e" is very difficult. The SIGUSR1
+# terminates a "wait" with an error which we need to interpret.
+#
+# When modifying this code, most of the real logic is in the index_one_file
+# function. The rest is cleanup + sempahore plumbing.
 
-set -e
+function usage_exit {
+  echo "Usage: $0 [-f] [-r] [-n] <paths-to-binaries>..."
+  echo "  -f forces replacement of an existing index."
+  echo "  -r removes the index section."
+  echo "  -n don't extract the dependencies of each binary with lld."
+  echo "       e.g., $0 -n out/Debug/lib.unstripped/lib*"
+  echo
+  echo "  Set TOOLCHAIN_PREFIX to use a non-default set of binutils."
+  exit 1
+}
+
+# Cleanup temp directory and ensure all child jobs are dead-dead.
+function on_exit {
+  trap "" EXIT USR1  # Avoid reentrancy.
+
+  local jobs=$(jobs -p)
+  if [ -n "$jobs" ]; then
+    echo -n "Killing outstanding index jobs..."
+    kill -KILL $(jobs -p)
+    wait
+    echo "done"
+  fi
+
+  if [ -d "$directory" ]; then
+    echo -n "Removing temp directory $directory..."
+    rm -rf "$directory"
+    echo done
+  fi
+}
+
+# Add index to one binary.
+function index_one_file {
+  local file=$1
+  local basename=$(basename "$file")
+  local should_index_this_file="${should_index}"
+
+  local readelf_out=$(${TOOLCHAIN_PREFIX}readelf -S "$file")
+  if [[ $readelf_out =~ "gdb_index" ]]; then
+    if $remove_index; then
+      ${TOOLCHAIN_PREFIX}objcopy --remove-section .gdb_index "$file"
+      echo "Removed index from $basename."
+    else
+      echo "Skipped $basename -- already contains index."
+      should_index_this_file=false
+    fi
+  fi
+
+  if $should_index_this_file; then
+    local start=$(date +"%s%N")
+    echo "Adding index to $basename..."
 
-if [[ ! $# == 1 ]]; then
-  echo "Usage: $0 path-to-binary"
-  exit 1
+    ${TOOLCHAIN_PREFIX}gdb -batch "$file" -ex "save gdb-index $directory" \
+      -ex "quit"
+    local index_file="$directory/$basename.gdb-index"
+    if [ -f "$index_file" ]; then
+      ${TOOLCHAIN_PREFIX}objcopy --add-section .gdb_index="$index_file" \
+        --set-section-flags .gdb_index=readonly "$file" "$file"
+      local finish=$(date +"%s%N")
+      local elapsed=$(((finish - start) / 1000000))
+      echo "   ...$basename indexed. [${elapsed}ms]"
+    else
+      echo "   ...$basename unindexable."
+    fi
+  fi
+}
+
+# Functions that when combined, concurrently index all files in FILES_TO_INDEX
+# array. The global FILES_TO_INDEX is declared in the main body of the script.
+function async_index {
+  # Start a background subshell to run the index command.
+  {
+    index_one_file $1
+    kill -SIGUSR1 $$  # $$ resolves to the parent script.
+    exit 129  # See comment above wait loop at bottom.
+  } &
+}
+
+cur_file_num=0
+function index_next {
+  if ((cur_file_num >= ${#files_to_index[@]})); then
+    return
+  fi
+
+  async_index "${files_to_index[cur_file_num]}"
+  ((cur_file_num += 1)) || true
+}
+
+########
+### Main body of the script.
+
+remove_index=false
+should_index=true
+should_index_deps=true
+files_to_index=()
+while (($# > 0)); do
+  case "$1" in
+    -h)
+      usage_exit
+      ;;
+    -f)
+      remove_index=true
+      ;;
+    -r)
+      remove_index=true
+      should_index=false
+      ;;
+    -n)
+      should_index_deps=false
+      ;;
+    -*)
+      echo "Invalid option: $1" >&2
+      usage_exit
+      ;;
+    *)
+      if [[ ! -f "$1" ]]; then
+        echo "Path $1 does not exist."
+        exit 1
+      fi
+      files_to_index+=("$1")
+      ;;
+  esac
+  shift
+done
+
+if ((${#files_to_index[@]} == 0)); then
+  usage_exit
 fi
 
-FILENAME="$1"
-if [[ ! -f "$FILENAME" ]]; then
-  echo "Path $FILENAME does not exist."
-  exit 1
+dependencies=()
+if $should_index_deps; then
+  for file in "${files_to_index[@]}"; do
+      # Append the shared library dependencies of this file that
+      # have the same dirname. The dirname is a signal that these
+      # shared libraries were part of the same build as the binary.
+      dependencies+=( \
+        $(ldd "$file" 2>/dev/null \
+          | grep $(dirname "$file") \
+          | sed "s/.*[ \t]\(.*\) (.*/\1/") \
+      )
+  done
 fi
+files_to_index+=("${dependencies[@]}")
+
+# Ensure we cleanup on on exit.
+trap on_exit EXIT INT
 
 # We're good to go! Create temp directory for index files.
-DIRECTORY=$(mktemp -d)
-echo "Made temp directory $DIRECTORY."
+directory=$(mktemp -d)
+echo "Made temp directory $directory."
 
-# Always remove directory on exit.
-trap "{ echo -n Removing temp directory $DIRECTORY...;
-  rm -rf $DIRECTORY; echo done; }" EXIT
-
-# Grab all the chromium shared library files.
-so_files=$(ldd "$FILENAME" 2>/dev/null \
-  | grep $(dirname "$FILENAME") \
-  | sed "s/.*[ \t]\(.*\) (.*/\1/")
+# Start concurrent indexing.
+trap index_next USR1
 
-# Add index to binary and the shared library dependencies.
-for file in "$FILENAME" $so_files; do
-  basename=$(basename "$file")
-  echo -n "Adding index to $basename..."
-  readelf_out=$(readelf -S "$file")
-  if [[ $readelf_out =~ "gdb_index" ]]; then
-    echo "already contains index. Skipped."
-  else
-    gdb -batch "$file" -ex "save gdb-index $DIRECTORY" -ex "quit"
-    objcopy --add-section .gdb_index="$DIRECTORY"/$basename.gdb-index \
-      --set-section-flags .gdb_index=readonly "$file" "$file"
-    echo "done."
-  fi
+# 4 is an arbitrary default. When changing, remember we are likely IO bound
+# so basing this off the number of cores is not sensible.
+index_tasks=${INDEX_TASKS:-4}
+for ((i = 0; i < index_tasks; i++)); do
+  index_next
 done
+
+# Do a wait loop. Bash waits that terminate due a trap have an exit
+# code > 128. We also ensure that our subshell's "normal" exit occurs with
+# an exit code > 128. This allows us to do consider a > 128 exit code as
+# an indication that the loop should continue. Unfortunately, it also means
+# we cannot use set -e since technically the "wait" is failing.
+wait
+while (($? > 128)); do
+  wait
+done
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/get_landmines.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This file emits the list of reasons why a particular build needs to be clobbered
+(or a list of 'landmines').
+"""
+
+import sys
+
+import landmine_utils
+
+
+distributor = landmine_utils.distributor
+gyp_defines = landmine_utils.gyp_defines
+gyp_msvs_version = landmine_utils.gyp_msvs_version
+platform = landmine_utils.platform
+
+
+def print_landmines():
+  """
+  ALL LANDMINES ARE EMITTED FROM HERE.
+  """
+  # DO NOT add landmines as part of a regular CL. Landmines are a last-effort
+  # bandaid fix if a CL that got landed has a build dependency bug and all bots
+  # need to be cleaned up. If you're writing a new CL that causes build
+  # dependency problems, fix the dependency problems instead of adding a
+  # landmine.
+
+  if distributor() == 'goma' and platform() == 'win32':
+    print 'Need to clobber winja goma due to backend cwd cache fix.'
+  if platform() == 'android':
+    print 'Clobber: to handle new way of suppressing findbugs failures.'
+    print 'Clobber to fix gyp not rename package name (crbug.com/457038)'
+    print 'Clobber to recalculate reversed dependency (crbug.com/639042)'
+  if platform() == 'win':
+    print 'Compile on cc_unittests fails due to symbols removed in r185063.'
+  if platform() == 'linux':
+    print 'Builders switching from make to ninja will clobber on this.'
+  if platform() == 'mac':
+    print 'Switching from bundle to unbundled dylib (issue 14743002).'
+  if platform() in ('win', 'mac'):
+    print ('Improper dependency for create_nmf.py broke in r240802, '
+           'fixed in r240860.')
+  if platform() == 'win':
+    print 'Switch to VS2015 Update 3, 14393 SDK'
+  print 'Need to clobber everything due to an IDL change in r154579 (blink)'
+  print 'Need to clobber everything due to gen file moves in r175513 (Blink)'
+  if (platform() != 'ios'):
+    print 'Clobber to get rid of obselete test plugin after r248358'
+    print 'Clobber to rebuild GN files for V8'
+  print 'Clobber to get rid of stale generated mojom.h files'
+  print 'Need to clobber everything due to build_nexe change in nacl r13424'
+  print '[chromium-dev] PSA: clobber build needed for IDR_INSPECTOR_* compil...'
+  print 'blink_resources.grd changed: crbug.com/400860'
+  print 'ninja dependency cycle: crbug.com/408192'
+  print 'Clobber to fix missing NaCl gyp dependencies (crbug.com/427427).'
+  print 'Another clobber for missing NaCl gyp deps (crbug.com/427427).'
+  print 'Clobber to fix GN not picking up increased ID range (crbug.com/444902)'
+  print 'Remove NaCl toolchains from the output dir (crbug.com/456902)'
+  if platform() == 'ios':
+    print 'Clobber iOS to workaround Xcode deps bug (crbug.com/485435)'
+  if platform() == 'win':
+    print 'Clobber to delete stale generated files (crbug.com/510086)'
+  if platform() == 'android' and gyp_defines().get('target_arch') == 'arm64':
+    print 'Clobber to support new location/infra for chrome_sync_shell_apk'
+  if platform() == 'mac':
+    print 'Clobber to get rid of evil libsqlite3.dylib (crbug.com/526208)'
+  if platform() == 'mac':
+    print 'Clobber to remove libsystem.dylib. See crbug.com/620075'
+  if platform() == 'mac':
+    print 'Clobber to get past mojo gen build error (crbug.com/679607)'
+  if platform() == 'win':
+    print 'Clobber Windows to fix strange PCH-not-rebuilt errors.'
+
+def main():
+  print_landmines()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/get_syzygy_binaries.py
@@ -0,0 +1,523 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A utility script for downloading versioned Syzygy binaries."""
+
+import hashlib
+import errno
+import json
+import logging
+import optparse
+import os
+import re
+import shutil
+import stat
+import sys
+import subprocess
+import tempfile
+import time
+import zipfile
+
+
+_LOGGER = logging.getLogger(os.path.basename(__file__))
+
+# The relative path where official builds are archived in their GS bucket.
+_SYZYGY_ARCHIVE_PATH = ('/builds/official/%(revision)s')
+
+# A JSON file containing the state of the download directory. If this file and
+# directory state do not agree, then the binaries will be downloaded and
+# installed again.
+_STATE = '.state'
+
+# This matches an integer (an SVN revision number) or a SHA1 value (a GIT hash).
+# The archive exclusively uses lowercase GIT hashes.
+_REVISION_RE = re.compile('^(?:\d+|[a-f0-9]{40})$')
+
+# This matches an MD5 hash.
+_MD5_RE = re.compile('^[a-f0-9]{32}$')
+
+# List of reources to be downloaded and installed. These are tuples with the
+# following format:
+# (basename, logging name, relative installation path, extraction filter)
+_RESOURCES = [
+  ('benchmark.zip', 'benchmark', '', None),
+  ('binaries.zip', 'binaries', 'exe', None),
+  ('symbols.zip', 'symbols', 'exe',
+      lambda x: x.filename.endswith('.dll.pdb'))]
+
+
+# Name of the MS DIA dll that we need to copy to the binaries directory.
+_DIA_DLL_NAME = "msdia140.dll"
+
+
+def _LoadState(output_dir):
+  """Loads the contents of the state file for a given |output_dir|, returning
+  None if it doesn't exist.
+  """
+  path = os.path.join(output_dir, _STATE)
+  if not os.path.exists(path):
+    _LOGGER.debug('No state file found.')
+    return None
+  with open(path, 'rb') as f:
+    _LOGGER.debug('Reading state file: %s', path)
+    try:
+      return json.load(f)
+    except ValueError:
+      _LOGGER.debug('Invalid state file.')
+      return None
+
+
+def _SaveState(output_dir, state, dry_run=False):
+  """Saves the |state| dictionary to the given |output_dir| as a JSON file."""
+  path = os.path.join(output_dir, _STATE)
+  _LOGGER.debug('Writing state file: %s', path)
+  if dry_run:
+    return
+  with open(path, 'wb') as f:
+    f.write(json.dumps(state, sort_keys=True, indent=2))
+
+
+def _Md5(path):
+  """Returns the MD5 hash of the file at |path|, which must exist."""
+  return hashlib.md5(open(path, 'rb').read()).hexdigest()
+
+
+def _StateIsValid(state):
+  """Returns true if the given state structure is valid."""
+  if not isinstance(state, dict):
+    _LOGGER.debug('State must be a dict.')
+    return False
+  r = state.get('revision', None)
+  if not isinstance(r, basestring) or not _REVISION_RE.match(r):
+    _LOGGER.debug('State contains an invalid revision.')
+    return False
+  c = state.get('contents', None)
+  if not isinstance(c, dict):
+    _LOGGER.debug('State must contain a contents dict.')
+    return False
+  for (relpath, md5) in c.iteritems():
+    if not isinstance(relpath, basestring) or len(relpath) == 0:
+      _LOGGER.debug('State contents dict contains an invalid path.')
+      return False
+    if not isinstance(md5, basestring) or not _MD5_RE.match(md5):
+      _LOGGER.debug('State contents dict contains an invalid MD5 digest.')
+      return False
+  return True
+
+
+def _BuildActualState(stored, revision, output_dir):
+  """Builds the actual state using the provided |stored| state as a template.
+  Only examines files listed in the stored state, causing the script to ignore
+  files that have been added to the directories locally. |stored| must be a
+  valid state dictionary.
+  """
+  contents = {}
+  state = { 'revision': revision, 'contents': contents }
+  for relpath, md5 in stored['contents'].iteritems():
+    abspath = os.path.abspath(os.path.join(output_dir, relpath))
+    if os.path.isfile(abspath):
+      m = _Md5(abspath)
+      contents[relpath] = m
+
+  return state
+
+
+def _StatesAreConsistent(stored, actual):
+  """Validates whether two state dictionaries are consistent. Both must be valid
+  state dictionaries. Additional entries in |actual| are ignored.
+  """
+  if stored['revision'] != actual['revision']:
+    _LOGGER.debug('Mismatched revision number.')
+    return False
+  cont_stored = stored['contents']
+  cont_actual = actual['contents']
+  for relpath, md5 in cont_stored.iteritems():
+    if relpath not in cont_actual:
+      _LOGGER.debug('Missing content: %s', relpath)
+      return False
+    if md5 != cont_actual[relpath]:
+      _LOGGER.debug('Modified content: %s', relpath)
+      return False
+  return True
+
+
+def _GetCurrentState(revision, output_dir):
+  """Loads the current state and checks to see if it is consistent. Returns
+  a tuple (state, bool). The returned state will always be valid, even if an
+  invalid state is present on disk.
+  """
+  stored = _LoadState(output_dir)
+  if not _StateIsValid(stored):
+    _LOGGER.debug('State is invalid.')
+    # Return a valid but empty state.
+    return ({'revision': '0', 'contents': {}}, False)
+  actual = _BuildActualState(stored, revision, output_dir)
+  # If the script has been modified consider the state invalid.
+  path = os.path.join(output_dir, _STATE)
+  if os.path.getmtime(__file__) > os.path.getmtime(path):
+    return (stored, False)
+  # Otherwise, explicitly validate the state.
+  if not _StatesAreConsistent(stored, actual):
+    return (stored, False)
+  return (stored, True)
+
+
+def _DirIsEmpty(path):
+  """Returns true if the given directory is empty, false otherwise."""
+  for root, dirs, files in os.walk(path):
+    return not dirs and not files
+
+
+def _RmTreeHandleReadOnly(func, path, exc):
+  """An error handling function for use with shutil.rmtree. This will
+  detect failures to remove read-only files, and will change their properties
+  prior to removing them. This is necessary on Windows as os.remove will return
+  an access error for read-only files, and git repos contain read-only
+  pack/index files.
+  """
+  excvalue = exc[1]
+  if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES:
+    _LOGGER.debug('Removing read-only path: %s', path)
+    os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
+    func(path)
+  else:
+    raise
+
+
+def _RmTree(path):
+  """A wrapper of shutil.rmtree that handles read-only files."""
+  shutil.rmtree(path, ignore_errors=False, onerror=_RmTreeHandleReadOnly)
+
+
+def _CleanState(output_dir, state, dry_run=False):
+  """Cleans up files/directories in |output_dir| that are referenced by
+  the given |state|. Raises an error if there are local changes. Returns a
+  dictionary of files that were deleted.
+  """
+  _LOGGER.debug('Deleting files from previous installation.')
+  deleted = {}
+
+  # Generate a list of files to delete, relative to |output_dir|.
+  contents = state['contents']
+  files = sorted(contents.keys())
+
+  # Try to delete the files. Keep track of directories to delete as well.
+  dirs = {}
+  for relpath in files:
+    fullpath = os.path.join(output_dir, relpath)
+    fulldir = os.path.dirname(fullpath)
+    dirs[fulldir] = True
+    if os.path.exists(fullpath):
+      # If somehow the file has become a directory complain about it.
+      if os.path.isdir(fullpath):
+        raise Exception('Directory exists where file expected: %s' % fullpath)
+
+      # Double check that the file doesn't have local changes. If it does
+      # then refuse to delete it.
+      if relpath in contents:
+        stored_md5 = contents[relpath]
+        actual_md5 = _Md5(fullpath)
+        if actual_md5 != stored_md5:
+          raise Exception('File has local changes: %s' % fullpath)
+
+      # The file is unchanged so it can safely be deleted.
+      _LOGGER.debug('Deleting file "%s".', fullpath)
+      deleted[relpath] = True
+      if not dry_run:
+        os.unlink(fullpath)
+
+  # Sort directories from longest name to shortest. This lets us remove empty
+  # directories from the most nested paths first.
+  dirs = sorted(dirs.keys(), key=lambda x: len(x), reverse=True)
+  for p in dirs:
+    if os.path.exists(p) and _DirIsEmpty(p):
+      _LOGGER.debug('Deleting empty directory "%s".', p)
+      if not dry_run:
+        _RmTree(p)
+
+  return deleted
+
+
+def _FindGsUtil():
+  """Looks for depot_tools and returns the absolute path to gsutil.py."""
+  for path in os.environ['PATH'].split(os.pathsep):
+    path = os.path.abspath(path)
+    git_cl = os.path.join(path, 'git_cl.py')
+    gs_util = os.path.join(path, 'gsutil.py')
+    if os.path.exists(git_cl) and os.path.exists(gs_util):
+      return gs_util
+  return None
+
+
+def _GsUtil(*cmd):
+  """Runs the given command in gsutil with exponential backoff and retries."""
+  gs_util = _FindGsUtil()
+  cmd = [sys.executable, gs_util] + list(cmd)
+
+  retries = 3
+  timeout = 4  # Seconds.
+  while True:
+    _LOGGER.debug('Running %s', cmd)
+    prog = subprocess.Popen(cmd, shell=False)
+    prog.communicate()
+
+    # Stop retrying on success.
+    if prog.returncode == 0:
+      return
+
+    # Raise a permanent failure if retries have been exhausted.
+    if retries == 0:
+      raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode))
+
+    _LOGGER.debug('Sleeping %d seconds and trying again.', timeout)
+    time.sleep(timeout)
+    retries -= 1
+    timeout *= 2
+
+
+def _Download(resource):
+  """Downloads the given GS resource to a temporary file, returning its path."""
+  tmp = tempfile.mkstemp(suffix='syzygy_archive')
+  os.close(tmp[0])
+  url = 'gs://syzygy-archive' + resource
+  _GsUtil('cp', url, tmp[1])
+  return tmp[1]
+
+
+def _MaybeCopyDIABinaries(options, contents):
+  """Try to copy the DIA DLL to the binaries exe directory."""
+  toolchain_data_file = os.path.join(os.path.dirname(__file__),
+                                     'win_toolchain.json')
+  if not os.path.exists(toolchain_data_file):
+    _LOGGER.debug('Toolchain JSON data file doesn\'t exist, skipping.')
+    return
+  with open(toolchain_data_file) as temp_f:
+    toolchain_data = json.load(temp_f)
+  if not os.path.isdir(toolchain_data['path']):
+    _LOGGER.error('The toolchain JSON file is invalid.')
+    return
+  dia_sdk_binaries_dir = os.path.join(toolchain_data['path'], 'DIA SDK', 'bin')
+  dia_dll = os.path.join(dia_sdk_binaries_dir, _DIA_DLL_NAME)
+  if not os.path.exists(dia_dll):
+    _LOGGER.debug('%s is missing, skipping.')
+    return
+  dia_dll_dest = os.path.join(options.output_dir, 'exe', _DIA_DLL_NAME)
+  _LOGGER.debug('Copying %s to %s.' % (dia_dll, dia_dll_dest))
+  if not options.dry_run:
+    shutil.copy(dia_dll, dia_dll_dest)
+    contents[os.path.relpath(dia_dll_dest, options.output_dir)] = (
+        _Md5(dia_dll_dest))
+
+
+def _InstallBinaries(options, deleted={}):
+  """Installs Syzygy binaries. This assumes that the output directory has
+  already been cleaned, as it will refuse to overwrite existing files."""
+  contents = {}
+  state = { 'revision': options.revision, 'contents': contents }
+  archive_path = _SYZYGY_ARCHIVE_PATH % { 'revision': options.revision }
+  if options.resources:
+    resources = [(resource, resource, '', None)
+                 for resource in options.resources]
+  else:
+    resources = _RESOURCES
+  for (base, name, subdir, filt) in resources:
+    # Create the output directory if it doesn't exist.
+    fulldir = os.path.join(options.output_dir, subdir)
+    if os.path.isfile(fulldir):
+      raise Exception('File exists where a directory needs to be created: %s' %
+                      fulldir)
+    if not os.path.exists(fulldir):
+      _LOGGER.debug('Creating directory: %s', fulldir)
+      if not options.dry_run:
+        os.makedirs(fulldir)
+
+    # Download and read the archive.
+    resource = archive_path + '/' + base
+    _LOGGER.debug('Retrieving %s archive at "%s".', name, resource)
+    path = _Download(resource)
+
+    _LOGGER.debug('Unzipping %s archive.', name)
+    with open(path, 'rb') as data:
+      archive = zipfile.ZipFile(data)
+      for entry in archive.infolist():
+        if not filt or filt(entry):
+          fullpath = os.path.normpath(os.path.join(fulldir, entry.filename))
+          relpath = os.path.relpath(fullpath, options.output_dir)
+          if os.path.exists(fullpath):
+            # If in a dry-run take into account the fact that the file *would*
+            # have been deleted.
+            if options.dry_run and relpath in deleted:
+              pass
+            else:
+              raise Exception('Path already exists: %s' % fullpath)
+
+          # Extract the file and update the state dictionary.
+          _LOGGER.debug('Extracting "%s".', fullpath)
+          if not options.dry_run:
+            archive.extract(entry.filename, fulldir)
+            md5 = _Md5(fullpath)
+            contents[relpath] = md5
+            if sys.platform == 'cygwin':
+              os.chmod(fullpath, os.stat(fullpath).st_mode | stat.S_IXUSR)
+
+    _LOGGER.debug('Removing temporary file "%s".', path)
+    os.remove(path)
+
+  if options.copy_dia_binaries:
+    # Try to copy the DIA binaries to the binaries directory.
+    _MaybeCopyDIABinaries(options, contents)
+
+  return state
+
+
+def _ParseCommandLine():
+  """Parses the command-line and returns an options structure."""
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--dry-run', action='store_true', default=False,
+      help='If true then will simply list actions that would be performed.')
+  option_parser.add_option('--force', action='store_true', default=False,
+      help='Force an installation even if the binaries are up to date.')
+  option_parser.add_option('--no-cleanup', action='store_true', default=False,
+      help='Allow installation on non-Windows platforms, and skip the forced '
+           'cleanup step.')
+  option_parser.add_option('--output-dir', type='string',
+      help='The path where the binaries will be replaced. Existing binaries '
+           'will only be overwritten if not up to date.')
+  option_parser.add_option('--overwrite', action='store_true', default=False,
+      help='If specified then the installation will happily delete and rewrite '
+           'the entire output directory, blasting any local changes.')
+  option_parser.add_option('--revision', type='string',
+      help='The SVN revision or GIT hash associated with the required version.')
+  option_parser.add_option('--revision-file', type='string',
+      help='A text file containing an SVN revision or GIT hash.')
+  option_parser.add_option('--resource', type='string', action='append',
+      dest='resources', help='A resource to be downloaded.')
+  option_parser.add_option('--verbose', dest='log_level', action='store_const',
+      default=logging.INFO, const=logging.DEBUG,
+      help='Enables verbose logging.')
+  option_parser.add_option('--quiet', dest='log_level', action='store_const',
+      default=logging.INFO, const=logging.ERROR,
+      help='Disables all output except for errors.')
+  option_parser.add_option('--copy-dia-binaries', action='store_true',
+      default=False, help='If true then the DIA dll will get copied into the '
+                          'binaries directory if it\'s available.')
+  options, args = option_parser.parse_args()
+  if args:
+    option_parser.error('Unexpected arguments: %s' % args)
+  if not options.output_dir:
+    option_parser.error('Must specify --output-dir.')
+  if not options.revision and not options.revision_file:
+    option_parser.error('Must specify one of --revision or --revision-file.')
+  if options.revision and options.revision_file:
+    option_parser.error('Must not specify both --revision and --revision-file.')
+
+  # Configure logging.
+  logging.basicConfig(level=options.log_level)
+
+  # If a revision file has been specified then read it.
+  if options.revision_file:
+    options.revision = open(options.revision_file, 'rb').read().strip()
+    _LOGGER.debug('Parsed revision "%s" from file "%s".',
+                 options.revision, options.revision_file)
+
+  # Ensure that the specified SVN revision or GIT hash is valid.
+  if not _REVISION_RE.match(options.revision):
+    option_parser.error('Must specify a valid SVN or GIT revision.')
+
+  # This just makes output prettier to read.
+  options.output_dir = os.path.normpath(options.output_dir)
+
+  return options
+
+
+def _RemoveOrphanedFiles(options):
+  """This is run on non-Windows systems to remove orphaned files that may have
+  been downloaded by a previous version of this script.
+  """
+  # Reconfigure logging to output info messages. This will allow inspection of
+  # cleanup status on non-Windows buildbots.
+  _LOGGER.setLevel(logging.INFO)
+
+  output_dir = os.path.abspath(options.output_dir)
+
+  # We only want to clean up the folder in 'src/third_party/syzygy', and we
+  # expect to be called with that as an output directory. This is an attempt to
+  # not start deleting random things if the script is run from an alternate
+  # location, or not called from the gclient hooks.
+  expected_syzygy_dir = os.path.abspath(os.path.join(
+      os.path.dirname(__file__), '..', 'third_party', 'syzygy'))
+  expected_output_dir = os.path.join(expected_syzygy_dir, 'binaries')
+  if expected_output_dir != output_dir:
+    _LOGGER.info('Unexpected output directory, skipping cleanup.')
+    return
+
+  if not os.path.isdir(expected_syzygy_dir):
+    _LOGGER.info('Output directory does not exist, skipping cleanup.')
+    return
+
+  def OnError(function, path, excinfo):
+    """Logs error encountered by shutil.rmtree."""
+    _LOGGER.error('Error when running %s(%s)', function, path, exc_info=excinfo)
+
+  _LOGGER.info('Removing orphaned files from %s', expected_syzygy_dir)
+  if not options.dry_run:
+    shutil.rmtree(expected_syzygy_dir, True, OnError)
+
+
+def main():
+  options = _ParseCommandLine()
+
+  if options.dry_run:
+    _LOGGER.debug('Performing a dry-run.')
+
+  # We only care about Windows platforms, as the Syzygy binaries aren't used
+  # elsewhere. However, there was a short period of time where this script
+  # wasn't gated on OS types, and those OSes downloaded and installed binaries.
+  # This will cleanup orphaned files on those operating systems.
+  if sys.platform not in ('win32', 'cygwin'):
+    if options.no_cleanup:
+      _LOGGER.debug('Skipping usual cleanup for non-Windows platforms.')
+    else:
+      return _RemoveOrphanedFiles(options)
+
+  # Load the current installation state, and validate it against the
+  # requested installation.
+  state, is_consistent = _GetCurrentState(options.revision, options.output_dir)
+
+  # Decide whether or not an install is necessary.
+  if options.force:
+    _LOGGER.debug('Forcing reinstall of binaries.')
+  elif is_consistent:
+    # Avoid doing any work if the contents of the directory are consistent.
+    _LOGGER.debug('State unchanged, no reinstall necessary.')
+    return
+
+  # Under normal logging this is the only only message that will be reported.
+  _LOGGER.info('Installing revision %s Syzygy binaries.',
+               options.revision[0:12])
+
+  # Clean up the old state to begin with.
+  deleted = []
+  if options.overwrite:
+    if os.path.exists(options.output_dir):
+      # If overwrite was specified then take a heavy-handed approach.
+      _LOGGER.debug('Deleting entire installation directory.')
+      if not options.dry_run:
+        _RmTree(options.output_dir)
+  else:
+    # Otherwise only delete things that the previous installation put in place,
+    # and take care to preserve any local changes.
+    deleted = _CleanState(options.output_dir, state, options.dry_run)
+
+  # Install the new binaries. In a dry-run this will actually download the
+  # archives, but it won't write anything to disk.
+  state = _InstallBinaries(options, deleted)
+
+  # Build and save the state for the directory.
+  _SaveState(options.output_dir, state, options.dry_run)
+
+
+if __name__ == '__main__':
+  main()
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/git-hooks/OWNERS
@@ -0,0 +1,3 @@
+set noparent
+szager@chromium.org
+cmp@chromium.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/git-hooks/pre-commit
@@ -0,0 +1,60 @@
+#!/bin/sh
+
+submodule_diff() {
+  if test -n "$2"; then
+    git diff-tree -r --ignore-submodules=dirty "$1" "$2" | grep -e '^:160000' -e '^:...... 160000' | xargs
+  else
+    git diff-index --cached --ignore-submodules=dirty "$1" | grep -e '^:160000' -e '^:...... 160000' | xargs
+  fi
+}
+
+if git rev-parse --verify --quiet --no-revs MERGE_HEAD; then
+  merge_base=$(git merge-base HEAD MERGE_HEAD)
+  if test -z "$(submodule_diff $merge_base HEAD)"; then
+    # Most up-to-date submodules are in MERGE_HEAD.
+    head_ref=MERGE_HEAD
+  else
+    # Most up-to-date submodules are in HEAD.
+    head_ref=HEAD
+  fi
+else
+  # No merge in progress. Submodules must match HEAD.
+  head_ref=HEAD
+fi
+
+submods=$(submodule_diff $head_ref)
+if test "$submods"; then
+  echo "You are trying to commit changes to the following submodules:" 1>&2
+  echo 1>&2
+  echo $submods | cut -d ' ' -f 6 | sed 's/^/  /g' 1>&2
+  cat <<EOF 1>&2
+
+Submodule commits are not allowed.  Please run:
+
+  git status --ignore-submodules=dirty
+
+and/or:
+
+  git diff-index --cached --ignore-submodules=dirty HEAD
+
+... to see what's in your index.
+
+If you're really and truly trying to roll the version of a submodule, you should
+commit the new version to DEPS, instead.
+EOF
+  exit 1
+fi
+
+gitmodules_diff() {
+  git diff-index --cached "$1" .gitmodules
+}
+
+if [ "$(git ls-files .gitmodules)" ] && [ "$(gitmodules_diff $head_ref)" ]; then
+  cat <<EOF 1>&2
+You are trying to commit a change to .gitmodules.  That is not allowed.
+To make changes to submodule names/paths, edit DEPS.
+EOF
+  exit 1
+fi
+
+exit 0
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/gn_helpers.py
@@ -0,0 +1,351 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions useful when writing scripts that integrate with GN.
+
+The main functions are ToGNString and FromGNString which convert between
+serialized GN veriables and Python variables.
+
+To use in a random python file in the build:
+
+  import os
+  import sys
+
+  sys.path.append(os.path.join(os.path.dirname(__file__),
+                               os.pardir, os.pardir, "build"))
+  import gn_helpers
+
+Where the sequence of parameters to join is the relative path from your source
+file to the build directory."""
+
+class GNException(Exception):
+  pass
+
+
+def ToGNString(value, allow_dicts = True):
+  """Returns a stringified GN equivalent of the Python value.
+
+  allow_dicts indicates if this function will allow converting dictionaries
+  to GN scopes. This is only possible at the top level, you can't nest a
+  GN scope in a list, so this should be set to False for recursive calls."""
+  if isinstance(value, basestring):
+    if value.find('\n') >= 0:
+      raise GNException("Trying to print a string with a newline in it.")
+    return '"' + \
+        value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \
+        '"'
+
+  if isinstance(value, unicode):
+    return ToGNString(value.encode('utf-8'))
+
+  if isinstance(value, bool):
+    if value:
+      return "true"
+    return "false"
+
+  if isinstance(value, list):
+    return '[ %s ]' % ', '.join(ToGNString(v) for v in value)
+
+  if isinstance(value, dict):
+    if not allow_dicts:
+      raise GNException("Attempting to recursively print a dictionary.")
+    result = ""
+    for key in sorted(value):
+      if not isinstance(key, basestring):
+        raise GNException("Dictionary key is not a string.")
+      result += "%s = %s\n" % (key, ToGNString(value[key], False))
+    return result
+
+  if isinstance(value, int):
+    return str(value)
+
+  raise GNException("Unsupported type when printing to GN.")
+
+
+def FromGNString(input_string):
+  """Converts the input string from a GN serialized value to Python values.
+
+  For details on supported types see GNValueParser.Parse() below.
+
+  If your GN script did:
+    something = [ "file1", "file2" ]
+    args = [ "--values=$something" ]
+  The command line would look something like:
+    --values="[ \"file1\", \"file2\" ]"
+  Which when interpreted as a command line gives the value:
+    [ "file1", "file2" ]
+
+  You can parse this into a Python list using GN rules with:
+    input_values = FromGNValues(options.values)
+  Although the Python 'ast' module will parse many forms of such input, it
+  will not handle GN escaping properly, nor GN booleans. You should use this
+  function instead.
+
+
+  A NOTE ON STRING HANDLING:
+
+  If you just pass a string on the command line to your Python script, or use
+  string interpolation on a string variable, the strings will not be quoted:
+    str = "asdf"
+    args = [ str, "--value=$str" ]
+  Will yield the command line:
+    asdf --value=asdf
+  The unquoted asdf string will not be valid input to this function, which
+  accepts only quoted strings like GN scripts. In such cases, you can just use
+  the Python string literal directly.
+
+  The main use cases for this is for other types, in particular lists. When
+  using string interpolation on a list (as in the top example) the embedded
+  strings will be quoted and escaped according to GN rules so the list can be
+  re-parsed to get the same result."""
+  parser = GNValueParser(input_string)
+  return parser.Parse()
+
+
+def FromGNArgs(input_string):
+  """Converts a string with a bunch of gn arg assignments into a Python dict.
+
+  Given a whitespace-separated list of
+
+    <ident> = (integer | string | boolean | <list of the former>)
+
+  gn assignments, this returns a Python dict, i.e.:
+
+    FromGNArgs("foo=true\nbar=1\n") -> { 'foo': True, 'bar': 1 }.
+
+  Only simple types and lists supported; variables, structs, calls
+  and other, more complicated things are not.
+
+  This routine is meant to handle only the simple sorts of values that
+  arise in parsing --args.
+  """
+  parser = GNValueParser(input_string)
+  return parser.ParseArgs()
+
+
+def UnescapeGNString(value):
+  """Given a string with GN escaping, returns the unescaped string.
+
+  Be careful not to feed with input from a Python parsing function like
+  'ast' because it will do Python unescaping, which will be incorrect when
+  fed into the GN unescaper."""
+  result = ''
+  i = 0
+  while i < len(value):
+    if value[i] == '\\':
+      if i < len(value) - 1:
+        next_char = value[i + 1]
+        if next_char in ('$', '"', '\\'):
+          # These are the escaped characters GN supports.
+          result += next_char
+          i += 1
+        else:
+          # Any other backslash is a literal.
+          result += '\\'
+    else:
+      result += value[i]
+    i += 1
+  return result
+
+
+def _IsDigitOrMinus(char):
+  return char in "-0123456789"
+
+
+class GNValueParser(object):
+  """Duplicates GN parsing of values and converts to Python types.
+
+  Normally you would use the wrapper function FromGNValue() below.
+
+  If you expect input as a specific type, you can also call one of the Parse*
+  functions directly. All functions throw GNException on invalid input. """
+  def __init__(self, string):
+    self.input = string
+    self.cur = 0
+
+  def IsDone(self):
+    return self.cur == len(self.input)
+
+  def ConsumeWhitespace(self):
+    while not self.IsDone() and self.input[self.cur] in ' \t\n':
+      self.cur += 1
+
+  def Parse(self):
+    """Converts a string representing a printed GN value to the Python type.
+
+    See additional usage notes on FromGNString above.
+
+    - GN booleans ('true', 'false') will be converted to Python booleans.
+
+    - GN numbers ('123') will be converted to Python numbers.
+
+    - GN strings (double-quoted as in '"asdf"') will be converted to Python
+      strings with GN escaping rules. GN string interpolation (embedded
+      variables preceeded by $) are not supported and will be returned as
+      literals.
+
+    - GN lists ('[1, "asdf", 3]') will be converted to Python lists.
+
+    - GN scopes ('{ ... }') are not supported."""
+    result = self._ParseAllowTrailing()
+    self.ConsumeWhitespace()
+    if not self.IsDone():
+      raise GNException("Trailing input after parsing:\n  " +
+                        self.input[self.cur:])
+    return result
+
+  def ParseArgs(self):
+    """Converts a whitespace-separated list of ident=literals to a dict.
+
+    See additional usage notes on FromGNArgs, above.
+    """
+    d = {}
+
+    self.ConsumeWhitespace()
+    while not self.IsDone():
+      ident = self._ParseIdent()
+      self.ConsumeWhitespace()
+      if self.input[self.cur] != '=':
+        raise GNException("Unexpected token: " + self.input[self.cur:])
+      self.cur += 1
+      self.ConsumeWhitespace()
+      val = self._ParseAllowTrailing()
+      self.ConsumeWhitespace()
+      d[ident] = val
+
+    return d
+
+  def _ParseAllowTrailing(self):
+    """Internal version of Parse that doesn't check for trailing stuff."""
+    self.ConsumeWhitespace()
+    if self.IsDone():
+      raise GNException("Expected input to parse.")
+
+    next_char = self.input[self.cur]
+    if next_char == '[':
+      return self.ParseList()
+    elif _IsDigitOrMinus(next_char):
+      return self.ParseNumber()
+    elif next_char == '"':
+      return self.ParseString()
+    elif self._ConstantFollows('true'):
+      return True
+    elif self._ConstantFollows('false'):
+      return False
+    else:
+      raise GNException("Unexpected token: " + self.input[self.cur:])
+
+  def _ParseIdent(self):
+    ident = ''
+
+    next_char = self.input[self.cur]
+    if not next_char.isalpha() and not next_char=='_':
+      raise GNException("Expected an identifier: " + self.input[self.cur:])
+
+    ident += next_char
+    self.cur += 1
+
+    next_char = self.input[self.cur]
+    while next_char.isalpha() or next_char.isdigit() or next_char=='_':
+      ident += next_char
+      self.cur += 1
+      next_char = self.input[self.cur]
+
+    return ident
+
+  def ParseNumber(self):
+    self.ConsumeWhitespace()
+    if self.IsDone():
+      raise GNException('Expected number but got nothing.')
+
+    begin = self.cur
+
+    # The first character can include a negative sign.
+    if not self.IsDone() and _IsDigitOrMinus(self.input[self.cur]):
+      self.cur += 1
+    while not self.IsDone() and self.input[self.cur].isdigit():
+      self.cur += 1
+
+    number_string = self.input[begin:self.cur]
+    if not len(number_string) or number_string == '-':
+      raise GNException("Not a valid number.")
+    return int(number_string)
+
+  def ParseString(self):
+    self.ConsumeWhitespace()
+    if self.IsDone():
+      raise GNException('Expected string but got nothing.')
+
+    if self.input[self.cur] != '"':
+      raise GNException('Expected string beginning in a " but got:\n  ' +
+                        self.input[self.cur:])
+    self.cur += 1  # Skip over quote.
+
+    begin = self.cur
+    while not self.IsDone() and self.input[self.cur] != '"':
+      if self.input[self.cur] == '\\':
+        self.cur += 1  # Skip over the backslash.
+        if self.IsDone():
+          raise GNException("String ends in a backslash in:\n  " +
+                            self.input)
+      self.cur += 1
+
+    if self.IsDone():
+      raise GNException('Unterminated string:\n  ' + self.input[begin:])
+
+    end = self.cur
+    self.cur += 1  # Consume trailing ".
+
+    return UnescapeGNString(self.input[begin:end])
+
+  def ParseList(self):
+    self.ConsumeWhitespace()
+    if self.IsDone():
+      raise GNException('Expected list but got nothing.')
+
+    # Skip over opening '['.
+    if self.input[self.cur] != '[':
+      raise GNException("Expected [ for list but got:\n  " +
+                        self.input[self.cur:])
+    self.cur += 1
+    self.ConsumeWhitespace()
+    if self.IsDone():
+      raise GNException("Unterminated list:\n  " + self.input)
+
+    list_result = []
+    previous_had_trailing_comma = True
+    while not self.IsDone():
+      if self.input[self.cur] == ']':
+        self.cur += 1  # Skip over ']'.
+        return list_result
+
+      if not previous_had_trailing_comma:
+        raise GNException("List items not separated by comma.")
+
+      list_result += [ self._ParseAllowTrailing() ]
+      self.ConsumeWhitespace()
+      if self.IsDone():
+        break
+
+      # Consume comma if there is one.
+      previous_had_trailing_comma = self.input[self.cur] == ','
+      if previous_had_trailing_comma:
+        # Consume comma.
+        self.cur += 1
+        self.ConsumeWhitespace()
+
+    raise GNException("Unterminated list:\n  " + self.input)
+
+  def _ConstantFollows(self, constant):
+    """Returns true if the given constant follows immediately at the current
+    location in the input. If it does, the text is consumed and the function
+    returns true. Otherwise, returns false and the current position is
+    unchanged."""
+    end = self.cur + len(constant)
+    if end > len(self.input):
+      return False  # Not enough room.
+    if self.input[self.cur:end] == constant:
+      self.cur = end
+      return True
+    return False
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/gn_helpers_unittest.py
@@ -0,0 +1,117 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import gn_helpers
+import unittest
+
+class UnitTest(unittest.TestCase):
+  def test_ToGNString(self):
+    self.assertEqual(
+        gn_helpers.ToGNString([1, 'two', [ '"thr$\\', True, False, [] ]]),
+        '[ 1, "two", [ "\\"thr\\$\\\\", true, false, [  ] ] ]')
+
+  def test_UnescapeGNString(self):
+    # Backslash followed by a \, $, or " means the folling character without
+    # the special meaning. Backslash followed by everything else is a literal.
+    self.assertEqual(
+        gn_helpers.UnescapeGNString('\\as\\$\\\\asd\\"'),
+        '\\as$\\asd"')
+
+  def test_FromGNString(self):
+    self.assertEqual(
+        gn_helpers.FromGNString('[1, -20, true, false,["as\\"", []]]'),
+        [ 1, -20, True, False, [ 'as"', [] ] ])
+
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('123 456')
+      parser.Parse()
+
+  def test_ParseBool(self):
+    parser = gn_helpers.GNValueParser('true')
+    self.assertEqual(parser.Parse(), True)
+
+    parser = gn_helpers.GNValueParser('false')
+    self.assertEqual(parser.Parse(), False)
+
+  def test_ParseNumber(self):
+    parser = gn_helpers.GNValueParser('123')
+    self.assertEqual(parser.ParseNumber(), 123)
+
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('')
+      parser.ParseNumber()
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('a123')
+      parser.ParseNumber()
+
+  def test_ParseString(self):
+    parser = gn_helpers.GNValueParser('"asdf"')
+    self.assertEqual(parser.ParseString(), 'asdf')
+
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('')  # Empty.
+      parser.ParseString()
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('asdf')  # Unquoted.
+      parser.ParseString()
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('"trailing')  # Unterminated.
+      parser.ParseString()
+
+  def test_ParseList(self):
+    parser = gn_helpers.GNValueParser('[1,]')  # Optional end comma OK.
+    self.assertEqual(parser.ParseList(), [ 1 ])
+
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('')  # Empty.
+      parser.ParseList()
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('asdf')  # No [].
+      parser.ParseList()
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('[1, 2')  # Unterminated
+      parser.ParseList()
+    with self.assertRaises(gn_helpers.GNException):
+      parser = gn_helpers.GNValueParser('[1 2]')  # No separating comma.
+      parser.ParseList()
+
+  def test_FromGNArgs(self):
+    # Booleans and numbers should work; whitespace is allowed works.
+    self.assertEqual(gn_helpers.FromGNArgs('foo = true\nbar = 1\n'),
+                     {'foo': True, 'bar': 1})
+
+    # Whitespace is not required; strings should also work.
+    self.assertEqual(gn_helpers.FromGNArgs('foo="bar baz"'),
+                     {'foo': 'bar baz'})
+
+    # Lists should work.
+    self.assertEqual(gn_helpers.FromGNArgs('foo=[1, 2, 3]'),
+                     {'foo': [1, 2, 3]})
+
+    # Empty strings should return an empty dict.
+    self.assertEqual(gn_helpers.FromGNArgs(''), {})
+    self.assertEqual(gn_helpers.FromGNArgs(' \n '), {})
+
+    # Non-identifiers should raise an exception.
+    with self.assertRaises(gn_helpers.GNException):
+      gn_helpers.FromGNArgs('123 = true')
+
+    # References to other variables should raise an exception.
+    with self.assertRaises(gn_helpers.GNException):
+      gn_helpers.FromGNArgs('foo = bar')
+
+    # References to functions should raise an exception.
+    with self.assertRaises(gn_helpers.GNException):
+      gn_helpers.FromGNArgs('foo = exec_script("//build/baz.py")')
+
+    # Underscores in identifiers should work.
+    self.assertEqual(gn_helpers.FromGNArgs('_foo = true'),
+                     {'_foo': True})
+    self.assertEqual(gn_helpers.FromGNArgs('foo_bar = true'),
+                     {'foo_bar': True})
+    self.assertEqual(gn_helpers.FromGNArgs('foo_=true'),
+                     {'foo_': True})
+
+if __name__ == '__main__':
+  unittest.main()
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/gn_run_binary.py
@@ -0,0 +1,25 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script for GN to run an arbitrary binary. See compiled_action.gni.
+
+Run with:
+  python gn_run_binary.py <binary_name> [args ...]
+"""
+
+import subprocess
+import sys
+
+# This script is designed to run binaries produced by the current build. We
+# always prefix it with "./" to avoid picking up system versions that might
+# also be on the path.
+path = './' + sys.argv[1]
+
+# The rest of the arguements are passed directly to the executable.
+args = [path] + sys.argv[2:]
+
+ret = subprocess.call(args)
+if ret != 0:
+  print '%s failed with exit code %d' % (sys.argv[1], ret)
+sys.exit(ret)
--- a/media/webrtc/trunk/build/gyp_chromium
+++ b/media/webrtc/trunk/build/gyp_chromium
@@ -1,175 +1,12 @@
 #!/usr/bin/env python
-
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# This script is wrapper for Chromium that adds some support for how GYP
-# is invoked by Chromium beyond what can be done in the gclient hooks.
-
-import glob
-import os
-import shlex
-import subprocess
-import sys
-
-script_dir = os.path.dirname(os.path.realpath(__file__))
-chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
-
-sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
-import gyp
-
-# Add paths so that pymod_do_main(...) can import files.
-sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
-sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
-sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build'))
-
-
-# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
-# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
-# seconds.  Conversely, memory usage of build/gyp_chromium with Psyco
-# maxes out at about 158 MB vs. 132 MB without it.
-#
-# Psyco uses native libraries, so we need to load a different
-# installation depending on which OS we are running under. It has not
-# been tested whether using Psyco on our Mac and Linux builds is worth
-# it (the GYP running time is a lot shorter, so the JIT startup cost
-# may not be worth it).
-if sys.platform == 'win32':
-  try:
-    sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
-    import psyco
-  except:
-    psyco = None
-else:
-  psyco = None
-
-def apply_gyp_environment(file_path=None):
-  """
-  Reads in a *.gyp_env file and applies the valid keys to os.environ.
-  """
-  if not file_path or not os.path.exists(file_path):
-    return
-  file_contents = open(file_path).read()
-  try:
-    file_data = eval(file_contents, {'__builtins__': None}, None)
-  except SyntaxError, e:
-    e.filename = os.path.abspath(file_path)
-    raise
-  supported_vars = ( 'CC',
-                     'CHROMIUM_GYP_FILE',
-                     'CHROMIUM_GYP_SYNTAX_CHECK',
-                     'CXX',
-                     'GYP_DEFINES',
-                     'GYP_GENERATOR_FLAGS',
-                     'GYP_GENERATOR_OUTPUT',
-                     'GYP_GENERATORS', )
-  for var in supported_vars:
-    val = file_data.get(var)
-    if val:
-      if var in os.environ:
-        print 'INFO: Environment value for "%s" overrides value in %s.' % (
-            var, os.path.abspath(file_path)
-        )
-      else:
-        os.environ[var] = val
-
-def additional_include_files(args=[]):
-  """
-  Returns a list of additional (.gypi) files to include, without
-  duplicating ones that are already specified on the command line.
-  """
-  # Determine the include files specified on the command line.
-  # This doesn't cover all the different option formats you can use,
-  # but it's mainly intended to avoid duplicating flags on the automatic
-  # makefile regeneration which only uses this format.
-  specified_includes = set()
-  for arg in args:
-    if arg.startswith('-I') and len(arg) > 2:
-      specified_includes.add(os.path.realpath(arg[2:]))
+# Simple launcher script for gyp_chromium.py.
+# TODO(sbc): This should probably be shell script but for historical
+# reasons (all the python code used to live in this script without a
+# .py extension, and was often run as 'python gyp_chromium') it is
+# currently still python.
 
-  result = []
-  def AddInclude(path):
-    if os.path.realpath(path) not in specified_includes:
-      result.append(path)
-
-  # Always include common.gypi.
-  AddInclude(os.path.join(script_dir, 'common.gypi'))
-
-  # Optionally add supplemental .gypi files if present.
-  supplements = glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
-  for supplement in supplements:
-    AddInclude(supplement)
-
-  return result
-
-if __name__ == '__main__':
-  args = sys.argv[1:]
-
-  # Use the Psyco JIT if available.
-  if psyco:
-    psyco.profile()
-    print "Enabled Psyco JIT."
-
-  # Fall back on hermetic python if we happen to get run under cygwin.
-  # TODO(bradnelson): take this out once this issue is fixed:
-  #    http://code.google.com/p/gyp/issues/detail?id=177
-  if sys.platform == 'cygwin':
-    python_dir = os.path.join(chrome_src, 'third_party', 'python_26')
-    env = os.environ.copy()
-    env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
-    p = subprocess.Popen(
-       [os.path.join(python_dir, 'python.exe')] + sys.argv,
-       env=env, shell=False)
-    p.communicate()
-    sys.exit(p.returncode)
-
-  if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
-    # Update the environment based on chromium.gyp_env
-    gyp_env_path = os.path.join(os.path.dirname(chrome_src), 'chromium.gyp_env')
-    apply_gyp_environment(gyp_env_path)
-
-  # This could give false positives since it doesn't actually do real option
-  # parsing.  Oh well.
-  gyp_file_specified = False
-  for arg in args:
-    if arg.endswith('.gyp'):
-      gyp_file_specified = True
-      break
-
-  # If we didn't get a file, check an env var, and then fall back to
-  # assuming 'all.gyp' from the same directory as the script.
-  if not gyp_file_specified:
-    gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
-    if gyp_file:
-      # Note that CHROMIUM_GYP_FILE values can't have backslashes as
-      # path separators even on Windows due to the use of shlex.split().
-      args.extend(shlex.split(gyp_file))
-    else:
-      args.append(os.path.join(script_dir, 'all.gyp'))
-
-  args.extend(['-I' + i for i in additional_include_files(args)])
-
-  # There shouldn't be a circular dependency relationship between .gyp files,
-  # but in Chromium's .gyp files, on non-Mac platforms, circular relationships
-  # currently exist.  The check for circular dependencies is currently
-  # bypassed on other platforms, but is left enabled on the Mac, where a
-  # violation of the rule causes Xcode to misbehave badly.
-  # TODO(mark): Find and kill remaining circular dependencies, and remove this
-  # option.  http://crbug.com/35878.
-  # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
-  # list.
-  if sys.platform not in ('darwin',):
-    args.append('--no-circular-check')
-
-  # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
-  # to enfore syntax checking.
-  syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
-  if syntax_check and int(syntax_check):
-    args.append('--check')
-
-  print 'Updating projects from gyp files...'
-  sys.stdout.flush()
-
-  # Off we go...
-  sys.exit(gyp.main(args))
+execfile(__file__ + '.py')
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/gyp_chromium.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script is now only used by the closure_compilation builders."""
+
+import argparse
+import glob
+import gyp_environment
+import os
+import shlex
+import sys
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+import gyp
+
+
+def ProcessGypDefinesItems(items):
+  """Converts a list of strings to a list of key-value pairs."""
+  result = []
+  for item in items:
+    tokens = item.split('=', 1)
+    # Some GYP variables have hyphens, which we don't support.
+    if len(tokens) == 2:
+      result += [(tokens[0], tokens[1])]
+    else:
+      # No value supplied, treat it as a boolean and set it. Note that we
+      # use the string '1' here so we have a consistent definition whether
+      # you do 'foo=1' or 'foo'.
+      result += [(tokens[0], '1')]
+  return result
+
+
+def GetSupplementalFiles():
+  return []
+
+
+def GetGypVars(_):
+  """Returns a dictionary of all GYP vars."""
+  # GYP defines from the environment.
+  env_items = ProcessGypDefinesItems(
+      shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+  # GYP defines from the command line.
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-D', dest='defines', action='append', default=[])
+  cmdline_input_items = parser.parse_known_args()[0].defines
+  cmdline_items = ProcessGypDefinesItems(cmdline_input_items)
+
+  return dict(env_items + cmdline_items)
+
+
+def main():
+  gyp_environment.SetEnvironment()
+
+  print 'Updating projects from gyp files...'
+  sys.stdout.flush()
+  sys.exit(gyp.main(sys.argv[1:] + [
+      '--check',
+      '--no-circular-check',
+      '-I', os.path.join(script_dir, 'common.gypi'),
+      '-D', 'gyp_output_dir=out']))
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/gyp_environment.py
@@ -0,0 +1,30 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Sets up various automatic gyp environment variables. These are used by
+gyp_chromium and landmines.py which run at different stages of runhooks. To
+make sure settings are consistent between them, all setup should happen here.
+"""
+
+import gyp_helper
+import os
+import sys
+import vs_toolchain
+
+def SetEnvironment():
+  """Sets defaults for GYP_* variables."""
+  gyp_helper.apply_chromium_gyp_env()
+
+  # Default to ninja on linux and windows, but only if no generator has
+  # explicitly been set.
+  # Also default to ninja on mac, but only when not building chrome/ios.
+  # . -f / --format has precedence over the env var, no need to check for it
+  # . set the env var only if it hasn't been set yet
+  # . chromium.gyp_env has been applied to os.environ at this point already
+  if sys.platform.startswith(('linux', 'win', 'freebsd', 'darwin')) and \
+      not os.environ.get('GYP_GENERATORS'):
+    os.environ['GYP_GENERATORS'] = 'ninja'
+
+  vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/gyp_helper.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file helps gyp_chromium and landmines correctly set up the gyp
+# environment from chromium.gyp_env on disk
+
+import os
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.dirname(SCRIPT_DIR)
+
+
+def apply_gyp_environment_from_file(file_path):
+  """Reads in a *.gyp_env file and applies the valid keys to os.environ."""
+  if not os.path.exists(file_path):
+    return
+  with open(file_path, 'rU') as f:
+    file_contents = f.read()
+  try:
+    file_data = eval(file_contents, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = os.path.abspath(file_path)
+    raise
+  supported_vars = (
+      'CC',
+      'CC_wrapper',
+      'CC.host_wrapper',
+      'CHROMIUM_GYP_FILE',
+      'CHROMIUM_GYP_SYNTAX_CHECK',
+      'CXX',
+      'CXX_wrapper',
+      'CXX.host_wrapper',
+      'GYP_DEFINES',
+      'GYP_GENERATOR_FLAGS',
+      'GYP_CROSSCOMPILE',
+      'GYP_GENERATOR_OUTPUT',
+      'GYP_GENERATORS',
+      'GYP_INCLUDE_FIRST',
+      'GYP_INCLUDE_LAST',
+      'GYP_MSVS_VERSION',
+  )
+  for var in supported_vars:
+    file_val = file_data.get(var)
+    if file_val:
+      if var in os.environ:
+        behavior = 'replaces'
+        if var == 'GYP_DEFINES':
+          result = file_val + ' ' + os.environ[var]
+          behavior = 'merges with, and individual components override,'
+        else:
+          result = os.environ[var]
+        print 'INFO: Environment value for "%s" %s value in %s' % (
+            var, behavior, os.path.abspath(file_path)
+        )
+        string_padding = max(len(var), len(file_path), len('result'))
+        print '      %s: %s' % (var.rjust(string_padding), os.environ[var])
+        print '      %s: %s' % (file_path.rjust(string_padding), file_val)
+        os.environ[var] = result
+      else:
+        os.environ[var] = file_val
+
+
+def apply_chromium_gyp_env():
+  if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
+    # Update the environment based on chromium.gyp_env
+    path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
+    apply_gyp_environment_from_file(path)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/gypi_to_gn.py
@@ -0,0 +1,191 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Converts a given gypi file to a python scope and writes the result to stdout.
+
+USING THIS SCRIPT IN CHROMIUM
+
+Forking Python to run this script in the middle of GN is slow, especially on
+Windows, and it makes both the GYP and GN files harder to follow. You can't
+use "git grep" to find files in the GN build any more, and tracking everything
+in GYP down requires a level of indirection. Any calls will have to be removed
+and cleaned up once the GYP-to-GN transition is complete.
+
+As a result, we only use this script when the list of files is large and
+frequently-changing. In these cases, having one canonical list outweights the
+downsides.
+
+As of this writing, the GN build is basically complete. It's likely that all
+large and frequently changing targets where this is appropriate use this
+mechanism already. And since we hope to turn down the GYP build soon, the time
+horizon is also relatively short. As a result, it is likely that no additional
+uses of this script should every be added to the build. During this later part
+of the transition period, we should be focusing more and more on the absolute
+readability of the GN build.
+
+
+HOW TO USE
+
+It is assumed that the file contains a toplevel dictionary, and this script
+will return that dictionary as a GN "scope" (see example below). This script
+does not know anything about GYP and it will not expand variables or execute
+conditions.
+
+It will strip conditions blocks.
+
+A variables block at the top level will be flattened so that the variables
+appear in the root dictionary. This way they can be returned to the GN code.
+
+Say your_file.gypi looked like this:
+  {
+     'sources': [ 'a.cc', 'b.cc' ],
+     'defines': [ 'ENABLE_DOOM_MELON' ],
+  }
+
+You would call it like this:
+  gypi_values = exec_script("//build/gypi_to_gn.py",
+                            [ rebase_path("your_file.gypi") ],
+                            "scope",
+                            [ "your_file.gypi" ])
+
+Notes:
+ - The rebase_path call converts the gypi file from being relative to the
+   current build file to being system absolute for calling the script, which
+   will have a different current directory than this file.
+
+ - The "scope" parameter tells GN to interpret the result as a series of GN
+   variable assignments.
+
+ - The last file argument to exec_script tells GN that the given file is a
+   dependency of the build so Ninja can automatically re-run GN if the file
+   changes.
+
+Read the values into a target like this:
+  component("mycomponent") {
+    sources = gypi_values.sources
+    defines = gypi_values.defines
+  }
+
+Sometimes your .gypi file will include paths relative to a different
+directory than the current .gn file. In this case, you can rebase them to
+be relative to the current directory.
+  sources = rebase_path(gypi_values.sources, ".",
+                        "//path/gypi/input/values/are/relative/to")
+
+This script will tolerate a 'variables' in the toplevel dictionary or not. If
+the toplevel dictionary just contains one item called 'variables', it will be
+collapsed away and the result will be the contents of that dictinoary. Some
+.gypi files are written with or without this, depending on how they expect to
+be embedded into a .gyp file.
+
+This script also has the ability to replace certain substrings in the input.
+Generally this is used to emulate GYP variable expansion. If you passed the
+argument "--replace=<(foo)=bar" then all instances of "<(foo)" in strings in
+the input will be replaced with "bar":
+
+  gypi_values = exec_script("//build/gypi_to_gn.py",
+                            [ rebase_path("your_file.gypi"),
+                              "--replace=<(foo)=bar"],
+                            "scope",
+                            [ "your_file.gypi" ])
+
+"""
+
+import gn_helpers
+from optparse import OptionParser
+import sys
+
+def LoadPythonDictionary(path):
+  file_string = open(path).read()
+  try:
+    file_data = eval(file_string, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = path
+    raise
+  except Exception, e:
+    raise Exception("Unexpected error while reading %s: %s" % (path, str(e)))
+
+  assert isinstance(file_data, dict), "%s does not eval to a dictionary" % path
+
+  # Flatten any variables to the top level.
+  if 'variables' in file_data:
+    file_data.update(file_data['variables'])
+    del file_data['variables']
+
+  # Strip all elements that this script can't process.
+  elements_to_strip = [
+    'conditions',
+    'target_conditions',
+    'targets',
+    'includes',
+    'actions',
+  ]
+  for element in elements_to_strip:
+    if element in file_data:
+      del file_data[element]
+
+  return file_data
+
+
+def ReplaceSubstrings(values, search_for, replace_with):
+  """Recursively replaces substrings in a value.
+
+  Replaces all substrings of the "search_for" with "repace_with" for all
+  strings occurring in "values". This is done by recursively iterating into
+  lists as well as the keys and values of dictionaries."""
+  if isinstance(values, str):
+    return values.replace(search_for, replace_with)
+
+  if isinstance(values, list):
+    return [ReplaceSubstrings(v, search_for, replace_with) for v in values]
+
+  if isinstance(values, dict):
+    # For dictionaries, do the search for both the key and values.
+    result = {}
+    for key, value in values.items():
+      new_key = ReplaceSubstrings(key, search_for, replace_with)
+      new_value = ReplaceSubstrings(value, search_for, replace_with)
+      result[new_key] = new_value
+    return result
+
+  # Assume everything else is unchanged.
+  return values
+
+def main():
+  parser = OptionParser()
+  parser.add_option("-r", "--replace", action="append",
+    help="Replaces substrings. If passed a=b, replaces all substrs a with b.")
+  (options, args) = parser.parse_args()
+
+  if len(args) != 1:
+    raise Exception("Need one argument which is the .gypi file to read.")
+
+  data = LoadPythonDictionary(args[0])
+  if options.replace:
+    # Do replacements for all specified patterns.
+    for replace in options.replace:
+      split = replace.split('=')
+      # Allow "foo=" to replace with nothing.
+      if len(split) == 1:
+        split.append('')
+      assert len(split) == 2, "Replacement must be of the form 'key=value'."
+      data = ReplaceSubstrings(data, split[0], split[1])
+
+  # Sometimes .gypi files use the GYP syntax with percents at the end of the
+  # variable name (to indicate not to overwrite a previously-defined value):
+  #   'foo%': 'bar',
+  # Convert these to regular variables.
+  for key in data:
+    if len(key) > 1 and key[len(key) - 1] == '%':
+      data[key[:-1]] = data[key]
+      del data[key]
+
+  print gn_helpers.ToGNString(data)
+
+if __name__ == '__main__':
+  try:
+    main()
+  except Exception, e:
+    print str(e)
+    sys.exit(1)
--- a/media/webrtc/trunk/build/install-build-deps-android.sh
+++ b/media/webrtc/trunk/build/install-build-deps-android.sh
@@ -1,101 +1,78 @@
-#!/bin/bash -e
+#!/bin/bash
 
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-# Script to install everything needed to build chromium on android that
-# requires sudo privileges.
-# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
+# Script to install everything needed to build chromium on android, including
+# items requiring sudo privileges.
+# See https://www.chromium.org/developers/how-tos/android-build-instructions
 
-# This script installs the sun-java6 packages (bin, jre and jdk). Sun requires
-# a license agreement, so upon installation it will prompt the user. To get
-# past the curses-based dialog press TAB <ret> TAB <ret> to agree.
+args="$@"
 
 if ! uname -m | egrep -q "i686|x86_64"; then
   echo "Only x86 architectures are currently supported" >&2
   exit
 fi
 
-if [ "x$(id -u)" != x0 ]; then
-  echo "Running as non-root user."
-  echo "You might have to enter your password one or more times for 'sudo'."
-  echo
-fi
+# Exit if any commands fail.
+set -e
 
-# The temporary directory used to store output of update-java-alternatives
-TEMPDIR=$(mktemp -d)
-cleanup() {
-  local status=${?}
-  trap - EXIT
-  rm -rf "${TEMPDIR}"
-  exit ${status}
-}
-trap cleanup EXIT
+lsb_release=$(lsb_release --codename --short)
 
-sudo apt-get update
+# Install first the default Linux build deps.
+"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \
+  --no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}"
 
 # Fix deps
 sudo apt-get -f install
 
-# Install deps
-# This step differs depending on what Ubuntu release we are running
-# on since the package names are different, and Sun's Java must
-# be installed manually on late-model versions.
+# common
+sudo apt-get -y install lib32z1 lighttpd python-pexpect xvfb x11-utils
 
-# common
-sudo apt-get -y install python-pexpect xvfb x11-utils
+# Some binaries in the Android SDK require 32-bit libraries on the host.
+# See https://developer.android.com/sdk/installing/index.html?pkg=tools
+sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
 
-if /usr/bin/lsb_release -r -s | grep -q "12."; then
-  # Ubuntu 12.x
-  sudo apt-get -y install ant
+# Required by //components/cronet/tools/generate_javadoc.py
+# TODO(375324): Stop requiring ANT.
+sudo apt-get -y install ant
 
-  # Java can not be installed via ppa on Ubuntu 12.04+ so we'll
-  # simply check to see if it has been setup properly -- if not
-  # let the user know.
+# Required for apk-patch-size-estimator
+sudo apt-get -y install bsdiff
 
-  if ! java -version 2>&1 | grep -q "Java(TM)"; then
-    echo "****************************************************************"
-    echo "You need to install the Oracle Java SDK from http://goo.gl/uPRSq"
-    echo "and configure it as the default command-line Java environment."
-    echo "****************************************************************"
-    exit
-  fi
+# Do our own error handling for java.
+set +e
 
-else
-  # Ubuntu 10.x
-
-  sudo apt-get -y install ant1.8
+function IsJava8() {
+  # Arg is either "java" or "javac"
+  $1 -version 2>&1 | grep -q '1\.8'
+}
 
-  # Install sun-java6 stuff
-  sudo apt-get -y install sun-java6-bin sun-java6-jre sun-java6-jdk
+if ! (IsJava8 java && IsJava8 javac); then
+  sudo apt-get -y install openjdk-8-jre openjdk-8-jdk
+fi
 
-  # Switch version of Java to java-6-sun
-  # Sun's java is missing certain Java plugins (e.g. for firefox, mozilla).
-  # These are not required to build, and thus are treated only as warnings.
-  # Any errors in updating java alternatives which are not '*-javaplugin.so'
-  # will cause errors and stop the script from completing successfully.
-  if ! sudo update-java-alternatives -s java-6-sun \
-            >& "${TEMPDIR}"/update-java-alternatives.out
-  then
-    # Check that there are the expected javaplugin.so errors for the update
-    if grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out >& \
-           /dev/null
-    then
-      # Print as warnings all the javaplugin.so errors
-      echo 'WARNING: java-6-sun has no alternatives for the following plugins:'
-      grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
-    fi
-    # Check if there are any errors that are not javaplugin.so
-    if grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out \
-           >& /dev/null
-    then
-      # If there are non-javaplugin.so errors, treat as errors and exit
-      echo 'ERRORS: Failed to update alternatives for java-6-sun:'
-      grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
-      exit 1
-    fi
-  fi
+# There can be several reasons why java8 is not default despite being installed.
+# Just show an error and exit.
+if ! (IsJava8 java && IsJava8 javac); then
+  echo
+  echo "Automatic java installation failed."
+  echo '`java -version` reports:'
+  java -version
+  echo
+  echo '`javac -version` reports:'
+  javac -version
+  echo
+  echo "Please ensure that JDK 8 is installed and resolves first in your PATH."
+  echo -n '`which java` reports: '
+  which java
+  echo -n '`which javac` reports: '
+  which javac
+  echo
+  echo "You might also try running:"
+  echo "    sudo update-java-alternatives -s java-1.8.0-openjdk-amd64"
+  exit 1
 fi
 
 echo "install-build-deps-android.sh complete."
--- a/media/webrtc/trunk/build/install-build-deps.sh
+++ b/media/webrtc/trunk/build/install-build-deps.sh
@@ -1,144 +1,45 @@
 #!/bin/bash -e
 
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 # Script to install everything needed to build chromium (well, ideally, anyway)
-# See http://code.google.com/p/chromium/wiki/LinuxBuildInstructions
-# and http://code.google.com/p/chromium/wiki/LinuxBuild64Bit
+# See https://chromium.googlesource.com/chromium/src/+/master/docs/linux_build_instructions.md
 
 usage() {
   echo "Usage: $0 [--options]"
   echo "Options:"
   echo "--[no-]syms: enable or disable installation of debugging symbols"
-  echo "--[no-]lib32: enable or disable installation of 32 bit libraries"
+  echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot"
+  echo "--[no-]arm: enable or disable installation of arm cross toolchain"
+  echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
+       "fonts"
+  echo "--[no-]nacl: enable or disable installation of prerequisites for"\
+       "building standalone NaCl and all its toolchains"
   echo "--no-prompt: silently select standard options/defaults"
+  echo "--quick-check: quickly try to determine if dependencies are installed"
+  echo "               (this avoids interactive prompts and sudo commands,"
+  echo "               so might not be 100% accurate)"
+  echo "--unsupported: attempt installation even on unsupported systems"
   echo "Script will prompt interactively if options not given."
   exit 1
 }
 
-while test "$1" != ""
-do
-  case "$1" in
-  --syms)                   do_inst_syms=1;;
-  --no-syms)                do_inst_syms=0;;
-  --lib32)                  do_inst_lib32=1;;
-  --no-lib32)               do_inst_lib32=0;;
-  --no-prompt)              do_default=1
-                            do_quietly="-qq --assume-yes"
-    ;;
-  *) usage;;
-  esac
-  shift
-done
-
-if ! egrep -q \
-    'Ubuntu (10\.04|10\.10|11\.04|11\.10|12\.04|lucid|maverick|natty|oneiric|precise)' \
-    /etc/issue; then
-  echo "Only Ubuntu 10.04 (lucid) through 12.04 (precise) are currently" \
-      "supported" >&2
-  exit 1
-fi
-
-if ! uname -m | egrep -q "i686|x86_64"; then
-  echo "Only x86 architectures are currently supported" >&2
-  exit
-fi
-
-if [ "x$(id -u)" != x0 ]; then
-  echo "Running as non-root user."
-  echo "You might have to enter your password one or more times for 'sudo'."
-  echo
-fi
-
-# Packages needed for chromeos only
-chromeos_dev_list="libbluetooth-dev libpulse-dev"
-
-# Packages need for development
-dev_list="apache2.2-bin bison curl elfutils fakeroot flex g++ gperf
-          language-pack-fr libapache2-mod-php5 libasound2-dev libbz2-dev
-          libcairo2-dev libcups2-dev libcurl4-gnutls-dev libdbus-glib-1-dev
-          libelf-dev libgconf2-dev libgl1-mesa-dev libglib2.0-dev
-          libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev
-          libkrb5-dev libnspr4-dev libnss3-dev libpam0g-dev libsctp-dev
-          libsqlite3-dev libssl-dev libudev-dev libwww-perl libxslt1-dev
-          libxss-dev libxt-dev libxtst-dev mesa-common-dev patch
-          perl php5-cgi pkg-config python python-cherrypy3 python-dev
-          python-psutil rpm ruby subversion ttf-dejavu-core ttf-indic-fonts
-          ttf-kochi-gothic ttf-kochi-mincho ttf-thai-tlwg wdiff git-core
-          $chromeos_dev_list"
-
-# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
-# NaCl binaries. These are always needed, regardless of whether or not we want
-# the full 32-bit "cross-compile" support (--lib32).
-if [ "$(uname -m)" = "x86_64" ]; then
-  dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
-fi
-
-# Run-time libraries required by chromeos only
-chromeos_lib_list="libpulse0 libbz2-1.0 libcurl4-gnutls-dev"
-
-# Full list of required run-time libraries
-lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcups2 libdbus-glib-1-2
-          libexpat1 libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0
-          libgtk2.0-0 libpam0g libpango1.0-0 libpcre3 libpixman-1-0
-          libpng12-0 libstdc++6 libsqlite3-0 libudev0 libx11-6 libxau6 libxcb1
-          libxcomposite1 libxcursor1 libxdamage1 libxdmcp6 libxext6 libxfixes3
-          libxi6 libxinerama1 libxrandr2 libxrender1 libxtst6 zlib1g
-          $chromeos_lib_list"
-
-# Debugging symbols for all of the run-time libraries
-dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libdbus-glib-1-2-dbg
-          libfontconfig1-dbg libglib2.0-0-dbg libgtk2.0-0-dbg
-          libpango1.0-0-dbg libpcre3-dbg libpixman-1-0-dbg
-          libsqlite3-0-dbg
-          libx11-6-dbg libxau6-dbg libxcb1-dbg libxcomposite1-dbg
-          libxcursor1-dbg libxdamage1-dbg libxdmcp6-dbg libxext6-dbg
-          libxfixes3-dbg libxi6-dbg libxinerama1-dbg libxrandr2-dbg
-          libxrender1-dbg libxtst6-dbg zlib1g-dbg"
-
-# Plugin lists needed for tests.
-plugin_list="flashplugin-installer"
-
-# Some package names have changed over time
-if apt-cache show ttf-mscorefonts-installer >/dev/null 2>&1; then
-  dev_list="${dev_list} ttf-mscorefonts-installer"
-else
-  dev_list="${dev_list} msttcorefonts"
-fi
-if apt-cache show libnspr4-dbg >/dev/null 2>&1; then
-  dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
-  lib_list="${lib_list} libnspr4 libnss3"
-else
-  dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
-  lib_list="${lib_list} libnspr4-0d libnss3-1d"
-fi
-if apt-cache show libjpeg-dev >/dev/null 2>&1; then
- dev_list="${dev_list} libjpeg-dev"
-else
- dev_list="${dev_list} libjpeg62-dev"
-fi
-
-# Some packages are only needed, if the distribution actually supports
-# installing them.
-if apt-cache show appmenu-gtk >/dev/null 2>&1; then
-  lib_list="$lib_list appmenu-gtk"
-fi
-
 # Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is
 # accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has
 # been provided to yes_no(), the function also accepts RETURN as a user input.
 # The parameter specifies the exit code that should be returned in that case.
 # The function will echo the user's selection followed by a newline character.
 # Users can abort the function by pressing CTRL-C. This will call "exit 1".
 yes_no() {
   if [ 0 -ne "${do_default-0}" ] ; then
+    [ $1 -eq 0 ] && echo "Y" || echo "N"
     return $1
   fi
   local c
   while :; do
     c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT
          stty -echo iuclc -icanon 2>/dev/null
          dd count=1 bs=1 2>/dev/null | od -An -tx1)"
     case "$c" in
@@ -159,58 +60,575 @@ yes_no() {
       *)     # The user pressed an unrecognized key. As we are not echoing
              # any incorrect user input, alert the user by ringing the bell.
              (tput bel) 2>/dev/null
              ;;
     esac
   done
 }
 
-if test "$do_inst_syms" = ""
+# Checks whether a particular package is available in the repos.
+# USAGE: $ package_exists <package name>
+package_exists() {
+  [ ! -z "`apt-cache search --names-only "$1"`" ]
+}
+
+# These default to on because (some) bots need them and it keeps things
+# simple for the bot setup if all bots just run the script in its default
+# mode.  Developers who don't want stuff they don't need installed on their
+# own workstations can pass --no-arm --no-nacl when running the script.
+do_inst_arm=1
+do_inst_nacl=1
+
+while test "$1" != ""
+do
+  case "$1" in
+  --syms)                   do_inst_syms=1;;
+  --no-syms)                do_inst_syms=0;;
+  --lib32)                  do_inst_lib32=1;;
+  --arm)                    do_inst_arm=1;;
+  --no-arm)                 do_inst_arm=0;;
+  --chromeos-fonts)         do_inst_chromeos_fonts=1;;
+  --no-chromeos-fonts)      do_inst_chromeos_fonts=0;;
+  --nacl)                   do_inst_nacl=1;;
+  --no-nacl)                do_inst_nacl=0;;
+  --no-prompt)              do_default=1
+                            do_quietly="-qq --assume-yes"
+    ;;
+  --quick-check)            do_quick_check=1;;
+  --unsupported)            do_unsupported=1;;
+  *) usage;;
+  esac
+  shift
+done
+
+if test "$do_inst_arm" = "1"; then
+  do_inst_lib32=1
+fi
+
+# Check for lsb_release command in $PATH
+if ! which lsb_release > /dev/null; then
+  echo "ERROR: lsb_release not found in \$PATH" >&2
+  exit 1;
+fi
+
+distro_codename=$(lsb_release --codename --short)
+distro_id=$(lsb_release --id --short)
+supported_codenames="(trusty|xenial|yakkety)"
+supported_ids="(Debian)"
+if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
+  if [[ ! $distro_codename =~ $supported_codenames &&
+        ! $distro_id =~ $supported_ids ]]; then
+    echo -e "ERROR: The only supported distros are\n" \
+      "\tUbuntu 14.04 (trusty)\n" \
+      "\tUbuntu 16.04 (xenial)\n" \
+      "\tUbuntu 16.10 (yakkety)\n" \
+      "\tDebian 8 (jessie) or later" >&2
+    exit 1
+  fi
+
+  if ! uname -m | egrep -q "i686|x86_64"; then
+    echo "Only x86 architectures are currently supported" >&2
+    exit
+  fi
+fi
+
+if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then
+  echo "Running as non-root user."
+  echo "You might have to enter your password one or more times for 'sudo'."
+  echo
+fi
+
+# Packages needed for chromeos only
+chromeos_dev_list="libbluetooth-dev libxkbcommon-dev realpath"
+
+# Packages needed for development
+dev_list="\
+  bison
+  cdbs
+  curl
+  dpkg-dev
+  elfutils
+  devscripts
+  fakeroot
+  flex
+  fonts-ipafont
+  fonts-thai-tlwg
+  g++
+  git-core
+  git-svn
+  gperf
+  libasound2-dev
+  libbrlapi-dev
+  libav-tools
+  libbz2-dev
+  libcairo2-dev
+  libcap-dev
+  libcups2-dev
+  libcurl4-gnutls-dev
+  libdrm-dev
+  libelf-dev
+  libffi-dev
+  libgconf2-dev
+  libglib2.0-dev
+  libglu1-mesa-dev
+  libgnome-keyring-dev
+  libgtk2.0-dev
+  libgtk-3-dev
+  libkrb5-dev
+  libnspr4-dev
+  libnss3-dev
+  libpam0g-dev
+  libpci-dev
+  libpulse-dev
+  libsctp-dev
+  libspeechd-dev
+  libsqlite3-dev
+  libssl-dev
+  libudev-dev
+  libwww-perl
+  libxslt1-dev
+  libxss-dev
+  libxt-dev
+  libxtst-dev
+  openbox
+  patch
+  perl
+  pkg-config
+  python
+  python-cherrypy3
+  python-crypto
+  python-dev
+  python-numpy
+  python-opencv
+  python-openssl
+  python-psutil
+  python-yaml
+  rpm
+  ruby
+  subversion
+  ttf-dejavu-core
+  wdiff
+  xcompmgr
+  zip
+  $chromeos_dev_list
+"
+
+# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
+# NaCl binaries.
+if file -L /sbin/init | grep -q 'ELF 64-bit'; then
+  dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
+fi
+
+# Run-time libraries required by chromeos only
+chromeos_lib_list="libpulse0 libbz2-1.0"
+
+# Full list of required run-time libraries
+lib_list="\
+  libatk1.0-0
+  libc6
+  libasound2
+  libcairo2
+  libcap2
+  libcups2
+  libexpat1
+  libffi6
+  libfontconfig1
+  libfreetype6
+  libglib2.0-0
+  libgnome-keyring0
+  libgtk2.0-0
+  libgtk-3-0
+  libpam0g
+  libpango1.0-0
+  libpci3
+  libpcre3
+  libpixman-1-0
+  libspeechd2
+  libstdc++6
+  libsqlite3-0
+  libx11-6
+  libx11-xcb1
+  libxau6
+  libxcb1
+  libxcomposite1
+  libxcursor1
+  libxdamage1
+  libxdmcp6
+  libxext6
+  libxfixes3
+  libxi6
+  libxinerama1
+  libxrandr2
+  libxrender1
+  libxtst6
+  zlib1g
+  $chromeos_lib_list
+"
+
+# Debugging symbols for all of the run-time libraries
+dbg_list="\
+  libatk1.0-dbg
+  libc6-dbg
+  libcairo2-dbg
+  libffi6-dbg
+  libfontconfig1-dbg
+  libglib2.0-0-dbg
+  libgtk2.0-0-dbg
+  libgtk-3-0-dbg
+  libpango1.0-0-dbg
+  libpcre3-dbg
+  libpixman-1-0-dbg
+  libsqlite3-0-dbg
+  libx11-6-dbg
+  libx11-xcb1-dbg
+  libxau6-dbg
+  libxcb1-dbg
+  libxcomposite1-dbg
+  libxcursor1-dbg
+  libxdamage1-dbg
+  libxdmcp6-dbg
+  libxext6-dbg
+  libxi6-dbg
+  libxinerama1-dbg
+  libxrandr2-dbg
+  libxrender1-dbg
+  libxtst6-dbg
+  zlib1g-dbg
+"
+
+if [[ ! $distro_codename =~ "yakkety" ]]; then
+  dbg_list="${dbg_list} libxfixes3-dbg"
+fi
+
+# Find the proper version of libstdc++6-4.x-dbg.
+if [ "x$distro_codename" = "xtrusty" ]; then
+  dbg_list="${dbg_list} libstdc++6-4.8-dbg"
+else
+  dbg_list="${dbg_list} libstdc++6-4.9-dbg"
+fi
+
+# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf
+lib32_list="linux-libc-dev:i386"
+
+# arm cross toolchain packages needed to build chrome on armhf
+EM_REPO="deb http://emdebian.org/tools/debian/ jessie main"
+EM_SOURCE=$(cat <<EOF
+# Repo added by Chromium $0
+${EM_REPO}
+# deb-src http://emdebian.org/tools/debian/ jessie main
+EOF
+)
+EM_ARCHIVE_KEY_FINGER="084C6C6F39159EDB67969AA87DE089671804772E"
+GPP_ARM_PACKAGE="g++-arm-linux-gnueabihf"
+case $distro_codename in
+  jessie)
+    eval $(apt-config shell APT_SOURCESDIR 'Dir::Etc::sourceparts/d')
+    CROSSTOOLS_LIST="${APT_SOURCESDIR}/crosstools.list"
+    arm_list="libc6-dev:armhf
+              linux-libc-dev:armhf"
+    if test "$do_inst_arm" = "1"; then
+      if $(dpkg-query -W ${GPP_ARM_PACKAGE} &>/dev/null); then
+        arm_list+=" ${GPP_ARM_PACKAGE}"
+      else
+        echo "The Debian Cross-toolchains repository is necessary to"
+        echo "cross-compile Chromium for arm."
+        echo -n "Do you want me to add it for you (y/N) "
+        if yes_no 1; then
+          gpg --keyserver pgp.mit.edu --recv-keys ${EM_ARCHIVE_KEY_FINGER}
+          gpg -a --export ${EM_ARCHIVE_KEY_FINGER} | sudo apt-key add -
+          if ! grep "^${EM_REPO}" "${CROSSTOOLS_LIST}" &>/dev/null; then
+            echo "${EM_SOURCE}" | sudo tee -a "${CROSSTOOLS_LIST}" >/dev/null
+          fi
+          arm_list+=" ${GPP_ARM_PACKAGE}"
+        fi
+      fi
+    fi
+    ;;
+  # All necessary ARM packages are available on the default repos on
+  # Debian 9 and later.
+  *)
+    arm_list="binutils-aarch64-linux-gnu
+              libc6-dev-armhf-cross
+              linux-libc-dev-armhf-cross
+              ${GPP_ARM_PACKAGE}"
+    ;;
+esac
+
+# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056
+case $distro_codename in
+  trusty)
+    arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf
+                gcc-4.8-multilib-arm-linux-gnueabihf"
+    ;;
+  xenial|yakkety)
+    arm_list+=" g++-5-multilib-arm-linux-gnueabihf
+                gcc-5-multilib-arm-linux-gnueabihf
+                gcc-arm-linux-gnueabihf"
+    ;;
+esac
+
+# Packages to build NaCl, its toolchains, and its ports.
+naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc"
+nacl_list="\
+  g++-mingw-w64-i686
+  lib32z1-dev
+  libasound2:i386
+  libcap2:i386
+  libelf-dev:i386
+  libfontconfig1:i386
+  libgconf-2-4:i386
+  libglib2.0-0:i386
+  libgpm2:i386
+  libgtk2.0-0:i386
+  libgtk-3-0:i386
+  libncurses5:i386
+  lib32ncurses5-dev
+  libnss3:i386
+  libpango1.0-0:i386
+  libssl-dev:i386
+  libtinfo-dev
+  libtinfo-dev:i386
+  libtool
+  libxcomposite1:i386
+  libxcursor1:i386
+  libxdamage1:i386
+  libxi6:i386
+  libxrandr2:i386
+  libxss1:i386
+  libxtst6:i386
+  texinfo
+  xvfb
+  ${naclports_list}
+"
+
+if package_exists libssl1.0.0; then
+  nacl_list="${nacl_list} libssl1.0.0:i386"
+else
+  nacl_list="${nacl_list} libssl1.0.2:i386"
+fi
+
+# Find the proper version of packages that depend on mesa. Only one -lts variant
+# of mesa can be installed and everything that depends on it must match.
+
+# Query for the name and status of all mesa LTS variants, filter for only
+# installed packages, extract just the name, and eliminate duplicates (there can
+# be more than one with the same name in the case of multiarch). Expand into an
+# array.
+mesa_packages=($(dpkg-query -Wf'${package} ${status}\n' \
+                            libgl1-mesa-glx-lts-\* 2>/dev/null | \
+                 grep " ok installed" | cut -d " " -f 1 | sort -u))
+if [ "${#mesa_packages[@]}" -eq 0 ]; then
+  mesa_variant=""
+elif [ "${#mesa_packages[@]}" -eq 1 ]; then
+  # Strip the base package name and leave just "-lts-whatever"
+  mesa_variant="${mesa_packages[0]#libgl1-mesa-glx}"
+else
+  echo "ERROR: unable to determine which libgl1-mesa-glx variant is installed."
+  exit 1
+fi
+dev_list="${dev_list} libgbm-dev${mesa_variant}
+          libgles2-mesa-dev${mesa_variant} libgl1-mesa-dev${mesa_variant}
+          mesa-common-dev${mesa_variant}"
+nacl_list="${nacl_list} libgl1-mesa-glx${mesa_variant}:i386"
+
+# Some package names have changed over time
+if package_exists libpng12-0; then
+  lib_list="${lib_list} libpng12-0"
+else
+  lib_list="${lib_list} libpng16-16"
+fi
+if package_exists libnspr4-dbg; then
+  dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
+  lib_list="${lib_list} libnspr4 libnss3"
+else
+  dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
+  lib_list="${lib_list} libnspr4-0d libnss3-1d"
+fi
+if package_exists libjpeg-dev; then
+  dev_list="${dev_list} libjpeg-dev"
+else
+  dev_list="${dev_list} libjpeg62-dev"
+fi
+if package_exists libudev1; then
+  dev_list="${dev_list} libudev1"
+  nacl_list="${nacl_list} libudev1:i386"
+else
+  dev_list="${dev_list} libudev0"
+  nacl_list="${nacl_list} libudev0:i386"
+fi
+if package_exists libbrlapi0.6; then
+  dev_list="${dev_list} libbrlapi0.6"
+else
+  dev_list="${dev_list} libbrlapi0.5"
+fi
+if package_exists apache2-bin; then
+  dev_list="${dev_list} apache2-bin"
+else
+  dev_list="${dev_list} apache2.2-bin"
+fi
+if package_exists xfonts-mathml; then
+  dev_list="${dev_list} xfonts-mathml"
+fi
+if package_exists fonts-indic; then
+  dev_list="${dev_list} fonts-indic"
+else
+  dev_list="${dev_list} ttf-indic-fonts"
+fi
+if package_exists php7.0-cgi; then
+  dev_list="${dev_list} php7.0-cgi libapache2-mod-php7.0"
+else
+  dev_list="${dev_list} php5-cgi libapache2-mod-php5"
+fi
+# ttf-mscorefonts-installer is in the Debian contrib repo, which has
+# dependencies on non-free software.  Install it only if the user has already
+# enabled contrib.
+if package_exists ttf-mscorefonts-installer; then
+  dev_list="${dev_list} ttf-mscorefonts-installer"
+elif package_exists msttcorefonts; then
+  dev_list="${dev_list} msttcorefonts"
+fi
+# Ubuntu 16.04 has this package deleted.
+if package_exists ttf-kochi-gothic; then
+  dev_list="${dev_list} ttf-kochi-gothic"
+fi
+# Ubuntu 16.04 has this package deleted.
+if package_exists ttf-kochi-mincho; then
+  dev_list="${dev_list} ttf-kochi-mincho"
+fi
+
+# Some packages are only needed if the distribution actually supports
+# installing them.
+if package_exists appmenu-gtk; then
+  lib_list="$lib_list appmenu-gtk"
+fi
+
+# When cross building for arm/Android on 64-bit systems the host binaries
+# that are part of v8 need to be compiled with -m32 which means
+# that basic multilib support is needed.
+if file -L /sbin/init | grep -q 'ELF 64-bit'; then
+  # gcc-multilib conflicts with the arm cross compiler (at least in trusty) but
+  # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the
+  # appropriate value of X and Y by seeing what version the current
+  # distribution's g++-multilib package depends on.
+  multilib_package=$(apt-cache depends g++-multilib --important | \
+      grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b')
+  lib32_list="$lib32_list $multilib_package"
+fi
+
+if test "$do_inst_syms" = "" && test 0 -eq ${do_quick_check-0}
 then
   echo "This script installs all tools and libraries needed to build Chromium."
   echo ""
   echo "For most of the libraries, it can also install debugging symbols, which"
   echo "will allow you to debug code in the system libraries. Most developers"
   echo "won't need these symbols."
   echo -n "Do you want me to install them for you (y/N) "
   if yes_no 1; then
     do_inst_syms=1
   fi
 fi
 if test "$do_inst_syms" = "1"; then
-  echo "Installing debugging symbols."
+  echo "Including debugging symbols."
+  # Many debug packages are not available in Debian stretch,
+  # so exclude the ones that are missing.
+  available_dbg_packages=""
+  for package in ${dbg_list}; do
+    if package_exists ${package}; then
+      available_dbg_packages="${available_dbg_packages} ${package}"
+    fi
+  done
+  dbg_list="${available_dbg_packages}"
 else
-  echo "Skipping installation of debugging symbols."
+  echo "Skipping debugging symbols."
   dbg_list=
 fi
 
+if test "$do_inst_lib32" = "1" ; then
+  echo "Including 32-bit libraries for ARM/Android."
+else
+  echo "Skipping 32-bit libraries for ARM/Android."
+  lib32_list=
+fi
+
+if test "$do_inst_arm" = "1" ; then
+  echo "Including ARM cross toolchain."
+else
+  echo "Skipping ARM cross toolchain."
+  arm_list=
+fi
+
+if test "$do_inst_nacl" = "1"; then
+  echo "Including NaCl, NaCl toolchain, NaCl ports dependencies."
+else
+  echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies."
+  nacl_list=
+fi
+
+# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid
+# confusing dpkg-query (crbug.com/446172).
+packages="$(
+  echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}"\
+       "${nacl_list}" | tr " " "\n" | sort -u | sort -r -s -t: -k2 | tr "\n" " "
+)"
+
+if [ 1 -eq "${do_quick_check-0}" ] ; then
+  if ! missing_packages="$(dpkg-query -W -f ' ' ${packages} 2>&1)"; then
+    # Distinguish between packages that actually aren't available to the
+    # system (i.e. not in any repo) and packages that just aren't known to
+    # dpkg (i.e. managed by apt).
+    missing_packages="$(echo "${missing_packages}" | awk '{print $NF}')"
+    not_installed=""
+    unknown=""
+    for p in ${missing_packages}; do
+      if apt-cache show ${p} > /dev/null 2>&1; then
+        not_installed="${p}\n${not_installed}"
+      else
+        unknown="${p}\n${unknown}"
+      fi
+    done
+    if [ -n "${not_installed}" ]; then
+      echo "WARNING: The following packages are not installed:"
+      echo -e "${not_installed}" | sed -e "s/^/  /"
+    fi
+    if [ -n "${unknown}" ]; then
+      echo "WARNING: The following packages are unknown to your system"
+      echo "(maybe missing a repo or need to 'sudo apt-get update'):"
+      echo -e "${unknown}" | sed -e "s/^/  /"
+    fi
+    exit 1
+  fi
+  exit 0
+fi
+
+if test "$do_inst_lib32" = "1" || test "$do_inst_nacl" = "1"; then
+  sudo dpkg --add-architecture i386
+fi
 sudo apt-get update
 
 # We initially run "apt-get" with the --reinstall option and parse its output.
 # This way, we can find all the packages that need to be newly installed
 # without accidentally promoting any packages from "auto" to "manual".
 # We then re-run "apt-get" with just the list of missing packages.
 echo "Finding missing packages..."
-packages="${dev_list} ${lib_list} ${dbg_list} ${plugin_list}"
 # Intentionally leaving $packages unquoted so it's more readable.
 echo "Packages required: " $packages
 echo
 new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
-if new_list="$(yes n | LANG=C $new_list_cmd)"; then
+if new_list="$(yes n | LANGUAGE=en LANG=C $new_list_cmd)"; then
   # We probably never hit this following line.
-  echo "No missing packages, and the packages are up-to-date."
+  echo "No missing packages, and the packages are up to date."
 elif [ $? -eq 1 ]; then
   # We expect apt-get to have exit status of 1.
   # This indicates that we cancelled the install with "yes n|".
   new_list=$(echo "$new_list" |
     sed -e '1,/The following NEW packages will be installed:/d;s/^  //;t;d')
   new_list=$(echo "$new_list" | sed 's/ *$//')
   if [ -z "$new_list" ] ; then
-    echo "No missing packages, and the packages are up-to-date."
+    echo "No missing packages, and the packages are up to date."
   else
     echo "Installing missing packages: $new_list."
     sudo apt-get install ${do_quietly-} ${new_list}
   fi
   echo
 else
   # An apt-get exit status of 100 indicates that a real error has occurred.
 
@@ -221,194 +639,49 @@ else
   echo "It produces the following output:"
   yes n | $new_list_cmd || true
   echo
   echo "You will have to install the above packages yourself."
   echo
   exit 100
 fi
 
-# Install 32bit backwards compatibility support for 64bit systems
-if [ "$(uname -m)" = "x86_64" ]; then
-  if test "$do_inst_lib32" = ""
-  then
-    echo "We no longer recommend that you use this script to install"
-    echo "32bit libraries on a 64bit system. Instead, consider using"
-    echo "the install-chroot.sh script to help you set up a 32bit"
-    echo "environment for building and testing 32bit versions of Chrome."
-    echo
-    echo "If you nonetheless want to try installing 32bit libraries"
-    echo "directly, you can do so by explicitly passing the --lib32"
-    echo "option to install-build-deps.sh."
-  fi
-  if test "$do_inst_lib32" != "1"
-  then
-    echo "Exiting without installing any 32bit libraries."
-    exit 0
-  fi
-
-  echo "N.B. the code for installing 32bit libraries on a 64bit"
-  echo "     system is no longer actively maintained and might"
-  echo "     not work with modern versions of Ubuntu or Debian."
+# Install the Chrome OS default fonts. This must go after running
+# apt-get, since install-chromeos-fonts depends on curl.
+if test "$do_inst_chromeos_fonts" != "0"; then
   echo
-
-  # Standard 32bit compatibility libraries
-  echo "First, installing the limited existing 32-bit support..."
-  cmp_list="ia32-libs lib32asound2-dev lib32stdc++6 lib32z1
-            lib32z1-dev libc6-dev-i386 libc6-i386 g++-multilib"
-  if [ -n "`apt-cache search lib32readline-gplv2-dev 2>/dev/null`" ]; then
-    cmp_list="${cmp_list} lib32readline-gplv2-dev"
-  else
-    cmp_list="${cmp_list} lib32readline5-dev"
+  echo "Installing Chrome OS fonts."
+  dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
+  if ! sudo $dir/linux/install-chromeos-fonts.py; then
+    echo "ERROR: The installation of the Chrome OS default fonts failed."
+    if [ `stat -f -c %T $dir` == "nfs" ]; then
+      echo "The reason is that your repo is installed on a remote file system."
+    else
+      echo "This is expected if your repo is installed on a remote file system."
+    fi
+    echo "It is recommended to install your repo on a local file system."
+    echo "You can skip the installation of the Chrome OS default founts with"
+    echo "the command line option: --no-chromeos-fonts."
+    exit 1
   fi
-  sudo apt-get install ${do_quietly-} $cmp_list
-
-  tmp=/tmp/install-32bit.$$
-  trap 'rm -rf "${tmp}"' EXIT INT TERM QUIT
-  mkdir -p "${tmp}/apt/lists/partial" "${tmp}/cache" "${tmp}/partial"
-  touch "${tmp}/status"
-
-  [ -r /etc/apt/apt.conf ] && cp /etc/apt/apt.conf "${tmp}/apt/"
-  cat >>"${tmp}/apt/apt.conf" <<EOF
-        Apt::Architecture "i386";
-        Dir::Cache "${tmp}/cache";
-        Dir::Cache::Archives "${tmp}/";
-        Dir::State::Lists "${tmp}/apt/lists/";
-        Dir::State::status "${tmp}/status";
-EOF
-
-  # Download 32bit packages
-  echo "Computing list of available 32bit packages..."
-  sudo apt-get -c="${tmp}/apt/apt.conf" update
-
-  echo "Downloading available 32bit packages..."
-  sudo apt-get -c="${tmp}/apt/apt.conf" \
-          --yes --download-only --force-yes --reinstall install \
-          ${lib_list} ${dbg_list}
-
-  # Open packages, remove everything that is not a library, move the
-  # library to a lib32 directory and package everything as a *.deb file.
-  echo "Repackaging and installing 32bit packages for use on 64bit systems..."
-  for i in ${lib_list} ${dbg_list}; do
-    orig="$(echo "${tmp}/${i}"_*_i386.deb)"
-    compat="$(echo "${orig}" |
-              sed -e 's,\(_[^_/]*_\)i386\(.deb\),-ia32\1amd64\2,')"
-    rm -rf "${tmp}/staging"
-    msg="$(fakeroot -u sh -exc '
-      # Unpack 32bit Debian archive
-      umask 022
-      mkdir -p "'"${tmp}"'/staging/dpkg/DEBIAN"
-      cd "'"${tmp}"'/staging"
-      ar x "'${orig}'"
-      tar zCfx dpkg data.tar.gz
-      tar zCfx dpkg/DEBIAN control.tar.gz
-
-      # Create a posix extended regular expression fragment that will
-      # recognize the includes which have changed. Should be rare,
-      # will almost always be empty.
-      includes=`sed -n -e "s/^[0-9a-z]*  //g" \
-                       -e "\,usr/include/,p" dpkg/DEBIAN/md5sums |
-                  xargs -n 1 -I FILE /bin/sh -c \
-                    "cmp -s dpkg/FILE /FILE || echo FILE" |
-                  tr "\n" "|" |
-                  sed -e "s,|$,,"`
-
-      # If empty, set it to not match anything.
-      test -z "$includes" && includes="^//"
+else
+  echo "Skipping installation of Chrome OS fonts."
+fi
 
-      # Turn the conflicts into an extended RE for removal from the
-      # Provides line.
-      conflicts=`sed -n -e "/Conflicts/s/Conflicts: *//;T;s/, */|/g;p" \
-                   dpkg/DEBIAN/control`
-
-      # Rename package, change architecture, remove conflicts and dependencies
-      sed -r -i                              \
-          -e "/Package/s/$/-ia32/"           \
-          -e "/Architecture/s/:.*$/: amd64/" \
-          -e "/Depends/s/:.*/: ia32-libs/"   \
-          -e "/Provides/s/($conflicts)(, *)?//g;T1;s/, *$//;:1"   \
-          -e "/Recommends/d"                 \
-          -e "/Conflicts/d"                  \
-        dpkg/DEBIAN/control
-
-      # Only keep files that live in "lib" directories or the includes
-      # that have changed.
-      sed -r -i                                                               \
-          -e "/\/lib64\//d" -e "/\/.?bin\//d"                                 \
-          -e "\,$includes,s,[ /]include/,&32/,g;s,include/32/,include32/,g"   \
-          -e "s, lib/, lib32/,g"                                              \
-          -e "s,/lib/,/lib32/,g"                                              \
-          -e "t;d"                                                            \
-          -e "\,^/usr/lib32/debug\(.*/lib32\),s,^/usr/lib32/debug,/usr/lib/debug," \
-        dpkg/DEBIAN/md5sums
-
-      # Re-run ldconfig after installation/removal
-      { echo "#!/bin/sh"; echo "[ \"x\$1\" = xconfigure ]&&ldconfig||:"; } \
-        >dpkg/DEBIAN/postinst
-      { echo "#!/bin/sh"; echo "[ \"x\$1\" = xremove ]&&ldconfig||:"; } \
-        >dpkg/DEBIAN/postrm
-      chmod 755 dpkg/DEBIAN/postinst dpkg/DEBIAN/postrm
-
-      # Remove any other control files
-      find dpkg/DEBIAN -mindepth 1 "(" -name control -o -name md5sums -o \
-                       -name postinst -o -name postrm ")" -o -print |
-        xargs -r rm -rf
-
-      # Remove any files/dirs that live outside of "lib" directories,
-      # or are not in our list of changed includes.
-      find dpkg -mindepth 1 -regextype posix-extended \
-          "(" -name DEBIAN -o -name lib -o -regex "dpkg/($includes)" ")" \
-          -prune -o -print | tac |
-        xargs -r -n 1 sh -c "rm \$0 2>/dev/null || rmdir \$0 2>/dev/null || : "
-      find dpkg -name lib64 -o -name bin -o -name "?bin" |
-        tac | xargs -r rm -rf
-
-      # Remove any symbolic links that were broken by the above steps.
-      find -L dpkg -type l -print | tac | xargs -r rm -rf
-
-      # Rename lib to lib32, but keep debug symbols in /usr/lib/debug/usr/lib32
-      # That is where gdb looks for them.
-      find dpkg -type d -o -path "*/lib/*" -print |
-        xargs -r -n 1 sh -c "
-          i=\$(echo \"\${0}\" |
-               sed -e s,/lib/,/lib32/,g \
-               -e s,/usr/lib32/debug\\\\\(.*/lib32\\\\\),/usr/lib/debug\\\\1,);
-          mkdir -p \"\${i%/*}\";
-          mv \"\${0}\" \"\${i}\""
-
-      # Rename include to include32.
-      [ -d "dpkg/usr/include" ] && mv "dpkg/usr/include" "dpkg/usr/include32"
-
-      # Prune any empty directories
-      find dpkg -type d | tac | xargs -r -n 1 rmdir 2>/dev/null || :
-
-      # Create our own Debian package
-      cd ..
-      dpkg --build staging/dpkg .' 2>&1)"
-    compat="$(eval echo $(echo "${compat}" |
-                          sed -e 's,_[^_/]*_amd64.deb,_*_amd64.deb,'))"
-    [ -r "${compat}" ] || {
-      echo "${msg}" >&2
-      echo "Failed to build new Debian archive!" >&2
-      exit 1
-    }
-
-    msg="$(sudo dpkg -i "${compat}" 2>&1)" && {
-        echo "Installed ${compat##*/}"
-      } || {
-        # echo "${msg}" >&2
-        echo "Skipped ${compat##*/}"
-      }
+echo "Installing locales."
+CHROMIUM_LOCALES="da_DK.UTF-8 fr_FR.UTF-8 he_IL.UTF-8 zh_TW.UTF-8"
+LOCALE_GEN=/etc/locale.gen
+if [ -e ${LOCALE_GEN} ]; then
+  OLD_LOCALE_GEN="$(cat /etc/locale.gen)"
+  for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
+    sudo sed -i "s/^# ${CHROMIUM_LOCALE}/${CHROMIUM_LOCALE}/" ${LOCALE_GEN}
   done
-
-  # Add symbolic links for developing 32bit code
-  echo "Adding missing symbolic links, enabling 32bit code development..."
-  for i in $(find /lib32 /usr/lib32 -maxdepth 1 -name \*.so.\* |
-             sed -e 's/[.]so[.][0-9].*/.so/' |
-             sort -u); do
-    [ "x${i##*/}" = "xld-linux.so" ] && continue
-    [ -r "$i" ] && continue
-    j="$(ls "$i."* | sed -e 's/.*[.]so[.]\([^.]*\)$/\1/;t;d' |
-         sort -n | tail -n 1)"
-    [ -r "$i.$j" ] || continue
-    sudo ln -s "${i##*/}.$j" "$i"
+  # Regenerating locales can take a while, so only do it if we need to.
+  if (echo "${OLD_LOCALE_GEN}" | cmp -s ${LOCALE_GEN}); then
+    echo "Locales already up-to-date."
+  else
+    sudo locale-gen
+  fi
+else
+  for CHROMIUM_LOCALE in ${CHROMIUM_LOCALES}; do
+    sudo locale-gen ${CHROMIUM_LOCALE}
   done
 fi
--- a/media/webrtc/trunk/build/install-chroot.sh
+++ b/media/webrtc/trunk/build/install-chroot.sh
@@ -7,17 +7,17 @@
 # This script installs Debian-derived distributions in a chroot environment.
 # It can for example be used to have an accurate 32bit build and test
 # environment when otherwise working on a 64bit machine.
 # N. B. it is unlikely that this script will ever work on anything other than a
 # Debian-derived system.
 
 # Older Debian based systems had both "admin" and "adm" groups, with "admin"
 # apparently being used in more places. Newer distributions have standardized
-# on just the "adm" group. Check /etc/group for the prefered name of the
+# on just the "adm" group. Check /etc/group for the preferred name of the
 # administrator group.
 admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm)
 
 usage() {
   echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
   echo "-b dir       additional directories that should be bind mounted,"
   echo '             or "NONE".'
   echo "             Default: if local filesystems present, ask user for help"
@@ -218,17 +218,18 @@ target="${distname}${arch}"
     printf "do (a/o/d)? "
     read choice
     case "${choice}" in
       a|A) exit 1;;
       o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;;
       d|D) sudo rm -rf "/var/lib/chroot/${target}"      \
                        "/usr/local/bin/${target%bit}"   \
                        "/etc/schroot/mount-${target}"   \
-                       "/etc/schroot/script-${target}"
+                       "/etc/schroot/script-${target}"  \
+                       "/etc/schroot/${target}"
            sudo sed -ni '/^[[]'"${target%bit}"']$/,${
                          :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
                        "/etc/schroot/schroot.conf"
            trap '' INT TERM QUIT HUP
            trap '' EXIT
            echo "Deleted!"
            exit 0;;
     esac
@@ -344,76 +345,109 @@ sudo ${http_proxy:+http_proxy="${http_pr
     "${distname}" "/var/lib/chroot/${target}"  "$mirror"
 
 # Add new entry to /etc/schroot/schroot.conf
 grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
   brand="Ubuntu" || brand="Debian"
 if [ -z "${chroot_groups}" ]; then
   chroot_groups="${admin},$(id -gn)"
 fi
-# Older versions of schroot wanted a "priority=" line, whereas recent
-# versions deprecate "priority=" and warn if they see it. We don't have
-# a good feature test, but scanning for the string "priority=" in the
-# existing "schroot.conf" file is a good indication of what to do.
-priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
+
+if [ -d '/etc/schroot/default' ]; then
+  new_version=1
+  fstab="/etc/schroot/${target}/fstab"
+else
+  new_version=0
+  fstab="/etc/schroot/mount-${target}"
+fi
+
+if [ "$new_version" = "1" ]; then
+  sudo cp -ar /etc/schroot/default /etc/schroot/${target}
+
+  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+profile=${target}
+
+EOF
+  [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+    printf "${bind_mounts}" |
+      sudo sh -c "cat >>${fstab}"
+else
+  # Older versions of schroot wanted a "priority=" line, whereas recent
+  # versions deprecate "priority=" and warn if they see it. We don't have
+  # a good feature test, but scanning for the string "priority=" in the
+  # existing "schroot.conf" file is a good indication of what to do.
+  priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
            echo 'priority=3' || :)
-sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
 [${target%bit}]
 description=${brand} ${distname} ${arch}
 type=directory
 directory=/var/lib/chroot/${target}
 users=root
 groups=${chroot_groups}
 root-groups=${chroot_groups}
 personality=linux$([ "${arch}" != 64bit ] && echo 32)
 script-config=script-${target}
 ${priority}
 
 EOF
 
-# Set up a list of mount points that is specific to this
-# chroot environment.
-sed '/^FSTAB=/s,"[^"]*","/etc/schroot/mount-'"${target}"'",' \
-         /etc/schroot/script-defaults |
-  sudo sh -c 'cat >/etc/schroot/script-'"${target}"
-sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
-  /etc/schroot/mount-defaults |
-  sudo sh -c 'cat > /etc/schroot/mount-'"${target}"
+  # Set up a list of mount points that is specific to this
+  # chroot environment.
+  sed '/^FSTAB=/s,"[^"]*","'"${fstab}"'",' \
+           /etc/schroot/script-defaults |
+    sudo sh -c 'cat >/etc/schroot/script-'"${target}"
+  sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
+    /etc/schroot/mount-defaults |
+    sudo sh -c "cat > ${fstab}"
+fi
 
 # Add the extra mount points that the user told us about
 [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
   printf "${bind_mounts}" |
-    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+    sudo sh -c 'cat >>'"${fstab}"
 
 # If this system has a "/media" mountpoint, import it into the chroot
 # environment. Most modern distributions use this mount point to
 # automatically mount devices such as CDROMs, USB sticks, etc...
 if [ -d /media ] &&
-   ! grep -qs '^/media' /etc/schroot/mount-"${target}"; then
+   ! grep -qs '^/media' "${fstab}"; then
   echo '/media /media none rw,rbind 0 0' |
-    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+    sudo sh -c 'cat >>'"${fstab}"
 fi
 
-# Share /dev/shm and possibly /run/shm
-grep -qs '^/dev/shm' /etc/schroot/mount-"${target}" ||
+# Share /dev/shm, /run and /run/shm.
+grep -qs '^/dev/shm' "${fstab}" ||
   echo '/dev/shm /dev/shm none rw,bind 0 0' |
-    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
-if [ -d "/var/lib/chroot/${target}/run" ] &&
-   ! grep -qs '^/run/shm' /etc/schroot/mount-"${target}"; then
+    sudo sh -c 'cat >>'"${fstab}"
+if [ ! -d "/var/lib/chroot/${target}/run" ] &&
+   ! grep -qs '^/run' "${fstab}"; then
+  echo '/run /run none rw,bind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+if ! grep -qs '^/run/shm' "${fstab}"; then
   { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' ||
                    echo '/dev/shm /run/shm none rw,bind 0 0'; } |
-    sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+    sudo sh -c 'cat >>'"${fstab}"
 fi
 
 # Set up a special directory that changes contents depending on the target
 # that is executing.
 d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")"
 s="${d}/.${target}"
 echo "${s} ${d} none rw,bind 0 0" |
-  sudo sh -c 'cat >>/etc/schroot/mount-'"${target}"
+  sudo sh -c 'cat >>'"${target}"
 mkdir -p "${s}"
 
 # Install a helper script to launch commands in the chroot
 sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF'
 #!/bin/bash
 
 chroot="${0##*/}"
 
@@ -488,37 +522,79 @@ while [ "$#" -ne 0 ]; do
     -h|--help)      shift; help;;
     -l|--list)      shift; list;;
     -c|--clean)     shift; clean "${chroot}";;
     -C|--clean-all) shift; clean;;
     *)              break;;
   esac
 done
 
+# Start a new chroot session and keep track of the session id. We inject this
+# id into all processes that run inside the chroot. Unless they go out of their
+# way to clear their environment, we can then later identify our child and
+# grand-child processes by scanning their environment.
 session="$(schroot -c "${chroot}" -b)"
+export CHROOT_SESSION_ID="${session}"
+
+# Set GOMA_TMP_DIR for better handling of goma inside chroot.
+export GOMA_TMP_DIR="/tmp/goma_tmp_$CHROOT_SESSION_ID"
+mkdir -p "$GOMA_TMP_DIR"
 
 if [ $# -eq 0 ]; then
+  # Run an interactive shell session
   schroot -c "${session}" -r -p
 else
+  # Run a command inside of the chroot environment
   p="$1"; shift
   schroot -c "${session}" -r -p "$p" -- "$@"
 fi
 rc=$?
 
+# Compute the inode of the root directory inside of the chroot environment.
 i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. |
      awk '{ print $1 }') 2>/dev/null
+other_pids=
 while [ -n "$i" ]; do
-  pids=$(ls -id1 /proc/*/root/. 2>/dev/null |
+  # Identify processes by the inode number of their root directory. Then
+  # remove all processes that we know belong to other sessions. We use
+  # "sort | uniq -u" to do what amounts to a "set substraction operation".
+  pids=$({ ls -id1 /proc/*/root/. 2>/dev/null |
          sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1,
                  t
-                 d') >/dev/null 2>&1
-  [ -z "$pids" ] && break
-  kill -9 $pids
+                 d';
+         echo "${other_pids}";
+         echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1
+  # Kill all processes that are still left running in the session. This is
+  # typically an assortment of daemon processes that were started
+  # automatically. They result in us being unable to tear down the session
+  # cleanly.
+  [ -z "${pids}" ] && break
+  for j in $pids; do
+    # Unfortunately, the way that schroot sets up sessions has the
+    # side-effect of being unable to tell one session apart from another.
+    # This can result in us attempting to kill processes in other sessions.
+    # We make a best-effort to avoid doing so.
+    k="$( ( xargs -0 -n1 </proc/$j/environ ) 2>/dev/null |
+         sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')"
+    if [ -n "${k}" -a "${k#x}" != "${session}" ]; then
+      other_pids="${other_pids}
+${j}"
+      continue
+    fi
+    kill -9 $pids
+  done
 done
+# End the chroot session. This should clean up all temporary files. But if we
+# earlier failed to terminate all (daemon) processes inside of the session,
+# deleting the session could fail. When that happens, the user has to manually
+# clean up the stale files by invoking us with "--clean" after having killed
+# all running processes.
 schroot -c "${session}" -e
+# Since no goma processes are running, we can remove goma directory.
+rm -rf "$GOMA_TMP_DIR"
 exit $rc
 EOF
 sudo chown root:root /usr/local/bin/"${target%bit}"
 sudo chmod 755 /usr/local/bin/"${target%bit}"
 
 # Add the standard Ubuntu update repositories if requested.
 [ "${alt_repos}" = "y" -a \
   -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
@@ -578,44 +654,47 @@ sudo "/usr/local/bin/${target%bit}" /bin
   locale-gen $LANG en_US en_US.UTF-8' || :
 
 # Enable multi-arch support, if available
 sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null &&
   [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && {
   sudo sed -i 's/ / [arch=amd64,i386] /' \
               "/var/lib/chroot/${target}/etc/apt/sources.list"
   [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] &&
-  echo foreign-architecture \
-       $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
-    sudo sh -c "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
+  sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \
+      $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null ||
+    echo foreign-architecture \
+        $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
+      sudo sh -c \
+        "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
 }
 
 # Configure "sudo" package
 sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
   egrep -qs '"'^$(id -nu) '"' /etc/sudoers ||
   echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
 
 # Install a few more commonly used packages
 sudo "/usr/local/bin/${target%bit}" apt-get -y install                         \
   autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool     \
-  strace
+  lsof strace
 
 # If running a 32bit environment on a 64bit machine, install a few binaries
 # as 64bit. This is only done automatically if the chroot distro is the same as
 # the host, otherwise there might be incompatibilities in build settings or
 # runtime dependencies. The user can force it with the '-c' flag.
 host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \
   cut -d "=" -f 2)
 if [ "${copy_64}" = "y" -o \
     "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
     file /bin/bash 2>/dev/null | grep -q x86-64; then
   readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \
     'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
   sudo "/usr/local/bin/${target%bit}" apt-get -y install                       \
-    lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1
+    lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1 lib64stdc++6
   dep=
   for i in binutils gdb; do
     [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
   done
   [ -n "$dep" ] && sudo apt-get -y install $dep
   sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64"
   for i in libbfd libpython; do
     lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
@@ -663,17 +742,17 @@ if [ -x "${script}" ]; then
           tmp_script="/tmp/${script##*/}"
           cp "${script}" "${tmp_script}"
         fi
         # Some distributions automatically start an instance of the system-
         # wide dbus daemon, cron daemon or of the logging daemon, when
         # installing the Chrome build depencies. This prevents the chroot
         # session from being closed.  So, we always try to shut down any running
         # instance of dbus and rsyslog.
-        sudo /usr/local/bin/"${target%bit}" sh -c "${script} --no-lib32;
+        sudo /usr/local/bin/"${target%bit}" sh -c "${script};
               rc=$?;
               /etc/init.d/cron stop >/dev/null 2>&1 || :;
               /etc/init.d/rsyslog stop >/dev/null 2>&1 || :;
               /etc/init.d/dbus stop >/dev/null 2>&1 || :;
               exit $rc"
         rc=$?
         [ -n "${tmp_script}" ] && rm -f "${tmp_script}"
         [ $rc -ne 0 ] && exit $rc
@@ -764,17 +843,17 @@ if [ ! -h "${HOME}/chroot" ] &&
       done
       sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-*
       break
     fi
   done
 fi
 
 # Clean up package files
-sudo schroot -c /usr/local/bin/"${target%bit}" -p -- apt-get clean
+sudo schroot -c "${target%bit}" -p -- apt-get clean
 sudo apt-get clean
 
 trap '' INT TERM QUIT HUP
 trap '' EXIT
 
 # Let the user know what we did
 cat <<EOF
 
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/inverse_depth.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+def DoMain(argv):
+  depth = argv[0]
+  return os.path.relpath(os.getcwd(), os.path.abspath(depth))
+
+
+def main(argv):
+  if len(argv) < 2:
+    print "USAGE: inverse_depth.py depth"
+    return 1
+  print DoMain(argv[1:])
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/ios/OWNERS
@@ -0,0 +1,1 @@
+rohitrao@chromium.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/ios/chrome_ios.croc
@@ -0,0 +1,71 @@
+# -*- python -*-
+# Crocodile config file for Chromium iOS.
+#
+# Note that Chromium iOS also uses the config file at src/build/common.croc.
+#
+# See src/tools/code_coverage/example.croc for more info on config files.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Exclude everything to negate whatever is in src/build/common.croc
+    {
+      'regexp' : '.*',
+      'include' : 0,
+    },
+
+    # Include all directories (but not the files in the directories).
+    # This is a workaround for how croc.py walks the directory tree. See the
+    # TODO in the AddFiles method of src/tools/code_coverage/croc.py
+    {
+      'regexp' : '.*/$',
+      'include' : 1,
+    },
+
+    # Include any file with an 'ios' directory in the path.
+    {
+      'regexp' : '.*/ios/.*',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+    
+    # Include any file that ends with _ios.
+    {
+      'regexp' : '.*_ios\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+
+    # Include any file that ends with _ios_unittest (and label it a test).
+    {
+      'regexp' : '.*_ios_unittest\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+      'group' : 'test',
+    },
+
+    # Don't scan for executable lines in uninstrumented header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Don't measure coverage of perftests.
+    {
+      'regexp' : '.*perftest\\.(c|cc|m|mm)$',
+      'include' : 0,
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/landmine_utils.py
@@ -0,0 +1,89 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import functools
+import logging
+import os
+import shlex
+import sys
+
+
+def memoize(default=None):
+  """This decorator caches the return value of a parameterless pure function"""
+  def memoizer(func):
+    val = []
+    @functools.wraps(func)
+    def inner():
+      if not val:
+        ret = func()
+        val.append(ret if ret is not None else default)
+        if logging.getLogger().isEnabledFor(logging.INFO):
+          print '%s -> %r' % (func.__name__, val[0])
+      return val[0]
+    return inner
+  return memoizer
+
+
+@memoize()
+def IsWindows():
+  return sys.platform in ['win32', 'cygwin']
+
+
+@memoize()
+def IsLinux():
+  return sys.platform.startswith(('linux', 'freebsd', 'netbsd', 'openbsd'))
+
+
+@memoize()
+def IsMac():
+  return sys.platform == 'darwin'
+
+
+@memoize()
+def gyp_defines():
+  """Parses and returns GYP_DEFINES env var as a dictionary."""
+  return dict(arg.split('=', 1)
+      for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+
+@memoize()
+def gyp_generator_flags():
+  """Parses and returns GYP_GENERATOR_FLAGS env var as a dictionary."""
+  return dict(arg.split('=', 1)
+      for arg in shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', '')))
+
+
+@memoize()
+def gyp_msvs_version():
+  return os.environ.get('GYP_MSVS_VERSION', '')
+
+
+@memoize()
+def distributor():
+  """
+  Returns a string which is the distributed build engine in use (if any).
+  Possible values: 'goma', None
+  """
+  if 'goma' in gyp_defines():
+    return 'goma'
+
+
+@memoize()
+def platform():
+  """
+  Returns a string representing the platform this build is targetted for.
+  Possible values: 'win', 'mac', 'linux', 'ios', 'android'
+  """
+  if 'OS' in gyp_defines():
+    if 'android' in gyp_defines()['OS']:
+      return 'android'
+    else:
+      return gyp_defines()['OS']
+  elif IsWindows():
+    return 'win'
+  elif IsLinux():
+    return 'linux'
+  else:
+    return 'mac'
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/landmines.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script runs every build as the first hook (See DEPS). If it detects that
+the build should be clobbered, it will delete the contents of the build
+directory.
+
+A landmine is tripped when a builder checks out a different revision, and the
+diff between the new landmines and the old ones is non-null. At this point, the
+build is clobbered.
+"""
+
+import difflib
+import errno
+import gyp_environment
+import logging
+import optparse
+import os
+import sys
+import subprocess
+import time
+
+import clobber
+import landmine_utils
+
+
+def get_build_dir(src_dir):
+  """
+  Returns output directory absolute path dependent on build and targets.
+  Examples:
+    r'c:\b\build\slave\win\build\src\out'
+    '/mnt/data/b/build/slave/linux/build/src/out'
+    '/b/build/slave/ios_rel_device/build/src/out'
+
+  Keep this function in sync with tools/build/scripts/slave/compile.py
+  """
+  if 'CHROMIUM_OUT_DIR' in os.environ:
+    output_dir = os.environ.get('CHROMIUM_OUT_DIR').strip()
+    if not output_dir:
+      raise Error('CHROMIUM_OUT_DIR environment variable is set but blank!')
+  else:
+    output_dir = landmine_utils.gyp_generator_flags().get('output_dir', 'out')
+  return os.path.abspath(os.path.join(src_dir, output_dir))
+
+
+def clobber_if_necessary(new_landmines, src_dir):
+  """Does the work of setting, planting, and triggering landmines."""
+  out_dir = get_build_dir(src_dir)
+  landmines_path = os.path.normpath(os.path.join(src_dir, '.landmines'))
+  try:
+    os.makedirs(out_dir)
+  except OSError as e:
+    if e.errno == errno.EEXIST:
+      pass
+
+  if os.path.exists(landmines_path):
+    with open(landmines_path, 'r') as f:
+      old_landmines = f.readlines()
+    if old_landmines != new_landmines:
+      old_date = time.ctime(os.stat(landmines_path).st_ctime)
+      diff = difflib.unified_diff(old_landmines, new_landmines,
+          fromfile='old_landmines', tofile='new_landmines',
+          fromfiledate=old_date, tofiledate=time.ctime(), n=0)
+      sys.stdout.write('Clobbering due to:\n')
+      sys.stdout.writelines(diff)
+      sys.stdout.flush()
+
+      clobber.clobber(out_dir)
+
+  # Save current set of landmines for next time.
+  with open(landmines_path, 'w') as f:
+    f.writelines(new_landmines)
+
+
+def process_options():
+  """Returns an options object containing the configuration for this script."""
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '-s', '--landmine-scripts', action='append',
+      help='Path to the script which emits landmines to stdout. The target '
+           'is passed to this script via option -t. Note that an extra '
+           'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
+  parser.add_option('-d', '--src-dir',
+      help='Path of the source root dir. Overrides the default location of the '
+           'source root dir when calculating the build directory.')
+  parser.add_option('-v', '--verbose', action='store_true',
+      default=('LANDMINES_VERBOSE' in os.environ),
+      help=('Emit some extra debugging information (default off). This option '
+          'is also enabled by the presence of a LANDMINES_VERBOSE environment '
+          'variable.'))
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('Unknown arguments %s' % args)
+
+  logging.basicConfig(
+      level=logging.DEBUG if options.verbose else logging.ERROR)
+
+  if options.src_dir:
+    if not os.path.isdir(options.src_dir):
+      parser.error('Cannot find source root dir at %s' % options.src_dir)
+    logging.debug('Overriding source root dir. Using: %s', options.src_dir)
+  else:
+    options.src_dir = \
+        os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+  if not options.landmine_scripts:
+    options.landmine_scripts = [os.path.join(options.src_dir, 'build',
+                                             'get_landmines.py')]
+
+  extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
+  if extra_script:
+    options.landmine_scripts += [extra_script]
+
+  return options
+
+
+def main():
+  options = process_options()
+
+  gyp_environment.SetEnvironment()
+
+  landmines = []
+  for s in options.landmine_scripts:
+    proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE)
+    output, _ = proc.communicate()
+    landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
+  clobber_if_necessary(landmines, options.src_dir)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
deleted file mode 100644
--- a/media/webrtc/trunk/build/linux/chrome_linux.croc
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- python -*-
-# Crocodile config file for Chromium linux
-
-# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot
-# for that platform.
-
-{
-  # List of rules, applied in order
-  'rules' : [
-    # Specify inclusions before exclusions, since rules are in order.
-
-    # Don't include non-Linux platform dirs
-    {
-      'regexp' : '.*/(chromeos|views)/',
-      'include' : 0,
-    },
-    # Don't include chromeos, windows, or mac specific files
-    {
-      'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)',
-      'include' : 0,
-    },
-
-    # Groups
-    {
-      'regexp' : '.*_test_linux\\.',
-      'group' : 'test',
-    },
-  ],
-}
deleted file mode 100755
--- a/media/webrtc/trunk/build/linux/dump_app_syms
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/sh
-
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# Helper script to run dump_syms on Chrome Linux executables and strip
-# them if needed.
-
-set -e
-
-usage() {
-  echo -n "$0 <dump_syms_exe> <strip_binary> " >&2
-  echo "<binary_with_symbols> <symbols_output>" >&2
-}
-
-
-if [ $# -ne 4 ]; then
-  usage
-  exit 1
-fi
-
-SCRIPTDIR="$(readlink -f "$(dirname "$0")")"
-DUMPSYMS="$1"
-STRIP_BINARY="$2"
-INFILE="$3"
-OUTFILE="$4"
-
-# Dump the symbols from the given binary.
-if [ ! -e "$OUTFILE" -o "$INFILE" -nt "$OUTFILE" ]; then
-  "$DUMPSYMS" "$INFILE" > "$OUTFILE"
-fi
-
-if [ "$STRIP_BINARY" != "0" ]; then
-  strip "$INFILE"
-fi
deleted file mode 100755
--- a/media/webrtc/trunk/build/linux/pkg-config-wrapper
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This program wraps around pkg-config to generate the correct include and
-# library paths when cross-compiling using a sysroot.
-# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig
-# and usr/share/pkgconfig (relative to the sysroot) and that they output paths
-# relative to some parent path of the sysroot.
-# This assumption is valid for a range of sysroots, in particular: a
-# LSB-compliant root filesystem mounted at the sysroot, and a board build
-# directory of a Chromium OS chroot.
-
-root="$1"
-shift
-target_arch="$1"
-shift
-
-if [ -z "$root" -o -z "$target_arch" ]
-then
-  echo "usage: $0 /path/to/sysroot target_arch [pkg-config-arguments] package" >&2
-  exit 1
-fi
-
-if [ "$target_arch" = "x64" ]
-then
-  libpath="lib64"
-else
-  libpath="lib"
-fi
-
-rewrite=`dirname $0`/rewrite_dirs.py
-package=${!#}
-
-config_path=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig
-set -e
-# Some sysroots, like the Chromium OS ones, may generate paths that are not
-# relative to the sysroot. For example,
-# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths
-# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of
-# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
-# To support this correctly, it's necessary to extract the prefix to strip from
-# pkg-config's |prefix| variable.
-prefix=`PKG_CONFIG_PATH=$config_path pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'`
-result=`PKG_CONFIG_PATH=$config_path pkg-config "$@"`
-echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix"
deleted file mode 100755
--- a/media/webrtc/trunk/build/linux/python_arch.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# This figures out the architecture of the version of Python we are building
-# pyautolib against.
-#
-#  python_arch.sh /usr/lib/libpython2.5.so.1.0
-#  python_arch.sh /path/to/sysroot/usr/lib/libpython2.4.so.1.0
-#
-
-python=$(readlink -f "$1")
-if [ ! -r "$python" ]; then
-  echo unknown
-  exit 0
-fi
-file_out=$(file "$python")
-if [ $? -ne 0 ]; then
-  echo unknown
-  exit 0
-fi
-
-echo $file_out | grep -qs "ARM"
-if [ $? -eq 0 ]; then
-  echo arm
-  exit 0
-fi
-
-echo $file_out | grep -qs "x86-64"
-if [ $? -eq 0 ]; then
-  echo x64
-  exit 0
-fi
-
-echo $file_out | grep -qs "Intel 80386"
-if [ $? -eq 0 ]; then
-  echo ia32
-  exit 0
-fi
-
-exit 1
deleted file mode 100755
--- a/media/webrtc/trunk/build/linux/rewrite_dirs.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
-
-import sys
-import os
-import optparse
-
-REWRITE_PREFIX = ['-I',
-                  '-idirafter',
-                  '-imacros',
-                  '-imultilib',
-                  '-include',
-                  '-iprefix',
-                  '-iquote',
-                  '-isystem',
-                  '-L']
-
-def RewritePath(path, opts):
-  """Rewrites a path by stripping the prefix and prepending the sysroot."""
-  sysroot = opts.sysroot
-  prefix = opts.strip_prefix
-  if os.path.isabs(path) and not path.startswith(sysroot):
-    if path.startswith(prefix):
-      path = path[len(prefix):]
-    path = path.lstrip('/')
-    return os.path.join(sysroot, path)
-  else:
-    return path
-
-
-def RewriteLine(line, opts):
-  """Rewrites all the paths in recognized options."""
-  args = line.split()
-  count = len(args)
-  i = 0
-  while i < count:
-    for prefix in REWRITE_PREFIX:
-      # The option can be either in the form "-I /path/to/dir" or
-      # "-I/path/to/dir" so handle both.
-      if args[i] == prefix:
-        i += 1
-        try:
-          args[i] = RewritePath(args[i], opts)
-        except IndexError:
-          sys.stderr.write('Missing argument following %s\n' % prefix)
-          break
-      elif args[i].startswith(prefix):
-        args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
-    i += 1
-
-  return ' '.join(args)
-
-
-def main(argv):
-  parser = optparse.OptionParser()
-  parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
-  parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
-  opts, args = parser.parse_args(argv[1:])
-
-  for line in sys.stdin.readlines():
-    line = RewriteLine(line.strip(), opts)
-    print line
-  return 0
-
-
-if __name__ == '__main__':
-  sys.exit(main(sys.argv))
deleted file mode 100644
--- a/media/webrtc/trunk/build/linux/system.gyp
+++ /dev/null
@@ -1,637 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'variables': {
-    'conditions': [
-      ['sysroot!=""', {
-        'pkg-config': './pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
-      }, {
-        'pkg-config': 'pkg-config'
-      }]
-    ],
-  },
-  'conditions': [
-    [ 'os_posix==1 and OS!="mac"', {
-      'variables': {
-        # We use our own copy of libssl3, although we still need to link against
-        # the rest of NSS.
-        'use_system_ssl%': 0,
-      },
-    }, {
-      'variables': {
-        'use_system_ssl%': 1,
-      },
-    }],
-    [ 'chromeos==0', {
-      # Hide GTK and related dependencies for Chrome OS, so they won't get
-      # added back to Chrome OS. Don't try to use GTK on Chrome OS.
-      'targets': [
-        {
-          'target_name': 'gtk',
-          'type': 'none',
-          'toolsets': ['host', 'target'],
-          'variables': {
-            # gtk requires gmodule, but it does not list it as a dependency
-            # in some misconfigured systems.
-            'gtk_packages': 'gmodule-2.0 gtk+-2.0 gthread-2.0',
-          },
-          'conditions': [
-            ['_toolset=="target"', {
-              'direct_dependent_settings': {
-                'cflags': [
-                  '<!@(<(pkg-config) --cflags <(gtk_packages))',
-                ],
-              },
-              'link_settings': {
-                'ldflags': [
-                  '<!@(<(pkg-config) --libs-only-L --libs-only-other <(gtk_packages))',
-                ],
-                'libraries': [
-                  '<!@(<(pkg-config) --libs-only-l <(gtk_packages))',
-                ],
-              },
-            }, {
-              'direct_dependent_settings': {
-                'cflags': [
-                  '<!@(pkg-config --cflags <(gtk_packages))',
-                ],
-              },
-              'link_settings': {
-                'ldflags': [
-                  '<!@(pkg-config --libs-only-L --libs-only-other <(gtk_packages))',
-                ],
-                'libraries': [
-                  '<!@(pkg-config --libs-only-l <(gtk_packages))',
-                ],
-              },
-            }],
-          ],
-        },
-        {
-          'target_name': 'gtkprint',
-          'type': 'none',
-          'conditions': [
-            ['_toolset=="target"', {
-              'direct_dependent_settings': {
-                'cflags': [
-                  '<!@(<(pkg-config) --cflags gtk+-unix-print-2.0)',
-                ],
-              },
-              'link_settings': {
-                'ldflags': [
-                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gtk+-unix-print-2.0)',
-                ],
-                'libraries': [
-                  '<!@(<(pkg-config) --libs-only-l gtk+-unix-print-2.0)',
-                ],
-              },
-            }],
-          ],
-        },
-        {
-          'target_name': 'gdk',
-          'type': 'none',
-          'conditions': [
-            ['_toolset=="target"', {
-              'direct_dependent_settings': {
-                'cflags': [
-                  '<!@(<(pkg-config) --cflags gdk-2.0)',
-                ],
-              },
-              'link_settings': {
-                'ldflags': [
-                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gdk-2.0)',
-                ],
-                'libraries': [
-                  '<!@(<(pkg-config) --libs-only-l gdk-2.0)',
-                ],
-              },
-            }],
-          ],
-        },
-      ],  # targets
-    }, {  # chromeos==1
-      'targets': [
-        {
-          # TODO(satorux): Remove this once dbus-glib clients are gone.
-          'target_name': 'dbus-glib',
-          'type': 'none',
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags dbus-glib-1)',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-glib-1)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l dbus-glib-1)',
-            ],
-          },
-        },
-      ],
-    }]
-  ],  # conditions
-  'targets': [
-    {
-      'target_name': 'ssl',
-      'type': 'none',
-      'conditions': [
-        ['_toolset=="target"', {
-          'conditions': [
-            ['use_openssl==1', {
-              'dependencies': [
-                '../../third_party/openssl/openssl.gyp:openssl',
-              ],
-            }],
-            ['use_openssl==0 and use_system_ssl==0', {
-              'dependencies': [
-                '../../net/third_party/nss/ssl.gyp:libssl',
-                '../../third_party/zlib/zlib.gyp:zlib',
-              ],
-              'direct_dependent_settings': {
-                'include_dirs+': [
-                  # We need for our local copies of the libssl3 headers to come
-                  # before other includes, as we are shadowing system headers.
-                  '<(DEPTH)/net/third_party/nss/ssl',
-                ],
-                'cflags': [
-                  '<!@(<(pkg-config) --cflags nss)',
-                ],
-              },
-              'link_settings': {
-                'ldflags': [
-                  '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
-                ],
-                'libraries': [
-                  '<!@(<(pkg-config) --libs-only-l nss | sed -e "s/-lssl3//")',
-                ],
-              },
-            }],
-            ['use_openssl==0 and use_system_ssl==1', {
-              'direct_dependent_settings': {
-                'cflags': [
-                  '<!@(<(pkg-config) --cflags nss)',
-                ],
-                'defines': [
-                  'USE_SYSTEM_SSL',
-                ],
-              },
-              'link_settings': {
-                'ldflags': [
-                  '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
-                ],
-                'libraries': [
-                  '<!@(<(pkg-config) --libs-only-l nss)',
-                ],
-              },
-            }],
-          ]
-        }],
-      ],
-    },
-    {
-      'target_name': 'freetype2',
-      'type': 'none',
-      'conditions': [
-        ['_toolset=="target"', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags freetype2)',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other freetype2)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l freetype2)',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'fontconfig',
-      'type': 'none',
-      'conditions': [
-        ['_toolset=="target"', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags fontconfig)',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other fontconfig)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l fontconfig)',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'gconf',
-      'type': 'none',
-      'conditions': [
-        ['use_gconf==1 and _toolset=="target"', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags gconf-2.0)',
-            ],
-            'defines': [
-              'USE_GCONF',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other gconf-2.0)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l gconf-2.0)',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'gio',
-      'type': 'none',
-      'conditions': [
-        ['use_gio==1 and _toolset=="target"', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags gio-2.0)',
-            ],
-            'defines': [
-              'USE_GIO',
-            ],
-            'conditions': [
-              ['linux_link_gsettings==0', {
-                'defines': ['DLOPEN_GSETTINGS'],
-              }],
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other gio-2.0)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l gio-2.0)',
-            ],
-            'conditions': [
-              ['linux_link_gsettings==0 and OS=="linux"', {
-                'libraries': [
-                  '-ldl',
-                ],
-              }],
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'x11',
-      'type': 'none',
-      'toolsets': ['host', 'target'],
-      'conditions': [
-        ['_toolset=="target"', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags x11)',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other x11 xi)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l x11 xi)',
-            ],
-          },
-        }, {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(pkg-config --cflags x11)',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(pkg-config --libs-only-L --libs-only-other x11 xi)',
-            ],
-            'libraries': [
-              '<!@(pkg-config --libs-only-l x11 xi)',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'xext',
-      'type': 'none',
-      'conditions': [
-        ['_toolset=="target"', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags xext)',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other xext)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l xext)',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'xfixes',
-      'type': 'none',
-      'conditions': [
-        ['_toolset=="target"', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags xfixes)',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other xfixes)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l xfixes)',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'libgcrypt',
-      'type': 'none',
-      'conditions': [
-        ['_toolset=="target" and use_cups==1', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(libgcrypt-config --cflags)',
-            ],
-          },
-          'link_settings': {
-            'libraries': [
-              '<!@(libgcrypt-config --libs)',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'selinux',
-      'type': 'none',
-      'conditions': [
-        ['_toolset=="target"', {
-          'link_settings': {
-            'libraries': [
-              '-lselinux',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'gnome_keyring',
-      'type': 'none',
-      'conditions': [
-        ['use_gnome_keyring==1', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
-            ],
-            'defines': [
-              'USE_GNOME_KEYRING',
-            ],
-            'conditions': [
-              ['linux_link_gnome_keyring==0', {
-                'defines': ['DLOPEN_GNOME_KEYRING'],
-              }],
-            ],
-          },
-          'conditions': [
-            ['linux_link_gnome_keyring!=0', {
-              'link_settings': {
-                'ldflags': [
-                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
-                ],
-                'libraries': [
-                  '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
-                ],
-              },
-            }, {
-              'conditions': [
-                ['OS=="linux"', {
-                 'link_settings': {
-                   'libraries': [
-                     '-ldl',
-                   ],
-                 },
-                }],
-              ],
-            }],
-          ],
-        }],
-      ],
-    },
-    {
-      # The unit tests use a few convenience functions from the GNOME
-      # Keyring library directly. We ignore linux_link_gnome_keyring and
-      # link directly in this version of the target to allow this.
-      # *** Do not use this target in the main binary! ***
-      'target_name': 'gnome_keyring_direct',
-      'type': 'none',
-      'conditions': [
-        ['use_gnome_keyring==1', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
-            ],
-            'defines': [
-              'USE_GNOME_KEYRING',
-            ],
-            'conditions': [
-              ['linux_link_gnome_keyring==0', {
-                'defines': ['DLOPEN_GNOME_KEYRING'],
-              }],
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'dbus',
-      'type': 'none',
-      'direct_dependent_settings': {
-        'cflags': [
-          '<!@(<(pkg-config) --cflags dbus-1)',
-        ],
-      },
-      'link_settings': {
-        'ldflags': [
-          '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-1)',
-        ],
-        'libraries': [
-          '<!@(<(pkg-config) --libs-only-l dbus-1)',
-        ],
-      },
-    },
-    {
-      'target_name': 'glib',
-      'type': 'none',
-      'toolsets': ['host', 'target'],
-      'variables': {
-        'glib_packages': 'glib-2.0 gmodule-2.0 gobject-2.0 gthread-2.0',
-      },
-      'conditions': [
-        ['_toolset=="target"', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags <(glib_packages))',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other <(glib_packages))',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l <(glib_packages))',
-            ],
-          },
-        }, {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(pkg-config --cflags <(glib_packages))',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(pkg-config --libs-only-L --libs-only-other <(glib_packages))',
-            ],
-            'libraries': [
-              '<!@(pkg-config --libs-only-l <(glib_packages))',
-            ],
-          },
-        }],
-        ['chromeos==1', {
-          'link_settings': {
-            'libraries': [ '-lXtst' ]
-          }
-        }],
-      ],
-    },
-    {
-      'target_name': 'pangocairo',
-      'type': 'none',
-      'toolsets': ['host', 'target'],
-      'conditions': [
-        ['_toolset=="target"', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags pangocairo)',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other pangocairo)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l pangocairo)',
-            ],
-          },
-        }, {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(pkg-config --cflags pangocairo)',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(pkg-config --libs-only-L --libs-only-other pangocairo)',
-            ],
-            'libraries': [
-              '<!@(pkg-config --libs-only-l pangocairo)',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'libresolv',
-      'type': 'none',
-      'link_settings': {
-        'libraries': [
-          '-lresolv',
-        ],
-      },
-    },
-    {
-      'target_name': 'ibus',
-      'type': 'none',
-      'conditions': [
-        ['use_ibus==1', {
-          'variables': {
-            'ibus_min_version': '1.3.99.20110425',
-          },
-          'direct_dependent_settings': {
-            'defines': ['HAVE_IBUS=1'],
-            'cflags': [
-              '<!@(<(pkg-config) --cflags "ibus-1.0 >= <(ibus_min_version)")',
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other "ibus-1.0 >= <(ibus_min_version)")',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l "ibus-1.0 >= <(ibus_min_version)")',
-            ],
-          },
-        }],
-      ],
-    },
-    {
-      'target_name': 'udev',
-      'type': 'none',
-      'conditions': [
-        # libudev is not available on *BSD
-        ['_toolset=="target" and os_bsd!=1', {
-          'direct_dependent_settings': {
-            'cflags': [
-              '<!@(<(pkg-config) --cflags libudev)'
-            ],
-          },
-          'link_settings': {
-            'ldflags': [
-              '<!@(<(pkg-config) --libs-only-L --libs-only-other libudev)',
-            ],
-            'libraries': [
-              '<!@(<(pkg-config) --libs-only-l libudev)',
-            ],
-          },
-        }],
-      ],
-    },
-  ],
-}
--- a/media/webrtc/trunk/build/mac/OWNERS
+++ b/media/webrtc/trunk/build/mac/OWNERS
@@ -1,2 +1,4 @@
 mark@chromium.org
-thomasvl@chromium.org
+rsesek@chromium.org
+
+# COMPONENT: Build
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/edit_xibs.sh
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is a convenience to run GYP for /src/chrome/chrome_nibs.gyp
+# with the Xcode generator (as you likely use ninja). Documentation:
+#   http://dev.chromium.org/developers/design-documents/mac-xib-files
+
+set -e
+
+RELSRC=$(dirname "$0")/../..
+SRC=$(cd "$RELSRC" && pwd)
+export PYTHONPATH="$PYTHONPATH:$SRC/build"
+export GYP_GENERATORS=xcode
+"$SRC/tools/gyp/gyp" -I"$SRC/build/common.gypi" "$SRC/chrome/chrome_nibs.gyp"
+echo "You can now edit XIB files in Xcode using:"
+echo "  $SRC/chrome/chrome_nibs.xcodeproj"
--- a/media/webrtc/trunk/build/mac/find_sdk.py
+++ b/media/webrtc/trunk/build/mac/find_sdk.py
@@ -1,83 +1,90 @@
 #!/usr/bin/env python
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+"""Prints the lowest locally available SDK version greater than or equal to a
+given minimum sdk version to standard output. If --developer_dir is passed, then
+the script will use the Xcode toolchain located at DEVELOPER_DIR.
+
+Usage:
+  python find_sdk.py [--developer_dir DEVELOPER_DIR] 10.6  # Ignores SDKs < 10.6
+"""
+
 import os
 import re
 import subprocess
 import sys
 
-"""Prints the lowest locally available SDK version greater than or equal to a
-given minimum sdk version to standard output.
-
-Usage:
-  python find_sdk.py 10.6  # Ignores SDKs < 10.6
-"""
-
 from optparse import OptionParser
 
 
 def parse_version(version_str):
   """'10.6' => [10, 6]"""
   return map(int, re.findall(r'(\d+)', version_str))
 
 
 def main():
   parser = OptionParser()
   parser.add_option("--verify",
                     action="store_true", dest="verify", default=False,
                     help="return the sdk argument and warn if it doesn't exist")
   parser.add_option("--sdk_path",
                     action="store", type="string", dest="sdk_path", default="",
                     help="user-specified SDK path; bypasses verification")
-  (options, args) = parser.parse_args()
+  parser.add_option("--print_sdk_path",
+                    action="store_true", dest="print_sdk_path", default=False,
+                    help="Additionaly print the path the SDK (appears first).")
+  parser.add_option("--developer_dir", help='Path to Xcode.')
+  options, args = parser.parse_args()
+  if len(args) != 1:
+    parser.error('Please specify a minimum SDK version')
   min_sdk_version = args[0]
 
-  if sys.platform == 'darwin':
-    job = subprocess.Popen(['xcode-select', '-print-path'],
-                           stdout=subprocess.PIPE,
-                           stderr=subprocess.STDOUT)
-    out, err = job.communicate()
-    if job.returncode != 0:
-      print >>sys.stderr, out
-      print >>sys.stderr, err
-      raise Exception(('Error %d running xcode-select, you might have to run '
-                       '|sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer| '
-                       'if you are using Xcode 4.') % job.returncode)
-    # The Developer folder moved in Xcode 4.3.
-    xcode43_sdk_path = os.path.join(
+  if options.developer_dir:
+    os.environ['DEVELOPER_DIR'] = options.developer_dir
+
+  job = subprocess.Popen(['xcode-select', '-print-path'],
+                         stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
+  out, err = job.communicate()
+  if job.returncode != 0:
+    print >> sys.stderr, out
+    print >> sys.stderr, err
+    raise Exception('Error %d running xcode-select' % job.returncode)
+  sdk_dir = os.path.join(
       out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
-    if os.path.isdir(xcode43_sdk_path):
-      sdk_dir = xcode43_sdk_path
-    else:
-      sdk_dir = os.path.join(out.rstrip(), 'SDKs')
-    sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
-    sdks = [s[0] for s in sdks if s]  # [['10.5'], ['10.6']] => ['10.5', '10.6']
-    sdks = [s for s in sdks  # ['10.5', '10.6'] => ['10.6']
-            if parse_version(s) >= parse_version(min_sdk_version)]
-    if not sdks:
-      raise Exception('No %s+ SDK found' % min_sdk_version)
-    best_sdk = sorted(sdks, key=parse_version)[0]
-  else:
-    best_sdk = ""
+  sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
+  sdks = [s[0] for s in sdks if s]  # [['10.5'], ['10.6']] => ['10.5', '10.6']
+  sdks = [s for s in sdks  # ['10.5', '10.6'] => ['10.6']
+          if parse_version(s) >= parse_version(min_sdk_version)]
+  if not sdks:
+    raise Exception('No %s+ SDK found' % min_sdk_version)
+  best_sdk = sorted(sdks, key=parse_version)[0]
 
   if options.verify and best_sdk != min_sdk_version and not options.sdk_path:
-    print >>sys.stderr, ''
-    print >>sys.stderr, '                                           vvvvvvv'
-    print >>sys.stderr, ''
-    print >>sys.stderr, \
+    print >> sys.stderr, ''
+    print >> sys.stderr, '                                           vvvvvvv'
+    print >> sys.stderr, ''
+    print >> sys.stderr, \
         'This build requires the %s SDK, but it was not found on your system.' \
         % min_sdk_version
-    print >>sys.stderr, \
+    print >> sys.stderr, \
         'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.'
-    print >>sys.stderr, ''
-    print >>sys.stderr, '                                           ^^^^^^^'
-    print >>sys.stderr, ''
-    return min_sdk_version
+    print >> sys.stderr, ''
+    print >> sys.stderr, '                                           ^^^^^^^'
+    print >> sys.stderr, ''
+    sys.exit(1)
+
+  if options.print_sdk_path:
+    print subprocess.check_output(
+        ['xcrun', '-sdk', 'macosx' + best_sdk, '--show-sdk-path']).strip()
 
   return best_sdk
 
 
 if __name__ == '__main__':
+  if sys.platform != 'darwin':
+    raise Exception("This script only runs on Mac")
   print main()
+  sys.exit(0)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/should_use_hermetic_xcode.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints "1" if Chrome targets should be built with hermetic xcode. Otherwise
+prints "0".
+
+Usage:
+  python should_use_hermetic_xcode.py <target_os>
+"""
+
+import os
+import sys
+
+
+def _IsCorpMachine():
+  return os.path.isdir('/Library/GoogleCorpSupport/')
+
+
+def main():
+  allow_corp = sys.argv[1] == 'mac' and _IsCorpMachine()
+  if os.environ.get('FORCE_MAC_TOOLCHAIN') or allow_corp:
+    return "1"
+  else:
+    return "0"
+
+
+if __name__ == '__main__':
+  print main()
+  sys.exit(0)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/mac/tweak_info_plist.gni
@@ -0,0 +1,83 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Template to run the tweak_info_plist.py script on a plist.
+#
+# Arguments:
+#
+#     info_plist:
+#         (optional), string, the plist to tweak.
+#
+#     info_plists:
+#         (optional), list of string, the plist files to merge and tweak.
+#
+#     args:
+#         (optional), list of string, the arguments to pass to the
+#         tweak_info_plist.py script.
+#
+# Callers should use get_target_outputs() to get the output name. One of
+# info_plist or info_plists must be specified.
+template("tweak_info_plist") {
+  _output_name = "$target_gen_dir/${target_name}_tweaked.plist"
+
+  if (defined(invoker.info_plists)) {
+    assert(!defined(invoker.info_plist),
+           "Cannot have both info_plist and info_plists for $target_name")
+
+    _source_name = "$target_gen_dir/${target_name}_merged.plist"
+    _deps = [ ":" + target_name + "_merge_plist" ]
+
+    action(target_name + "_merge_plist") {
+      forward_variables_from(invoker, [ "testonly" ])
+      script = "//build/config/mac/plist_util.py"
+      sources = invoker.info_plists
+      outputs = [
+        _source_name,
+      ]
+      args = [
+               "merge",
+               "-f=xml1",
+               "-o=" + rebase_path(_source_name, root_build_dir),
+             ] + rebase_path(invoker.info_plists, root_build_dir)
+    }
+  } else {
+    assert(defined(invoker.info_plist),
+           "The info_plist must be specified in $target_name")
+
+    _source_name = invoker.info_plist
+    _deps = []
+  }
+
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "args",
+                             "testonly",
+                           ])
+    script = "//build/mac/tweak_info_plist.py"
+    inputs = [
+      script,
+      "//build/util/version.py",
+      "//build/util/LASTCHANGE",
+      "//chrome/VERSION",
+    ]
+    sources = [
+      _source_name,
+    ]
+    outputs = [
+      _output_name,
+    ]
+    if (!defined(args)) {
+      args = []
+    }
+    args += [
+      "--plist",
+      rebase_path(_source_name, root_build_dir),
+      "--output",
+      rebase_path(_output_name, root_build_dir),
+      "--platform=$current_os",
+    ]
+    deps = _deps
+  }
+}
--- a/media/webrtc/trunk/build/mac/tweak_info_plist.py
+++ b/media/webrtc/trunk/build/mac/tweak_info_plist.py
@@ -17,27 +17,29 @@
 #
 # So, we work around all of this by making a script build phase that will run
 # during the app build, and simply update the info.plist in place.  This way
 # by the time the app target is done, the info.plist is correct.
 #
 
 import optparse
 import os
-from os import environ as env
 import plistlib
 import re
 import subprocess
 import sys
 import tempfile
 
-TOP = os.path.join(env['SRCROOT'], '..')
+TOP = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
+
 
-sys.path.insert(0, os.path.join(TOP, "build/util"))
-import lastchange
+def _ConvertPlist(source_plist, output_plist, fmt):
+  """Convert |source_plist| to |fmt| and save as |output_plist|."""
+  return subprocess.call(
+      ['plutil', '-convert', fmt, '-o', output_plist, source_plist])
 
 
 def _GetOutput(args):
   """Runs a subprocess and waits for termination. Returns (stdout, returncode)
   of the process. stderr is attached to the parent."""
   proc = subprocess.Popen(args, stdout=subprocess.PIPE)
   (stdout, stderr) = proc.communicate()
   return (stdout, proc.returncode)
@@ -60,129 +62,115 @@ def _RemoveKeys(plist, *keys):
   """Removes a varargs of keys from the plist."""
   for key in keys:
     try:
       del plist[key]
     except KeyError:
       pass
 
 
-def _AddVersionKeys(plist):
+def _ApplyVersionOverrides(version, keys, overrides, separator='.'):
+  """Applies version overrides.
+
+  Given a |version| string as "a.b.c.d" (assuming a default separator) with
+  version components named by |keys| then overrides any value that is present
+  in |overrides|.
+
+  >>> _ApplyVersionOverrides('a.b', ['major', 'minor'], {'minor': 'd'})
+  'a.d'
+  """
+  if not overrides:
+    return version
+  version_values = version.split(separator)
+  for i, (key, value) in enumerate(zip(keys, version_values)):
+    if key in overrides:
+      version_values[i] = overrides[key]
+  return separator.join(version_values)
+
+
+def _GetVersion(version_format, values, overrides=None):
+  """Generates a version number according to |version_format| using the values
+  from |values| or |overrides| if given."""
+  result = version_format
+  for key in values:
+    if overrides and key in overrides:
+      value = overrides[key]
+    else:
+      value = values[key]
+    result = result.replace('@%s@' % key, value)
+  return result
+
+
+def _AddVersionKeys(
+    plist, version_format_for_key, version=None, overrides=None):
   """Adds the product version number into the plist. Returns True on success and
   False on error. The error will be printed to stderr."""
-  # Pull in the Chrome version number.
-  VERSION_TOOL = os.path.join(TOP, 'chrome/tools/build/version.py')
-  VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+  if not version:
+    # Pull in the Chrome version number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+    (stdout, retval) = _GetOutput([
+        VERSION_TOOL, '-f', VERSION_FILE,
+        '-t', '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'])
 
-  (stdout, retval1) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
-                                  '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'])
-  full_version = stdout.rstrip()
-
-  (stdout, retval2) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
-                                  '@BUILD@.@PATCH@'])
-  bundle_version = stdout.rstrip()
+    # If the command finished with a non-zero return code, then report the
+    # error up.
+    if retval != 0:
+      return False
 
-  # If either of the two version commands finished with non-zero returncode,
-  # report the error up.
-  if retval1 or retval2:
-    return False
-
-  # Add public version info so "Get Info" works.
-  plist['CFBundleShortVersionString'] = full_version
+    version = stdout.strip()
 
-  # Honor the 429496.72.95 limit.  The maximum comes from splitting 2^32 - 1
-  # into  6, 2, 2 digits.  The limitation was present in Tiger, but it could
-  # have been fixed in later OS release, but hasn't been tested (it's easy
-  # enough to find out with "lsregister -dump).
-  # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
-  # BUILD will always be an increasing value, so BUILD_PATH gives us something
-  # unique that meetings what LS wants.
-  plist['CFBundleVersion'] = bundle_version
+  # Parse the given version number, that should be in MAJOR.MINOR.BUILD.PATCH
+  # format (where each value is a number). Note that str.isdigit() returns
+  # True if the string is composed only of digits (and thus match \d+ regexp).
+  groups = version.split('.')
+  if len(groups) != 4 or not all(element.isdigit() for element in groups):
+    print >>sys.stderr, 'Invalid version string specified: "%s"' % version
+    return False
+  values = dict(zip(('MAJOR', 'MINOR', 'BUILD', 'PATCH'), groups))
+
+  for key in version_format_for_key:
+    plist[key] = _GetVersion(version_format_for_key[key], values, overrides)
 
   # Return with no error.
   return True
 
 
 def _DoSCMKeys(plist, add_keys):
   """Adds the SCM information, visible in about:version, to property list. If
   |add_keys| is True, it will insert the keys, otherwise it will remove them."""
-  scm_path, scm_revision = None, None
+  scm_revision = None
   if add_keys:
-    version_info = lastchange.FetchVersionInfo(
-        default_lastchange=None, directory=TOP)
-    scm_path, scm_revision = version_info.url, version_info.revision
+    # Pull in the Chrome revision number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE')
+    (stdout, retval) = _GetOutput([VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t',
+                                  '@LASTCHANGE@'])
+    if retval:
+      return False
+    scm_revision = stdout.rstrip()
 
   # See if the operation failed.
   _RemoveKeys(plist, 'SCMRevision')
   if scm_revision != None:
     plist['SCMRevision'] = scm_revision
   elif add_keys:
     print >>sys.stderr, 'Could not determine SCM revision.  This may be OK.'
 
-  if scm_path != None:
-    plist['SCMPath'] = scm_path
-  else:
-    _RemoveKeys(plist, 'SCMPath')
+  return True
 
 
-def _DoPDFKeys(plist, add_keys):
-  """Adds PDF support to the document types list. If add_keys is True, it will
-  add the type information dictionary. If it is False, it will remove it if
-  present."""
-
-  PDF_FILE_EXTENSION = 'pdf'
-
-  def __AddPDFKeys(sub_plist):
-    """Writes the keys into a sub-dictionary of the plist."""
-    sub_plist['CFBundleTypeExtensions'] = [PDF_FILE_EXTENSION]
-    sub_plist['CFBundleTypeIconFile'] = 'document.icns'
-    sub_plist['CFBundleTypeMIMETypes'] = 'application/pdf'
-    sub_plist['CFBundleTypeName'] = 'PDF Document'
-    sub_plist['CFBundleTypeRole'] = 'Viewer'
-
-  DOCUMENT_TYPES_KEY = 'CFBundleDocumentTypes'
-
-  # First get the list of document types, creating it if necessary.
-  try:
-    extensions = plist[DOCUMENT_TYPES_KEY]
-  except KeyError:
-    # If this plist doesn't have a type dictionary, create one if set to add the
-    # keys. If not, bail.
-    if not add_keys:
-      return
-    extensions = plist[DOCUMENT_TYPES_KEY] = []
-
-  # Loop over each entry in the list, looking for one that handles PDF types.
-  for i, ext in enumerate(extensions):
-    # If an entry for .pdf files is found...
-    if 'CFBundleTypeExtensions' not in ext:
-      continue
-    if PDF_FILE_EXTENSION in ext['CFBundleTypeExtensions']:
-      if add_keys:
-        # Overwrite the existing keys with new ones.
-        __AddPDFKeys(ext)
-      else:
-        # Otherwise, delete the entry entirely.
-        del extensions[i]
-      return
-
-  # No PDF entry exists. If one needs to be added, do so now.
-  if add_keys:
-    pdf_entry = {}
-    __AddPDFKeys(pdf_entry)
-    extensions.append(pdf_entry)
-
-
-def _AddBreakpadKeys(plist, branding):
+def _AddBreakpadKeys(plist, branding, platform):
   """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
   also requires the |branding| argument."""
   plist['BreakpadReportInterval'] = '3600'  # Deliberately a string.
-  plist['BreakpadProduct'] = '%s_Mac' % branding
+  plist['BreakpadProduct'] = '%s_%s' % (branding, platform)
   plist['BreakpadProductDisplay'] = branding
-  plist['BreakpadVersion'] = plist['CFBundleShortVersionString']
+  plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
+
   # These are both deliberately strings and not boolean.
   plist['BreakpadSendAndExit'] = 'YES'
   plist['BreakpadSkipConfirm'] = 'YES'
 
 
 def _RemoveBreakpadKeys(plist):
   """Removes any set Breakpad keys."""
   _RemoveKeys(plist,
@@ -190,104 +178,182 @@ def _RemoveBreakpadKeys(plist):
       'BreakpadReportInterval',
       'BreakpadProduct',
       'BreakpadProductDisplay',
       'BreakpadVersion',
       'BreakpadSendAndExit',
       'BreakpadSkipConfirm')
 
 
+def _TagSuffixes():
+  # Keep this list sorted in the order that tag suffix components are to
+  # appear in a tag value. That is to say, it should be sorted per ASCII.
+  components = ('full',)
+  assert tuple(sorted(components)) == components
+
+  components_len = len(components)
+  combinations = 1 << components_len
+  tag_suffixes = []
+  for combination in xrange(0, combinations):
+    tag_suffix = ''
+    for component_index in xrange(0, components_len):
+      if combination & (1 << component_index):
+        tag_suffix += '-' + components[component_index]
+    tag_suffixes.append(tag_suffix)
+  return tag_suffixes
+
+
 def _AddKeystoneKeys(plist, bundle_identifier):
   """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and
   also requires the |bundle_identifier| argument (com.example.product)."""
   plist['KSVersion'] = plist['CFBundleShortVersionString']
   plist['KSProductID'] = bundle_identifier
   plist['KSUpdateURL'] = 'https://tools.google.com/service/update2'
 
+  _RemoveKeys(plist, 'KSChannelID')
+  for tag_suffix in _TagSuffixes():
+    if tag_suffix:
+      plist['KSChannelID' + tag_suffix] = tag_suffix
+
 
 def _RemoveKeystoneKeys(plist):
   """Removes any set Keystone keys."""
   _RemoveKeys(plist,
       'KSVersion',
       'KSProductID',
       'KSUpdateURL')
 
+  tag_keys = []
+  for tag_suffix in _TagSuffixes():
+    tag_keys.append('KSChannelID' + tag_suffix)
+  _RemoveKeys(plist, *tag_keys)
+
 
 def Main(argv):
   parser = optparse.OptionParser('%prog [options]')
+  parser.add_option('--plist', dest='plist_path', action='store',
+      type='string', default=None, help='The path of the plist to tweak.')
+  parser.add_option('--output', dest='plist_output', action='store',
+      type='string', default=None, help='If specified, the path to output ' + \
+      'the tweaked plist, rather than overwriting the input.')
   parser.add_option('--breakpad', dest='use_breakpad', action='store',
       type='int', default=False, help='Enable Breakpad [1 or 0]')
-  parser.add_option('--breakpad_uploads', dest='breakpad_uploads',
-      action='store', type='int', default=False,
-      help='Enable Breakpad\'s uploading of crash dumps [1 or 0]')
   parser.add_option('--keystone', dest='use_keystone', action='store',
       type='int', default=False, help='Enable Keystone [1 or 0]')
   parser.add_option('--scm', dest='add_scm_info', action='store', type='int',
       default=True, help='Add SCM metadata [1 or 0]')
-  parser.add_option('--pdf', dest='add_pdf_support', action='store', type='int',
-      default=False, help='Add PDF file handler support [1 or 0]')
   parser.add_option('--branding', dest='branding', action='store',
       type='string', default=None, help='The branding of the binary')
   parser.add_option('--bundle_id', dest='bundle_identifier',
       action='store', type='string', default=None,
       help='The bundle id of the binary')
+  parser.add_option('--platform', choices=('ios', 'mac'), default='mac',
+      help='The target platform of the bundle')
+  parser.add_option('--version-overrides', action='append',
+      help='Key-value pair to override specific component of version '
+           'like key=value (can be passed multiple time to configure '
+           'more than one override)')
+  parser.add_option('--format', choices=('binary1', 'xml1', 'json'),
+      default='xml1', help='Format to use when writing property list '
+          '(default: %(default)s)')
+  parser.add_option('--version', dest='version', action='store', type='string',
+      default=None, help='The version string [major.minor.build.patch]')
   (options, args) = parser.parse_args(argv)
 
   if len(args) > 0:
     print >>sys.stderr, parser.get_usage()
     return 1
 
-  # Read the plist into its parsed format.
-  DEST_INFO_PLIST = os.path.join(env['TARGET_BUILD_DIR'], env['INFOPLIST_PATH'])
-  plist = plistlib.readPlist(DEST_INFO_PLIST)
+  if not options.plist_path:
+    print >>sys.stderr, 'No --plist specified.'
+    return 1
+
+  # Read the plist into its parsed format. Convert the file to 'xml1' as
+  # plistlib only supports that format in Python 2.7.
+  with tempfile.NamedTemporaryFile() as temp_info_plist:
+    retcode = _ConvertPlist(options.plist_path, temp_info_plist.name, 'xml1')
+    if retcode != 0:
+      return retcode
+    plist = plistlib.readPlist(temp_info_plist.name)
+
+  # Convert overrides.
+  overrides = {}
+  if options.version_overrides:
+    for pair in options.version_overrides:
+      if not '=' in pair:
+        print >>sys.stderr, 'Invalid value for --version-overrides:', pair
+        return 1
+      key, value = pair.split('=', 1)
+      overrides[key] = value
+      if key not in ('MAJOR', 'MINOR', 'BUILD', 'PATCH'):
+        print >>sys.stderr, 'Unsupported key for --version-overrides:', key
+        return 1
+
+  if options.platform == 'mac':
+    version_format_for_key = {
+      # Add public version info so "Get Info" works.
+      'CFBundleShortVersionString': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@',
+
+      # Honor the 429496.72.95 limit.  The maximum comes from splitting 2^32 - 1
+      # into  6, 2, 2 digits.  The limitation was present in Tiger, but it could
+      # have been fixed in later OS release, but hasn't been tested (it's easy
+      # enough to find out with "lsregister -dump).
+      # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
+      # BUILD will always be an increasing value, so BUILD_PATH gives us
+      # something unique that meetings what LS wants.
+      'CFBundleVersion': '@BUILD@.@PATCH@',
+    }
+  else:
+    version_format_for_key = {
+      'CFBundleShortVersionString': '@MAJOR@.@BUILD@.@PATCH@',
+      'CFBundleVersion': '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'
+    }
+
+  if options.use_breakpad:
+    version_format_for_key['BreakpadVersion'] = \
+        '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'
 
   # Insert the product version.
-  if not _AddVersionKeys(plist):
+  if not _AddVersionKeys(
+      plist, version_format_for_key, version=options.version,
+      overrides=overrides):
     return 2
 
   # Add Breakpad if configured to do so.
   if options.use_breakpad:
     if options.branding is None:
       print >>sys.stderr, 'Use of Breakpad requires branding.'
       return 1
-    _AddBreakpadKeys(plist, options.branding)
-    if options.breakpad_uploads:
-      plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
-    else:
-      # This allows crash dumping to a file without uploading the
-      # dump, for testing purposes.  Breakpad does not recognise
-      # "none" as a special value, but this does stop crash dump
-      # uploading from happening.  We need to specify something
-      # because if "BreakpadURL" is not present, Breakpad will not
-      # register its crash handler and no crash dumping will occur.
-      plist['BreakpadURL'] = 'none'
+    # Map gyp "OS" / gn "target_os" passed via the --platform parameter to
+    # the platform as known by breakpad.
+    platform = {'mac': 'Mac', 'ios': 'iOS'}[options.platform]
+    _AddBreakpadKeys(plist, options.branding, platform)
   else:
     _RemoveBreakpadKeys(plist)
 
-  # Only add Keystone in Release builds.
-  if options.use_keystone and env['CONFIGURATION'] == 'Release':
+  # Add Keystone if configured to do so.
+  if options.use_keystone:
     if options.bundle_identifier is None:
       print >>sys.stderr, 'Use of Keystone requires the bundle id.'
       return 1
     _AddKeystoneKeys(plist, options.bundle_identifier)
   else:
     _RemoveKeystoneKeys(plist)
 
   # Adds or removes any SCM keys.
-  _DoSCMKeys(plist, options.add_scm_info)
+  if not _DoSCMKeys(plist, options.add_scm_info):
+    return 3
 
-  # Adds or removes the PDF file handler entry.
-  _DoPDFKeys(plist, options.add_pdf_support)
+  output_path = options.plist_path
+  if options.plist_output is not None:
+    output_path = options.plist_output
 
   # Now that all keys have been mutated, rewrite the file.
-  temp_info_plist = tempfile.NamedTemporaryFile()
-  plistlib.writePlist(plist, temp_info_plist.name)
+  with tempfile.NamedTemporaryFile() as temp_info_plist:
+    plistlib.writePlist(plist, temp_info_plist.name)
 
-  # Info.plist will work perfectly well in any plist format, but traditionally
-  # applications use xml1 for this, so convert it to ensure that it's valid.
-  proc = subprocess.Popen(['plutil', '-convert', 'xml1', '-o', DEST_INFO_PLIST,
-                           temp_info_plist.name])
-  proc.wait()
-  return proc.returncode
+    # Convert Info.plist to the format requested by the --format flag. Any
+    # format would work on Mac but iOS requires specific format.
+    return _ConvertPlist(temp_info_plist.name, output_path, options.format)
 
 
 if __name__ == '__main__':
   sys.exit(Main(sys.argv[1:]))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/mac_toolchain.py
@@ -0,0 +1,251 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+If should_use_hermetic_xcode.py emits "1", and the current toolchain is out of
+date:
+  * Downloads the hermetic mac toolchain
+    * Requires gsutil to be configured.
+  * Accepts the license.
+    * If xcode-select and xcodebuild are not passwordless in sudoers, requires
+      user interaction.
+"""
+
+import os
+import plistlib
+import shutil
+import subprocess
+import sys
+import tarfile
+import time
+import tempfile
+import urllib2
+
+# This can be changed after running /build/package_mac_toolchain.py.
+MAC_TOOLCHAIN_VERSION = '5B1008'
+MAC_TOOLCHAIN_SUB_REVISION = 3
+MAC_TOOLCHAIN_VERSION = '%s-%s' % (MAC_TOOLCHAIN_VERSION,
+                                   MAC_TOOLCHAIN_SUB_REVISION)
+IOS_TOOLCHAIN_VERSION = '8C1002'
+IOS_TOOLCHAIN_SUB_REVISION = 1
+IOS_TOOLCHAIN_VERSION = '%s-%s' % (IOS_TOOLCHAIN_VERSION,
+                                   IOS_TOOLCHAIN_SUB_REVISION)
+
+# Absolute path to src/ directory.
+REPO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Absolute path to a file with gclient solutions.
+GCLIENT_CONFIG = os.path.join(os.path.dirname(REPO_ROOT), '.gclient')
+
+BASE_DIR = os.path.abspath(os.path.dirname(__file__))
+TOOLCHAIN_BUILD_DIR = os.path.join(BASE_DIR, '%s_files', 'Xcode.app')
+STAMP_FILE = os.path.join(BASE_DIR, '%s_files', 'toolchain_build_revision')
+TOOLCHAIN_URL = 'gs://chrome-mac-sdk/'
+
+def GetPlatforms():
+  default_target_os = ["mac"]
+  try:
+    env = {}
+    execfile(GCLIENT_CONFIG, env, env)
+    return env.get('target_os', default_target_os)
+  except:
+    pass
+  return default_target_os
+
+
+def ReadStampFile(target_os):
+  """Return the contents of the stamp file, or '' if it doesn't exist."""
+  try:
+    with open(STAMP_FILE % target_os, 'r') as f:
+      return f.read().rstrip()
+  except IOError:
+    return ''
+
+
+def WriteStampFile(target_os, s):
+  """Write s to the stamp file."""
+  EnsureDirExists(os.path.dirname(STAMP_FILE % target_os))
+  with open(STAMP_FILE % target_os, 'w') as f:
+    f.write(s)
+    f.write('\n')
+
+
+def EnsureDirExists(path):
+  if not os.path.exists(path):
+    os.makedirs(path)
+
+
+def DownloadAndUnpack(url, output_dir):
+  """Decompresses |url| into a cleared |output_dir|."""
+  temp_name = tempfile.mktemp(prefix='mac_toolchain')
+  try:
+    print 'Downloading new toolchain.'
+    subprocess.check_call(['gsutil.py', 'cp', url, temp_name])
+    if os.path.exists(output_dir):
+      print 'Deleting old toolchain.'
+      shutil.rmtree(output_dir)
+    EnsureDirExists(output_dir)
+    print 'Unpacking new toolchain.'
+    tarfile.open(mode='r:gz', name=temp_name).extractall(path=output_dir)
+  finally:
+    if os.path.exists(temp_name):
+      os.unlink(temp_name)
+
+
+def CanAccessToolchainBucket():
+  """Checks whether the user has access to |TOOLCHAIN_URL|."""
+  proc = subprocess.Popen(['gsutil.py', 'ls', TOOLCHAIN_URL],
+                           stdout=subprocess.PIPE)
+  proc.communicate()
+  return proc.returncode == 0
+
+
+def LoadPlist(path):
+  """Loads Plist at |path| and returns it as a dictionary."""
+  fd, name = tempfile.mkstemp()
+  try:
+    subprocess.check_call(['plutil', '-convert', 'xml1', '-o', name, path])
+    with os.fdopen(fd, 'r') as f:
+      return plistlib.readPlist(f)
+  finally:
+    os.unlink(name)
+
+
+def AcceptLicense(target_os):
+  """Use xcodebuild to accept new toolchain license if necessary.  Don't accept
+  the license if a newer license has already been accepted. This only works if
+  xcodebuild and xcode-select are passwordless in sudoers."""
+
+  # Check old license
+  try:
+    target_license_plist_path = \
+        os.path.join(TOOLCHAIN_BUILD_DIR % target_os,
+                     *['Contents','Resources','LicenseInfo.plist'])
+    target_license_plist = LoadPlist(target_license_plist_path)
+    build_type = target_license_plist['licenseType']
+    build_version = target_license_plist['licenseID']
+
+    accepted_license_plist = LoadPlist(
+        '/Library/Preferences/com.apple.dt.Xcode.plist')
+    agreed_to_key = 'IDELast%sLicenseAgreedTo' % build_type
+    last_license_agreed_to = accepted_license_plist[agreed_to_key]
+
+    # Historically all Xcode build numbers have been in the format of AANNNN, so
+    # a simple string compare works.  If Xcode's build numbers change this may
+    # need a more complex compare.
+    if build_version <= last_license_agreed_to:
+      # Don't accept the license of older toolchain builds, this will break the
+      # license of newer builds.
+      return
+  except (subprocess.CalledProcessError, KeyError):
+    # If there's never been a license of type |build_type| accepted,
+    # |target_license_plist_path| or |agreed_to_key| may not exist.
+    pass
+
+  print "Accepting license."
+  old_path = subprocess.Popen(['/usr/bin/xcode-select', '-p'],
+                               stdout=subprocess.PIPE).communicate()[0].strip()
+  try:
+    build_dir = os.path.join(
+        TOOLCHAIN_BUILD_DIR % target_os, 'Contents/Developer')
+    subprocess.check_call(['sudo', '/usr/bin/xcode-select', '-s', build_dir])
+    subprocess.check_call(['sudo', '/usr/bin/xcodebuild', '-license', 'accept'])
+  finally:
+    subprocess.check_call(['sudo', '/usr/bin/xcode-select', '-s', old_path])
+
+
+def _UseHermeticToolchain(target_os):
+  current_dir = os.path.dirname(os.path.realpath(__file__))
+  script_path = os.path.join(current_dir, 'mac/should_use_hermetic_xcode.py')
+  proc = subprocess.Popen([script_path, target_os], stdout=subprocess.PIPE)
+  return '1' in proc.stdout.readline()
+
+
+def RequestGsAuthentication():
+  """Requests that the user authenticate to be able to access gs://.
+  """
+  print 'Access to ' + TOOLCHAIN_URL + ' not configured.'
+  print '-----------------------------------------------------------------'
+  print
+  print 'You appear to be a Googler.'
+  print
+  print 'I\'m sorry for the hassle, but you need to do a one-time manual'
+  print 'authentication. Please run:'
+  print
+  print '    download_from_google_storage --config'
+  print
+  print 'and follow the instructions.'
+  print
+  print 'NOTE 1: Use your google.com credentials, not chromium.org.'
+  print 'NOTE 2: Enter 0 when asked for a "project-id".'
+  print
+  print '-----------------------------------------------------------------'
+  print
+  sys.stdout.flush()
+  sys.exit(1)
+
+
+def DownloadHermeticBuild(target_os, default_version, toolchain_filename):
+  if not _UseHermeticToolchain(target_os):
+    print 'Using local toolchain for %s.' % target_os
+    return 0
+
+  toolchain_version = os.environ.get('MAC_TOOLCHAIN_REVISION',
+                                      default_version)
+
+  if ReadStampFile(target_os) == toolchain_version:
+    print 'Toolchain (%s) is already up to date.' % toolchain_version
+    AcceptLicense(target_os)
+    return 0
+
+  if not CanAccessToolchainBucket():
+    RequestGsAuthentication()
+    return 1
+
+  # Reset the stamp file in case the build is unsuccessful.
+  WriteStampFile(target_os, '')
+
+  toolchain_file = '%s.tgz' % toolchain_version
+  toolchain_full_url = TOOLCHAIN_URL + toolchain_file
+
+  print 'Updating toolchain to %s...' % toolchain_version
+  try:
+    toolchain_file = toolchain_filename % toolchain_version
+    toolchain_full_url = TOOLCHAIN_URL + toolchain_file
+    DownloadAndUnpack(toolchain_full_url, TOOLCHAIN_BUILD_DIR % target_os)
+    AcceptLicense(target_os)
+
+    print 'Toolchain %s unpacked.' % toolchain_version
+    WriteStampFile(target_os, toolchain_version)
+    return 0
+  except Exception as e:
+    print 'Failed to download toolchain %s.' % toolchain_file
+    print 'Exception %s' % e
+    print 'Exiting.'
+    return 1
+
+
+def main():
+  if sys.platform != 'darwin':
+    return 0
+
+  for target_os in GetPlatforms():
+    if target_os == 'ios':
+      default_version = IOS_TOOLCHAIN_VERSION
+      toolchain_filename = 'ios-toolchain-%s.tgz'
+    else:
+      default_version = MAC_TOOLCHAIN_VERSION
+      toolchain_filename = 'toolchain-%s.tgz'
+
+    return_value = DownloadHermeticBuild(
+        target_os, default_version, toolchain_filename)
+    if return_value:
+      return return_value
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/nocompile.gni
@@ -0,0 +1,98 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to create a unittest that
+# invokes a set of no-compile tests.  A no-compile test is a test that asserts
+# a particular construct will not compile.
+#
+# Also see:
+#   http://dev.chromium.org/developers/testing/no-compile-tests
+#
+# To use this, create a gyp target with the following form:
+#
+# import("//build/nocompile.gni")
+# nocompile_test("my_module_nc_unittests") {
+#   sources = [
+#     'nc_testset_1.nc',
+#     'nc_testset_2.nc',
+#   ]
+# }
+#
+# The .nc files are C++ files that contain code we wish to assert will not
+# compile.  Each individual test case in the file should be put in its own
+# #ifdef section.  The expected output should be appended with a C++-style
+# comment that has a python list of regular expressions.  This will likely
+# be greater than 80-characters. Giving a solid expected output test is
+# important so that random compile failures do not cause the test to pass.
+#
+# Example .nc file:
+#
+#   #if defined(TEST_NEEDS_SEMICOLON)  // [r"expected ',' or ';' at end of input"]
+#
+#   int a = 1
+#
+#   #elif defined(TEST_NEEDS_CAST)  // [r"invalid conversion from 'void*' to 'char*'"]
+#
+#   void* a = NULL;
+#   char* b = a;
+#
+#   #endif
+#
+# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
+#
+#   DISABLE_TEST_NEEDS_SEMICOLON
+#   TEST_NEEDS_CAST
+#
+# The lines above are parsed by a regexp so avoid getting creative with the
+# formatting or ifdef logic; it will likely just not work.
+#
+# Implementation notes:
+# The .nc files are actually processed by a python script which executes the
+# compiler and generates a .cc file that is empty on success, or will have a
+# series of #error lines on failure, and a set of trivially passing gunit
+# TEST() functions on success. This allows us to fail at the compile step when
+# something goes wrong, and know during the unittest run that the test was at
+# least processed when things go right.
+
+import("//testing/test.gni")
+
+declare_args() {
+  # TODO(crbug.com/105388): make sure no-compile test is not flaky.
+  enable_nocompile_tests =
+      (is_linux || is_mac || is_ios) && is_clang && host_cpu == target_cpu
+}
+
+if (enable_nocompile_tests) {
+  import("//build/config/sysroot.gni")
+  template("nocompile_test") {
+    nocompile_target = target_name + "_run_nocompile"
+
+    action_foreach(nocompile_target) {
+      script = "//tools/nocompile_driver.py"
+      sources = invoker.sources
+
+      result_path = "$target_gen_dir/{{source_name_part}}_nc.cc"
+      depfile = "${result_path}.d"
+      outputs = [
+        result_path,
+      ]
+      sysroot_args = ""
+      if (sysroot != "") {
+        sysroot_args = " --sysroot " + rebase_path(sysroot, root_build_dir)
+      }
+      args = [
+        "4",  # number of compilers to invoke in parallel.
+        "{{source}}",
+        "-Wall -Werror -Wfatal-errors " + "-I" +
+            rebase_path("//", root_build_dir) + sysroot_args,
+        rebase_path(result_path, root_build_dir),
+      ]
+    }
+
+    test(target_name) {
+      deps = invoker.deps + [ ":$nocompile_target" ]
+      sources = get_target_outputs(":$nocompile_target")
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/package_mac_toolchain.py
@@ -0,0 +1,146 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compress and upload Mac toolchain files.
+
+Stored in in https://pantheon.corp.google.com/storage/browser/chrome-mac-sdk/.
+"""
+
+import argparse
+import glob
+import os
+import plistlib
+import re
+import subprocess
+import sys
+import tarfile
+import tempfile
+
+
+TOOLCHAIN_URL = "gs://chrome-mac-sdk"
+
+# It's important to at least remove unused Platform folders to cut down on the
+# size of the toolchain folder.  There are other various unused folders that
+# have been removed through trial and error.  If future versions of Xcode become
+# problematic it's possible this list is incorrect, and can be reduced to just
+# the unused platforms.  On the flip side, it's likely more directories can be
+# excluded.
+DEFAULT_EXCLUDE_FOLDERS = [
+'Contents/Applications',
+'Contents/Developer/Documentation',
+'Contents/Developer/Library/Xcode/Templates',
+'Contents/Developer/Platforms/AppleTVOS.platform',
+'Contents/Developer/Platforms/AppleTVSimulator.platform',
+'Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/'
+    'usr/share/man/',
+'Contents/Developer/Platforms/WatchOS.platform',
+'Contents/Developer/Platforms/WatchSimulator.platform',
+'Contents/Developer/Toolchains/Swift*',
+'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift',
+'Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-migrator',
+'Contents/Resources/Packages/MobileDevice.pkg',
+'Contents/SharedFrameworks/DNTDocumentationSupport.framework'
+]
+
+MAC_EXCLUDE_FOLDERS = [
+# The only thing we need in iPhoneOS.platform on mac is:
+#  \Developer\Library\Xcode\PrivatePlugins
+#  \Info.Plist.
+#  This is the cleanest way to get these.
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/Frameworks',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/GPUTools',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/'
+    'GPUToolsPlatform',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/'
+    'PrivateFrameworks',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/usr',
+'Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs',
+'Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport',
+'Contents/Developer/Platforms/iPhoneOS.platform/Library',
+'Contents/Developer/Platforms/iPhoneOS.platform/usr',
+
+# iPhoneSimulator has a similar requirement, but the bulk of the binary size is
+# in \Developer\SDKs, so only excluding that here.
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs',
+]
+
+IOS_EXCLUDE_FOLDERS = [
+'Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport/'
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
+    'iPhoneSimulator.sdk/Applications/',
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
+    'iPhoneSimulator.sdk/System/Library/AccessibilityBundles/',
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
+    'iPhoneSimulator.sdk/System/Library/CoreServices/',
+'Contents/Developer/Platforms/iPhoneSimulator.platform/Developer/SDKs/'
+    'iPhoneSimulator.sdk/System/Library/LinguisticData/',
+]
+
+def main():
+  """Compress |target_dir| and upload to |TOOLCHAIN_URL|"""
+  parser = argparse.ArgumentParser()
+  parser.add_argument('target_dir',
+                      help="Xcode installation directory.")
+  parser.add_argument('platform', choices=['ios', 'mac'],
+                      help="Target platform for bundle.")
+  parser_args = parser.parse_args()
+
+  # Verify this looks like an Xcode directory.
+  contents_dir = os.path.join(parser_args.target_dir, 'Contents')
+  plist_file = os.path.join(contents_dir, 'version.plist')
+  try:
+    info = plistlib.readPlist(plist_file)
+  except:
+    print "Invalid Xcode dir."
+    return 0
+  build_version = info['ProductBuildVersion']
+
+  # Look for previous toolchain tgz files with the same |build_version|.
+  fname = 'toolchain'
+  if parser_args.platform == 'ios':
+    fname = 'ios-' + fname
+  wildcard_filename = '%s/%s-%s-*.tgz' % (TOOLCHAIN_URL, fname, build_version)
+  p = subprocess.Popen(['gsutil.py', 'ls', wildcard_filename],
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE)
+  output = p.communicate()[0]
+  next_count = 1
+  if p.returncode == 0:
+    next_count = len(output.split('\n'))
+    sys.stdout.write("%s already exists (%s). "
+                     "Do you want to create another? [y/n] "
+                     % (build_version, next_count - 1))
+
+    if raw_input().lower() not in set(['yes','y', 'ye']):
+      print "Skipping duplicate upload."
+      return 0
+
+  os.chdir(parser_args.target_dir)
+  toolchain_file_name = "%s-%s-%s" % (fname, build_version, next_count)
+  toolchain_name = tempfile.mktemp(suffix='toolchain.tgz')
+
+  print "Creating %s (%s)." % (toolchain_file_name, toolchain_name)
+  os.environ["COPYFILE_DISABLE"] = "1"
+  os.environ["GZ_OPT"] = "-8"
+  args = ['tar', '-cvzf', toolchain_name]
+  exclude_folders = DEFAULT_EXCLUDE_FOLDERS
+  if parser_args.platform == 'mac':
+    exclude_folders += MAC_EXCLUDE_FOLDERS
+  else:
+    exclude_folders += IOS_EXCLUDE_FOLDERS
+  args.extend(map('--exclude={0}'.format, exclude_folders))
+  args.extend(['.'])
+  subprocess.check_call(args)
+
+  print "Uploading %s toolchain." % toolchain_file_name
+  destination_path = '%s/%s.tgz' % (TOOLCHAIN_URL, toolchain_file_name)
+  subprocess.check_call(['gsutil.py', 'cp', '-n', toolchain_name,
+                         destination_path])
+
+  print "Done with %s upload." % toolchain_file_name
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
--- a/media/webrtc/trunk/build/precompile.h
+++ b/media/webrtc/trunk/build/precompile.h
@@ -1,108 +1,57 @@
 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
-// Precompiled header for Chromium project on Windows, not used by
-// other build configurations. Using precompiled headers speeds the
-// build up significantly, around 1/4th on VS 2010 on an HP Z600 with 12
-// GB of memory.
-//
-// Numeric comments beside includes are the number of times they were
-// included under src/chrome/browser on 2011/8/20, which was used as a
-// baseline for deciding what to include in the PCH. Includes without
-// a numeric comment are generally included at least 5 times. It may
-// be possible to tweak the speed of the build by commenting out or
-// removing some of the less frequently used headers.
+// This file is used as a precompiled header for both C and C++ files. So
+// any C++ headers must go in the __cplusplus block below.
 
 #if defined(BUILD_PRECOMPILE_H_)
 #error You shouldn't include the precompiled header file more than once.
 #endif
 
 #define BUILD_PRECOMPILE_H_
 
-// The Windows header needs to come before almost all the other
-// Windows-specific headers.
-#include <Windows.h>
-#include <dwmapi.h>
-#include <shellapi.h>
-#include <wincrypt.h>  // 4
-#include <wtypes.h>  // 2
-
-// Defines in atlbase.h cause conflicts; if we could figure out how
-// this family of headers can be included in the PCH, it might speed
-// up the build as several of them are used frequently.
-/*
-#include <atlbase.h>
-#include <atlapp.h>
-#include <atlcom.h>
-#include <atlcrack.h>  // 2
-#include <atlctrls.h>  // 2
-#include <atlmisc.h>  // 2
-#include <atlsafe.h>  // 1
-#include <atltheme.h>  // 1
-#include <atlwin.h>  // 2
-*/
-
-// Objbase.h and other files that rely on it bring in [ #define
-// interface struct ] which can cause problems in a multi-platform
-// build like Chrome's. #undef-ing it does not work as there are
-// currently 118 targets that break if we do this, so leaving out of
-// the precompiled header for now.
-//#include <commctrl.h>  // 2
-//#include <commdlg.h>  // 3
-//#include <cryptuiapi.h>  // 2
-//#include <Objbase.h>  // 2
-//#include <objidl.h>  // 1
-//#include <ole2.h>  // 1
-//#include <oleacc.h>  // 2
-//#include <oleauto.h>  // 1
-//#include <oleidl.h>  // 1
-//#include <propkey.h>  // 2
-//#include <propvarutil.h>  // 2
-//#include <pstore.h>  // 2
-//#include <shlguid.h>  // 1
-//#include <shlwapi.h>  // 1
-//#include <shobjidl.h>  // 4
-//#include <urlhist.h>  // 2
-
-// Caused other conflicts in addition to the 'interface' issue above.
-// #include <shlobj.h>
+#define _USE_MATH_DEFINES
 
 #include <errno.h>
 #include <fcntl.h>
-#include <limits.h>  // 4
+#include <limits.h>
 #include <math.h>
-#include <memory.h>  // 1
+#include <memory.h>
 #include <signal.h>
-#include <stdarg.h>  // 1
+#include <stdarg.h>
 #include <stddef.h>
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
-#include <time.h>  // 4
+#include <time.h>
+
+#if defined(__cplusplus)
 
 #include <algorithm>
-#include <bitset>  // 3
+#include <bitset>
 #include <cmath>
 #include <cstddef>
-#include <cstdio>  // 3
-#include <cstdlib>  // 2
+#include <cstdio>
+#include <cstdlib>
 #include <cstring>
 #include <deque>
-#include <fstream>  // 3
+#include <fstream>
 #include <functional>
-#include <iomanip>  // 2
-#include <iosfwd>  // 2
+#include <iomanip>
+#include <iosfwd>
 #include <iterator>
 #include <limits>
 #include <list>
 #include <map>
-#include <numeric>  // 2
+#include <numeric>
 #include <ostream>
 #include <queue>
 #include <set>
 #include <sstream>
 #include <stack>
 #include <string>
 #include <utility>
 #include <vector>
+
+#endif  // __cplusplus
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/print_python_deps.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints all non-system dependencies for the given module.
+
+The primary use-case for this script is to genererate the list of python modules
+required for .isolate files.
+"""
+
+import argparse
+import imp
+import os
+import pipes
+import sys
+
+# Don't use any helper modules, or else they will end up in the results.
+
+
+_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
+
+
+def _ComputePythonDependencies():
+  """Gets the paths of imported non-system python modules.
+
+  A path is assumed to be a "system" import if it is outside of chromium's
+  src/. The paths will be relative to the current directory.
+  """
+  module_paths = (m.__file__ for m in sys.modules.values()
+                  if m and hasattr(m, '__file__'))
+
+  src_paths = set()
+  for path in module_paths:
+    if path == __file__:
+      continue
+    path = os.path.abspath(path)
+    if not path.startswith(_SRC_ROOT):
+      continue
+
+    if (path.endswith('.pyc')
+        or (path.endswith('c') and not os.path.splitext(path)[1])):
+      path = path[:-1]
+    src_paths.add(path)
+
+  return src_paths
+
+
+def _NormalizeCommandLine(options):
+  """Returns a string that when run from SRC_ROOT replicates the command."""
+  args = ['build/print_python_deps.py']
+  root = os.path.relpath(options.root, _SRC_ROOT)
+  if root != '.':
+    args.extend(('--root', root))
+  if options.output:
+    args.extend(('--output', os.path.relpath(options.output, _SRC_ROOT)))
+  for whitelist in sorted(options.whitelists):
+    args.extend(('--whitelist', os.path.relpath(whitelist, _SRC_ROOT)))
+  args.append(os.path.relpath(options.module, _SRC_ROOT))
+  return ' '.join(pipes.quote(x) for x in args)
+
+
+def _FindPythonInDirectory(directory):
+  """Returns an iterable of all non-test python files in the given directory."""
+  files = []
+  for root, _dirnames, filenames in os.walk(directory):
+    for filename in filenames:
+      if filename.endswith('.py') and not filename.endswith('_test.py'):
+        yield os.path.join(root, filename)
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description='Prints all non-system dependencies for the given module.')
+  parser.add_argument('module',
+                      help='The python module to analyze.')
+  parser.add_argument('--root', default='.',
+                      help='Directory to make paths relative to.')
+  parser.add_argument('--output',
+                      help='Write output to a file rather than stdout.')
+  parser.add_argument('--whitelist', default=[], action='append',
+                      dest='whitelists',
+                      help='Recursively include all non-test python files '
+                      'within this directory. May be specified multiple times.')
+  options = parser.parse_args()
+  # Replace the path entry for print_python_deps.py with the one for the given
+  # module.
+  sys.path[0] = os.path.dirname(options.module)
+  imp.load_source('NAME', options.module)
+
+  paths_set = _ComputePythonDependencies()
+  for path in options.whitelists:
+    paths_set.update(os.path.abspath(p) for p in _FindPythonInDirectory(path))
+
+  paths = [os.path.relpath(p, options.root) for p in paths_set]
+
+  normalized_cmdline = _NormalizeCommandLine(options)
+  out = open(options.output, 'w') if options.output else sys.stdout
+  with out:
+    out.write('# Generated by running:\n')
+    out.write('#   %s\n' % normalized_cmdline)
+    for path in sorted(paths):
+      out.write(path + '\n')
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/protoc_java.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate java source files from protobuf files.
+
+This is a helper file for the genproto_java action in protoc_java.gypi.
+
+It performs the following steps:
+1. Deletes all old sources (ensures deleted classes are not part of new jars).
+2. Creates source directory.
+3. Generates Java files using protoc (output into either --java-out-dir or
+   --srcjar).
+4. Creates a new stamp file.
+"""
+
+import os
+import optparse
+import shutil
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "android", "gyp"))
+from util import build_utils
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option("--protoc", help="Path to protoc binary.")
+  parser.add_option("--proto-path", help="Path to proto directory.")
+  parser.add_option("--java-out-dir",
+      help="Path to output directory for java files.")
+  parser.add_option("--srcjar", help="Path to output srcjar.")
+  parser.add_option("--stamp", help="File to touch on success.")
+  options, args = parser.parse_args(argv)
+
+  build_utils.CheckOptions(options, parser, ['protoc', 'proto_path'])
+  if not options.java_out_dir and not options.srcjar:
+    print 'One of --java-out-dir or --srcjar must be specified.'
+    return 1
+
+  with build_utils.TempDir() as temp_dir:
+    # Specify arguments to the generator.
+    generator_args = ['optional_field_style=reftypes',
+                      'store_unknown_fields=true']
+    out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir
+    # Generate Java files using protoc.
+    build_utils.CheckOutput(
+        [options.protoc, '--proto_path', options.proto_path, out_arg]
+        + args)
+
+    if options.java_out_dir:
+      build_utils.DeleteDirectory(options.java_out_dir)
+      shutil.copytree(temp_dir, options.java_out_dir)
+    else:
+      build_utils.ZipDir(options.srcjar, temp_dir)
+
+  if options.depfile:
+    assert options.srcjar
+    deps = args + [options.protoc]
+    build_utils.WriteDepfile(options.depfile, options.srcjar, deps)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/redirect_stdout.py
@@ -0,0 +1,19 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import subprocess
+import sys
+
+# This script executes a command and redirects the stdout to a file. This is
+# equivalent to |command... > output_file|.
+#
+# Usage: python redirect_stdout.py output_file command...
+
+if __name__ == '__main__':
+  if len(sys.argv) < 2:
+    print >> sys.stderr, "Usage: %s output_file command..." % (sys.argv[0])
+    sys.exit(1)
+
+  with open(sys.argv[1], 'w') as fp:
+    sys.exit(subprocess.check_call(sys.argv[2:], stdout=fp))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/rm.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Delete a file.
+
+This module works much like the rm posix command.
+"""
+
+import argparse
+import os
+import sys
+
+
+def Main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('files', nargs='+')
+  parser.add_argument('-f', '--force', action='store_true',
+                      help="don't err on missing")
+  parser.add_argument('--stamp', required=True, help='touch this file')
+  args = parser.parse_args()
+  for f in args.files:
+    try:
+      os.remove(f)
+    except OSError:
+      if not args.force:
+        print >>sys.stderr, "'%s' does not exist" % f
+        return 1
+
+  with open(args.stamp, 'w'):
+    os.utime(args.stamp, None)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/sample_arg_file.gn
@@ -0,0 +1,6 @@
+# Build arguments go here. Here are some of the most commonly set ones.
+# Run `gn args <out_dir> --list` for the full list.
+#   is_component_build = true
+#   is_debug = true
+#   symbol_level = 2
+#   use_goma = false
--- a/media/webrtc/trunk/build/sanitize-mac-build-log.sed
+++ b/media/webrtc/trunk/build/sanitize-mac-build-log.sed
@@ -1,10 +1,8 @@
-#!/bin/echo Use sanitize-mac-build-log.sh or sed -f
-
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 # Use this sed script to reduce a Mac build log into something readable.
 
 # Drop uninformative lines.
 /^distcc/d
@@ -12,24 +10,24 @@
 /^    setenv /d
 /^    cd /d
 /^make: Nothing to be done/d
 /^$/d
 
 # Xcode prints a short "compiling foobar.o" line followed by the lengthy
 # full command line.  These deletions drop the command line.
 \|^    /Developer/usr/bin/|d
-\|^    /Developer/Library/PrivateFrameworks/DevToolsCore.framework/|d
-\|^    /Developer/Library/Xcode/Plug-ins/CoreBuildTasks.xcplugin/|d
+\|^    /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d
+\|^    /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d
 
 # Drop any goma command lines as well.
 \|^    .*/gomacc |d
 
 # And, if you've overridden something from your own bin directory, remove those
 # full command lines, too.
 \|^    /Users/[^/]*/bin/|d
 
 # There's already a nice note for bindings, don't need the command line.
-\|^python scripts/rule_binding.py|d
+\|^python scripts/rule_binding\.py|d
 
 # Shorten the "compiling foobar.o" line.
-s|^Distributed-CompileC \(.*\) normal i386 c++ com.apple.compilers.gcc.4_2|    CC \1|
-s|^CompileC \(.*\) normal i386 c++ com.apple.compilers.gcc.4_2|    CC \1|
+s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
+s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
--- a/media/webrtc/trunk/build/sanitize-mac-build-log.sh
+++ b/media/webrtc/trunk/build/sanitize-mac-build-log.sh
@@ -1,6 +1,5 @@
 #!/bin/sh
 # Copyright (c) 2010 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-sed -f `dirname "${0}"`/`basename "${0}" sh`sed
-
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
--- a/media/webrtc/trunk/build/sanitize-win-build-log.sed
+++ b/media/webrtc/trunk/build/sanitize-win-build-log.sed
@@ -1,17 +1,15 @@
-#!/bin/echo Use sanitize-win-build-log.sh or sed -f
-
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 # Use this sed script to reduce a Windows build log into something
 # machine-parsable.
 
 # Drop uninformative lines.
-/The operation completed successfully./d
+/The operation completed successfully\./d
 
 # Drop parallelization indicators on lines.
-s/^[0-9]\+>//
+s/^[0-9]+>//
 
 # Shorten bindings generation lines
-s/^.*"perl".*generate-bindings.pl".*\("[^"]\+\.idl"\).*$/  generate-bindings \1/
+s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/  idl_compiler \1/
--- a/media/webrtc/trunk/build/sanitize-win-build-log.sh
+++ b/media/webrtc/trunk/build/sanitize-win-build-log.sh
@@ -1,6 +1,5 @@
 #!/bin/sh
 # Copyright (c) 2010 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-sed -f `dirname "${0}"`/`basename "${0}" sh`sed
-
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/sanitizers/OWNERS
@@ -0,0 +1,4 @@
+glider@chromium.org
+eugenis@chromium.org
+per-file tsan_suppressions.cc=*
+per-file lsan_suppressions.cc=*
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/sanitizers/asan_suppressions.cc
@@ -0,0 +1,23 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for AddressSanitizer.
+// It should only be used under very limited circumstances such as suppressing
+// a report caused by an interceptor call in a system-installed library.
+
+#if defined(ADDRESS_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kASanDefaultSuppressions which contains ASan suppressions delimited by
+// newlines.
+char kASanDefaultSuppressions[] =
+// http://crbug.com/178677
+"interceptor_via_lib:libsqlite3.so\n"
+
+// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+// End of suppressions.
+;  // Please keep this semicolon.
+
+#endif  // ADDRESS_SANITIZER
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/sanitizers/lsan_suppressions.cc
@@ -0,0 +1,98 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for LeakSanitizer.
+// You can also pass additional suppressions via LSAN_OPTIONS:
+// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/leaksanitizer for more info.
+
+#if defined(LEAK_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kLSanDefaultSuppressions which contains LSan suppressions delimited by
+// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer
+// for the instructions on writing suppressions.
+char kLSanDefaultSuppressions[] =
+    // Intentional leak used as sanity test for Valgrind/memcheck.
+    "leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
+
+    // ================ Leaks in third-party code ================
+
+    // False positives in libfontconfig. http://crbug.com/39050
+    "leak:libfontconfig\n"
+    // eglibc-2.19/string/strdup.c creates false positive leak errors because of
+    // the same reason as crbug.com/39050. The leak error stack trace, when
+    // unwind on malloc, includes a call to libfontconfig. But the default stack
+    // trace is too short in leak sanitizer bot to make the libfontconfig
+    // suppression works. http://crbug.com/605286
+    "leak:__strdup\n"
+
+    // Leaks in Nvidia's libGL.
+    "leak:libGL.so\n"
+
+    // TODO(eugenis): revisit NSS suppressions after the switch to BoringSSL
+    // NSS leaks in CertDatabaseNSSTest tests. http://crbug.com/51988
+    "leak:net::NSSCertDatabase::ImportFromPKCS12\n"
+    "leak:net::NSSCertDatabase::ListCerts\n"
+    "leak:net::NSSCertDatabase::DeleteCertAndKey\n"
+    "leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB\n"
+    // Another leak due to not shutting down NSS properly.
+    // http://crbug.com/124445
+    "leak:error_get_my_stack\n"
+    // The NSS suppressions above will not fire when the fast stack unwinder is
+    // used, because it can't unwind through NSS libraries. Apply blanket
+    // suppressions for now.
+    "leak:libnssutil3\n"
+    "leak:libnspr4\n"
+    "leak:libnss3\n"
+    "leak:libplds4\n"
+    "leak:libnssckbi\n"
+
+    // XRandR has several one time leaks.
+    "leak:libxrandr\n"
+
+    // xrandr leak. http://crbug.com/119677
+    "leak:XRRFindDisplay\n"
+
+    // http://crbug.com/431213, http://crbug.com/416665
+    "leak:gin/object_template_builder.h\n"
+
+    // Leaks in swrast_dri.so. http://crbug.com/540042
+    "leak:swrast_dri.so\n"
+
+    // Leak in glibc's gconv caused by fopen(..., "r,ccs=UNICODE")
+    "leak:__gconv_lookup_cache\n"
+
+    // ================ Leaks in Chromium code ================
+    // PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
+    // Instead, commits that introduce memory leaks should be reverted.
+    // Suppressing the leak is acceptable in some cases when reverting is
+    // impossible, i.e. when enabling leak detection for the first time for a
+    // test target with pre-existing leaks.
+
+    // Small test-only leak in ppapi_unittests. http://crbug.com/258113
+    "leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_"
+    "Test\n"
+
+    // http://crbug.com/322671
+    "leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread\n"
+
+    // http://crbug.com/355641
+    "leak:TrayAccessibilityTest\n"
+
+    // http://crbug.com/354644
+    "leak:CertificateViewerUITest::ShowModalCertificateViewer\n"
+
+    // http://crbug.com/356306
+    "leak:service_manager::SetProcessTitleFromCommandLine\n"
+
+    // http://crbug.com/601435
+    "leak:mojo/edk/js/handle.h\n"
+
+    // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+    // End of suppressions.
+    ;  // Please keep this semicolon.
+
+#endif  // LEAK_SANITIZER
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/sanitizers/sanitizer_options.cc
@@ -0,0 +1,181 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file contains the default options for various compiler-based dynamic
+// tools.
+
+#include "build/build_config.h"
+
+#if defined(ADDRESS_SANITIZER) && defined(OS_MACOSX)
+#include <crt_externs.h>  // for _NSGetArgc, _NSGetArgv
+#include <string.h>
+#endif  // ADDRESS_SANITIZER && OS_MACOSX
+
+#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) ||  \
+    defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) || \
+    defined(UNDEFINED_SANITIZER)
+// Functions returning default options are declared weak in the tools' runtime
+// libraries. To make the linker pick the strong replacements for those
+// functions from this module, we explicitly force its inclusion by passing
+// -Wl,-u_sanitizer_options_link_helper
+extern "C"
+void _sanitizer_options_link_helper() { }
+
+// The callbacks we define here will be called from the sanitizer runtime, but
+// aren't referenced from the Chrome executable. We must ensure that those
+// callbacks are not sanitizer-instrumented, and that they aren't stripped by
+// the linker.
+#define SANITIZER_HOOK_ATTRIBUTE                                           \
+  extern "C"                                                               \
+  __attribute__((no_sanitize("address", "memory", "thread", "undefined"))) \
+  __attribute__((visibility("default")))                                   \
+  __attribute__((used))
+#endif
+
+#if defined(ADDRESS_SANITIZER)
+// Default options for AddressSanitizer in various configurations:
+//   malloc_context_size=5 - limit the size of stack traces collected by ASan
+//     for each malloc/free by 5 frames. These stack traces tend to accumulate
+//     very fast in applications using JIT (v8 in Chrome's case), see
+//     https://code.google.com/p/address-sanitizer/issues/detail?id=177
+//   symbolize=1 - enable in-process symbolization.
+//   legacy_pthread_cond=1 - run in the libpthread 2.2.5 compatibility mode to
+//     work around libGL.so using the obsolete API, see
+//     http://crbug.com/341805. This may break if pthread_cond_t objects are
+//     accessed by both instrumented and non-instrumented binaries (e.g. if
+//     they reside in shared memory). This option is going to be deprecated in
+//     upstream AddressSanitizer and must not be used anywhere except the
+//     official builds.
+//   check_printf=1 - check the memory accesses to printf (and other formatted
+//     output routines) arguments.
+//   use_sigaltstack=1 - handle signals on an alternate signal stack. Useful
+//     for stack overflow detection.
+//   strip_path_prefix=/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports
+//   fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder
+//     to print error reports. V8 doesn't generate debug info for the JIT code,
+//     so the slow unwinder may not work properly.
+//   detect_stack_use_after_return=1 - use fake stack to delay the reuse of
+//     stack allocations and detect stack-use-after-return errors.
+#if defined(OS_LINUX)
+#if defined(GOOGLE_CHROME_BUILD)
+// Default AddressSanitizer options for the official build. These do not affect
+// tests on buildbots (which don't set GOOGLE_CHROME_BUILD) or non-official
+// Chromium builds.
+const char kAsanDefaultOptions[] =
+    "legacy_pthread_cond=1 malloc_context_size=5 "
+    "symbolize=1 check_printf=1 use_sigaltstack=1 detect_leaks=0 "
+    "strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+    "allow_user_segv_handler=1 ";
+#else
+// Default AddressSanitizer options for buildbots and non-official builds.
+const char* kAsanDefaultOptions =
+    "symbolize=1 check_printf=1 use_sigaltstack=1 "
+    "detect_leaks=0 strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+    "detect_stack_use_after_return=1 "
+    "allow_user_segv_handler=1 ";
+#endif  // GOOGLE_CHROME_BUILD
+
+#elif defined(OS_MACOSX)
+const char *kAsanDefaultOptions =
+    "check_printf=1 use_sigaltstack=1 "
+    "strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+    "detect_stack_use_after_return=1 detect_odr_violation=0 ";
+#endif  // OS_LINUX
+
+#if defined(OS_LINUX) || defined(OS_MACOSX)
+// Allow NaCl to override the default asan options.
+extern const char* kAsanDefaultOptionsNaCl;
+__attribute__((weak)) const char* kAsanDefaultOptionsNaCl = nullptr;
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() {
+  if (kAsanDefaultOptionsNaCl)
+    return kAsanDefaultOptionsNaCl;
+  return kAsanDefaultOptions;
+}
+
+extern char kASanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() {
+  return kASanDefaultSuppressions;
+}
+#endif  // OS_LINUX || OS_MACOSX
+#endif  // ADDRESS_SANITIZER
+
+#if defined(THREAD_SANITIZER) && defined(OS_LINUX)
+// Default options for ThreadSanitizer in various configurations:
+//   detect_deadlocks=1 - enable deadlock (lock inversion) detection.
+//   second_deadlock_stack=1 - more verbose deadlock reports.
+//   report_signal_unsafe=0 - do not report async-signal-unsafe functions
+//     called from signal handlers.
+//   report_thread_leaks=0 - do not report unjoined threads at the end of
+//     the program execution.
+//   print_suppressions=1 - print the list of matched suppressions.
+//   history_size=7 - make the history buffer proportional to 2^7 (the maximum
+//     value) to keep more stack traces.
+//   strip_path_prefix=/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+const char kTsanDefaultOptions[] =
+    "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 "
+    "report_thread_leaks=0 print_suppressions=1 history_size=7 "
+    "strict_memcmp=0 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() {
+  return kTsanDefaultOptions;
+}
+
+extern char kTSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() {
+  return kTSanDefaultSuppressions;
+}
+
+#endif  // THREAD_SANITIZER && OS_LINUX
+
+#if defined(MEMORY_SANITIZER)
+// Default options for MemorySanitizer:
+//   intercept_memcmp=0 - do not detect uninitialized memory in memcmp() calls.
+//     Pending cleanup, see http://crbug.com/523428
+//   strip_path_prefix=/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+const char kMsanDefaultOptions[] =
+    "intercept_memcmp=0 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__msan_default_options() {
+  return kMsanDefaultOptions;
+}
+
+#endif  // MEMORY_SANITIZER
+
+#if defined(LEAK_SANITIZER)
+// Default options for LeakSanitizer:
+//   print_suppressions=1 - print the list of matched suppressions.
+//   strip_path_prefix=/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+const char kLsanDefaultOptions[] =
+    "print_suppressions=1 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() {
+  return kLsanDefaultOptions;
+}
+
+extern char kLSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() {
+  return kLSanDefaultSuppressions;
+}
+
+#endif  // LEAK_SANITIZER
+
+#if defined(UNDEFINED_SANITIZER)
+// Default options for UndefinedBehaviorSanitizer:
+//   print_stacktrace=1 - print the stacktrace when UBSan reports an error.
+const char kUbsanDefaultOptions[] =
+    "print_stacktrace=1 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char* __ubsan_default_options() {
+  return kUbsanDefaultOptions;
+}
+
+#endif  // UNDEFINED_SANITIZER
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/sanitizers/tsan_suppressions.cc
@@ -0,0 +1,266 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for ThreadSanitizer.
+// You can also pass additional suppressions via TSAN_OPTIONS:
+// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for more info.
+
+#if defined(THREAD_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines.
+// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for the instructions on writing suppressions.
+char kTSanDefaultSuppressions[] =
+    // False positives in libflashplayer.so and libglib.so. Since we don't
+    // instrument them, we cannot reason about the synchronization in them.
+    "race:libflashplayer.so\n"
+    "race:libglib*.so\n"
+
+    // Intentional race in ToolsSanityTest.DataRace in base_unittests.
+    "race:base/tools_sanity_unittest.cc\n"
+
+    // Data race on WatchdogCounter [test-only].
+    "race:base/threading/watchdog_unittest.cc\n"
+
+    // Races in libevent, http://crbug.com/23244.
+    "race:libevent/event.c\n"
+
+    // http://crbug.com/84094.
+    "race:sqlite3StatusSet\n"
+    "race:pcache1EnforceMaxPage\n"
+    "race:pcache1AllocPage\n"
+
+    // http://crbug.com/102327.
+    // Test-only race, won't fix.
+    "race:tracked_objects::ThreadData::ShutdownSingleThreadedCleanup\n"
+
+    // http://crbug.com/120808
+    "race:base/threading/watchdog.cc\n"
+
+    // http://crbug.com/157586
+    "race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
+
+    // http://crbug.com/158718
+    "race:third_party/ffmpeg/libavcodec/pthread.c\n"
+    "race:third_party/ffmpeg/libavcodec/pthread_frame.c\n"
+    "race:third_party/ffmpeg/libavcodec/vp8.c\n"
+    "race:third_party/ffmpeg/libavutil/mem.c\n"
+    "race:*HashFrameForTesting\n"
+    "race:third_party/ffmpeg/libavcodec/h264pred.c\n"
+    "race:media::ReleaseData\n"
+
+    // http://crbug.com/158922
+    "race:third_party/libvpx/source/libvpx/vp8/encoder/*\n"
+    "race:third_party/libvpx/source/libvpx/vp9/encoder/*\n"
+
+    // http://crbug.com/189177
+    "race:thread_manager\n"
+    "race:v8::Locker::Initialize\n"
+
+    // http://crbug.com/239359
+    "race:media::TestInputCallback::OnData\n"
+
+    // http://crbug.com/244368
+    "race:skia::BeginPlatformPaint\n"
+
+    // http://crbug.com/244385
+    "race:unixTempFileDir\n"
+
+    // http://crbug.com/244755
+    "race:v8::internal::Zone::NewExpand\n"
+    "race:TooLateToEnableNow\n"
+    "race:adjust_segment_bytes_allocated\n"
+
+    // http://crbug.com/244774
+    "race:webrtc::RTPReceiver::ProcessBitrate\n"
+    "race:webrtc::RTPSender::ProcessBitrate\n"
+    "race:webrtc::VideoCodingModuleImpl::Decode\n"
+    "race:webrtc::RTPSender::SendOutgoingData\n"
+    "race:webrtc::VP8EncoderImpl::GetEncodedPartitions\n"
+    "race:webrtc::VP8EncoderImpl::Encode\n"
+    "race:webrtc::ViEEncoder::DeliverFrame\n"
+    "race:webrtc::vcm::VideoReceiver::Decode\n"
+    "race:webrtc::VCMReceiver::FrameForDecoding\n"
+    "race:*trace_event_unique_catstatic*\n"
+
+    // http://crbug.com/244856
+    "race:AutoPulseLock\n"
+
+    // http://crbug.com/246968
+    "race:webrtc::VideoCodingModuleImpl::RegisterPacketRequestCallback\n"
+
+    // http://crbug.com/246974
+    "race:content::GpuWatchdogThread::CheckArmed\n"
+
+    // http://crbug.com/257396
+    "race:base::trace_event::"
+    "TraceEventTestFixture_TraceSamplingScope_Test::TestBody\n"
+
+    // http://crbug.com/258479
+    "race:SamplingStateScope\n"
+    "race:g_trace_state\n"
+
+    // http://crbug.com/258499
+    "race:third_party/skia/include/core/SkRefCnt.h\n"
+
+    // http://crbug.com/268924
+    "race:base::g_power_monitor\n"
+    "race:base::PowerMonitor::PowerMonitor\n"
+    "race:base::PowerMonitor::AddObserver\n"
+    "race:base::PowerMonitor::RemoveObserver\n"
+    "race:base::PowerMonitor::IsOnBatteryPower\n"
+
+    // http://crbug.com/258935
+    "race:base::Thread::StopSoon\n"
+
+    // http://crbug.com/272095
+    "race:base::g_top_manager\n"
+
+    // http://crbug.com/308590
+    "race:CustomThreadWatcher::~CustomThreadWatcher\n"
+
+    // http://crbug.com/310851
+    "race:net::ProxyResolverV8Tracing::Job::~Job\n"
+
+    // http://crbug.com/327330
+    "race:PrepareTextureMailbox\n"
+    "race:cc::LayerTreeHost::PaintLayerContents\n"
+
+    // http://crbug.com/476529
+    "deadlock:cc::VideoLayerImpl::WillDraw\n"
+
+    // http://crbug.com/328826
+    "race:gLCDOrder\n"
+    "race:gLCDOrientation\n"
+
+    // http://crbug.com/328868
+    "race:PR_Lock\n"
+
+    // http://crbug.com/333244
+    "race:content::"
+    "VideoCaptureImplTest::MockVideoCaptureImpl::~MockVideoCaptureImpl\n"
+
+    // http://crbug.com/333871
+    "race:v8::internal::Interface::NewValue()::value_interface\n"
+    "race:v8::internal::IsMinusZero(double)::minus_zero\n"
+    "race:v8::internal::FastCloneShallowObjectStub::"
+    "InitializeInterfaceDescriptor\n"
+    "race:v8::internal::KeyedLoadStubCompiler::registers\n"
+    "race:v8::internal::KeyedStoreStubCompiler::registers()::registers\n"
+    "race:v8::internal::KeyedLoadFastElementStub::"
+    "InitializeInterfaceDescriptor\n"
+    "race:v8::internal::KeyedStoreFastElementStub::"
+    "InitializeInterfaceDescriptor\n"
+    "race:v8::internal::LoadStubCompiler::registers\n"
+    "race:v8::internal::StoreStubCompiler::registers\n"
+    "race:v8::internal::HValue::LoopWeight\n"
+
+    // http://crbug.com/334140
+    "race:CommandLine::HasSwitch\n"
+    "race:CommandLine::current_process_commandline_\n"
+    "race:CommandLine::GetSwitchValueASCII\n"
+
+    // http://crbug.com/338675
+    "race:blink::s_platform\n"
+    "race:content::"
+    "RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl\n"
+
+    // http://crbug.com/347534
+    "race:v8::internal::V8::TearDown\n"
+
+    // http://crbug.com/347538
+    "race:sctp_timer_start\n"
+
+    // http://crbug.com/347553
+    "race:blink::WebString::reset\n"
+
+    // http://crbug.com/348511
+    "race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms\n"
+
+    // http://crbug.com/348982
+    "race:cricket::P2PTransportChannel::OnConnectionDestroyed\n"
+    "race:cricket::P2PTransportChannel::AddConnection\n"
+
+    // http://crbug.com/348984
+    "race:sctp_express_handle_sack\n"
+    "race:system_base_info\n"
+
+    // https://code.google.com/p/v8/issues/detail?id=3143
+    "race:v8::internal::FLAG_track_double_fields\n"
+
+    // http://crbug.com/374135
+    "race:media::AlsaWrapper::PcmWritei\n"
+
+    // False positive in libc's tzset_internal, http://crbug.com/379738.
+    "race:tzset_internal\n"
+
+    // http://crbug.com/380554
+    "deadlock:g_type_add_interface_static\n"
+
+    // http:://crbug.com/386385
+    "race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n"
+
+    // http://crbug.com/388730
+    "race:g_next_user_script_id\n"
+
+    // http://crbug.com/397022
+    "deadlock:"
+    "base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::"
+    "TestBody\n"
+
+    // http://crbug.com/415472
+    "deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n"
+
+    // http://crbug.com/490856
+    "deadlock:content::TracingControllerImpl::SetEnabledOnFileThread\n"
+
+    // https://code.google.com/p/skia/issues/detail?id=3294
+    "race:SkBaseMutex::acquire\n"
+
+    // https://crbug.com/430533
+    "race:TileTaskGraphRunner::Run\n"
+
+    // Lock inversion in third party code, won't fix.
+    // https://crbug.com/455638
+    "deadlock:dbus::Bus::ShutdownAndBlock\n"
+
+    // https://crbug.com/459429
+    "race:randomnessPid\n"
+
+    // https://crbug.com/454655
+    "race:content::BrowserTestBase::PostTaskToInProcessRendererAndWait\n"
+
+    // https://crbug.com/569682
+    "race:blink::ThreadState::visitStackRoots\n"
+
+    // http://crbug.com/582274
+    "race:usrsctp_close\n"
+
+    // http://crbug.com/633145
+    "race:third_party/libjpeg_turbo/simd/jsimd_x86_64.c\n"
+
+    // http://crbug.com/587199
+    "race:base::TimerTest_OneShotTimer_CustomTaskRunner_Test::TestBody\n"
+
+    // http://crbug.com/v8/6065
+    "race:net::(anonymous namespace)::ProxyResolverV8TracingImpl::RequestImpl"
+    "::~RequestImpl()\n"
+
+    // http://crbug.com/691029
+    "deadlock:libGLX.so*\n"
+
+    // http://crbug.com/719633
+    "race:crypto::EnsureNSSInit()\n"
+
+    // http://crbug.com/695929
+    "race:base::i18n::IsRTL\n"
+    "race:base::i18n::SetICUDefaultLocale\n"
+
+    // End of suppressions.
+    ;  // Please keep this semicolon.
+
+#endif  // THREAD_SANITIZER
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/testing/gmock/BUILD.gn
@@ -0,0 +1,65 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Chromium's GN configuration for gmock now lives at testing/gmock/BUILD.gn.
+#
+# This configuration is left in the tree because it is pulled by V8 and PDFium,
+# and will be removed as soon as the projects switch off of it.
+#
+# Details at http://crbug.com/630705 and http://crrev.com/2779193002
+
+config("gmock_config") {
+  # Gmock headers need to be able to find themselves.
+  include_dirs = [
+    "//testing/gmock_custom",
+    "include",
+  ]
+}
+
+static_library("gmock") {
+  testonly = true
+  sources = [
+    # Sources based on files in r173 of gmock.
+    "include/gmock/gmock-actions.h",
+    "include/gmock/gmock-cardinalities.h",
+    "include/gmock/gmock-generated-actions.h",
+    "include/gmock/gmock-generated-function-mockers.h",
+    "include/gmock/gmock-generated-matchers.h",
+    "include/gmock/gmock-generated-nice-strict.h",
+    "include/gmock/gmock-matchers.h",
+    "include/gmock/gmock-spec-builders.h",
+    "include/gmock/gmock.h",
+    "include/gmock/internal/gmock-generated-internal-utils.h",
+    "include/gmock/internal/gmock-internal-utils.h",
+    "include/gmock/internal/gmock-port.h",
+
+    # gmock helpers.
+    "../gmock_custom/gmock/internal/custom/gmock-port.h",
+
+    #"src/gmock-all.cc",  # Not needed by our build.
+    "src/gmock-cardinalities.cc",
+    "src/gmock-internal-utils.cc",
+    "src/gmock-matchers.cc",
+    "src/gmock-spec-builders.cc",
+    "src/gmock.cc",
+  ]
+
+  # This project includes some stuff form gtest's guts.
+  include_dirs = [ "../gtest/include" ]
+
+  public_configs = [
+    ":gmock_config",
+    "//testing/gtest:gtest_config",
+  ]
+}
+
+static_library("gmock_main") {
+  testonly = true
+  sources = [
+    "src/gmock_main.cc",
+  ]
+  deps = [
+    ":gmock",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/testing/gtest/BUILD.gn
@@ -0,0 +1,150 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Chromium's GN configuration for gtest now lives at testing/gtest/BUILD.gn.
+#
+# This configuration is left in the tree because it is pulled by V8 and PDFium,
+# and will be removed as soon as the projects switch off of it.
+#
+# Details at http://crbug.com/630705 and http://crrev.com/2779193002
+
+import("//build_overrides/gtest.gni")
+if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+  import("//build/buildflag_header.gni")
+}
+
+config("gtest_config") {
+  visibility = [
+    ":*",
+    "//testing/gmock:*",  # gmock also shares this config.
+  ]
+
+  defines = [
+    # In order to allow regex matches in gtest to be shared between Windows
+    # and other systems, we tell gtest to always use it's internal engine.
+    "GTEST_HAS_POSIX_RE=0",
+    "GTEST_LANG_CXX11=1",
+  ]
+
+  # Gtest headers need to be able to find themselves.
+  include_dirs = [ "include" ]
+
+  if (is_win) {
+    cflags = [ "/wd4800" ]  # Unused variable warning.
+  }
+}
+
+config("gtest_direct_config") {
+  visibility = [ ":*" ]
+  defines = [ "UNIT_TEST" ]
+}
+
+config("gtest_warnings") {
+  if (is_win && is_clang) {
+    # The Mutex constructor initializer list in gtest-port.cc is incorrectly
+    # ordered. See
+    # https://groups.google.com/d/msg/googletestframework/S5uSV8L2TX8/U1FaTDa6J6sJ.
+    cflags = [ "-Wno-reorder" ]
+  }
+}
+
+static_library("gtest") {
+  testonly = true
+  sources = [
+    "include/gtest/gtest-death-test.h",
+    "include/gtest/gtest-message.h",
+    "include/gtest/gtest-param-test.h",
+    "include/gtest/gtest-printers.h",
+    "include/gtest/gtest-spi.h",
+    "include/gtest/gtest-test-part.h",
+    "include/gtest/gtest-typed-test.h",
+    "include/gtest/gtest.h",
+    "include/gtest/gtest_pred_impl.h",
+    "include/gtest/internal/gtest-death-test-internal.h",
+    "include/gtest/internal/gtest-filepath.h",
+    "include/gtest/internal/gtest-internal.h",
+    "include/gtest/internal/gtest-linked_ptr.h",
+    "include/gtest/internal/gtest-param-util-generated.h",
+    "include/gtest/internal/gtest-param-util.h",
+    "include/gtest/internal/gtest-port.h",
+    "include/gtest/internal/gtest-string.h",
+    "include/gtest/internal/gtest-tuple.h",
+    "include/gtest/internal/gtest-type-util.h",
+
+    #"gtest/src/gtest-all.cc",  # Not needed by our build.
+    "src/gtest-death-test.cc",
+    "src/gtest-filepath.cc",
+    "src/gtest-internal-inl.h",
+    "src/gtest-port.cc",
+    "src/gtest-printers.cc",
+    "src/gtest-test-part.cc",
+    "src/gtest-typed-test.cc",
+    "src/gtest.cc",
+  ]
+  deps = []
+
+  if (gtest_include_multiprocess) {
+    sources += [
+      "../multiprocess_func_list.cc",
+      "../multiprocess_func_list.h",
+    ]
+  }
+
+  if (gtest_include_platform_test) {
+    sources += [ "../platform_test.h" ]
+  }
+
+  if ((is_mac || is_ios) && gtest_include_objc_support) {
+    if (is_ios) {
+      set_sources_assignment_filter([])
+    }
+    sources += [
+      "../gtest_mac.h",
+      "../gtest_mac.mm",
+    ]
+    if (gtest_include_platform_test) {
+      sources += [ "../platform_test_mac.mm" ]
+    }
+    set_sources_assignment_filter(sources_assignment_filter)
+  }
+
+  if (is_ios && gtest_include_ios_coverage) {
+    sources += [
+      "../coverage_util_ios.h",
+      "../coverage_util_ios.mm",
+    ]
+    deps += [ ":ios_enable_coverage" ]
+  }
+
+  include_dirs = [ "." ]
+
+  all_dependent_configs = [ ":gtest_config" ]
+  public_configs = [ ":gtest_direct_config" ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [
+    "//build/config/compiler:no_chromium_code",
+
+    # Must be after no_chromium_code for warning flags to be ordered correctly.
+    ":gtest_warnings",
+  ]
+}
+
+source_set("gtest_main") {
+  testonly = true
+  sources = [
+    "src/gtest_main.cc",
+  ]
+  deps = [
+    ":gtest",
+  ]
+}
+
+if (is_ios) {
+  buildflag_header("ios_enable_coverage") {
+    header = "ios_enable_coverage.h"
+    flags = [ "IOS_ENABLE_COVERAGE=$ios_enable_coverage" ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/android_platform/development/scripts/BUILD.gn
@@ -0,0 +1,19 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+
+group("stack_py") {
+  _py_files = read_file(
+          "//build/secondary/third_party/android_platform/development/scripts/stack.pydeps",
+          "list lines")
+
+  set_sources_assignment_filter([ "#*" ])
+  sources = _py_files
+  data = sources
+
+  data += ["${android_tool_prefix}addr2line",
+           "${android_tool_prefix}objdump",
+           "${android_tool_prefix}c++filt"]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/android_platform/development/scripts/stack.pydeps
@@ -0,0 +1,19 @@
+# Generated by running:
+#   build/print_python_deps.py --root third_party/android_platform/development/scripts --output build/secondary/third_party/android_platform/development/scripts/stack.pydeps third_party/android_platform/development/scripts/stack
+../../../../build/android/pylib/__init__.py
+../../../../build/android/pylib/constants/__init__.py
+../../../../build/android/pylib/symbols/__init__.py
+../../../../build/android/pylib/symbols/elf_symbolizer.py
+../../../catapult/devil/devil/__init__.py
+../../../catapult/devil/devil/android/__init__.py
+../../../catapult/devil/devil/android/constants/__init__.py
+../../../catapult/devil/devil/android/constants/chrome.py
+../../../catapult/devil/devil/android/sdk/__init__.py
+../../../catapult/devil/devil/android/sdk/keyevent.py
+../../../catapult/devil/devil/android/sdk/version_codes.py
+../../../catapult/devil/devil/constants/__init__.py
+../../../catapult/devil/devil/constants/exit_codes.py
+stack
+stack_core.py
+stack_libs.py
+symbol.py
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/android_tools/BUILD.gn
@@ -0,0 +1,369 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+if (!defined(default_android_support_library_version)) {
+  default_android_support_library_version = "25.0.1"
+}
+
+declare_args() {
+  android_support_library_version = default_android_support_library_version
+}
+
+config("cpu_features_include") {
+  include_dirs = [ "$android_ndk_root/sources/android/cpufeatures" ]
+}
+
+config("cpu_features_warnings") {
+  if (is_clang) {
+    # cpu-features.c has few unused functions on x86 b/26403333
+    cflags = [ "-Wno-unused-function" ]
+  }
+}
+
+source_set("cpu_features") {
+  sources = [
+    "$android_ndk_root/sources/android/cpufeatures/cpu-features.c",
+  ]
+  public_configs = [ ":cpu_features_include" ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [
+    "//build/config/compiler:no_chromium_code",
+
+    # Must be after no_chromium_code for warning flags to be ordered correctly.
+    ":cpu_features_warnings",
+  ]
+}
+
+lib_version = android_support_library_version
+lib_path = "$android_sdk_root/extras/android/m2repository/com/android/support"
+
+android_java_prebuilt("android_gcm_java") {
+  jar_path = "$android_sdk_root/extras/google/gcm/gcm-client/dist/gcm.jar"
+}
+
+android_java_prebuilt("emma_device_java") {
+  jar_path = "$android_sdk_root/tools/lib/emma_device.jar"
+  include_java_resources = true
+}
+
+android_aar_prebuilt("android_support_design_java") {
+  deps = [
+    ":android_support_v7_appcompat_java",
+  ]
+  _lib_name = "design"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_transition_java") {
+  deps = [
+    ":android_support_v7_appcompat_java",
+  ]
+  _lib_name = "transition"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_multidex_java") {
+  aar_path = "$lib_path/multidex/1.0.1/multidex-1.0.1.aar"
+}
+
+android_java_prebuilt("android_support_annotations_java") {
+  _lib_name = "support-annotations"
+  jar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.jar"
+}
+
+java_group("android_support_v4_java") {
+  deps = [
+    ":android_support_compat_java",
+    ":android_support_core_ui_java",
+    ":android_support_core_utils_java",
+    ":android_support_fragment_java",
+    ":android_support_media_compat_java",
+  ]
+}
+
+android_aar_prebuilt("android_support_compat_java") {
+  _lib_name = "support-compat"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_aidl = true  # We don't appear to need these currently.
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_core_ui_java") {
+  _lib_name = "support-core-ui"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_core_utils_java") {
+  _lib_name = "support-core-utils"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_fragment_java") {
+  _lib_name = "support-fragment"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_media_compat_java") {
+  _lib_name = "support-media-compat"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_aidl = true  # We don't appear to need these currently.
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_v13_java") {
+  deps = [
+    ":android_support_annotations_java",
+    ":android_support_v4_java",
+  ]
+  _lib_name = "support-v13"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_vector_drawable_java") {
+  _lib_name = "support-vector-drawable"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_v7_appcompat_java_internal") {
+  _lib_name = "appcompat-v7"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+java_group("android_support_v7_appcompat_java") {
+  deps = [
+    ":android_support_v4_java",
+    ":android_support_v7_appcompat_java_internal",
+    ":android_support_vector_drawable_java",
+  ]
+}
+
+android_aar_prebuilt("android_support_v7_gridlayout_java") {
+  deps = [
+    ":android_support_v7_appcompat_java",
+  ]
+  _lib_name = "gridlayout-v7"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_v7_mediarouter_java") {
+  deps = [
+    ":android_support_v7_appcompat_java",
+  ]
+  _lib_name = "mediarouter-v7"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_v7_recyclerview_java") {
+  deps = [
+    ":android_support_v7_appcompat_java",
+  ]
+  _lib_name = "recyclerview-v7"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_v7_preference_java") {
+  deps = [
+    ":android_support_v7_appcompat_java",
+  ]
+  _lib_name = "preference-v7"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_v14_preference_java") {
+  deps = [
+    ":android_support_v7_preference_java",
+  ]
+  _lib_name = "preference-v14"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_v17_leanback_java") {
+  deps = [
+    ":android_support_v4_java",
+    ":android_support_v7_recyclerview_java",
+  ]
+  _lib_name = "leanback-v17"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("android_support_v17_preference_java") {
+  deps = [
+    ":android_support_v14_preference_java",
+    ":android_support_v17_leanback_java",
+    ":android_support_v4_java",
+    ":android_support_v7_appcompat_java",
+    ":android_support_v7_preference_java",
+    ":android_support_v7_recyclerview_java",
+  ]
+  _lib_name = "preference-leanback-v17"
+  aar_path = "$lib_path/$_lib_name/$lib_version/$_lib_name-$lib_version.aar"
+}
+
+android_library("android_support_chromium_java") {
+  testonly = true
+  java_files = [ "$android_sdk_root/extras/chromium/support/src/org/chromium/android/support/PackageManagerWrapper.java" ]
+}
+
+# TODO(dgn): Remove this once no other target has a dependency on it
+java_group("google_play_services_default_resources") {
+  deps = []
+}
+
+# TODO(dgn): Remove this once other targets register dependencies on only the
+# modules they need.
+java_group("google_play_services_default_java") {
+  deps = [
+    ":google_play_services_auth_base_java",
+    ":google_play_services_auth_java",
+    ":google_play_services_base_java",
+    ":google_play_services_basement_java",
+    ":google_play_services_cast_java",
+    ":google_play_services_gcm_java",
+    ":google_play_services_iid_java",
+    ":google_play_services_location_java",
+    ":google_play_services_nearby_java",
+    ":google_play_services_vision_java",
+  ]
+}
+
+# TODO(dgn): Use the POM files instead of hardcoding the dependencies.
+gms_path = "$default_extras_android_sdk_root/extras/google/m2repository/com/google/android/gms"
+gms_version = "10.2.0"
+
+android_aar_prebuilt("google_play_services_basement_java") {
+  deps = [
+    ":android_support_v4_java",
+  ]
+  _lib_name = "play-services-basement"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  input_jars_paths = [ "$android_sdk/optional/org.apache.http.legacy.jar" ]
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_tasks_java") {
+  deps = [
+    ":google_play_services_basement_java",
+  ]
+  _lib_name = "play-services-tasks"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_base_java") {
+  deps = [
+    ":google_play_services_basement_java",
+    ":google_play_services_tasks_java",
+  ]
+  _lib_name = "play-services-base"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_auth_base_java") {
+  deps = [
+    ":google_play_services_base_java",
+    ":google_play_services_basement_java",
+  ]
+  _lib_name = "play-services-auth-base"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_auth_java") {
+  deps = [
+    ":google_play_services_auth_base_java",
+    ":google_play_services_base_java",
+    ":google_play_services_basement_java",
+  ]
+  _lib_name = "play-services-auth"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_cast_java") {
+  deps = [
+    ":android_support_v7_mediarouter_java",
+    ":google_play_services_base_java",
+    ":google_play_services_basement_java",
+  ]
+  _lib_name = "play-services-cast"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_iid_java") {
+  deps = [
+    ":google_play_services_base_java",
+    ":google_play_services_basement_java",
+  ]
+  _lib_name = "play-services-iid"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_gcm_java") {
+  deps = [
+    ":google_play_services_base_java",
+    ":google_play_services_basement_java",
+    ":google_play_services_iid_java",
+  ]
+  _lib_name = "play-services-gcm"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_location_java") {
+  deps = [
+    ":google_play_services_base_java",
+    ":google_play_services_basement_java",
+  ]
+  _lib_name = "play-services-location"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_nearby_java") {
+  deps = [
+    ":google_play_services_base_java",
+    ":google_play_services_basement_java",
+  ]
+  _lib_name = "play-services-nearby"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+android_aar_prebuilt("google_play_services_vision_java") {
+  deps = [
+    ":google_play_services_base_java",
+    ":google_play_services_basement_java",
+  ]
+  _lib_name = "play-services-vision"
+  aar_path = "$gms_path/$_lib_name/$gms_version/$_lib_name-$gms_version.aar"
+  ignore_manifest = true
+}
+
+# TODO(paulmiller): Replace this with a proper target after rolling to a GMS
+# version which has vision-common.
+java_group("google_play_services_vision_common_java") {
+  deps = [
+    ":google_play_services_vision_java",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/android_tools/apk_proguard.flags
@@ -0,0 +1,10 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Keep all Parcelables, since Play Services has some that are used only by
+# reflection.
+# TODO(agrieve): Remove this once proguard flags provided by play services via
+#     .aars are used.  https://crbug.com/640836
+-keep class * implements android.os.Parcelable
+
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/OWNERS
@@ -0,0 +1,5 @@
+mark@chromium.org
+rsesek@chromium.org
+scottmg@chromium.org
+
+# TEAM: crashpad-dev@chromium.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/client/BUILD.gn
@@ -0,0 +1,53 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("client_config") {
+  include_dirs = [ ".." ]
+}
+
+static_library("client") {
+  sources = [
+    "crash_report_database.cc",
+    "crash_report_database.h",
+    "crash_report_database_mac.mm",
+    "crash_report_database_win.cc",
+    "crashpad_client.h",
+    "crashpad_client_mac.cc",
+    "crashpad_client_win.cc",
+    "crashpad_info.cc",
+    "crashpad_info.h",
+    "prune_crash_reports.cc",
+    "prune_crash_reports.h",
+    "settings.cc",
+    "settings.h",
+    "simple_address_range_bag.cc",
+    "simple_address_range_bag.h",
+    "simple_string_dictionary.cc",
+    "simple_string_dictionary.h",
+    "simulate_crash.h",
+    "simulate_crash_mac.cc",
+    "simulate_crash_mac.h",
+    "simulate_crash_win.h",
+  ]
+
+  if (is_mac) {
+    sources += [
+      "capture_context_mac.S",
+      "capture_context_mac.h",
+    ]
+  }
+
+  public_configs = [ ":client_config" ]
+
+  deps = [
+    "//base",
+    "//third_party/crashpad/crashpad/compat",
+    "//third_party/crashpad/crashpad/util",
+  ]
+
+  if (is_win) {
+    libs = [ "rpcrt4.lib" ]
+    cflags = [ "/wd4201" ]  # nonstandard extension used : nameless struct/union.
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/compat/BUILD.gn
@@ -0,0 +1,69 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("compat_config") {
+  include_dirs = []
+
+  if (is_win) {
+    include_dirs += [ "win" ]
+  } else {
+    include_dirs += [ "non_win" ]
+  }
+
+  if (is_mac) {
+    include_dirs += [
+      "mac",
+      "non_cxx11_lib",
+    ]
+  }
+}
+
+static_library("compat") {
+  sources = []
+  if (is_mac) {
+    sources += [
+      "mac/AvailabilityMacros.h",
+      "mac/kern/exc_resource.h",
+      "mac/mach-o/getsect.cc",
+      "mac/mach-o/getsect.h",
+      "mac/mach-o/loader.h",
+      "mac/mach/mach.h",
+      "mac/sys/resource.h",
+      "non_cxx11_lib/type_traits",
+      "non_cxx11_lib/utility",
+    ]
+  } else {
+    sources += [ "non_mac/mach/mach.h" ]
+  }
+
+  if (is_win) {
+    sources += [
+      "win/getopt.h",
+      "win/strings.cc",
+      "win/strings.h",
+      "win/sys/types.h",
+      "win/time.cc",
+      "win/time.h",
+      "win/winnt.h",
+    ]
+  } else {
+    sources += [
+      "non_win/dbghelp.h",
+      "non_win/minwinbase.h",
+      "non_win/timezoneapi.h",
+      "non_win/verrsrc.h",
+      "non_win/windows.h",
+      "non_win/winnt.h",
+    ]
+  }
+
+  public_configs = [ ":compat_config" ]
+
+  deps = []
+  if (is_mac) {
+    deps += [ "//third_party/crashpad/crashpad/third_party/apple_cctools" ]
+  } else if (is_win) {
+    deps += [ "//third_party/crashpad/crashpad/third_party/getopt" ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/handler/BUILD.gn
@@ -0,0 +1,67 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+static_library("handler_lib") {
+  sources = [
+    "crash_report_upload_thread.cc",
+    "crash_report_upload_thread.h",
+    "handler_main.cc",
+    "handler_main.h",
+    "mac/crash_report_exception_handler.cc",
+    "mac/crash_report_exception_handler.h",
+    "mac/exception_handler_server.cc",
+    "mac/exception_handler_server.h",
+    "mac/file_limit_annotation.cc",
+    "mac/file_limit_annotation.h",
+    "prune_crash_reports_thread.cc",
+    "prune_crash_reports_thread.h",
+    "user_stream_data_source.cc",
+    "user_stream_data_source.h",
+    "win/crash_report_exception_handler.cc",
+    "win/crash_report_exception_handler.h",
+  ]
+
+  include_dirs = [ ".." ]
+
+  deps = [
+    "../compat",
+    "../minidump",
+    "../snapshot",
+    "../tools:tool_support",
+    "//base",
+  ]
+
+  if (is_win) {
+    cflags = [ "/wd4201" ]
+  }
+}
+
+executable("crashpad_handler") {
+  sources = [
+    "main.cc",
+  ]
+
+  include_dirs = [ ".." ]
+
+  deps = [
+    ":handler_lib",
+    "../compat",
+    "//base",
+    "//build/win:default_exe_manifest",
+  ]
+
+  if (is_mac && is_component_build) {
+    # The handler is in Chromium.app/Contents/Versions/X/Chromium Framework.framework/Versions/A/Helpers/
+    # so set rpath up to the base.
+    ldflags = [
+      "-rpath",
+      "@loader_path/../../../../../../../..",
+    ]
+  }
+
+  if (is_win) {
+    configs -= [ "//build/config/win:console" ]
+    configs += [ "//build/config/win:windowed" ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/minidump/BUILD.gn
@@ -0,0 +1,71 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+static_library("minidump") {
+  deps = [
+    "../compat",
+    "../snapshot",
+    "../util",
+    "//base",
+  ]
+
+  include_dirs = [ ".." ]
+
+  if (is_win) {
+    cflags = [
+      "/wd4201",
+      "/wd4324",
+    ]
+  }
+
+  sources = [
+    "minidump_context.h",
+    "minidump_context_writer.cc",
+    "minidump_context_writer.h",
+    "minidump_crashpad_info_writer.cc",
+    "minidump_crashpad_info_writer.h",
+    "minidump_exception_writer.cc",
+    "minidump_exception_writer.h",
+    "minidump_extensions.cc",
+    "minidump_extensions.h",
+    "minidump_file_writer.cc",
+    "minidump_file_writer.h",
+    "minidump_handle_writer.cc",
+    "minidump_handle_writer.h",
+    "minidump_memory_info_writer.cc",
+    "minidump_memory_info_writer.h",
+    "minidump_memory_writer.cc",
+    "minidump_memory_writer.h",
+    "minidump_misc_info_writer.cc",
+    "minidump_misc_info_writer.h",
+    "minidump_module_crashpad_info_writer.cc",
+    "minidump_module_crashpad_info_writer.h",
+    "minidump_module_writer.cc",
+    "minidump_module_writer.h",
+    "minidump_rva_list_writer.cc",
+    "minidump_rva_list_writer.h",
+    "minidump_simple_string_dictionary_writer.cc",
+    "minidump_simple_string_dictionary_writer.h",
+    "minidump_stream_writer.cc",
+    "minidump_stream_writer.h",
+    "minidump_string_writer.cc",
+    "minidump_string_writer.h",
+    "minidump_system_info_writer.cc",
+    "minidump_system_info_writer.h",
+    "minidump_thread_id_map.cc",
+    "minidump_thread_id_map.h",
+    "minidump_thread_writer.cc",
+    "minidump_thread_writer.h",
+    "minidump_unloaded_module_writer.cc",
+    "minidump_unloaded_module_writer.h",
+    "minidump_user_extension_stream_data_source.cc",
+    "minidump_user_extension_stream_data_source.h",
+    "minidump_user_stream_writer.cc",
+    "minidump_user_stream_writer.h",
+    "minidump_writable.cc",
+    "minidump_writable.h",
+    "minidump_writer_util.cc",
+    "minidump_writer_util.h",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/snapshot/BUILD.gn
@@ -0,0 +1,136 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+static_library("snapshot") {
+  deps = [
+    "../client",
+    "../compat",
+    "../util",
+    "//base",
+  ]
+
+  include_dirs = [ ".." ]
+
+  if (is_win) {
+    cflags = [ "/wd4201" ]
+    libs = [ "powrprof.lib" ]
+  }
+
+  sources = [
+    "capture_memory.cc",
+    "capture_memory.h",
+    "cpu_architecture.h",
+    "cpu_context.cc",
+    "cpu_context.h",
+    "crashpad_info_client_options.cc",
+    "crashpad_info_client_options.h",
+    "exception_snapshot.h",
+    "handle_snapshot.cc",
+    "handle_snapshot.h",
+    "mac/cpu_context_mac.cc",
+    "mac/cpu_context_mac.h",
+    "mac/exception_snapshot_mac.cc",
+    "mac/exception_snapshot_mac.h",
+    "mac/mach_o_image_annotations_reader.cc",
+    "mac/mach_o_image_annotations_reader.h",
+    "mac/mach_o_image_reader.cc",
+    "mac/mach_o_image_reader.h",
+    "mac/mach_o_image_segment_reader.cc",
+    "mac/mach_o_image_segment_reader.h",
+    "mac/mach_o_image_symbol_table_reader.cc",
+    "mac/mach_o_image_symbol_table_reader.h",
+    "mac/memory_snapshot_mac.cc",
+    "mac/memory_snapshot_mac.h",
+    "mac/module_snapshot_mac.cc",
+    "mac/module_snapshot_mac.h",
+    "mac/process_reader.cc",
+    "mac/process_reader.h",
+    "mac/process_snapshot_mac.cc",
+    "mac/process_snapshot_mac.h",
+    "mac/process_types.cc",
+    "mac/process_types.h",
+    "mac/process_types/all.proctype",
+    "mac/process_types/crashpad_info.proctype",
+    "mac/process_types/crashreporterclient.proctype",
+    "mac/process_types/custom.cc",
+    "mac/process_types/dyld_images.proctype",
+    "mac/process_types/flavors.h",
+    "mac/process_types/internal.h",
+    "mac/process_types/loader.proctype",
+    "mac/process_types/nlist.proctype",
+    "mac/process_types/traits.h",
+    "mac/system_snapshot_mac.cc",
+    "mac/system_snapshot_mac.h",
+    "mac/thread_snapshot_mac.cc",
+    "mac/thread_snapshot_mac.h",
+    "memory_snapshot.h",
+    "minidump/minidump_simple_string_dictionary_reader.cc",
+    "minidump/minidump_simple_string_dictionary_reader.h",
+    "minidump/minidump_string_list_reader.cc",
+    "minidump/minidump_string_list_reader.h",
+    "minidump/minidump_string_reader.cc",
+    "minidump/minidump_string_reader.h",
+    "minidump/module_snapshot_minidump.cc",
+    "minidump/module_snapshot_minidump.h",
+    "minidump/process_snapshot_minidump.cc",
+    "minidump/process_snapshot_minidump.h",
+    "module_snapshot.h",
+    "process_snapshot.h",
+    "system_snapshot.h",
+    "thread_snapshot.h",
+    "unloaded_module_snapshot.cc",
+    "unloaded_module_snapshot.h",
+    "win/capture_memory_delegate_win.cc",
+    "win/capture_memory_delegate_win.h",
+    "win/cpu_context_win.cc",
+    "win/cpu_context_win.h",
+    "win/exception_snapshot_win.cc",
+    "win/exception_snapshot_win.h",
+    "win/memory_map_region_snapshot_win.cc",
+    "win/memory_map_region_snapshot_win.h",
+    "win/memory_snapshot_win.cc",
+    "win/memory_snapshot_win.h",
+    "win/module_snapshot_win.cc",
+    "win/module_snapshot_win.h",
+    "win/pe_image_annotations_reader.cc",
+    "win/pe_image_annotations_reader.h",
+    "win/pe_image_reader.cc",
+    "win/pe_image_reader.h",
+    "win/pe_image_resource_reader.cc",
+    "win/pe_image_resource_reader.h",
+    "win/process_reader_win.cc",
+    "win/process_reader_win.h",
+    "win/process_snapshot_win.cc",
+    "win/process_snapshot_win.h",
+    "win/process_subrange_reader.cc",
+    "win/process_subrange_reader.h",
+    "win/system_snapshot_win.cc",
+    "win/system_snapshot_win.h",
+    "win/thread_snapshot_win.cc",
+    "win/thread_snapshot_win.h",
+  ]
+}
+
+if (is_win) {
+  source_set("snapshot_api") {
+    deps = [
+      ":snapshot",
+      "../compat",
+      "../util",
+      "//base",
+    ]
+
+    include_dirs = [ ".." ]
+
+    cflags = [ "/wd4201" ]
+
+    sources = [
+      "api/module_annotations_win.cc",
+      "api/module_annotations_win.h",
+    ]
+  }
+} else {
+  group("snapshot_api") {
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/third_party/apple_cctools/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("apple_cctools_config") {
+  include_dirs = [ "../.." ]
+}
+
+source_set("apple_cctools") {
+  sources = [
+    "cctools/include/mach-o/getsect.h",
+    "cctools/libmacho/getsecbyname.c",
+  ]
+  public_configs = [ ":apple_cctools_config" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/third_party/getopt/BUILD.gn
@@ -0,0 +1,10 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+source_set("getopt") {
+  sources = [
+    "getopt.cc",
+    "getopt.h",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/third_party/zlib/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("zlib_config") {
+  defines = [ "CRASHPAD_ZLIB_SOURCE_CHROMIUM" ]
+}
+
+group("zlib") {
+  public_deps = [
+    "//third_party/zlib:zlib",
+  ]
+  public_configs = [ ":zlib_config" ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/tools/BUILD.gn
@@ -0,0 +1,39 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+source_set("tool_support") {
+  deps = [
+    "//base",
+  ]
+
+  include_dirs = [ ".." ]
+
+  if (is_win) {
+    cflags = [ "/wd4201" ]
+  }
+
+  sources = [
+    "tool_support.cc",
+    "tool_support.h",
+  ]
+}
+
+executable("crashpad_database_util") {
+  sources = [
+    "crashpad_database_util.cc",
+  ]
+
+  include_dirs = [ ".." ]
+
+  deps = [
+    ":tool_support",
+    "//base",
+
+    # Default manifest on Windows (a no-op elsewhere).
+    "//build/win:default_exe_manifest",
+    "//third_party/crashpad/crashpad/client",
+    "//third_party/crashpad/crashpad/compat",
+    "//third_party/crashpad/crashpad/util",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/crashpad/crashpad/util/BUILD.gn
@@ -0,0 +1,284 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+if (is_mac) {
+  import("//build/config/sysroot.gni")
+}
+
+# Allows the source set to inject ldflags for targets that link to it.
+config("util_link_config") {
+  if (is_mac) {
+    libs = [ "bsm" ]
+  }
+}
+
+if (is_mac) {
+  action_foreach("mig") {
+    script = "mach/mig.py"
+    sources = [
+      "$sysroot/usr/include/mach/exc.defs",
+      "$sysroot/usr/include/mach/mach_exc.defs",
+      "$sysroot/usr/include/mach/notify.defs",
+      "mach/child_port.defs",
+    ]
+
+    outputs = [
+      "$target_gen_dir/mach/{{source_name_part}}User.c",
+      "$target_gen_dir/mach/{{source_name_part}}Server.c",
+      "$target_gen_dir/mach/{{source_name_part}}.h",
+      "$target_gen_dir/mach/{{source_name_part}}Server.h",
+    ]
+
+    args = [ "{{source}}" ]
+    args += rebase_path(outputs, root_build_dir)
+    if (!use_system_xcode) {
+      args += [
+        "--developer-dir",
+        hermetic_xcode_path,
+      ]
+    }
+    args += [
+      "--sdk",
+      mac_sdk_path,
+    ]
+  }
+}
+
+static_library("util") {
+  sources = [
+    "file/delimited_file_reader.cc",
+    "file/delimited_file_reader.h",
+    "file/file_io.cc",
+    "file/file_io.h",
+    "file/file_io_posix.cc",
+    "file/file_io_win.cc",
+    "file/file_reader.cc",
+    "file/file_reader.h",
+    "file/file_seeker.cc",
+    "file/file_seeker.h",
+    "file/file_writer.cc",
+    "file/file_writer.h",
+    "file/string_file.cc",
+    "file/string_file.h",
+    "mac/checked_mach_address_range.h",
+    "mac/launchd.h",
+    "mac/launchd.mm",
+    "mac/mac_util.cc",
+    "mac/mac_util.h",
+    "mac/service_management.cc",
+    "mac/service_management.h",
+    "mac/xattr.cc",
+    "mac/xattr.h",
+    "misc/address_sanitizer.h",
+    "misc/arraysize_unsafe.h",
+    "misc/clock.h",
+    "misc/clock_mac.cc",
+    "misc/clock_posix.cc",
+    "misc/clock_win.cc",
+    "misc/from_pointer_cast.h",
+    "misc/implicit_cast.h",
+    "misc/initialization_state.h",
+    "misc/initialization_state_dcheck.cc",
+    "misc/initialization_state_dcheck.h",
+    "misc/metrics.cc",
+    "misc/metrics.h",
+    "misc/paths.h",
+    "misc/paths_mac.cc",
+    "misc/paths_win.cc",
+    "misc/pdb_structures.cc",
+    "misc/pdb_structures.h",
+    "misc/random_string.cc",
+    "misc/random_string.h",
+    "misc/scoped_forbid_return.cc",
+    "misc/scoped_forbid_return.h",
+    "misc/symbolic_constants_common.h",
+    "misc/tri_state.h",
+    "misc/uuid.cc",
+    "misc/uuid.h",
+    "misc/zlib.cc",
+    "misc/zlib.h",
+    "net/http_body.cc",
+    "net/http_body.h",
+    "net/http_body_gzip.cc",
+    "net/http_body_gzip.h",
+    "net/http_headers.cc",
+    "net/http_headers.h",
+    "net/http_multipart_builder.cc",
+    "net/http_multipart_builder.h",
+    "net/http_transport.cc",
+    "net/http_transport.h",
+    "net/http_transport_mac.mm",
+    "net/http_transport_win.cc",
+    "numeric/checked_address_range.cc",
+    "numeric/checked_address_range.h",
+    "numeric/checked_range.h",
+    "numeric/in_range_cast.h",
+    "numeric/int128.h",
+    "numeric/safe_assignment.h",
+    "posix/close_multiple.cc",
+    "posix/close_multiple.h",
+    "posix/close_stdio.cc",
+    "posix/close_stdio.h",
+    "posix/drop_privileges.cc",
+    "posix/drop_privileges.h",
+    "posix/process_info.h",
+    "posix/process_info_mac.cc",
+    "posix/scoped_dir.cc",
+    "posix/scoped_dir.h",
+    "posix/scoped_mmap.cc",
+    "posix/scoped_mmap.h",
+    "posix/signals.cc",
+    "posix/signals.h",
+    "posix/symbolic_constants_posix.cc",
+    "posix/symbolic_constants_posix.h",
+    "stdlib/aligned_allocator.cc",
+    "stdlib/aligned_allocator.h",
+    "stdlib/cxx.h",
+    "stdlib/map_insert.h",
+    "stdlib/objc.h",
+    "stdlib/pointer_container.h",
+    "stdlib/string_number_conversion.cc",
+    "stdlib/string_number_conversion.h",
+    "stdlib/strlcpy.cc",
+    "stdlib/strlcpy.h",
+    "stdlib/strnlen.cc",
+    "stdlib/strnlen.h",
+    "stdlib/thread_safe_vector.h",
+    "string/split_string.cc",
+    "string/split_string.h",
+    "synchronization/semaphore.h",
+    "synchronization/semaphore_mac.cc",
+    "synchronization/semaphore_posix.cc",
+    "synchronization/semaphore_win.cc",
+    "thread/thread.cc",
+    "thread/thread.h",
+    "thread/thread_log_messages.cc",
+    "thread/thread_log_messages.h",
+    "thread/thread_posix.cc",
+    "thread/thread_win.cc",
+    "thread/worker_thread.cc",
+    "thread/worker_thread.h",
+    "win/address_types.h",
+    "win/capture_context.asm",
+    "win/capture_context.h",
+    "win/checked_win_address_range.h",
+    "win/command_line.cc",
+    "win/command_line.h",
+    "win/critical_section_with_debug_info.cc",
+    "win/critical_section_with_debug_info.h",
+    "win/exception_handler_server.cc",
+    "win/exception_handler_server.h",
+    "win/get_function.cc",
+    "win/get_function.h",
+    "win/get_module_information.cc",
+    "win/get_module_information.h",
+    "win/handle.cc",
+    "win/handle.h",
+    "win/initial_client_data.cc",
+    "win/initial_client_data.h",
+    "win/module_version.cc",
+    "win/module_version.h",
+    "win/nt_internals.cc",
+    "win/nt_internals.h",
+    "win/ntstatus_logging.cc",
+    "win/ntstatus_logging.h",
+    "win/process_info.cc",
+    "win/process_info.h",
+    "win/process_structs.h",
+    "win/registration_protocol_win.cc",
+    "win/registration_protocol_win.h",
+    "win/safe_terminate_process.asm",
+    "win/safe_terminate_process.h",
+    "win/scoped_handle.cc",
+    "win/scoped_handle.h",
+    "win/scoped_local_alloc.cc",
+    "win/scoped_local_alloc.h",
+    "win/scoped_process_suspend.cc",
+    "win/scoped_process_suspend.h",
+    "win/session_end_watcher.cc",
+    "win/session_end_watcher.h",
+    "win/termination_codes.h",
+    "win/time.cc",
+    "win/time.h",
+    "win/xp_compat.h",
+  ]
+
+  if (is_mac) {
+    # mach/ are not globally filtered.
+    sources += [
+      "mach/child_port_handshake.cc",
+      "mach/child_port_handshake.h",
+      "mach/child_port_server.cc",
+      "mach/child_port_server.h",
+      "mach/child_port_types.h",
+      "mach/composite_mach_message_server.cc",
+      "mach/composite_mach_message_server.h",
+      "mach/exc_client_variants.cc",
+      "mach/exc_client_variants.h",
+      "mach/exc_server_variants.cc",
+      "mach/exc_server_variants.h",
+      "mach/exception_behaviors.cc",
+      "mach/exception_behaviors.h",
+      "mach/exception_ports.cc",
+      "mach/exception_ports.h",
+      "mach/exception_types.cc",
+      "mach/exception_types.h",
+      "mach/mach_extensions.cc",
+      "mach/mach_extensions.h",
+      "mach/mach_message.cc",
+      "mach/mach_message.h",
+      "mach/mach_message_server.cc",
+      "mach/mach_message_server.h",
+      "mach/notify_server.cc",
+      "mach/notify_server.h",
+      "mach/scoped_task_suspend.cc",
+      "mach/scoped_task_suspend.h",
+      "mach/symbolic_constants_mach.cc",
+      "mach/symbolic_constants_mach.h",
+      "mach/task_for_pid.cc",
+      "mach/task_for_pid.h",
+      "mach/task_memory.cc",
+      "mach/task_memory.h",
+    ]
+  }
+
+  # Include files from here and generated files starting with "util".
+  include_dirs = [
+    "..",
+    "$root_gen_dir/third_party/crashpad/crashpad",
+  ]
+
+  all_dependent_configs = [ ":util_link_config" ]
+
+  deps = [
+    "//base",
+    "//third_party/crashpad/crashpad/third_party/zlib",
+  ]
+
+  public_deps = [
+    "//third_party/crashpad/crashpad/compat",
+  ]
+
+  if (is_win) {
+    libs = [ "winhttp.lib" ]
+    cflags = [
+      "/wd4201",  # nonstandard extension used : nameless struct/union.
+      "/wd4577",  # 'noexcept' used with no exception handling mode specified.
+    ]
+
+    if (current_cpu == "x86") {
+      asmflags = [ "/safeseh" ]
+    }
+  } else if (is_mac) {
+    sources += get_target_outputs(":mig")
+    deps += [ ":mig" ]
+    libs = [
+      "CoreFoundation.framework",
+      "Foundation.framework",
+      "IOKit.framework",
+    ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/libjpeg_turbo/BUILD.gn
@@ -0,0 +1,224 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not use the targets in this file unless you need a certain libjpeg
+# implementation. Use the meta target //third_party:jpeg instead.
+
+import("//build/config/sanitizers/sanitizers.gni")
+if (current_cpu == "arm") {
+  import("//build/config/arm.gni")
+}
+
+assert(!is_ios, "This is not used on iOS, don't drag it in unintentionally")
+
+if (current_cpu == "x86" || current_cpu == "x64") {
+  import("//third_party/yasm/yasm_assemble.gni")
+
+  yasm_assemble("simd_asm") {
+    defines = []
+
+    if (current_cpu == "x86") {
+      sources = [
+        "simd/jccolor-mmx.asm",
+        "simd/jccolor-sse2.asm",
+        "simd/jcgray-mmx.asm",
+        "simd/jcgray-sse2.asm",
+        "simd/jchuff-sse2.asm",
+        "simd/jcsample-mmx.asm",
+        "simd/jcsample-sse2.asm",
+        "simd/jdcolor-mmx.asm",
+        "simd/jdcolor-sse2.asm",
+        "simd/jdmerge-mmx.asm",
+        "simd/jdmerge-sse2.asm",
+        "simd/jdsample-mmx.asm",
+        "simd/jdsample-sse2.asm",
+        "simd/jfdctflt-3dn.asm",
+        "simd/jfdctflt-sse.asm",
+        "simd/jfdctfst-mmx.asm",
+        "simd/jfdctfst-sse2.asm",
+        "simd/jfdctint-mmx.asm",
+        "simd/jfdctint-sse2.asm",
+        "simd/jidctflt-3dn.asm",
+        "simd/jidctflt-sse.asm",
+        "simd/jidctflt-sse2.asm",
+        "simd/jidctfst-mmx.asm",
+        "simd/jidctfst-sse2.asm",
+        "simd/jidctint-mmx.asm",
+        "simd/jidctint-sse2.asm",
+        "simd/jidctred-mmx.asm",
+        "simd/jidctred-sse2.asm",
+        "simd/jquant-3dn.asm",
+        "simd/jquant-mmx.asm",
+        "simd/jquant-sse.asm",
+        "simd/jquantf-sse2.asm",
+        "simd/jquanti-sse2.asm",
+        "simd/jsimdcpu.asm",
+      ]
+      defines += [
+        "__x86__",
+        "PIC",
+      ]
+    } else if (current_cpu == "x64") {
+      sources = [
+        "simd/jccolor-sse2-64.asm",
+        "simd/jcgray-sse2-64.asm",
+        "simd/jchuff-sse2-64.asm",
+        "simd/jcsample-sse2-64.asm",
+        "simd/jdcolor-sse2-64.asm",
+        "simd/jdmerge-sse2-64.asm",
+        "simd/jdsample-sse2-64.asm",
+        "simd/jfdctflt-sse-64.asm",
+        "simd/jfdctfst-sse2-64.asm",
+        "simd/jfdctint-sse2-64.asm",
+        "simd/jidctflt-sse2-64.asm",
+        "simd/jidctfst-sse2-64.asm",
+        "simd/jidctint-sse2-64.asm",
+        "simd/jidctred-sse2-64.asm",
+        "simd/jquantf-sse2-64.asm",
+        "simd/jquanti-sse2-64.asm",
+      ]
+      defines += [
+        "__x86_64__",
+        "PIC",
+      ]
+    }
+
+    if (is_win) {
+      defines += [ "MSVC" ]
+      include_dirs = [ "win" ]
+      if (current_cpu == "x86") {
+        defines += [ "WIN32" ]
+      } else {
+        defines += [ "WIN64" ]
+      }
+    } else if (is_mac || is_ios) {
+      defines += [ "MACHO" ]
+      include_dirs = [ "mac" ]
+    } else if (is_linux || is_android) {
+      defines += [ "ELF" ]
+      include_dirs = [ "linux" ]
+    }
+  }
+}
+
+static_library("simd") {
+  if (current_cpu == "x86") {
+    deps = [
+      ":simd_asm",
+    ]
+    sources = [
+      "simd/jsimd_i386.c",
+    ]
+  } else if (current_cpu == "x64") {
+    deps = [
+      ":simd_asm",
+    ]
+    sources = [
+      "simd/jsimd_x86_64.c",
+    ]
+  } else if (current_cpu == "arm" && arm_version >= 7 &&
+             (arm_use_neon || arm_optionally_use_neon)) {
+    sources = [
+      "simd/jsimd_arm.c",
+      "simd/jsimd_arm_neon.S",
+    ]
+  } else if (current_cpu == "arm64") {
+    sources = [
+      "simd/jsimd_arm64.c",
+      "simd/jsimd_arm64_neon.S",
+    ]
+  } else {
+    sources = [
+      "jsimd_none.c",
+    ]
+  }
+
+  if (is_win) {
+    cflags = [ "/wd4245" ]
+  }
+}
+
+config("libjpeg_config") {
+  include_dirs = [ "." ]
+}
+
+static_library("libjpeg") {
+  sources = [
+    "jcapimin.c",
+    "jcapistd.c",
+    "jccoefct.c",
+    "jccolor.c",
+    "jcdctmgr.c",
+    "jchuff.c",
+    "jchuff.h",
+    "jcinit.c",
+    "jcmainct.c",
+    "jcmarker.c",
+    "jcmaster.c",
+    "jcomapi.c",
+    "jconfig.h",
+    "jcparam.c",
+    "jcphuff.c",
+    "jcprepct.c",
+    "jcsample.c",
+    "jdapimin.c",
+    "jdapistd.c",
+    "jdatadst.c",
+    "jdatasrc.c",
+    "jdcoefct.c",
+    "jdcolor.c",
+    "jdct.h",
+    "jddctmgr.c",
+    "jdhuff.c",
+    "jdhuff.h",
+    "jdinput.c",
+    "jdmainct.c",
+    "jdmarker.c",
+    "jdmaster.c",
+    "jdmerge.c",
+    "jdphuff.c",
+    "jdpostct.c",
+    "jdsample.c",
+    "jerror.c",
+    "jerror.h",
+    "jfdctflt.c",
+    "jfdctfst.c",
+    "jfdctint.c",
+    "jidctflt.c",
+    "jidctfst.c",
+    "jidctint.c",
+    "jidctred.c",
+    "jinclude.h",
+    "jmemmgr.c",
+    "jmemnobs.c",
+    "jmemsys.h",
+    "jmorecfg.h",
+    "jpegint.h",
+    "jpeglib.h",
+    "jpeglibmangler.h",
+    "jquant1.c",
+    "jquant2.c",
+    "jutils.c",
+    "jversion.h",
+  ]
+
+  defines = [
+    "WITH_SIMD",
+    "NO_GETENV",
+  ]
+
+  configs += [ ":libjpeg_config" ]
+
+  public_configs = [ ":libjpeg_config" ]
+
+  # MemorySanitizer doesn't support assembly code, so keep it disabled in
+  # MSan builds for now.
+  if (is_msan) {
+    sources += [ "jsimd_none.c" ]
+  } else {
+    deps = [
+      ":simd",
+    ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/secondary/third_party/nss/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+if (is_linux) {
+  # This is a dependency on NSS with no libssl. On Linux we use a built-in SSL
+  # library but the system NSS libraries. Non-Linux platforms using NSS use the
+  # hermetic one in //third_party/nss.
+  #
+  # Generally you should depend on //crypto:platform instead of using this
+  # config since that will properly pick up NSS or OpenSSL depending on
+  # platform and build config.
+  pkg_config("system_nss_no_ssl_config") {
+    packages = [ "nss" ]
+    extra_args = [
+      "-v",
+      "-lssl3",
+    ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/shim_headers.gni
@@ -0,0 +1,41 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+template("shim_headers") {
+  action_name = "gen_${target_name}"
+  config_name = "${target_name}_config"
+  shim_headers_path = "${root_gen_dir}/shim_headers/${target_name}"
+
+  config(config_name) {
+    include_dirs = [ shim_headers_path ]
+  }
+
+  action(action_name) {
+    script = "//tools/generate_shim_headers/generate_shim_headers.py"
+    args = [
+      "--generate",
+      "--headers-root",
+      rebase_path(invoker.root_path),
+      "--output-directory",
+      rebase_path(shim_headers_path),
+    ]
+    if (defined(invoker.prefix)) {
+      args += [
+        "--prefix",
+        invoker.prefix,
+      ]
+    }
+    args += invoker.headers
+
+    outputs = process_file_template(invoker.headers,
+                                    "${shim_headers_path}/{{source_file_part}}")
+  }
+
+  group(target_name) {
+    deps = [
+      ":${action_name}",
+    ]
+    all_dependent_configs = [ ":${config_name}" ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/slave/OWNERS
@@ -0,0 +1,20 @@
+set noparent
+agable@chromium.org
+agable@google.com
+cmp@chromium.org
+cmp@google.com
+dpranke@chromium.org
+iannucci@chromium.org
+iannucci@google.com
+johnw@chromium.org
+johnw@google.com
+maruel@chromium.org
+maruel@google.com
+mmoss@chromium.org
+mmoss@google.com
+pschmidt@chromium.org
+pschmidt@google.com
+stip@chromium.org
+stip@google.com
+szager@chromium.org
+szager@google.com
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/slave/README
@@ -0,0 +1,8 @@
+This is a directory which contains configuration information for the
+buildsystem.
+
+* Under recipes, the buildsystem should use only this directory as an
+  entry point into src/.
+
+* Scripts in this directory must not import from outside this directory or shell
+  to scripts outside this directory.
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/split_static_library.gni
@@ -0,0 +1,73 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+template("split_static_library") {
+  assert(defined(invoker.split_count),
+         "Must define split_count for split_static_library")
+
+  # In many conditions the number of inputs will be 1 (because the count will
+  # be conditional on platform or configuration) so optimize that.
+  if (invoker.split_count == 1) {
+    static_library(target_name) {
+      forward_variables_from(invoker, "*")
+    }
+  } else {
+    group_name = target_name
+
+    generated_static_libraries = []
+    current_library_index = 0
+    foreach(current_sources, split_list(invoker.sources, invoker.split_count)) {
+      current_name = "${target_name}_$current_library_index"
+      assert(
+          current_sources != [],
+          "Your values for splitting a static library generate one that has no sources.")
+      generated_static_libraries += [ ":$current_name" ]
+
+      static_library(current_name) {
+        # Generated static library shard gets everything but sources (which
+        # we're redefining) and visibility (which is set to be the group
+        # below).
+        forward_variables_from(invoker,
+                               "*",
+                               [
+                                 "check_includes",
+                                 "sources",
+                                 "visibility",
+                               ])
+        sources = current_sources
+        visibility = [ ":$group_name" ]
+
+        # When splitting a target's sources up into a series of static
+        # libraries, those targets will naturally include headers from each
+        # other arbitrarily. We could theoretically generate a web of
+        # dependencies and allow_circular_includes_from between all pairs of
+        # targets, but that's very cumbersome. Typical usage in Chrome is that
+        # only official Windows builds use split static libraries due to the
+        # Visual Studio size limits, and this means we'll still get header
+        # checking coverage for the other configurations.
+        check_includes = false
+
+        # Uniquify the output name if one is specified.
+        if (defined(invoker.output_name)) {
+          output_name = "${invoker.output_name}_$current_library_index"
+        }
+      }
+
+      current_library_index = current_library_index + 1
+    }
+
+    group(group_name) {
+      public_deps = generated_static_libraries
+      forward_variables_from(invoker,
+                             [
+                               "testonly",
+                               "visibility",
+                             ])
+    }
+  }
+}
+
+set_defaults("split_static_library") {
+  configs = default_compiler_configs
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/symlink.gni
@@ -0,0 +1,85 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a symlink.
+# Args:
+#   source: Path to link to.
+#   output: Where to create the symlink.
+template("symlink") {
+  action(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "deps",
+                             "testonly",
+                             "visibility",
+                           ])
+    outputs = [
+      invoker.output,
+    ]
+    script = "//build/symlink.py"
+    args = [
+      "-f",
+      rebase_path(invoker.source, get_path_info(invoker.output, "dir")),
+      rebase_path(invoker.output, root_build_dir),
+    ]
+  }
+}
+
+# Creates a symlink from root_build_dir/target_name to |binary_label|. This rule
+# is meant to be used within if (current_toolchain == default_toolchain) blocks
+# and point to targets in the non-default toolchain.
+# Note that for executables, using a copy (as opposed to a symlink) does not
+# work when is_component_build=true, since dependent libraries are found via
+# relative location.
+#
+# Args:
+#   binary_label: Target that builds the file to symlink to. e.g.:
+#       ":$target_name($host_toolchain)".
+#   binary_output_name: The output_name set by the binary_label target
+#       (if applicable).
+#   output_name: Where to create the symlink
+#       (default="$root_out_dir/$binary_output_name").
+#
+# Example:
+#   if (current_toolchain == host_toolchain) {
+#     executable("foo") { ... }
+#   } else if (current_toolchain == default_toolchain) {
+#     binary_symlink("foo") {
+#       binary_label = ":foo($host_toolchain)"
+#     }
+#   }
+template("binary_symlink") {
+  symlink(target_name) {
+    forward_variables_from(invoker,
+                           [
+                             "output",
+                             "testonly",
+                             "visibility",
+                           ])
+    deps = [
+      invoker.binary_label,
+    ]
+    data_deps = [
+      invoker.binary_label,
+    ]
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+
+    _out_dir = get_label_info(invoker.binary_label, "root_out_dir")
+    if (defined(invoker.binary_output_name)) {
+      _name = invoker.binary_output_name
+    } else {
+      _name = get_label_info(invoker.binary_label, "name")
+    }
+    source = "$_out_dir/$_name"
+
+    _output_name = _name
+    if (defined(invoker.output_name)) {
+      _output_name = invoker.output_name
+    }
+    output = "$root_out_dir/$_output_name"
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/symlink.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Make a symlink and optionally touch a file (to handle dependencies).
+
+Usage:
+  symlink.py [options] sources... target
+
+A sym link to source is created at target. If multiple sources are specfied,
+then target is assumed to be a directory, and will contain all the links to
+the sources (basenames identical to their source).
+"""
+
+import errno
+import optparse
+import os.path
+import shutil
+import sys
+
+
+def Main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-f', '--force', action='store_true')
+  parser.add_option('--touch')
+
+  options, args = parser.parse_args(argv[1:])
+  if len(args) < 2:
+    parser.error('at least two arguments required.')
+
+  target = args[-1]
+  sources = args[:-1]
+  for s in sources:
+    t = os.path.join(target, os.path.basename(s))
+    if len(sources) == 1 and not os.path.isdir(target):
+      t = target
+    t = os.path.expanduser(t)
+    if os.path.realpath(t) == s:
+      continue
+    try:
+      os.symlink(s, t)
+    except OSError, e:
+      if e.errno == errno.EEXIST and options.force:
+        if os.path.isdir(t):
+          shutil.rmtree(t, ignore_errors=True)
+        else:
+          os.remove(t)
+        os.symlink(s, t)
+      else:
+        raise
+
+
+  if options.touch:
+    with open(options.touch, 'w') as f:
+      pass
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/BUILD.gn
@@ -0,0 +1,11 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/concurrent_links.gni")
+
+if (current_toolchain == default_toolchain) {
+  pool("link_pool") {
+    depth = concurrent_links
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/OWNERS
@@ -0,0 +1,3 @@
+brettw@chromium.org
+dpranke@chromium.org
+scottmg@chromium.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/aix/BUILD.gn
@@ -0,0 +1,21 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/gcc_toolchain.gni")
+
+gcc_toolchain("ppc64") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_args = {
+    current_cpu = "ppc64"
+    current_os = "aix"
+    is_clang = false
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/android/BUILD.gn
@@ -0,0 +1,145 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/sysroot.gni")  # Imports android/config.gni.
+import("//build/toolchain/gcc_toolchain.gni")
+
+# The Android GCC toolchains share most of the same parameters, so we have this
+# wrapper around gcc_toolchain to avoid duplication of logic.
+#
+# Parameters:
+#  - toolchain_root
+#      Path to cpu-specific toolchain within the ndk.
+#  - sysroot
+#      Sysroot for this architecture.
+#  - lib_dir
+#      Subdirectory inside of sysroot where libs go.
+#  - binary_prefix
+#      Prefix of compiler executables.
+template("android_gcc_toolchain") {
+  gcc_toolchain(target_name) {
+    assert(defined(invoker.toolchain_args),
+           "toolchain_args must be defined for android_gcc_toolchain()")
+    toolchain_args = invoker.toolchain_args
+    toolchain_args.current_os = "android"
+
+    # Output linker map files for binary size analysis.
+    enable_linker_map = true
+
+    # Make our manually injected libs relative to the build dir.
+    _ndk_lib =
+        rebase_path(invoker.sysroot + "/" + invoker.lib_dir, root_build_dir)
+
+    libs_section_prefix = "$_ndk_lib/crtbegin_dynamic.o"
+    libs_section_postfix = "$_ndk_lib/crtend_android.o"
+
+    solink_libs_section_prefix = "$_ndk_lib/crtbegin_so.o"
+    solink_libs_section_postfix = "$_ndk_lib/crtend_so.o"
+
+    _android_tool_prefix =
+        "${invoker.toolchain_root}/bin/${invoker.binary_prefix}-"
+
+    # The tools should be run relative to the build dir.
+    _tool_prefix = rebase_path("$_android_tool_prefix", root_build_dir)
+
+    # Use the clang specified by the toolchain if there is one. Otherwise fall
+    # back to the global flag.
+    if (defined(toolchain_args.is_clang)) {
+      toolchain_uses_clang = toolchain_args.is_clang
+    } else {
+      toolchain_uses_clang = is_clang
+    }
+
+    if (toolchain_uses_clang) {
+      _prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+      cc = "$_prefix/clang"
+      cxx = "$_prefix/clang++"
+    } else {
+      cc = "${_tool_prefix}gcc"
+      cxx = "${_tool_prefix}g++"
+    }
+    ar = _tool_prefix + "ar"
+    ld = cxx
+    readelf = _tool_prefix + "readelf"
+    nm = _tool_prefix + "nm"
+    strip = "${_tool_prefix}strip"
+
+    # Don't use .cr.so for loadable_modules since they are always loaded via
+    # absolute path.
+    loadable_module_extension = ".so"
+  }
+}
+
+template("android_gcc_toolchains_helper") {
+  android_gcc_toolchain("android_$target_name") {
+    forward_variables_from(invoker, "*")
+    toolchain_args.is_clang = false
+  }
+
+  android_gcc_toolchain("android_clang_$target_name") {
+    forward_variables_from(invoker, "*")
+    toolchain_args.is_clang = true
+  }
+}
+
+android_gcc_toolchains_helper("x86") {
+  toolchain_root = x86_android_toolchain_root
+  sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
+  lib_dir = "usr/lib"
+  binary_prefix = "i686-linux-android"
+  toolchain_args = {
+    current_cpu = "x86"
+  }
+}
+
+android_gcc_toolchains_helper("arm") {
+  toolchain_root = arm_android_toolchain_root
+  sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
+  lib_dir = "usr/lib"
+  binary_prefix = "arm-linux-androideabi"
+  toolchain_args = {
+    current_cpu = "arm"
+  }
+}
+
+android_gcc_toolchains_helper("mipsel") {
+  toolchain_root = mips_android_toolchain_root
+  sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
+  lib_dir = "usr/lib"
+  binary_prefix = "mipsel-linux-android"
+  toolchain_args = {
+    current_cpu = "mipsel"
+  }
+}
+
+android_gcc_toolchains_helper("x64") {
+  toolchain_root = x86_64_android_toolchain_root
+  sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
+  lib_dir = "usr/lib64"
+  binary_prefix = "x86_64-linux-android"
+  toolchain_args = {
+    current_cpu = "x64"
+  }
+}
+
+android_gcc_toolchains_helper("arm64") {
+  toolchain_root = arm64_android_toolchain_root
+  sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
+  lib_dir = "usr/lib"
+  binary_prefix = "aarch64-linux-android"
+  toolchain_args = {
+    current_cpu = "arm64"
+  }
+}
+
+android_gcc_toolchains_helper("mips64el") {
+  toolchain_root = mips64_android_toolchain_root
+  sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
+  lib_dir = "usr/lib64"
+  binary_prefix = "mips64el-linux-android"
+  toolchain_args = {
+    current_cpu = "mips64el"
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/cc_wrapper.gni
@@ -0,0 +1,40 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/goma.gni")
+
+# Defines the configuration of cc wrapper
+# ccache: a c/c++ compiler cache which can greatly reduce recompilation times.
+# icecc, distcc: it takes compile jobs from a build and distributes them among
+#                remote machines allowing a parallel build.
+#
+# TIPS
+#
+# 1) ccache
+# Set clang_use_chrome_plugins=false if using ccache 3.1.9 or earlier, since
+# these versions don't support -Xclang.  (3.1.10 and later will silently
+# ignore -Xclang, so it doesn't matter if you disable clang_use_chrome_plugins
+# or not).
+#
+# Use ccache 3.2 or later to avoid clang unused argument warnings:
+# https://bugzilla.samba.org/show_bug.cgi?id=8118
+#
+# To avoid -Wparentheses-equality clang warnings, at some cost in terms of
+# speed, you can do:
+# export CCACHE_CPP2=yes
+#
+# 2) icecc
+# Set clang_use_chrome_plugins=false because icecc cannot distribute custom
+# clang libraries.
+#
+# To use icecc and ccache together, set cc_wrapper = "ccache" with
+# export CCACHE_PREFIX=icecc
+
+declare_args() {
+  # Set to "ccache", "icecc" or "distcc".  Probably doesn't work on windows.
+  cc_wrapper = ""
+}
+
+assert(!use_goma || cc_wrapper == "",
+       "use_goma and cc_wrapper can not be used together.")
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/clang_static_analyzer.gni
@@ -0,0 +1,11 @@
+# Copyright (c) 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of Clang static analysis tools.
+# See docs/clang_static_analyzer.md for more information.
+
+declare_args() {
+  # Uses the Clang static analysis tools during compilation.
+  use_clang_static_analyzer = false
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/clang_static_analyzer_wrapper.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adds an analysis build step to invocations of the Clang C/C++ compiler.
+
+Usage: clang_static_analyzer_wrapper.py <compiler> [args...]
+"""
+
+import argparse
+import fnmatch
+import itertools
+import os
+import sys
+import wrapper_utils
+
+# Flags used to enable analysis for Clang invocations.
+analyzer_enable_flags = [
+    '--analyze',
+]
+
+# Flags used to configure the analyzer's behavior.
+analyzer_option_flags = [
+    '-fdiagnostics-show-option',
+    '-analyzer-checker=cplusplus',
+    '-analyzer-opt-analyze-nested-blocks',
+    '-analyzer-eagerly-assume',
+    '-analyzer-output=text',
+    '-analyzer-config',
+    'suppress-c++-stdlib=true',
+
+# List of checkers to execute.
+# The full list of checkers can be found at
+# https://clang-analyzer.llvm.org/available_checks.html.
+    '-analyzer-checker=core',
+    '-analyzer-checker=unix',
+    '-analyzer-checker=deadcode',
+]
+
+
+# Prepends every element of a list |args| with |token|.
+# e.g. ['-analyzer-foo', '-analyzer-bar'] => ['-Xanalyzer', '-analyzer-foo',
+#                                             '-Xanalyzer', '-analyzer-bar']
+def interleave_args(args, token):
+  return list(sum(zip([token] * len(args), args), ()))
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--mode',
+                      choices=['clang', 'cl'],
+                      required=True,
+                      help='Specifies the compiler argument convention to use.')
+  parser.add_argument('args', nargs=argparse.REMAINDER)
+  parsed_args = parser.parse_args()
+
+  prefix = '-Xclang' if parsed_args.mode == 'cl' else '-Xanalyzer'
+  cmd = parsed_args.args + analyzer_enable_flags + \
+        interleave_args(analyzer_option_flags, prefix)
+  returncode, stderr = wrapper_utils.CaptureCommandStderr(
+      wrapper_utils.CommandToRun(cmd))
+  sys.stderr.write(stderr)
+  if returncode != 0:
+    sys.stderr.write(
+        """WARNING! The Clang static analyzer exited with error code %d.
+         Please share the error details in crbug.com/695243 if this looks like
+         a new regression.\n""" % (returncode))
+
+  returncode, stderr = wrapper_utils.CaptureCommandStderr(
+    wrapper_utils.CommandToRun(parsed_args.args))
+  sys.stderr.write(stderr)
+
+  return returncode
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/concurrent_links.gni
@@ -0,0 +1,53 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file should only be imported from files that define toolchains.
+# There's no way to enforce this exactly, but all toolchains are processed
+# in the context of the default_toolchain, so we can at least check for that.
+assert(current_toolchain == default_toolchain)
+
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+  # Limit the number of concurrent links; we often want to run fewer
+  # links at once than we do compiles, because linking is memory-intensive.
+  # The default to use varies by platform and by the amount of memory
+  # available, so we call out to a script to get the right value.
+  concurrent_links = -1
+}
+
+if (concurrent_links == -1) {
+  if (allow_posix_link_time_opt || is_cfi) {
+    if (use_thin_lto) {
+      _args = [
+        "--mem_per_link_gb=10",
+        "--reserve_mem_gb=10",
+      ]
+    } else {
+      # Full LTO, needs lots of RAM
+      _args = [
+        "--mem_per_link_gb=26",
+        "--reserve_mem_gb=20",
+      ]
+    }
+  } else if (use_sanitizer_coverage) {
+    # Sanitizer coverage instrumentation increases linker memory consumption
+    # significantly.
+    _args = [ "--mem_per_link_gb=12" ]
+  } else if (is_win) {
+    _args = [ "--mem_per_link_gb=5" ]
+  } else if (is_mac) {
+    _args = [ "--mem_per_link_gb=4" ]
+  } else if (is_android && !is_component_build && symbol_level == 2) {
+    _args = [ "--mem_per_link_gb=25" ]
+  } else {
+    _args = []
+  }
+
+  # TODO(crbug.com/617429) Pass more build configuration info to the script
+  # so that we can compute better values.
+  concurrent_links = exec_script("get_concurrent_links.py", _args, "value")
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/cros/BUILD.gn
@@ -0,0 +1,136 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+import("//build/toolchain/cros_toolchain.gni")
+
+# This is the normal toolchain for most targets.
+gcc_toolchain("target") {
+  ar = cros_target_ar
+  cc = cros_target_cc
+  cxx = cros_target_cxx
+  ld = cxx
+  if (cros_target_ld != "") {
+    ld = cros_target_ld
+  }
+  if (cros_target_nm != "") {
+    nm = cros_target_nm
+  }
+  if (cros_target_readelf != "") {
+    readelf = cros_target_readelf
+  }
+  extra_cflags = cros_target_extra_cflags
+  extra_cppflags = cros_target_extra_cppflags
+  extra_cxxflags = cros_target_extra_cxxflags
+  extra_ldflags = cros_target_extra_ldflags
+
+  toolchain_args = {
+    cc_wrapper = ""
+    current_cpu = target_cpu
+    current_os = "chromeos"
+    is_clang = is_clang
+    use_debug_fission = use_debug_fission
+    use_gold = use_gold
+    use_sysroot = use_sysroot
+  }
+}
+
+# This is a special toolchain needed just for the nacl_bootstrap target in
+# //native_client/src/trusted/service_runtime/linux. It is identical
+# to ":target" except that it forces use_debug_fission, use_gold, and
+# use_sysroot off, and allows the user to set different sets of extra flags.
+gcc_toolchain("nacl_bootstrap") {
+  ar = cros_target_ar
+  cc = cros_target_cc
+  cxx = cros_target_cxx
+  ld = cxx
+  if (cros_target_ld != "") {
+    ld = cros_target_ld
+  }
+  if (cros_target_nm != "") {
+    nm = cros_target_nm
+  }
+  if (cros_target_readelf != "") {
+    readelf = cros_target_readelf
+  }
+  extra_cflags = cros_nacl_bootstrap_extra_cflags
+  extra_cppflags = cros_nacl_bootstrap_extra_cppflags
+  extra_cxxflags = cros_nacl_bootstrap_extra_cxxflags
+  extra_ldflags = cros_nacl_bootstrap_extra_ldflags
+
+  toolchain_args = {
+    cc_wrapper = ""
+    current_cpu = target_cpu
+    current_os = "chromeos"
+    is_clang = is_clang
+    use_debug_fission = false
+    use_gold = false
+    use_sysroot = false
+  }
+}
+
+gcc_toolchain("host") {
+  # These are args for the template.
+  ar = cros_host_ar
+  cc = cros_host_cc
+  cxx = cros_host_cxx
+  ld = cxx
+  if (cros_host_ld != "") {
+    ld = cros_host_ld
+  }
+  if (cros_host_nm != "") {
+    nm = cros_host_nm
+  }
+  if (cros_host_readelf != "") {
+    readelf = cros_host_readelf
+  }
+  extra_cflags = cros_host_extra_cflags
+  extra_cppflags = cros_host_extra_cppflags
+  extra_cxxflags = cros_host_extra_cxxflags
+  extra_ldflags = cros_host_extra_ldflags
+
+  toolchain_args = {
+    cc_wrapper = ""
+    is_clang = cros_host_is_clang
+    current_cpu = host_cpu
+    current_os = "linux"
+    use_sysroot = false
+  }
+}
+
+gcc_toolchain("v8_snapshot") {
+  # These are args for the template.
+  ar = cros_v8_snapshot_ar
+  cc = cros_v8_snapshot_cc
+  cxx = cros_v8_snapshot_cxx
+  ld = cxx
+  if (cros_v8_snapshot_ld != "") {
+    ld = cros_v8_snapshot_ld
+  }
+  if (cros_v8_snapshot_nm != "") {
+    nm = cros_v8_snapshot_nm
+  }
+  if (cros_v8_snapshot_readelf != "") {
+    readelf = cros_v8_snapshot_readelf
+  }
+  extra_cflags = cros_v8_snapshot_extra_cflags
+  extra_cppflags = cros_v8_snapshot_extra_cppflags
+  extra_cxxflags = cros_v8_snapshot_extra_cxxflags
+  extra_ldflags = cros_v8_snapshot_extra_ldflags
+
+  toolchain_args = {
+    cc_wrapper = ""
+    is_clang = cros_v8_snapshot_is_clang
+    if (target_cpu == "x86" || target_cpu == "arm" || target_cpu == "mipsel") {
+      current_cpu = "x86"
+    } else {
+      current_cpu = "x64"
+    }
+    v8_current_cpu = v8_target_cpu
+    current_os = "linux"
+    use_sysroot = false
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/cros_toolchain.gni
@@ -0,0 +1,81 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# CrOS builds must cross-compile on a Linux host for the actual CrOS
+# device target. There are many different CrOS devices so the build
+# system provides configuration variables that permit a CrOS build to
+# control the cross-compilation tool chain. However, requiring such
+# fine-grain specification is tedious for build-bots and developers.
+# Consequently, the CrOS build system defaults to a convenience
+# compilation mode where the compilation host is also the build target.
+#
+# Chrome can be compiled in this way with the gn variable:
+#
+# target_os = "chromeos"
+#
+# To perform a board-specific build, first obtain the correct system
+# root (http://goo.gl/aFB4XH) for the board. Then configure GN to use it
+# by setting appropriate cross-compilation variables.
+#
+# For example, to compile a Chrome source tree in /g/src for an
+# auron_paine CrOS device with the system root cached in /g/.cros_cache,
+# the following GN arguments must be provided to configure
+# cross-compilation with Goma acceleration. (NB: additional variables
+# will be necessary to successfully compile a working CrOS Chrome. See
+# the definition of GYP_DEFINES inside a sysroot shell.)
+#
+# goma_dir = "/g/.cros_cache/common/goma+2"
+# target_sysroot= /g/.cros_cache/chrome-sdk/tarballs/auron_paine+7644.0.0+sysroot_chromeos-base_chromeos-chrome.tar.xz"
+# cros_target_cc = "x86_64-cros-linux-gnu-gcc -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold"
+# cros_target_cxx = "x86_64-cros-linux-gnu-g++ -B/g/.cros_cache/chrome-sdk/tarballs/auron_paine+7657.0.0+target_toolchain/usr/x86_64-pc-linux-gnu/x86_64-cros-linux-gnu/binutils-bin/2.25.51-gold"
+# cros_target_ar = "x86_64-cros-linux-gnu-gcc-ar"
+# target_cpu = "x64"
+
+declare_args() {
+  # These must be specified for a board-specific build.
+  cros_target_ar = "ar"
+  cros_target_cc = "gcc"
+  cros_target_cxx = "g++"
+  cros_target_ld = ""
+  cros_target_nm = ""
+  cros_target_readelf = ""
+
+  # These can be optionally set. The "_cppflags"  will be applied to *both*
+  # C and C++ files; use "_cxxflags" for C++-only flags.
+  cros_target_extra_cflags = ""
+  cros_target_extra_cppflags = ""
+  cros_target_extra_cxxflags = ""
+  cros_target_extra_ldflags = ""
+
+  # is_clang is used instead of cros_target_is_clang
+
+  cros_host_ar = "ar"
+  cros_host_cc = "gcc"
+  cros_host_cxx = "g++"
+  cros_host_ld = ""
+  cros_host_nm = ""
+  cros_host_readelf = ""
+  cros_host_extra_cflags = ""
+  cros_host_extra_cppflags = ""
+  cros_host_extra_cxxflags = ""
+  cros_host_extra_ldflags = ""
+  cros_host_is_clang = false
+
+  cros_v8_snapshot_ar = "ar"
+  cros_v8_snapshot_cc = "gcc"
+  cros_v8_snapshot_cxx = "g++"
+  cros_v8_snapshot_ld = ""
+  cros_v8_snapshot_nm = ""
+  cros_v8_snapshot_readelf = ""
+  cros_v8_snapshot_extra_cflags = ""
+  cros_v8_snapshot_extra_cppflags = ""
+  cros_v8_snapshot_extra_cxxflags = ""
+  cros_v8_snapshot_extra_ldflags = ""
+  cros_v8_snapshot_is_clang = false
+
+  cros_nacl_bootstrap_extra_cflags = ""
+  cros_nacl_bootstrap_extra_cppflags = ""
+  cros_nacl_bootstrap_extra_cxxflags = ""
+  cros_nacl_bootstrap_extra_ldflags = ""
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/fuchsia/BUILD.gn
@@ -0,0 +1,16 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/gcc_toolchain.gni")
+import("//build/config/fuchsia/config.gni")
+
+clang_toolchain("x64") {
+  assert(current_cpu == "x64")
+  assert(host_os == "linux")
+
+  toolchain_args = {
+    current_cpu = "x64"
+    current_os = "fuchsia"
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/fuchsia/OWNERS
@@ -0,0 +1,1 @@
+scottmg@chromium.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/gcc_ar_wrapper.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs the 'ar' command after removing its output file first.
+
+This script is invoked like:
+  python gcc_ar_wrapper.py --ar=$AR --output=$OUT $OP $INPUTS
+to do the equivalent of:
+  rm -f $OUT && $AR $OP $OUT $INPUTS
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+import wrapper_utils
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('--ar',
+                      required=True,
+                      help='The ar binary to run',
+                      metavar='PATH')
+  parser.add_argument('--output',
+                      required=True,
+                      help='Output archive file',
+                      metavar='ARCHIVE')
+  parser.add_argument('--plugin',
+                      help='Load plugin')
+  parser.add_argument('--resource-whitelist',
+                      help='Merge all resource whitelists into a single file.',
+                      metavar='PATH')
+  parser.add_argument('operation',
+                      help='Operation on the archive')
+  parser.add_argument('inputs', nargs='+',
+                      help='Input files')
+  args = parser.parse_args()
+
+  # Specifies the type of object file ar should examine.
+  # The ar on linux ignores this option.
+  object_mode = []
+  if sys.platform.startswith('aix'):
+    # The @file feature is not avaliable on ar for AIX.
+    # For linux (and other posix like systems), the @file_name
+    # option reads the contents of file_name as command line arguments.
+    # For AIX we must parse these (rsp files) manually.
+    # Read rspfile.
+    args.inputs  = wrapper_utils.ResolveRspLinks(args.inputs)
+    object_mode = ['-X64']
+  else:
+    if args.resource_whitelist:
+      whitelist_candidates = wrapper_utils.ResolveRspLinks(args.inputs)
+      wrapper_utils.CombineResourceWhitelists(
+          whitelist_candidates, args.resource_whitelist)
+
+  command = [args.ar] + object_mode + [args.operation]
+  if args.plugin is not None:
+    command += ['--plugin', args.plugin]
+  command.append(args.output)
+  command += args.inputs
+
+  # Remove the output file first.
+  try:
+    os.remove(args.output)
+  except OSError as e:
+    if e.errno != os.errno.ENOENT:
+      raise
+
+  # Now just run the ar command.
+  return subprocess.call(wrapper_utils.CommandToRun(command))
+
+
+if __name__ == "__main__":
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/gcc_compile_wrapper.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs a compilation command.
+
+This script exists to avoid using complex shell commands in
+gcc_toolchain.gni's tool("cxx") and tool("cc") in case the host running the
+compiler does not have a POSIX-like shell (e.g. Windows).
+"""
+
+import argparse
+import sys
+
+import wrapper_utils
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('--resource-whitelist',
+                      help='Generate a resource whitelist for this target.',
+                      metavar='PATH')
+  parser.add_argument('command', nargs=argparse.REMAINDER,
+                      help='Compilation command')
+  args = parser.parse_args()
+
+  returncode, stderr = wrapper_utils.CaptureCommandStderr(
+      wrapper_utils.CommandToRun(args.command))
+
+  used_resources = wrapper_utils.ExtractResourceIdsFromPragmaWarnings(stderr)
+  sys.stderr.write(stderr)
+
+  if args.resource_whitelist:
+    with open(args.resource_whitelist, 'w') as f:
+      if used_resources:
+        f.write('\n'.join(str(resource) for resource in used_resources))
+        f.write('\n')
+
+  return returncode
+
+if __name__ == "__main__":
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/gcc_link_wrapper.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs a linking command and optionally a strip command.
+
+This script exists to avoid using complex shell commands in
+gcc_toolchain.gni's tool("link"), in case the host running the compiler
+does not have a POSIX-like shell (e.g. Windows).
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+import wrapper_utils
+
+
+# When running on a Windows host and using a toolchain whose tools are
+# actually wrapper scripts (i.e. .bat files on Windows) rather than binary
+# executables, the "command" to run has to be prefixed with this magic.
+# The GN toolchain definitions take care of that for when GN/Ninja is
+# running the tool directly.  When that command is passed in to this
+# script, it appears as a unitary string but needs to be split up so that
+# just 'cmd' is the actual command given to Python's subprocess module.
+BAT_PREFIX = 'cmd /c call '
+
+def CommandToRun(command):
+  if command[0].startswith(BAT_PREFIX):
+    command = command[0].split(None, 3) + command[1:]
+  return command
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('--strip',
+                      help='The strip binary to run',
+                      metavar='PATH')
+  parser.add_argument('--unstripped-file',
+                      help='Executable file produced by linking command',
+                      metavar='FILE')
+  parser.add_argument('--map-file',
+                      help=('Use --Wl,-Map to generate a map file. Will be '
+                            'gzipped if extension ends with .gz'),
+                      metavar='FILE')
+  parser.add_argument('--output',
+                      required=True,
+                      help='Final output executable file',
+                      metavar='FILE')
+  parser.add_argument('command', nargs='+',
+                      help='Linking command')
+  args = parser.parse_args()
+
+  # Work-around for gold being slow-by-default. http://crbug.com/632230
+  fast_env = dict(os.environ)
+  fast_env['LC_ALL'] = 'C'
+  result = wrapper_utils.RunLinkWithOptionalMapFile(args.command, env=fast_env,
+                                                    map_file=args.map_file)
+  if result != 0:
+    return result
+
+  # Finally, strip the linked executable (if desired).
+  if args.strip:
+    result = subprocess.call(CommandToRun([
+        args.strip, '--strip-unneeded', '-o', args.output, args.unstripped_file
+        ]))
+
+  return result
+
+
+if __name__ == "__main__":
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/gcc_solink_wrapper.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs 'ld -shared' and generates a .TOC file that's untouched when unchanged.
+
+This script exists to avoid using complex shell commands in
+gcc_toolchain.gni's tool("solink"), in case the host running the compiler
+does not have a POSIX-like shell (e.g. Windows).
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+import wrapper_utils
+
+
+def CollectSONAME(args):
+  """Replaces: readelf -d $sofile | grep SONAME"""
+  toc = ''
+  readelf = subprocess.Popen(wrapper_utils.CommandToRun(
+      [args.readelf, '-d', args.sofile]), stdout=subprocess.PIPE, bufsize=-1)
+  for line in readelf.stdout:
+    if 'SONAME' in line:
+      toc += line
+  return readelf.wait(), toc
+
+
+def CollectDynSym(args):
+  """Replaces: nm --format=posix -g -D $sofile | cut -f1-2 -d' '"""
+  toc = ''
+  nm = subprocess.Popen(wrapper_utils.CommandToRun([
+      args.nm, '--format=posix', '-g', '-D', args.sofile]),
+                        stdout=subprocess.PIPE, bufsize=-1)
+  for line in nm.stdout:
+    toc += ' '.join(line.split(' ', 2)[:2]) + '\n'
+  return nm.wait(), toc
+
+
+def CollectTOC(args):
+  result, toc = CollectSONAME(args)
+  if result == 0:
+    result, dynsym = CollectDynSym(args)
+    toc += dynsym
+  return result, toc
+
+
+def UpdateTOC(tocfile, toc):
+  if os.path.exists(tocfile):
+    old_toc = open(tocfile, 'r').read()
+  else:
+    old_toc = None
+  if toc != old_toc:
+    open(tocfile, 'w').write(toc)
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('--readelf',
+                      required=True,
+                      help='The readelf binary to run',
+                      metavar='PATH')
+  parser.add_argument('--nm',
+                      required=True,
+                      help='The nm binary to run',
+                      metavar='PATH')
+  parser.add_argument('--strip',
+                      help='The strip binary to run',
+                      metavar='PATH')
+  parser.add_argument('--sofile',
+                      required=True,
+                      help='Shared object file produced by linking command',
+                      metavar='FILE')
+  parser.add_argument('--tocfile',
+                      required=True,
+                      help='Output table-of-contents file',
+                      metavar='FILE')
+  parser.add_argument('--map-file',
+                      help=('Use --Wl,-Map to generate a map file. Will be '
+                            'gzipped if extension ends with .gz'),
+                      metavar='FILE')
+  parser.add_argument('--output',
+                      required=True,
+                      help='Final output shared object file',
+                      metavar='FILE')
+  parser.add_argument('--resource-whitelist',
+                      help='Merge all resource whitelists into a single file.',
+                      metavar='PATH')
+  parser.add_argument('command', nargs='+',
+                      help='Linking command')
+  args = parser.parse_args()
+
+  # Work-around for gold being slow-by-default. http://crbug.com/632230
+  fast_env = dict(os.environ)
+  fast_env['LC_ALL'] = 'C'
+
+  if args.resource_whitelist:
+    whitelist_candidates = wrapper_utils.ResolveRspLinks(args.command)
+    wrapper_utils.CombineResourceWhitelists(
+        whitelist_candidates, args.resource_whitelist)
+
+  # First, run the actual link.
+  command = wrapper_utils.CommandToRun(args.command)
+  result = wrapper_utils.RunLinkWithOptionalMapFile(command, env=fast_env,
+                                                    map_file=args.map_file)
+
+  if result != 0:
+    return result
+
+  # Next, generate the contents of the TOC file.
+  result, toc = CollectTOC(args)
+  if result != 0:
+    return result
+
+  # If there is an existing TOC file with identical contents, leave it alone.
+  # Otherwise, write out the TOC file.
+  UpdateTOC(args.tocfile, toc)
+
+  # Finally, strip the linked shared object file (if desired).
+  if args.strip:
+    result = subprocess.call(wrapper_utils.CommandToRun(
+        [args.strip, '--strip-unneeded', '-o', args.output, args.sofile]))
+
+  return result
+
+
+if __name__ == "__main__":
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/gcc_toolchain.gni
@@ -0,0 +1,593 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/v8_target_cpu.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/clang_static_analyzer.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+
+if (is_nacl) {
+  # To keep NaCl variables out of builds that don't include NaCl, all
+  # variables defined in nacl/config.gni referenced here should be protected by
+  # is_nacl conditions.
+  import("//build/config/nacl/config.gni")
+}
+
+# Path to the Clang static analysis wrapper script.
+# REVIEWERS: can you suggest a better location for this?
+# GN is really picky about dead stores of variables except at the global scope.
+analyzer_wrapper =
+    rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
+                root_build_dir) + " --mode=clang"
+
+# This template defines a toolchain for something that works like gcc
+# (including clang).
+#
+# It requires the following variables specifying the executables to run:
+#  - ar
+#  - cc
+#  - cxx
+#  - ld
+#
+# Optional parameters that control the tools:
+#
+#  - extra_cflags
+#      Extra flags to be appended when compiling C files (but not C++ files).
+#  - extra_cppflags
+#      Extra flags to be appended when compiling both C and C++ files. "CPP"
+#      stands for "C PreProcessor" in this context, although it can be
+#      used for non-preprocessor flags as well. Not to be confused with
+#      "CXX" (which follows).
+#  - extra_cxxflags
+#      Extra flags to be appended when compiling C++ files (but not C files).
+#  - extra_ldflags
+#      Extra flags to be appended when linking
+#
+#  - libs_section_prefix
+#  - libs_section_postfix
+#      The contents of these strings, if specified, will be placed around
+#      the libs section of the linker line. It allows one to inject libraries
+#      at the beginning and end for all targets in a toolchain.
+#  - solink_libs_section_prefix
+#  - solink_libs_section_postfix
+#      Same as libs_section_{pre,post}fix except used for solink instead of link.
+#  - link_outputs
+#      The content of this array, if specified, will be added to the list of
+#      outputs from the link command. This can be useful in conjunction with
+#      the post_link parameter.
+#  - post_link
+#      The content of this string, if specified, will be run as a separate
+#      command following the the link command.
+#  - deps
+#      Just forwarded to the toolchain definition.
+#  - executable_extension
+#      If this string is specified it will be used for the file extension
+#      for an executable, rather than using no extension; targets will
+#      still be able to override the extension using the output_extension
+#      variable.
+#  - rebuild_define
+#      The contents of this string, if specified, will be passed as a #define
+#      to the toolchain. It can be used to force recompiles whenever a
+#      toolchain is updated.
+#  - shlib_extension
+#      If this string is specified it will be used for the file extension
+#      for a shared library, rather than default value specified in
+#      toolchain.gni
+#  - strip
+#      Location of the strip executable. When specified, strip will be run on
+#      all shared libraries and executables as they are built. The pre-stripped
+#      artifacts will be put in lib.unstripped/ and exe.unstripped/.
+template("gcc_toolchain") {
+  toolchain(target_name) {
+    assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
+    assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
+    assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
+    assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
+
+    # This define changes when the toolchain changes, forcing a rebuild.
+    # Nothing should ever use this define.
+    if (defined(invoker.rebuild_define)) {
+      rebuild_string = "-D" + invoker.rebuild_define + " "
+    } else {
+      rebuild_string = ""
+    }
+
+    # GN's syntax can't handle more than one scope dereference at once, like
+    # "invoker.toolchain_args.foo", so make a temporary to hold the toolchain
+    # args so we can do "invoker_toolchain_args.foo".
+    assert(defined(invoker.toolchain_args),
+           "Toolchains must specify toolchain_args")
+    invoker_toolchain_args = invoker.toolchain_args
+    assert(defined(invoker_toolchain_args.current_cpu),
+           "toolchain_args must specify a current_cpu")
+    assert(defined(invoker_toolchain_args.current_os),
+           "toolchain_args must specify a current_os")
+
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    toolchain_args = {
+      # Populate toolchain args from the invoker.
+      forward_variables_from(invoker_toolchain_args, "*")
+
+      # The host toolchain value computed by the default toolchain's setup
+      # needs to be passed through unchanged to all secondary toolchains to
+      # ensure that it's always the same, regardless of the values that may be
+      # set on those toolchains.
+      host_toolchain = host_toolchain
+
+      if (!defined(invoker_toolchain_args.v8_current_cpu)) {
+        v8_current_cpu = invoker_toolchain_args.current_cpu
+      }
+    }
+
+    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+    # toolchain args, use those values, otherwise default to the global one.
+    # This works because the only reasonable override that toolchains might
+    # supply for these values are to force-disable them.
+    if (defined(toolchain_args.use_goma)) {
+      toolchain_uses_goma = toolchain_args.use_goma
+    } else {
+      toolchain_uses_goma = use_goma
+    }
+    if (defined(toolchain_args.cc_wrapper)) {
+      toolchain_cc_wrapper = toolchain_args.cc_wrapper
+    } else {
+      toolchain_cc_wrapper = cc_wrapper
+    }
+    assert(!(toolchain_cc_wrapper != "" && toolchain_uses_goma),
+           "Goma and cc_wrapper can't be used together.")
+
+    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+    # toolchain args, use those values, otherwise default to the global one.
+    # This works because the only reasonable override that toolchains might
+    # supply for these values are to force-disable them.
+    if (toolchain_uses_goma) {
+      goma_path = "$goma_dir/gomacc"
+
+      # Use the static analysis script if static analysis is turned on
+      # AND the tool has not opted out by setting
+      # 'is_clang_static_analysis_supported' to false.
+      if (is_clang && use_clang_static_analyzer &&
+          (!defined(invoker.is_clang_analysis_supported) ||
+           invoker.is_clang_analysis_supported)) {
+        compiler_prefix = "${analyzer_wrapper} ${goma_path} "
+
+        # Create a distinct variable for "asm", since analysis runs pass
+        # a bunch of flags to clang/clang++ that are nonsensical on assembler
+        # runs.
+        asm = "${goma_path} ${invoker.cc}"
+      } else {
+        compiler_prefix = "${goma_path} "
+      }
+    } else {
+      if (is_clang && use_clang_static_analyzer &&
+          (!defined(invoker.is_clang_analysis_supported) ||
+           invoker.is_clang_analysis_supported)) {
+        compiler_prefix = "${analyzer_wrapper} "
+        asm = invoker.cc
+      } else {
+        compiler_prefix = "${toolchain_cc_wrapper} "
+      }
+    }
+
+    cc = compiler_prefix + invoker.cc
+    cxx = compiler_prefix + invoker.cxx
+    ar = invoker.ar
+    ld = invoker.ld
+    if (!defined(asm)) {
+      asm = cc
+    }
+    if (defined(invoker.readelf)) {
+      readelf = invoker.readelf
+    } else {
+      readelf = "readelf"
+    }
+    if (defined(invoker.nm)) {
+      nm = invoker.nm
+    } else {
+      nm = "nm"
+    }
+
+    if (defined(invoker.shlib_extension)) {
+      default_shlib_extension = invoker.shlib_extension
+    } else {
+      default_shlib_extension = shlib_extension
+    }
+
+    if (defined(invoker.executable_extension)) {
+      default_executable_extension = invoker.executable_extension
+    } else {
+      default_executable_extension = ""
+    }
+
+    # Bring these into our scope for string interpolation with default values.
+    if (defined(invoker.libs_section_prefix)) {
+      libs_section_prefix = invoker.libs_section_prefix
+    } else {
+      libs_section_prefix = ""
+    }
+
+    if (defined(invoker.libs_section_postfix)) {
+      libs_section_postfix = invoker.libs_section_postfix
+    } else {
+      libs_section_postfix = ""
+    }
+
+    if (defined(invoker.solink_libs_section_prefix)) {
+      solink_libs_section_prefix = invoker.solink_libs_section_prefix
+    } else {
+      solink_libs_section_prefix = ""
+    }
+
+    if (defined(invoker.solink_libs_section_postfix)) {
+      solink_libs_section_postfix = invoker.solink_libs_section_postfix
+    } else {
+      solink_libs_section_postfix = ""
+    }
+
+    if (defined(invoker.extra_cflags) && invoker.extra_cflags != "") {
+      extra_cflags = " " + invoker.extra_cflags
+    } else {
+      extra_cflags = ""
+    }
+
+    if (defined(invoker.extra_cppflags) && invoker.extra_cppflags != "") {
+      extra_cppflags = " " + invoker.extra_cppflags
+    } else {
+      extra_cppflags = ""
+    }
+
+    if (defined(invoker.extra_cxxflags) && invoker.extra_cxxflags != "") {
+      extra_cxxflags = " " + invoker.extra_cxxflags
+    } else {
+      extra_cxxflags = ""
+    }
+
+    if (defined(invoker.extra_ldflags) && invoker.extra_ldflags != "") {
+      extra_ldflags = " " + invoker.extra_ldflags
+    } else {
+      extra_ldflags = ""
+    }
+
+    enable_linker_map = defined(invoker.enable_linker_map) &&
+                        invoker.enable_linker_map && generate_linker_map
+
+    # These library switches can apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    # Object files go in this directory.
+    object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [
+        # The whitelist file is also an output, but ninja does not
+        # currently support multiple outputs for tool("cc").
+        "$object_subdir/{{source_name_part}}.o",
+      ]
+      if (enable_resource_whitelist_generation) {
+        compile_wrapper =
+            rebase_path("//build/toolchain/gcc_compile_wrapper.py",
+                        root_build_dir)
+        command = "$python_path \"$compile_wrapper\" --resource-whitelist=\"{{output}}.whitelist\" $command"
+      }
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [
+        # The whitelist file is also an output, but ninja does not
+        # currently support multiple outputs for tool("cxx").
+        "$object_subdir/{{source_name_part}}.o",
+      ]
+      if (enable_resource_whitelist_generation) {
+        compile_wrapper =
+            rebase_path("//build/toolchain/gcc_compile_wrapper.py",
+                        root_build_dir)
+        command = "$python_path \"$compile_wrapper\" --resource-whitelist=\"{{output}}.whitelist\" $command"
+      }
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$asm -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+      whitelist_flag = " "
+      if (enable_resource_whitelist_generation) {
+        whitelist_flag = " --resource-whitelist=\"{{output}}.whitelist\""
+      }
+
+      # This needs a Python script to avoid using simple sh features in this
+      # command, in case the host does not use a POSIX shell (e.g. compiling
+      # POSIX-like toolchains such as NaCl on Windows).
+      ar_wrapper =
+          rebase_path("//build/toolchain/gcc_ar_wrapper.py", root_build_dir)
+      if (current_os == "aix") {
+        # We use slightly different arflags for AIX.
+        extra_arflags = "rcsT"
+      } else {
+        extra_arflags = "rcsD"
+      }
+      command = "$python_path \"$ar_wrapper\"$whitelist_flag --output={{output}} --ar=\"$ar\" {{arflags}} $extra_arflags @\"$rspfile\""
+      description = "AR {{output}}"
+      rspfile_content = "{{inputs}}"
+      outputs = [
+        "{{output_dir}}/{{target_output_name}}{{output_extension}}",
+      ]
+
+      # Shared libraries go in the target out directory by default so we can
+      # generate different targets with the same name and not have them collide.
+      default_output_dir = "{{target_out_dir}}"
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
+      sofile = "{{output_dir}}/$soname"  # Possibly including toolchain dir.
+      rspfile = sofile + ".rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+      whitelist_flag = " "
+      if (enable_resource_whitelist_generation) {
+        whitelist_file = "$sofile.whitelist"
+        whitelist_flag = " --resource-whitelist=\"$whitelist_file\""
+      }
+
+      if (defined(invoker.strip)) {
+        unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
+      } else {
+        unstripped_sofile = sofile
+      }
+
+      # These variables are not built into GN but are helpers that
+      # implement (1) linking to produce a .so, (2) extracting the symbols
+      # from that file (3) if the extracted list differs from the existing
+      # .TOC file, overwrite it, otherwise, don't change it.
+      tocfile = sofile + ".TOC"
+
+      link_command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
+
+      # Generate a map file to be used for binary size analysis.
+      # Map file adds ~10% to the link time on a z620.
+      # With target_os="android", libchrome.so.map.gz is ~20MB.
+      map_switch = ""
+      if (enable_linker_map && is_official_build) {
+        map_file = "$unstripped_sofile.map.gz"
+        map_switch = " --map-file \"$map_file\""
+      }
+
+      assert(defined(readelf), "to solink you must have a readelf")
+      assert(defined(nm), "to solink you must have an nm")
+      strip_switch = ""
+      if (defined(invoker.strip)) {
+        strip_switch = "--strip=${invoker.strip} "
+      }
+
+      # This needs a Python script to avoid using a complex shell command
+      # requiring sh control structures, pipelines, and POSIX utilities.
+      # The host might not have a POSIX shell and utilities (e.g. Windows).
+      solink_wrapper = rebase_path("//build/toolchain/gcc_solink_wrapper.py")
+      command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch--sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\"$map_switch --output=\"$sofile\"$whitelist_flag -- $link_command"
+
+      rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
+
+      description = "SOLINK $sofile"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = default_shlib_extension
+
+      default_output_dir = "{{root_out_dir}}"
+      if (shlib_subdir != ".") {
+        default_output_dir += "/$shlib_subdir"
+      }
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the sofile but use the
+      # tocfile for dependency management.
+      outputs = [
+        sofile,
+        tocfile,
+      ]
+      if (enable_resource_whitelist_generation) {
+        outputs += [ whitelist_file ]
+      }
+      if (sofile != unstripped_sofile) {
+        outputs += [ unstripped_sofile ]
+      }
+      if (defined(map_file)) {
+        outputs += [ map_file ]
+      }
+      link_output = sofile
+      depend_output = tocfile
+    }
+
+    tool("solink_module") {
+      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
+      sofile = "{{output_dir}}/$soname"
+      rspfile = sofile + ".rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      if (defined(invoker.strip)) {
+        unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
+      } else {
+        unstripped_sofile = sofile
+      }
+
+      command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
+
+      if (defined(invoker.strip)) {
+        strip_command = "${invoker.strip} --strip-unneeded -o \"$sofile\" \"$unstripped_sofile\""
+        command += " && " + strip_command
+      }
+      rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
+
+      description = "SOLINK_MODULE $sofile"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      if (defined(invoker.loadable_module_extension)) {
+        default_output_extension = invoker.loadable_module_extension
+      } else {
+        default_output_extension = default_shlib_extension
+      }
+
+      default_output_dir = "{{root_out_dir}}"
+      if (shlib_subdir != ".") {
+        default_output_dir += "/$shlib_subdir"
+      }
+
+      output_prefix = "lib"
+
+      outputs = [
+        sofile,
+      ]
+      if (sofile != unstripped_sofile) {
+        outputs += [ unstripped_sofile ]
+      }
+    }
+
+    tool("link") {
+      exename = "{{target_output_name}}{{output_extension}}"
+      outfile = "{{output_dir}}/$exename"
+      rspfile = "$outfile.rsp"
+      unstripped_outfile = outfile
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = default_executable_extension
+
+      default_output_dir = "{{root_out_dir}}"
+
+      if (defined(invoker.strip)) {
+        unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename"
+      }
+
+      # Generate a map file to be used for binary size analysis.
+      # Map file adds ~10% to the link time on a z620.
+      # With target_os="android", libchrome.so.map.gz is ~20MB.
+      map_switch = ""
+      if (enable_linker_map && is_official_build) {
+        map_file = "$unstripped_outfile.map.gz"
+        map_switch = " --map-file \"$map_file\""
+      }
+
+      start_group_flag = ""
+      end_group_flag = ""
+      if (current_os != "aix") {
+        # the "--start-group .. --end-group" feature isn't available on the aix ld.
+        start_group_flag = "-Wl,--start-group"
+        end_group_flag = "-Wl,--end-group "
+      }
+      link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" $start_group_flag @\"$rspfile\" {{solibs}} $end_group_flag $libs_section_prefix {{libs}} $libs_section_postfix"
+
+      strip_switch = ""
+
+      if (defined(invoker.strip)) {
+        strip_switch = " --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\""
+      }
+
+      link_wrapper =
+          rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
+      command = "$python_path \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch -- $link_command"
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs}}"
+      outputs = [
+        outfile,
+      ]
+      if (outfile != unstripped_outfile) {
+        outputs += [ unstripped_outfile ]
+      }
+      if (defined(invoker.link_outputs)) {
+        outputs += invoker.link_outputs
+      }
+      if (defined(map_file)) {
+        outputs += [ map_file ]
+      }
+    }
+
+    # These two are really entirely generic, but have to be repeated in
+    # each toolchain because GN doesn't allow a template to be used here.
+    # See //build/toolchain/toolchain.gni for details.
+    tool("stamp") {
+      command = stamp_command
+      description = stamp_description
+    }
+    tool("copy") {
+      command = copy_command
+      description = copy_description
+    }
+
+    forward_variables_from(invoker, [ "deps" ])
+  }
+}
+
+# This is a shorthand for gcc_toolchain instances based on the Chromium-built
+# version of Clang. Only the toolchain_cpu and toolchain_os variables need to
+# be specified by the invoker, and optionally toolprefix if it's a
+# cross-compile case. Note that for a cross-compile case this toolchain
+# requires a config to pass the appropriate -target option, or else it will
+# actually just be doing a native compile. The invoker can optionally override
+# use_gold too.
+template("clang_toolchain") {
+  if (defined(invoker.toolprefix)) {
+    toolprefix = invoker.toolprefix
+  } else {
+    toolprefix = ""
+  }
+
+  gcc_toolchain(target_name) {
+    prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+    cc = "$prefix/clang"
+    cxx = "$prefix/clang++"
+    ld = cxx
+    readelf = "${toolprefix}readelf"
+    ar = "${prefix}/llvm-ar"
+    nm = "${toolprefix}nm"
+
+    forward_variables_from(invoker,
+                           [
+                             "strip",
+                             "is_clang_analysis_supported",
+                             "enable_linker_map",
+                           ])
+
+    toolchain_args = {
+      if (defined(invoker.toolchain_args)) {
+        forward_variables_from(invoker.toolchain_args, "*")
+      }
+      is_clang = true
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/get_concurrent_links.py
@@ -0,0 +1,77 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script computs the number of concurrent links we want to run in the build
+# as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP.
+
+import optparse
+import os
+import re
+import subprocess
+import sys
+
+def _GetTotalMemoryInBytes():
+  if sys.platform in ('win32', 'cygwin'):
+    import ctypes
+
+    class MEMORYSTATUSEX(ctypes.Structure):
+      _fields_ = [
+        ("dwLength", ctypes.c_ulong),
+        ("dwMemoryLoad", ctypes.c_ulong),
+        ("ullTotalPhys", ctypes.c_ulonglong),
+        ("ullAvailPhys", ctypes.c_ulonglong),
+        ("ullTotalPageFile", ctypes.c_ulonglong),
+        ("ullAvailPageFile", ctypes.c_ulonglong),
+        ("ullTotalVirtual", ctypes.c_ulonglong),
+        ("ullAvailVirtual", ctypes.c_ulonglong),
+        ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+      ]
+
+    stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX))
+    ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+    return stat.ullTotalPhys
+  elif sys.platform.startswith('linux'):
+    if os.path.exists("/proc/meminfo"):
+      with open("/proc/meminfo") as meminfo:
+        memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+        for line in meminfo:
+          match = memtotal_re.match(line)
+          if not match:
+            continue
+          return float(match.group(1)) * 2**10
+  elif sys.platform == 'darwin':
+    try:
+      return int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+    except Exception:
+      return 0
+  # TODO(scottmg): Implement this for other platforms.
+  return 0
+
+
+def _GetDefaultConcurrentLinks(mem_per_link_gb, reserve_mem_gb):
+  # Inherit the legacy environment variable for people that have set it in GYP.
+  pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
+  if pool_size:
+    return pool_size
+
+  mem_total_bytes = _GetTotalMemoryInBytes()
+  mem_total_bytes = max(0, mem_total_bytes - reserve_mem_gb * 2**30)
+  num_concurrent_links = int(max(1, mem_total_bytes / mem_per_link_gb / 2**30))
+  hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+  return min(num_concurrent_links, hard_cap)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--mem_per_link_gb', action="store", type="int", default=8)
+  parser.add_option('--reserve_mem_gb', action="store", type="int", default=0)
+  parser.disable_interspersed_args()
+  options, _ = parser.parse_args()
+
+  print _GetDefaultConcurrentLinks(options.mem_per_link_gb,
+                                   options.reserve_mem_gb)
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/goma.gni
@@ -0,0 +1,19 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of Goma.
+
+declare_args() {
+  # Set to true to enable distributed compilation using Goma.
+  use_goma = false
+
+  # Set the default value based on the platform.
+  if (is_win) {
+    # Absolute directory containing the gomacc.exe binary.
+    goma_dir = "C:\goma\goma-win64"
+  } else {
+    # Absolute directory containing the gomacc binary.
+    goma_dir = getenv("HOME") + "/goma"
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/linux/BUILD.gn
@@ -0,0 +1,204 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+clang_toolchain("clang_arm") {
+  toolprefix = "arm-linux-gnueabihf-"
+  toolchain_args = {
+    current_cpu = "arm"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_arm64") {
+  toolprefix = "aarch64-linux-gnu-"
+  toolchain_args = {
+    current_cpu = "arm64"
+    current_os = "linux"
+  }
+}
+
+gcc_toolchain("arm64") {
+  toolprefix = "aarch64-linux-gnu-"
+
+  cc = "${toolprefix}gcc"
+  cxx = "${toolprefix}g++"
+
+  ar = "${toolprefix}ar"
+  ld = cxx
+  readelf = "${toolprefix}readelf"
+  nm = "${toolprefix}nm"
+
+  toolchain_args = {
+    current_cpu = "arm64"
+    current_os = "linux"
+    is_clang = false
+  }
+}
+
+gcc_toolchain("arm") {
+  toolprefix = "arm-linux-gnueabihf-"
+
+  cc = "${toolprefix}gcc"
+  cxx = "${toolprefix}g++"
+
+  ar = "${toolprefix}ar"
+  ld = cxx
+  readelf = "${toolprefix}readelf"
+  nm = "${toolprefix}nm"
+
+  toolchain_args = {
+    current_cpu = "arm"
+    current_os = "linux"
+    is_clang = false
+  }
+}
+
+clang_toolchain("clang_x86") {
+  # Output linker map files for binary size analysis.
+  enable_linker_map = true
+
+  toolchain_args = {
+    current_cpu = "x86"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x86_v8_arm") {
+  toolchain_args = {
+    current_cpu = "x86"
+    v8_current_cpu = "arm"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x86_v8_mipsel") {
+  toolchain_args = {
+    current_cpu = "x86"
+    v8_current_cpu = "mipsel"
+    current_os = "linux"
+  }
+}
+
+gcc_toolchain("x86") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  # Output linker map files for binary size analysis.
+  enable_linker_map = true
+
+  toolchain_args = {
+    current_cpu = "x86"
+    current_os = "linux"
+    is_clang = false
+  }
+}
+
+clang_toolchain("clang_x64") {
+  # Output linker map files for binary size analysis.
+  enable_linker_map = true
+
+  toolchain_args = {
+    current_cpu = "x64"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x64_v8_arm64") {
+  toolchain_args = {
+    current_cpu = "x64"
+    v8_current_cpu = "arm64"
+    current_os = "linux"
+  }
+}
+
+clang_toolchain("clang_x64_v8_mips64el") {
+  toolchain_args = {
+    current_cpu = "x64"
+    v8_current_cpu = "mips64el"
+    current_os = "linux"
+  }
+}
+
+gcc_toolchain("x64") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  # Output linker map files for binary size analysis.
+  enable_linker_map = true
+
+  toolchain_args = {
+    current_cpu = "x64"
+    current_os = "linux"
+    is_clang = false
+  }
+}
+
+clang_toolchain("clang_mipsel") {
+  toolchain_args = {
+    current_cpu = "mipsel"
+    current_os = "linux"
+  }
+}
+
+gcc_toolchain("mipsel") {
+  cc = "mipsel-linux-gnu-gcc"
+  cxx = "mipsel-linux-gnu-g++"
+  ar = "mipsel-linux-gnu-ar"
+  ld = cxx
+  readelf = "mipsel-linux-gnu-readelf"
+  nm = "mipsel-linux-gnu-nm"
+
+  toolchain_args = {
+    cc_wrapper = ""
+    current_cpu = "mipsel"
+    current_os = "linux"
+    is_clang = false
+    use_goma = false
+  }
+}
+
+gcc_toolchain("s390x") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_args = {
+    current_cpu = "s390x"
+    current_os = "linux"
+    is_clang = false
+  }
+}
+
+gcc_toolchain("ppc64") {
+  cc = "gcc"
+  cxx = "g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_args = {
+    current_cpu = "ppc64"
+    current_os = "linux"
+    is_clang = false
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/mac/BUILD.gn
@@ -0,0 +1,530 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(brettw) Use "gcc_toolchain.gni" like the Linux toolchains. This requires
+# some enhancements since the commands on Mac are slightly different than on
+# Linux.
+
+import("../goma.gni")
+import("//build/config/clang/clang.gni")
+if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+}
+import("//build/config/mac/mac_sdk.gni")
+import("//build/config/mac/symbols.gni")
+
+assert(host_os == "mac")
+
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/clang_static_analyzer.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+import("//build/toolchain/concurrent_links.gni")
+
+declare_args() {
+  # Reduce the number of tasks using the copy_bundle_data and compile_xcassets
+  # tools as they can cause lots of I/O contention when invoking ninja with a
+  # large number of parallel jobs (e.g. when using distributed build like goma).
+  bundle_pool_depth = -1
+}
+
+if (current_toolchain == default_toolchain) {
+  pool("bundle_pool") {
+    if (bundle_pool_depth == -1) {
+      depth = concurrent_links
+    } else {
+      depth = bundle_pool_depth
+    }
+  }
+}
+
+# When implementing tools using Python scripts, a TOOL_VERSION=N env
+# variable is placed in front of the command. The N should be incremented
+# whenever the script is changed, so that the build system rebuilds all
+# edges that utilize the script. Ideally this should be changed to use
+# proper input-dirty checking, but that could be expensive. Instead, use a
+# script to get the tool scripts' modification time to use as the version.
+# This won't cause a re-generation of GN files when the tool script changes
+# but it will cause edges to be marked as dirty if the ninja files are
+# regenerated. See https://crbug.com/619083 for details. A proper fix
+# would be to have inputs to tools (https://crbug.com/621119).
+tool_versions =
+    exec_script("get_tool_mtime.py",
+                rebase_path([
+                              "//build/toolchain/mac/compile_xcassets.py",
+                              "//build/toolchain/mac/filter_libtool.py",
+                              "//build/toolchain/mac/linker_driver.py",
+                            ],
+                            root_build_dir),
+                "trim scope")
+
+# Shared toolchain definition. Invocations should set current_os to set the
+# build args in this definition.
+template("mac_toolchain") {
+  toolchain(target_name) {
+    if (use_system_xcode) {
+      env_wrapper = ""
+    } else {
+      env_wrapper = "export DEVELOPER_DIR=$hermetic_xcode_path; "
+    }
+
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    assert(defined(invoker.toolchain_args),
+           "Toolchains must declare toolchain_args")
+    toolchain_args = {
+      # Populate toolchain args from the invoker.
+      forward_variables_from(invoker.toolchain_args, "*")
+
+      # The host toolchain value computed by the default toolchain's setup
+      # needs to be passed through unchanged to all secondary toolchains to
+      # ensure that it's always the same, regardless of the values that may be
+      # set on those toolchains.
+      host_toolchain = host_toolchain
+    }
+
+    # Supports building with the version of clang shipped with Xcode when
+    # targetting iOS by not respecting clang_base_path.
+    if (toolchain_args.current_os == "ios" && use_xcode_clang) {
+      prefix = ""
+    } else {
+      prefix = rebase_path("$clang_base_path/bin/", root_build_dir)
+    }
+
+    _cc = "${prefix}clang"
+    _cxx = "${prefix}clang++"
+
+    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+    # toolchain args, use those values, otherwise default to the global one.
+    # This works because the only reasonable override that toolchains might
+    # supply for these values are to force-disable them.
+    if (defined(toolchain_args.use_goma)) {
+      toolchain_uses_goma = toolchain_args.use_goma
+    } else {
+      toolchain_uses_goma = use_goma
+    }
+    if (defined(toolchain_args.cc_wrapper)) {
+      toolchain_cc_wrapper = toolchain_args.cc_wrapper
+    } else {
+      toolchain_cc_wrapper = cc_wrapper
+    }
+
+    # Compute the compiler prefix.
+    if (toolchain_uses_goma) {
+      assert(toolchain_cc_wrapper == "",
+             "Goma and cc_wrapper can't be used together.")
+      compiler_prefix = "$goma_dir/gomacc "
+    } else if (toolchain_cc_wrapper != "") {
+      compiler_prefix = toolchain_cc_wrapper + " "
+    } else {
+      compiler_prefix = ""
+    }
+
+    cc = compiler_prefix + _cc
+    cxx = compiler_prefix + _cxx
+    ld = _cxx
+
+    if (use_clang_static_analyzer) {
+      # Static analysis isn't supported under GOMA. See crbug.com/687245
+      # for progress on this issue.
+      assert(!use_goma, "'use_clang_static_analyzer' cannot be used with GOMA.")
+
+      # Call "ccc-analyzer" or "c++-analyzer" instead of directly calling Clang.
+      # |wrapper_tool| sets the environment variables which are read by the
+      # analyzer tools.
+      analyzer_wrapper =
+          rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
+                      root_build_dir)
+      cc = analyzer_wrapper + " --clang-cc-path=${cc} --analyzer=" +
+           rebase_path("//third_party/scan-build/src/libexec/ccc-analyzer",
+                       root_build_dir)
+      cxx = analyzer_wrapper + " --clang-cxx-path=${cxx} --analyzer=" +
+            rebase_path("//third_party/scan-build/src/libexec/c++-analyzer",
+                        root_build_dir)
+
+      ld = cxx
+    }
+
+    linker_driver =
+        "TOOL_VERSION=${tool_versions.linker_driver} " +
+        rebase_path("//build/toolchain/mac/linker_driver.py", root_build_dir)
+
+    # On iOS, the final applications are assembled using lipo (to support fat
+    # builds). The correct flags are passed to the linker_driver.py script
+    # directly during the lipo call.
+    if (toolchain_args.current_os != "ios") {
+      _enable_dsyms = enable_dsyms
+      _save_unstripped_output = save_unstripped_output
+    } else {
+      _enable_dsyms = false
+      _save_unstripped_output = false
+    }
+
+    # Make these apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    # Object files go in this directory. Use label_name instead of
+    # target_output_name since labels will generally have no spaces and will be
+    # unique in the directory.
+    object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+    # If dSYMs are enabled, this flag will be added to the link tools.
+    if (_enable_dsyms) {
+      dsym_switch = " -Wcrl,dsym,{{root_out_dir}} "
+      dsym_output_dir =
+          "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.dSYM"
+      dsym_output = [
+        "$dsym_output_dir/",
+        "$dsym_output_dir/Contents/Info.plist",
+        "$dsym_output_dir/Contents/Resources/DWARF/" +
+            "{{target_output_name}}{{output_extension}}",
+      ]
+    } else {
+      dsym_switch = ""
+    }
+
+    if (_save_unstripped_output) {
+      _unstripped_output = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.unstripped"
+    }
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$env_wrapper $cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$env_wrapper $cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$env_wrapper $cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("objc") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$env_wrapper $cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJC {{output}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("objcxx") {
+      depfile = "{{output}}.d"
+      precompiled_header_type = "gcc"
+      command = "$env_wrapper $cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objcc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJCXX {{output}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("alink") {
+      script =
+          rebase_path("//build/toolchain/mac/filter_libtool.py", root_build_dir)
+      command = "$env_wrapper rm -f {{output}} && TOOL_VERSION=${tool_versions.filter_libtool} python $script libtool -static {{arflags}} -o {{output}} {{inputs}}"
+      description = "LIBTOOL-STATIC {{output}}"
+      outputs = [
+        "{{output_dir}}/{{target_output_name}}{{output_extension}}",
+      ]
+      default_output_dir = "{{target_out_dir}}"
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      dylib = "{{output_dir}}/{{target_output_name}}{{output_extension}}"  # eg "./libfoo.dylib"
+      rspfile = dylib + ".rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      # These variables are not built into GN but are helpers that implement
+      # (1) linking to produce a .dylib, (2) extracting the symbols from that
+      # file to a temporary file, (3) if the temporary file has differences from
+      # the existing .TOC file, overwrite it, otherwise, don't change it.
+      #
+      # As a special case, if the library reexports symbols from other dynamic
+      # libraries, we always update the .TOC and skip the temporary file and
+      # diffing steps, since that library always needs to be re-linked.
+      tocname = dylib + ".TOC"
+      temporary_tocname = dylib + ".tmp"
+
+      does_reexport_command = "[ ! -e \"$dylib\" -o ! -e \"$tocname\" ] || otool -l \"$dylib\" | grep -q LC_REEXPORT_DYLIB"
+
+      link_command = "$linker_driver $ld -shared "
+      if (is_component_build) {
+        link_command += " -Wl,-install_name,@rpath/\"{{target_output_name}}{{output_extension}}\" "
+      }
+      link_command += dsym_switch
+      link_command += "{{ldflags}} -o \"$dylib\" -Wl,-filelist,\"$rspfile\" {{libs}} {{solibs}}"
+
+      replace_command = "if ! cmp -s \"$temporary_tocname\" \"$tocname\"; then mv \"$temporary_tocname\" \"$tocname\""
+      extract_toc_command = "{ otool -l \"$dylib\" | grep LC_ID_DYLIB -A 5; nm -gP \"$dylib\" | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
+
+      command = "$env_wrapper if $does_reexport_command ; then $link_command && $extract_toc_command > \"$tocname\"; else $link_command && $extract_toc_command > \"$temporary_tocname\" && $replace_command ; fi; fi"
+
+      rspfile_content = "{{inputs_newline}}"
+
+      description = "SOLINK {{output}}"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_dir = "{{root_out_dir}}"
+      default_output_extension = ".dylib"
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the dylib but use the
+      # tocname for dependency management.
+      outputs = [
+        dylib,
+        tocname,
+      ]
+      link_output = dylib
+      depend_output = tocname
+
+      if (_enable_dsyms) {
+        outputs += dsym_output
+      }
+      if (_save_unstripped_output) {
+        outputs += [ _unstripped_output ]
+      }
+    }
+
+    tool("solink_module") {
+      sofile = "{{output_dir}}/{{target_output_name}}{{output_extension}}"  # eg "./libfoo.so"
+      rspfile = sofile + ".rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      link_command = "$env_wrapper $linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" -Wl,-filelist,\"$rspfile\""
+      if (is_component_build) {
+        link_command += " -Wl,-install_name,@rpath/{{target_output_name}}{{output_extension}}"
+      }
+      link_command += dsym_switch
+      link_command += " {{solibs}} {{libs}}"
+      command = link_command
+
+      rspfile_content = "{{inputs_newline}}"
+
+      description = "SOLINK_MODULE {{output}}"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_dir = "{{root_out_dir}}"
+      default_output_extension = ".so"
+
+      outputs = [
+        sofile,
+      ]
+
+      if (_enable_dsyms) {
+        outputs += dsym_output
+      }
+      if (_save_unstripped_output) {
+        outputs += [ _unstripped_output ]
+      }
+    }
+
+    tool("link") {
+      outfile = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+      rspfile = "$outfile.rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      # Note about --filelist: Apple's linker reads the file list file and
+      # interprets each newline-separated chunk of text as a file name. It
+      # doesn't do the things one would expect from the shell like unescaping
+      # or handling quotes. In contrast, when Ninja finds a file name with
+      # spaces, it single-quotes them in $inputs_newline as it would normally
+      # do for command-line arguments. Thus any source names with spaces, or
+      # label names with spaces (which GN bases the output paths on) will be
+      # corrupted by this process. Don't use spaces for source files or labels.
+      command = "$env_wrapper $linker_driver $ld $dsym_switch {{ldflags}} -o \"$outfile\" -Wl,-filelist,\"$rspfile\" {{solibs}} {{libs}}"
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs_newline}}"
+      outputs = [
+        outfile,
+      ]
+
+      if (_enable_dsyms) {
+        outputs += dsym_output
+      }
+      if (_save_unstripped_output) {
+        outputs += [ _unstripped_output ]
+      }
+
+      default_output_dir = "{{root_out_dir}}"
+    }
+
+    # These two are really entirely generic, but have to be repeated in
+    # each toolchain because GN doesn't allow a template to be used here.
+    # See //build/toolchain/toolchain.gni for details.
+    tool("stamp") {
+      command = stamp_command
+      description = stamp_description
+    }
+    tool("copy") {
+      command = copy_command
+      description = copy_description
+    }
+
+    tool("copy_bundle_data") {
+      # copy_command use hardlink if possible but this does not work with
+      # directories. If source is a directory, instead use "pax" to create
+      # the same tree structure using hardlinks to individual files (this
+      # preserve symbolic links too) as recommended in the replies to the
+      # question at http://serverfault.com/q/209888/43689 ("cp -al" isn't
+      # available on macOS).
+      #
+      # According to the man page for pax, the commands to use to clone
+      # olddir to newdir using pax are the following:
+      #
+      #   $ mkdir newdir
+      #   $ cd olddir
+      #   $ pax -rwl . ../newdir
+      #
+      # The _copydir command does exactly that but use an absolute path
+      # constructed using shell variable $OLDPWD (automatically set when
+      # cd is used) as computing the relative path is a bit complex and
+      # using pwd would requires a sub-shell to be created.
+      _copydir = "mkdir -p {{output}} && cd {{source}} && " +
+                 "pax -rwl . \"\$OLDPWD\"/{{output}}"
+      command = "rm -rf {{output}} && if [[ -d {{source}} ]]; then " +
+                _copydir + "; else " + copy_command + "; fi"
+
+      description = "COPY_BUNDLE_DATA {{source}} {{output}}"
+      pool = ":bundle_pool($default_toolchain)"
+    }
+    tool("compile_xcassets") {
+      _tool = rebase_path("//build/toolchain/mac/compile_xcassets.py",
+                          root_build_dir)
+      if (is_ios) {
+        _sdk_name = ios_sdk_name
+        _min_deployment_target = ios_deployment_target
+      } else {
+        _sdk_name = mac_sdk_name
+        _min_deployment_target = mac_deployment_target
+      }
+      command = "$env_wrapper rm -f {{output}} && " +
+                "TOOL_VERSION=${tool_versions.compile_xcassets} " +
+                "python $_tool -p $_sdk_name -t $_min_deployment_target " +
+                "-T {{bundle_product_type}} -o {{output}} {{inputs}}"
+
+      description = "COMPILE_XCASSETS {{output}}"
+      pool = ":bundle_pool($default_toolchain)"
+    }
+  }
+}
+
+mac_toolchain("clang_arm") {
+  toolchain_args = {
+    current_cpu = "arm"
+    current_os = "mac"
+  }
+}
+
+mac_toolchain("clang_x64") {
+  toolchain_args = {
+    current_cpu = "x64"
+    current_os = "mac"
+  }
+}
+
+mac_toolchain("clang_x86") {
+  toolchain_args = {
+    current_cpu = "x86"
+    current_os = "mac"
+  }
+}
+
+mac_toolchain("clang_x86_v8_arm") {
+  toolchain_args = {
+    current_cpu = "x86"
+    v8_current_cpu = "arm"
+    current_os = "mac"
+  }
+}
+
+mac_toolchain("clang_x86_v8_mipsel") {
+  toolchain_args = {
+    current_cpu = "x86"
+    v8_current_cpu = "mipsel"
+    current_os = "mac"
+  }
+}
+
+mac_toolchain("clang_x64_v8_arm64") {
+  toolchain_args = {
+    current_cpu = "x64"
+    v8_current_cpu = "arm64"
+    current_os = "mac"
+  }
+}
+
+mac_toolchain("clang_x64_v8_mips64el") {
+  toolchain_args = {
+    current_cpu = "x64"
+    v8_current_cpu = "mips64el"
+    current_os = "mac"
+  }
+}
+
+if (is_ios) {
+  mac_toolchain("ios_clang_arm") {
+    toolchain_args = {
+      current_cpu = "arm"
+      current_os = "ios"
+    }
+  }
+
+  mac_toolchain("ios_clang_arm64") {
+    toolchain_args = {
+      current_cpu = "arm64"
+      current_os = "ios"
+    }
+  }
+
+  mac_toolchain("ios_clang_x86") {
+    toolchain_args = {
+      current_cpu = "x86"
+      current_os = "ios"
+    }
+  }
+
+  mac_toolchain("ios_clang_x64") {
+    toolchain_args = {
+      current_cpu = "x64"
+      current_os = "ios"
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/mac/compile_xcassets.py
@@ -0,0 +1,105 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+
+def CompileXCAssets(
+    output, platform, product_type, min_deployment_target, inputs):
+  """Compile the .xcassets bundles to an asset catalog using actool.
+
+  Args:
+    output: absolute path to the containing bundle
+    platform: the targetted platform
+    product_type: the bundle type
+    min_deployment_target: minimum deployment target
+    inputs: list of absolute paths to .xcassets bundles
+  """
+  command = [
+      'xcrun', 'actool', '--output-format=human-readable-text',
+      '--compress-pngs', '--notices', '--warnings', '--errors',
+      '--platform', platform, '--minimum-deployment-target',
+      min_deployment_target,
+  ]
+
+  if product_type != '':
+    command.extend(['--product-type', product_type])
+
+  if platform == 'macosx':
+    command.extend(['--target-device', 'mac'])
+  else:
+    command.extend(['--target-device', 'iphone', '--target-device', 'ipad'])
+
+  # actool crashes if paths are relative, so convert input and output paths
+  # to absolute paths.
+  command.extend(['--compile', os.path.dirname(os.path.abspath(output))])
+  command.extend(map(os.path.abspath, inputs))
+
+  # Run actool and redirect stdout and stderr to the same pipe (as actool
+  # is confused about what should go to stderr/stdout).
+  process = subprocess.Popen(
+      command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  stdout, _ = process.communicate()
+
+  if process.returncode:
+    sys.stderr.write(stdout)
+    sys.exit(process.returncode)
+
+  # In case of success, the output looks like the following:
+  #   /* com.apple.actool.compilation-results */
+  #   /Full/Path/To/Bundle.app/Assets.car
+  #
+  # Ignore any lines in the output matching those (last line is an empty line)
+  # and consider that the build failed if the output contains any other lines.
+  for line in stdout.splitlines():
+    if not line:
+      continue
+    if line == '/* com.apple.actool.compilation-results */':
+      continue
+    if line == os.path.abspath(output):
+      continue
+    sys.stderr.write(stdout)
+    sys.exit(1)
+
+
+def Main():
+  parser = argparse.ArgumentParser(
+      description='compile assets catalog for a bundle')
+  parser.add_argument(
+      '--platform', '-p', required=True,
+      choices=('macosx', 'iphoneos', 'iphonesimulator'),
+      help='target platform for the compiled assets catalog')
+  parser.add_argument(
+      '--minimum-deployment-target', '-t', required=True,
+      help='minimum deployment target for the compiled assets catalog')
+  parser.add_argument(
+      '--output', '-o', required=True,
+      help='path to the compiled assets catalog')
+  parser.add_argument(
+      '--product-type', '-T',
+      help='type of the containing bundle')
+  parser.add_argument(
+      'inputs', nargs='+',
+      help='path to input assets catalog sources')
+  args = parser.parse_args()
+
+  if os.path.basename(args.output) != 'Assets.car':
+    sys.stderr.write(
+        'output should be path to compiled asset catalog, not '
+        'to the containing bundle: %s\n' % (args.output,))
+    sys.exit(1)
+
+  CompileXCAssets(
+      args.output,
+      args.platform,
+      args.product_type,
+      args.minimum_deployment_target,
+      args.inputs)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/mac/filter_libtool.py
@@ -0,0 +1,42 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import subprocess
+import sys
+
+# This script executes libool and filters out logspam lines like:
+#    '/path/to/libtool: file: foo.o has no symbols'
+
+def Main(cmd_list):
+  libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?'
+                          r'file: .* has no symbols$')
+  libtool_re5 = re.compile(
+      r'^.*libtool: warning for library: ' +
+      r'.* the table of contents is empty ' +
+      r'\(no object file members in the library define global symbols\)$')
+  env = os.environ.copy()
+  # Ref:
+  # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
+  # The problem with this flag is that it resets the file mtime on the file to
+  # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
+  env['ZERO_AR_DATE'] = '1'
+  libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
+  _, err = libtoolout.communicate()
+  for line in err.splitlines():
+    if not libtool_re.match(line) and not libtool_re5.match(line):
+      print >>sys.stderr, line
+  # Unconditionally touch the output .a file on the command line if present
+  # and the command succeeded. A bit hacky.
+  if not libtoolout.returncode:
+    for i in range(len(cmd_list) - 1):
+      if cmd_list[i] == '-o' and cmd_list[i+1].endswith('.a'):
+        os.utime(cmd_list[i+1], None)
+        break
+  return libtoolout.returncode
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/mac/get_tool_mtime.py
@@ -0,0 +1,17 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+# Usage: python get_tool_mtime.py path/to/file1.py path/to/file2.py
+#
+# Prints a GN scope with the variable name being the basename sans-extension
+# and the value being the file modification time. A variable is emitted for
+# each file argument on the command line.
+
+if __name__ == '__main__':
+  for f in sys.argv[1:]:
+    variable = os.path.splitext(os.path.basename(f))[0]
+    print '%s = %d' % (variable, os.path.getmtime(f))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/mac/linker_driver.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python
+
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+
+# The linker_driver.py is responsible for forwarding a linker invocation to
+# the compiler driver, while processing special arguments itself.
+#
+# Usage: linker_driver.py clang++ main.o -L. -llib -o prog -Wcrl,dsym,out
+#
+# On Mac, the logical step of linking is handled by three discrete tools to
+# perform the image link, debug info link, and strip. The linker_driver.py
+# combines these three steps into a single tool.
+#
+# The command passed to the linker_driver.py should be the compiler driver
+# invocation for the linker. It is first invoked unaltered (except for the
+# removal of the special driver arguments, described below). Then the driver
+# performs additional actions, based on these arguments:
+#
+#   -Wcrl,dsym,<dsym_path_prefix>
+#       After invoking the linker, this will run `dsymutil` on the linker's
+#       output, producing a dSYM bundle, stored at dsym_path_prefix. As an
+#       example, if the linker driver were invoked with:
+#         "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..."
+#       The resulting dSYM would be out/gn/libbar.dylib.dSYM/.
+#
+#   -Wcrl,unstripped,<unstripped_path_prefix>
+#       After invoking the linker, and before strip, this will save a copy of
+#       the unstripped linker output in the directory unstripped_path_prefix.
+#
+#   -Wcrl,strip,<strip_arguments>
+#       After invoking the linker, and optionally dsymutil, this will run
+#       the strip command on the linker's output. strip_arguments are
+#       comma-separated arguments to be passed to the strip command.
+
+def Main(args):
+  """Main function for the linker driver. Separates out the arguments for
+  the main compiler driver and the linker driver, then invokes all the
+  required tools.
+
+  Args:
+    args: list of string, Arguments to the script.
+  """
+
+  if len(args) < 2:
+    raise RuntimeError("Usage: linker_driver.py [linker-invocation]")
+
+  for i in xrange(len(args)):
+    if args[i] != '--developer_dir':
+      continue
+    os.environ['DEVELOPER_DIR'] = args[i + 1]
+    del args[i:i+2]
+    break
+
+  # Collect arguments to the linker driver (this script) and remove them from
+  # the arguments being passed to the compiler driver.
+  linker_driver_actions = {}
+  compiler_driver_args = []
+  for arg in args[1:]:
+    if arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
+      # Convert driver actions into a map of name => lambda to invoke.
+      driver_action = ProcessLinkerDriverArg(arg)
+      assert driver_action[0] not in linker_driver_actions
+      linker_driver_actions[driver_action[0]] = driver_action[1]
+    else:
+      compiler_driver_args.append(arg)
+
+  linker_driver_outputs = [_FindLinkerOutput(compiler_driver_args)]
+
+  try:
+    # Run the linker by invoking the compiler driver.
+    subprocess.check_call(compiler_driver_args)
+
+    # Run the linker driver actions, in the order specified by the actions list.
+    for action in _LINKER_DRIVER_ACTIONS:
+      name = action[0]
+      if name in linker_driver_actions:
+        linker_driver_outputs += linker_driver_actions[name](args)
+  except:
+    # If a linker driver action failed, remove all the outputs to make the
+    # build step atomic.
+    map(_RemovePath, linker_driver_outputs)
+
+    # Re-report the original failure.
+    raise
+
+
+def ProcessLinkerDriverArg(arg):
+  """Processes a linker driver argument and returns a tuple containing the
+  name and unary lambda to invoke for that linker driver action.
+
+  Args:
+    arg: string, The linker driver argument.
+
+  Returns:
+    A 2-tuple:
+      0: The driver action name, as in _LINKER_DRIVER_ACTIONS.
+      1: An 1-ary lambda that takes the full list of arguments passed to
+         Main(). The lambda should call the linker driver action that
+         corresponds to the argument and return a list of outputs from the
+         action.
+  """
+  if not arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
+    raise ValueError('%s is not a linker driver argument' % (arg,))
+
+  sub_arg = arg[len(_LINKER_DRIVER_ARG_PREFIX):]
+
+  for driver_action in _LINKER_DRIVER_ACTIONS:
+    (name, action) = driver_action
+    if sub_arg.startswith(name):
+      return (name,
+          lambda full_args: action(sub_arg[len(name):], full_args))
+
+  raise ValueError('Unknown linker driver argument: %s' % (arg,))
+
+
+def RunDsymUtil(dsym_path_prefix, full_args):
+  """Linker driver action for -Wcrl,dsym,<dsym-path-prefix>. Invokes dsymutil
+  on the linker's output and produces a dsym file at |dsym_file| path.
+
+  Args:
+    dsym_path_prefix: string, The path at which the dsymutil output should be
+        located.
+    full_args: list of string, Full argument list for the linker driver.
+
+  Returns:
+      list of string, Build step outputs.
+  """
+  if not len(dsym_path_prefix):
+    raise ValueError('Unspecified dSYM output file')
+
+  linker_out = _FindLinkerOutput(full_args)
+  base = os.path.basename(linker_out)
+  dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM')
+
+  # Remove old dSYMs before invoking dsymutil.
+  _RemovePath(dsym_out)
+  subprocess.check_call(['xcrun', 'dsymutil', '-o', dsym_out, linker_out])
+  return [dsym_out]
+
+
+def RunSaveUnstripped(unstripped_path_prefix, full_args):
+  """Linker driver action for -Wcrl,unstripped,<unstripped_path_prefix>. Copies
+  the linker output to |unstripped_path_prefix| before stripping.
+
+  Args:
+    unstripped_path_prefix: string, The path at which the unstripped output
+        should be located.
+    full_args: list of string, Full argument list for the linker driver.
+
+  Returns:
+    list of string, Build step outputs.
+  """
+  if not len(unstripped_path_prefix):
+    raise ValueError('Unspecified unstripped output file')
+
+  linker_out = _FindLinkerOutput(full_args)
+  base = os.path.basename(linker_out)
+  unstripped_out = os.path.join(unstripped_path_prefix, base + '.unstripped')
+
+  shutil.copyfile(linker_out, unstripped_out)
+  return [unstripped_out]
+
+
+def RunStrip(strip_args_string, full_args):
+  """Linker driver action for -Wcrl,strip,<strip_arguments>.
+
+  Args:
+      strip_args_string: string, Comma-separated arguments for `strip`.
+      full_args: list of string, Full arguments for the linker driver.
+
+  Returns:
+      list of string, Build step outputs.
+  """
+  strip_command = ['xcrun', 'strip']
+  if len(strip_args_string) > 0:
+    strip_command += strip_args_string.split(',')
+  strip_command.append(_FindLinkerOutput(full_args))
+  subprocess.check_call(strip_command)
+  return []
+
+
+def _FindLinkerOutput(full_args):
+  """Finds the output of the linker by looking for the output flag in its
+  argument list. As this is a required linker argument, raises an error if it
+  cannot be found.
+  """
+  # The linker_driver.py script may be used to wrap either the compiler linker
+  # (uses -o to configure the output) or lipo (uses -output to configure the
+  # output). Since wrapping the compiler linker is the most likely possibility
+  # use try/except and fallback to checking for -output if -o is not found.
+  try:
+    output_flag_index = full_args.index('-o')
+  except ValueError:
+    output_flag_index = full_args.index('-output')
+  return full_args[output_flag_index + 1]
+
+
+def _RemovePath(path):
+  """Removes the file or directory at |path| if it exists."""
+  if os.path.exists(path):
+    if os.path.isdir(path):
+      shutil.rmtree(path)
+    else:
+      os.unlink(path)
+
+
+_LINKER_DRIVER_ARG_PREFIX = '-Wcrl,'
+
+"""List of linker driver actions. The sort order of this list affects the
+order in which the actions are invoked. The first item in the tuple is the
+argument's -Wcrl,<sub_argument> and the second is the function to invoke.
+"""
+_LINKER_DRIVER_ACTIONS = [
+    ('dsym,', RunDsymUtil),
+    ('unstripped,', RunSaveUnstripped),
+    ('strip,', RunStrip),
+]
+
+
+if __name__ == '__main__':
+  Main(sys.argv)
+  sys.exit(0)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/nacl/BUILD.gn
@@ -0,0 +1,266 @@
+# Copyright (c) 2014 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/config/nacl/config.gni")
+import("//build/toolchain/nacl_toolchain.gni")
+
+# Add the toolchain revision as a preprocessor define so that sources are
+# rebuilt when a toolchain is updated.
+# Idea we could use the toolchain deps feature, but currently that feature is
+# bugged and does not trigger a rebuild.
+# https://code.google.com/p/chromium/issues/detail?id=431880
+# Calls to get the toolchain revision are relatively slow, so do them all in a
+# single batch to amortize python startup, etc.
+revisions = exec_script("//native_client/build/get_toolchain_revision.py",
+                        [
+                          "nacl_x86_glibc",
+                          "nacl_arm_glibc",
+                          "pnacl_newlib",
+                        ],
+                        "trim list lines")
+nacl_x86_glibc_rev = revisions[0]
+nacl_arm_glibc_rev = revisions[1]
+
+pnacl_newlib_rev = revisions[2]
+
+if (host_os == "win") {
+  toolsuffix = ".exe"
+} else {
+  toolsuffix = ""
+}
+
+# The PNaCl toolchain tools are all wrapper scripts rather than binary
+# executables.  On POSIX systems, nobody cares what kind of executable
+# file you are.  But on Windows, scripts (.bat files) cannot be run
+# directly and need the Windows shell (cmd.exe) specified explicily.
+if (host_os == "win") {
+  # NOTE!  The //build/toolchain/gcc_*_wrapper.py scripts recognize
+  # this exact prefix string, so they must be updated if this string
+  # is changed in any way.
+  scriptprefix = "cmd /c call "
+  scriptsuffix = ".bat"
+} else {
+  scriptprefix = ""
+  scriptsuffix = ""
+}
+
+# When the compilers are run via goma or ccache rather than directly by
+# GN/Ninja, the goma/ccache wrapper handles .bat files but gets confused
+# by being given the scriptprefix.
+if (host_os == "win" && !use_goma && cc_wrapper == "") {
+  compiler_scriptprefix = scriptprefix
+} else {
+  compiler_scriptprefix = ""
+}
+
+template("pnacl_toolchain") {
+  assert(defined(invoker.executable_extension),
+         "Must define executable_extension")
+
+  nacl_toolchain(target_name) {
+    toolchain_package = "pnacl_newlib"
+    toolchain_revision = pnacl_newlib_rev
+    toolprefix =
+        rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/pnacl-",
+                    root_build_dir)
+
+    cc = compiler_scriptprefix + toolprefix + "clang" + scriptsuffix
+    cxx = compiler_scriptprefix + toolprefix + "clang++" + scriptsuffix
+    ar = scriptprefix + toolprefix + "ar" + scriptsuffix
+    readelf = scriptprefix + toolprefix + "readelf" + scriptsuffix
+    nm = scriptprefix + toolprefix + "nm" + scriptsuffix
+    if (defined(invoker.strip)) {
+      strip = scriptprefix + toolprefix + invoker.strip + scriptsuffix
+    }
+    forward_variables_from(invoker,
+                           [
+                             "executable_extension",
+                             "is_clang_analysis_supported",
+                           ])
+
+    # Note this is not the usual "ld = cxx" because "ld" uses are
+    # never run via goma, so this needs scriptprefix.
+    ld = scriptprefix + toolprefix + "clang++" + scriptsuffix
+
+    toolchain_args = {
+      is_clang = true
+      current_cpu = "pnacl"
+    }
+  }
+}
+
+pnacl_toolchain("newlib_pnacl") {
+  executable_extension = ".pexe"
+
+  # The pnacl-finalize tool turns a .pexe.debug file into a .pexe file.
+  # It's very similar in purpose to the traditional "strip" utility: it
+  # turns what comes out of the linker into what you actually want to
+  # distribute and run.  PNaCl doesn't have a "strip"-like utility that
+  # you ever actually want to use other than pnacl-finalize, so just
+  # make pnacl-finalize the strip tool rather than adding an additional
+  # step like "postlink" to run pnacl-finalize.
+  strip = "finalize"
+}
+
+pnacl_toolchain("newlib_pnacl_nonsfi") {
+  executable_extension = ""
+  strip = "strip"
+
+  if (use_clang_static_analyzer) {
+    is_clang_analysis_supported = false
+  }
+}
+
+template("nacl_glibc_toolchain") {
+  toolchain_cpu = target_name
+  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+  assert(defined(invoker.toolchain_package), "Must define toolchain_package")
+  assert(defined(invoker.toolchain_revision), "Must define toolchain_revision")
+  forward_variables_from(invoker,
+                         [
+                           "toolchain_package",
+                           "toolchain_revision",
+                         ])
+
+  toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
+                               invoker.toolchain_tuple + "-",
+                           root_build_dir)
+
+  nacl_toolchain("glibc_" + toolchain_cpu) {
+    cc = toolprefix + "gcc" + toolsuffix
+    cxx = toolprefix + "g++" + toolsuffix
+    ar = toolprefix + "ar" + toolsuffix
+    ld = cxx
+    readelf = toolprefix + "readelf" + toolsuffix
+    nm = toolprefix + "nm" + toolsuffix
+    strip = toolprefix + "strip" + toolsuffix
+
+    toolchain_args = {
+      current_cpu = toolchain_cpu
+      is_clang = false
+      is_nacl_glibc = true
+    }
+  }
+}
+
+nacl_glibc_toolchain("x86") {
+  toolchain_package = "nacl_x86_glibc"
+  toolchain_revision = nacl_x86_glibc_rev
+
+  # Rely on the :compiler_cpu_abi config adding the -m32 flag here rather
+  # than using the i686-nacl binary directly.  This is a because i686-nacl-gcc
+  # is a shell script wrapper around x86_64-nacl-gcc and goma has trouble with
+  # compiler executables that are shell scripts (so the i686 'compiler' is not
+  # currently in goma).
+  toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_glibc_toolchain("x64") {
+  toolchain_package = "nacl_x86_glibc"
+  toolchain_revision = nacl_x86_glibc_rev
+  toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_glibc_toolchain("arm") {
+  toolchain_package = "nacl_arm_glibc"
+  toolchain_revision = nacl_arm_glibc_rev
+  toolchain_tuple = "arm-nacl"
+}
+
+template("nacl_clang_toolchain") {
+  toolchain_cpu = target_name
+  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+
+  toolchain_package = "pnacl_newlib"
+  toolchain_revision = pnacl_newlib_rev
+  toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
+                               invoker.toolchain_tuple + "-",
+                           root_build_dir)
+
+  nacl_toolchain("clang_newlib_" + toolchain_cpu) {
+    cc = toolprefix + "clang" + toolsuffix
+    cxx = toolprefix + "clang++" + toolsuffix
+    ar = toolprefix + "ar" + toolsuffix
+    ld = cxx
+    readelf = toolprefix + "readelf" + toolsuffix
+    nm = toolprefix + "nm" + toolsuffix
+    strip = toolprefix + "strip" + toolsuffix
+
+    toolchain_args = {
+      current_cpu = toolchain_cpu
+      is_clang = true
+    }
+  }
+}
+
+template("nacl_irt_toolchain") {
+  toolchain_cpu = target_name
+  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+
+  toolchain_package = "pnacl_newlib"
+  toolchain_revision = pnacl_newlib_rev
+  toolprefix = rebase_path("${nacl_toolchain_dir}/${toolchain_package}/bin/" +
+                               invoker.toolchain_tuple + "-",
+                           root_build_dir)
+
+  link_irt = rebase_path("//native_client/build/link_irt.py", root_build_dir)
+
+  tls_edit_label =
+      "//native_client/src/tools/tls_edit:tls_edit($host_toolchain)"
+  host_toolchain_out_dir =
+      rebase_path(get_label_info(tls_edit_label, "root_out_dir"),
+                  root_build_dir)
+  tls_edit = "${host_toolchain_out_dir}/tls_edit"
+
+  nacl_toolchain("irt_" + toolchain_cpu) {
+    cc = toolprefix + "clang" + toolsuffix
+    cxx = toolprefix + "clang++" + toolsuffix
+    ar = toolprefix + "ar" + toolsuffix
+    readelf = toolprefix + "readelf" + toolsuffix
+    nm = toolprefix + "nm" + toolsuffix
+    strip = toolprefix + "strip" + toolsuffix
+
+    # Some IRT implementations (notably, Chromium's) contain C++ code,
+    # so we need to link w/ the C++ linker.
+    ld = "${python_path} ${link_irt} --tls-edit=${tls_edit} --link-cmd=${cxx} --readelf-cmd=${readelf}"
+
+    toolchain_args = {
+      current_cpu = toolchain_cpu
+      is_clang = true
+    }
+
+    # TODO(ncbray): depend on link script
+    deps = [
+      tls_edit_label,
+    ]
+  }
+}
+
+template("nacl_clang_toolchains") {
+  assert(defined(invoker.toolchain_tuple), "Must define toolchain_tuple")
+  nacl_clang_toolchain(target_name) {
+    toolchain_tuple = invoker.toolchain_tuple
+  }
+  nacl_irt_toolchain(target_name) {
+    toolchain_tuple = invoker.toolchain_tuple
+  }
+}
+
+nacl_clang_toolchains("x86") {
+  # Rely on :compiler_cpu_abi adding -m32.  See nacl_x86_glibc above.
+  toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_clang_toolchains("x64") {
+  toolchain_tuple = "x86_64-nacl"
+}
+
+nacl_clang_toolchains("arm") {
+  toolchain_tuple = "arm-nacl"
+}
+
+nacl_clang_toolchains("mipsel") {
+  toolchain_tuple = "mipsel-nacl"
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/nacl_toolchain.gni
@@ -0,0 +1,56 @@
+# Copyright (c) 2014 The Native Client Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/nacl/config.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+# This template defines a NaCl toolchain.
+#
+# It requires the following variables specifying the executables to run:
+#  - cc
+#  - cxx
+#  - ar
+#  - ld
+
+template("nacl_toolchain") {
+  assert(defined(invoker.cc), "nacl_toolchain() must specify a \"cc\" value")
+  assert(defined(invoker.cxx), "nacl_toolchain() must specify a \"cxx\" value")
+  assert(defined(invoker.ar), "nacl_toolchain() must specify a \"ar\" value")
+  assert(defined(invoker.ld), "nacl_toolchain() must specify a \"ld\" value")
+  gcc_toolchain(target_name) {
+    if (defined(invoker.executable_extension)) {
+      executable_extension = invoker.executable_extension
+    } else {
+      executable_extension = ".nexe"
+    }
+    rebuild_define = "NACL_TC_REV=" + invoker.toolchain_revision
+
+    forward_variables_from(invoker,
+                           [
+                             "ar",
+                             "cc",
+                             "cxx",
+                             "deps",
+                             "is_clang_analysis_supported",
+                             "ld",
+                             "link_outputs",
+                             "nm",
+                             "readelf",
+                             "strip",
+                           ])
+
+    toolchain_args = {
+      # Use all values set on the invoker's toolchain_args.
+      forward_variables_from(invoker.toolchain_args, "*")
+
+      current_os = "nacl"
+
+      # We do not support component builds with the NaCl toolchains.
+      is_component_build = false
+
+      # We do not support tcmalloc in the NaCl toolchains.
+      use_allocator = "none"
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/toolchain.gni
@@ -0,0 +1,129 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Toolchain-related configuration that may be needed outside the context of the
+# toolchain() rules themselves.
+
+import("//build_overrides/build.gni")
+import("//build/config/chrome_build.gni")
+
+declare_args() {
+  # Enable Link Time Optimization in optimized builds (output programs run
+  # faster, but linking is up to 5-20x slower).
+  # Note: use target_os == "linux" rather than is_linux so that it does not
+  # apply to host_toolchain when target_os="android".
+  allow_posix_link_time_opt =
+      is_clang && target_os == "linux" && !is_chromeos && target_cpu == "x64" &&
+      is_official_build
+
+  # If used with allow_posix_link_time_opt, it enables the experimental support
+  # of ThinLTO that links 3x-10x faster but (as of now) does not have all the
+  # important optimizations such us devirtualization implemented. See also
+  # http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html
+  use_thin_lto = false
+
+  # If this is set to true, or if LLVM_FORCE_HEAD_REVISION is set to 1
+  # in the environment, we use the revision in the llvm repo to determine
+  # the CLANG_REVISION to use, instead of the version hard-coded into
+  # //tools/clang/scripts/update.py. This should only be used in
+  # conjunction with setting LLVM_FORCE_HEAD_REVISION in the
+  # environment when `gclient runhooks` is run as well.
+  llvm_force_head_revision = false
+
+  # Compile with Xcode version of clang instead of hermetic version shipped
+  # with the build. Used on iOS to ship official builds (as they are built
+  # with the version of clang shipped with Xcode).
+  use_xcode_clang = is_ios && is_official_build
+
+  # Used for binary size analysis.
+  # Currently disabled on LLD because of a bug (fixed upstream).
+  # See https://crbug.com/716209.
+  generate_linker_map = is_android && is_official_build
+}
+
+if (generate_linker_map) {
+  assert(
+      is_official_build,
+      "Linker map files should only be generated when is_official_build = true")
+  assert(target_os == "android" || target_os == "linux",
+         "Linker map files should only be generated for Android and Linux")
+}
+
+# The path to the hermetic install of Xcode. Only relevant when
+# use_system_xcode = false.
+hermetic_xcode_path =
+    rebase_path("//build/${target_os}_files/Xcode.app", "", root_build_dir)
+
+declare_args() {
+  if (is_clang) {
+    # Clang compiler version. Clang files are placed at version-dependent paths.
+    clang_version = "5.0.0"
+  }
+
+  # Set to true to use lld, the LLVM linker. This flag may be used on Windows
+  # or Linux.
+  use_lld = (is_win && host_os != "win") ||
+            (allow_posix_link_time_opt && target_os == "linux" &&
+             !is_chromeos && target_cpu == "x64")
+}
+
+# Check target_os here instead of is_ios as this file is loaded for secondary
+# toolchain (host toolchain in particular) but the argument is the same for
+# all toolchains.
+assert(!use_xcode_clang || target_os == "ios",
+       "Using Xcode's clang is only supported in iOS builds")
+
+# Subdirectory within root_out_dir for shared library files.
+# TODO(agrieve): GYP sets this to "lib" for Linux & Android, but this won't work
+#     in GN until support for loadable_module() is added.
+#     See: https://codereview.chromium.org/1236503002/
+shlib_subdir = "."
+
+# Root out dir for shared library files.
+root_shlib_dir = root_out_dir
+if (shlib_subdir != ".") {
+  root_shlib_dir += "/$shlib_subdir"
+}
+
+# Extension for shared library files (including leading dot).
+if (is_mac || is_ios) {
+  shlib_extension = ".dylib"
+} else if (is_android && is_component_build) {
+  # By appending .cr, we prevent name collisions with libraries already
+  # loaded by the Android zygote.
+  shlib_extension = ".cr.so"
+} else if (is_posix) {
+  shlib_extension = ".so"
+} else if (is_win) {
+  shlib_extension = ".dll"
+} else {
+  assert(false, "Platform not supported")
+}
+
+# Prefix for shared library files.
+if (is_posix) {
+  shlib_prefix = "lib"
+} else {
+  shlib_prefix = ""
+}
+
+# While other "tool"s in a toolchain are specific to the target of that
+# toolchain, the "stamp" and "copy" tools are really generic to the host;
+# but each toolchain must define them separately.  GN doesn't allow a
+# template instantiation inside a toolchain definition, so some boilerplate
+# has to be repeated in each toolchain to define these two tools.  These
+# four variables reduce the duplication in that boilerplate.
+stamp_description = "STAMP {{output}}"
+copy_description = "COPY {{source}} {{output}}"
+if (host_os == "win") {
+  _tool_wrapper_path =
+      rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir)
+
+  stamp_command = "$python_path $_tool_wrapper_path stamp {{output}}"
+  copy_command =
+      "$python_path $_tool_wrapper_path recursive-mirror {{source}} {{output}}"
+} else {
+  stamp_command = "touch {{output}}"
+  copy_command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/win/BUILD.gn
@@ -0,0 +1,448 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/clang_static_analyzer.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/toolchain.gni")
+
+# Should only be running on Windows.
+assert(is_win)
+
+# Setup the Visual Studio state.
+#
+# Its arguments are the VS path and the compiler wrapper tool. It will write
+# "environment.x86" and "environment.x64" to the build directory and return a
+# list to us.
+
+# This tool will is used as a wrapper for various commands below.
+tool_wrapper_path = rebase_path("tool_wrapper.py", root_build_dir)
+
+if (use_goma) {
+  goma_prefix = "$goma_dir/gomacc.exe "
+} else {
+  goma_prefix = ""
+}
+
+# Copy the VS runtime DLL for the default toolchain to the root build directory
+# so things will run.
+if (current_toolchain == default_toolchain) {
+  if (is_debug) {
+    configuration_name = "Debug"
+  } else {
+    configuration_name = "Release"
+  }
+  exec_script("../../vs_toolchain.py",
+              [
+                "copy_dlls",
+                rebase_path(root_build_dir),
+                configuration_name,
+                target_cpu,
+              ])
+}
+
+# Parameters:
+#   environment: File name of environment file.
+#
+# You would also define a toolchain_args variable with at least these set:
+#   current_cpu: current_cpu to pass as a build arg
+#   current_os: current_os to pass as a build arg
+template("msvc_toolchain") {
+  toolchain(target_name) {
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    assert(defined(invoker.toolchain_args))
+    toolchain_args = {
+      if (defined(invoker.toolchain_args)) {
+        forward_variables_from(invoker.toolchain_args, "*")
+      }
+
+      # This value needs to be passed through unchanged.
+      host_toolchain = host_toolchain
+
+      current_os = "win"
+    }
+
+    # Make these apply to all tools below.
+    lib_switch = ""
+    lib_dir_switch = "/LIBPATH:"
+
+    # Object files go in this directory.
+    object_subdir = "{{target_out_dir}}/{{label_name}}"
+
+    env = invoker.environment
+
+    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+    # toolchain args, use those values, otherwise default to the global one.
+    # This works because the only reasonable override that toolchains might
+    # supply for these values are to force-disable them.
+    if (defined(toolchain_args.is_clang)) {
+      toolchain_uses_clang = toolchain_args.is_clang
+    } else {
+      toolchain_uses_clang = is_clang
+    }
+
+    if (toolchain_uses_clang && host_os != "win") {
+      # This toolchain definition uses response files for compilations.  GN uses
+      # the quoting rules of the host OS, while clang-cl always defaults to
+      # cmd.exe quoting rules for parsing response files.  Tell clang-cl to use
+      # POSIX quoting rules, so it can understand what GN generates.
+      cl = "${invoker.cl} --rsp-quoting=posix"
+    } else {
+      cl = invoker.cl
+    }
+
+    if (toolchain_uses_clang && use_clang_static_analyzer) {
+      analyzer_prefix =
+          "$python_path " +
+          rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
+                      root_build_dir) + " --mode=cl"
+      cl = "${analyzer_prefix} ${cl}"
+    }
+
+    if (use_lld) {
+      if (host_os == "win") {
+        lld_link = "lld-link.exe"
+      } else {
+        lld_link = "lld-link"
+      }
+      prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+
+      # lld-link includes a replacement for lib.exe that can produce thin
+      # archives and understands bitcode (for lto builds).
+      lib = "$prefix/$lld_link /lib /llvmlibthin"
+      link = "$prefix/$lld_link"
+    } else {
+      lib = "lib.exe"
+      link = "link.exe"
+    }
+
+    # If possible, pass system includes as flags to the compiler.  When that's
+    # not possible, load a full environment file (containing %INCLUDE% and
+    # %PATH%) -- e.g. 32-bit MSVS builds require %PATH% to be set and just
+    # passing in a list of include directories isn't enough.
+    if (defined(invoker.sys_include_flags)) {
+      env_wrapper = ""
+      sys_include_flags = "${invoker.sys_include_flags} "  # Note trailing space.
+    } else {
+      # clang-cl doesn't need this env hoop, so omit it there.
+      assert(!toolchain_uses_clang)
+      env_wrapper = "ninja -t msvc -e $env -- "  # Note trailing space.
+      sys_include_flags = ""
+    }
+
+    tool("cc") {
+      rspfile = "{{output}}.rsp"
+      precompiled_header_type = "msvc"
+      pdbname = "{{target_out_dir}}/{{label_name}}_c.pdb"
+
+      # Label names may have spaces in them so the pdbname must be quoted. The
+      # source and output don't need to be quoted because GN knows they're a
+      # full file name and will quote automatically when necessary.
+      command = "$env_wrapper$cl /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd\"$pdbname\""
+      depsformat = "msvc"
+      description = "CC {{output}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.obj",
+      ]
+      rspfile_content = "$sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}"
+    }
+
+    tool("cxx") {
+      rspfile = "{{output}}.rsp"
+      precompiled_header_type = "msvc"
+
+      # The PDB name needs to be different between C and C++ compiled files.
+      pdbname = "{{target_out_dir}}/{{label_name}}_cc.pdb"
+
+      # See comment in CC tool about quoting.
+      command = "$env_wrapper$cl /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd\"$pdbname\""
+      depsformat = "msvc"
+      description = "CXX {{output}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.obj",
+      ]
+      rspfile_content = "$sys_include_flags{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}"
+    }
+
+    tool("rc") {
+      command = "$python_path $tool_wrapper_path rc-wrapper $env rc.exe {{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.res",
+      ]
+      description = "RC {{output}}"
+    }
+
+    tool("asm") {
+      if (toolchain_args.current_cpu == "x64") {
+        ml = "ml64.exe"
+      } else {
+        ml = "ml.exe"
+      }
+      command = "$python_path $tool_wrapper_path asm-wrapper $env $ml {{defines}} {{include_dirs}} {{asmflags}} /c /Fo{{output}} {{source}}"
+      description = "ASM {{output}}"
+      outputs = [
+        "$object_subdir/{{source_name_part}}.obj",
+      ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+      command = "$python_path $tool_wrapper_path link-wrapper $env False $lib /nologo {{arflags}} /OUT:{{output}} @$rspfile"
+      description = "LIB {{output}}"
+      outputs = [
+        # Ignore {{output_extension}} and always use .lib, there's no reason to
+        # allow targets to override this extension on Windows.
+        "{{output_dir}}/{{target_output_name}}.lib",
+      ]
+      default_output_extension = ".lib"
+      default_output_dir = "{{target_out_dir}}"
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}}"
+    }
+
+    tool("solink") {
+      dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"  # e.g. foo.dll
+      libname = "${dllname}.lib"  # e.g. foo.dll.lib
+      pdbname = "${dllname}.pdb"
+      rspfile = "${dllname}.rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      command = "$python_path $tool_wrapper_path link-wrapper $env False $link /nologo /IMPLIB:$libname /DLL /OUT:$dllname /PDB:$pdbname @$rspfile"
+
+      default_output_extension = ".dll"
+      default_output_dir = "{{root_out_dir}}"
+      description = "LINK(DLL) {{output}}"
+      outputs = [
+        dllname,
+        libname,
+      ]
+      link_output = libname
+      depend_output = libname
+      runtime_outputs = [ dllname ]
+      if (symbol_level != 0) {
+        outputs += [ pdbname ]
+        runtime_outputs += [ pdbname ]
+      }
+
+      # Since the above commands only updates the .lib file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+    }
+
+    tool("solink_module") {
+      dllname = "{{output_dir}}/{{target_output_name}}{{output_extension}}"  # e.g. foo.dll
+      pdbname = "${dllname}.pdb"
+      rspfile = "${dllname}.rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      command = "$python_path $tool_wrapper_path link-wrapper $env False $link /nologo /DLL /OUT:$dllname /PDB:$pdbname @$rspfile"
+
+      default_output_extension = ".dll"
+      default_output_dir = "{{root_out_dir}}"
+      description = "LINK_MODULE(DLL) {{output}}"
+      outputs = [
+        dllname,
+      ]
+      if (symbol_level != 0) {
+        outputs += [ pdbname ]
+      }
+      runtime_outputs = outputs
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+    }
+
+    tool("link") {
+      exename = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+      pdbname = "$exename.pdb"
+      rspfile = "$exename.rsp"
+      pool = "//build/toolchain:link_pool($default_toolchain)"
+
+      command = "$python_path $tool_wrapper_path link-wrapper $env False $link /nologo /OUT:$exename /PDB:$pdbname @$rspfile"
+
+      if (is_official_build) {
+        # On bots, the binary's PDB grow and eventually exceed 4G, causing the
+        # link to fail. As there's no utility to keeping the PDB around
+        # incrementally anyway in this config (because we're doing
+        # non-incremental LTCG builds), delete it before linking.
+        command = "cmd /c $python_path $tool_wrapper_path delete-file $pdbname && $command"
+      }
+
+      if (linkrepro_root_dir != "") {
+        # Create the directory that will receive the link repro for this target
+        # if needed. Note that this will create one directory per link target
+        # even if this target doesn't generate a link repro. This is necessary
+        # because the linker doesn't generate the directory specified to the
+        # /LINKREPRO flag if it doesn't exist.
+        linkrepro_dir = "$linkrepro_root_dir\\{{target_output_name}}"
+        command = "cmd /c mkdir $linkrepro_dir && $command"
+      }
+
+      default_output_extension = ".exe"
+      default_output_dir = "{{root_out_dir}}"
+      description = "LINK {{output}}"
+      outputs = [
+        exename,
+      ]
+      if (symbol_level != 0) {
+        outputs += [ pdbname ]
+      }
+      runtime_outputs = outputs
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}"
+    }
+
+    # These two are really entirely generic, but have to be repeated in
+    # each toolchain because GN doesn't allow a template to be used here.
+    # See //build/toolchain/toolchain.gni for details.
+    tool("stamp") {
+      command = stamp_command
+      description = stamp_description
+    }
+    tool("copy") {
+      command = copy_command
+      description = copy_description
+    }
+  }
+}
+
+if (host_os == "win") {
+  clang_cl = "clang-cl.exe"
+} else {
+  clang_cl = "clang-cl"
+}
+
+# 32-bit toolchains. Only define these when the target architecture is 32-bit
+# since we don't do any 32-bit cross compiles when targeting 64-bit (the
+# build does generate some 64-bit stuff from 32-bit target builds).
+if (target_cpu == "x86") {
+  x86_toolchain_data = exec_script("setup_toolchain.py",
+                                   [
+                                     visual_studio_path,
+                                     windows_sdk_path,
+                                     visual_studio_runtime_dirs,
+                                     "x86",
+                                   ],
+                                   "scope")
+
+  msvc_toolchain("x86") {
+    environment = "environment.x86"
+    cl = "${goma_prefix}\"${x86_toolchain_data.vc_bin_dir}/cl.exe\""
+    toolchain_args = {
+      current_cpu = "x86"
+      is_clang = false
+    }
+  }
+
+  msvc_toolchain("clang_x86") {
+    environment = "environment.x86"
+    prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+    cl = "${goma_prefix}$prefix/${clang_cl}"
+    sys_include_flags = "${x86_toolchain_data.include_flags_imsvc}"
+
+    toolchain_args = {
+      current_cpu = "x86"
+      is_clang = true
+    }
+  }
+}
+
+# 64-bit toolchains.
+x64_toolchain_data = exec_script("setup_toolchain.py",
+                                 [
+                                   visual_studio_path,
+                                   windows_sdk_path,
+                                   visual_studio_runtime_dirs,
+                                   "x64",
+                                 ],
+                                 "scope")
+
+template("win_x64_toolchains") {
+  msvc_toolchain(target_name) {
+    environment = "environment.x64"
+    cl = "${goma_prefix}\"${x64_toolchain_data.vc_bin_dir}/cl.exe\""
+
+    toolchain_args = {
+      if (defined(invoker.toolchain_args)) {
+        forward_variables_from(invoker.toolchain_args, "*")
+      }
+      is_clang = false
+      current_cpu = "x64"
+    }
+  }
+
+  msvc_toolchain("clang_" + target_name) {
+    environment = "environment.x64"
+    prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+    cl = "${goma_prefix}$prefix/${clang_cl}"
+    sys_include_flags = "${x64_toolchain_data.include_flags_imsvc}"
+
+    toolchain_args = {
+      if (defined(invoker.toolchain_args)) {
+        forward_variables_from(invoker.toolchain_args, "*")
+      }
+      is_clang = true
+      current_cpu = "x64"
+    }
+  }
+}
+
+win_x64_toolchains("x64") {
+  toolchain_args = {
+    # Use the defaults.
+  }
+}
+
+# The nacl_win64 toolchain is nearly identical to the plain x64 toolchain.
+# It's used solely for building nacl64.exe (//components/nacl/broker:nacl64).
+# The only reason it's a separate toolchain is so that it can force
+# is_component_build to false in the toolchain_args() block, because
+# building nacl64.exe in component style does not work.
+win_x64_toolchains("nacl_win64") {
+  toolchain_args = {
+    is_component_build = false
+  }
+}
+
+# WinRT toolchains. Only define these when targeting them.
+#
+# NOTE: This is currently broken because it references vc_bin_dir. brettw@
+# changed this around a bit, and I don't know what this should be set to
+# in terms of what setup_toolchain returns for a certain CPU architecture.
+if (target_os == "winrt_81" || target_os == "winrt_81_phone" ||
+    target_os == "winrt_10") {
+  msvc_toolchain("winrt_x86") {
+    environment = "environment.winrt_x86"
+    cl = "${goma_prefix}\"${vc_bin_dir}/cl.exe\""
+
+    toolchain_args = {
+      is_clang = false
+      current_cpu = "x86"
+    }
+  }
+
+  msvc_toolchain("winrt_x64") {
+    environment = "environment.winrt_x64"
+    cl = "${goma_prefix}\"${vc_bin_dir}/cl.exe\""
+
+    toolchain_args = {
+      is_clang = false
+      current_cpu = "x64"
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/win/midl.gni
@@ -0,0 +1,104 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win)
+
+import("//build/config/win/visual_studio_version.gni")
+
+# This template defines a rule to invoke the MS IDL compiler. The generated
+# source code will be compiled and linked into targets that depend on this.
+#
+# Parameters
+#
+#   sources
+#      List of .idl file to process.
+#
+#   out_dir (optional)
+#       Directory to write the generated files to. Defaults to target_gen_dir.
+#
+#   deps (optional)
+#   visibility (optional)
+
+template("midl") {
+  action_name = "${target_name}_idl_action"
+  source_set_name = target_name
+
+  assert(defined(invoker.sources), "Source must be defined for $target_name")
+
+  if (defined(invoker.out_dir)) {
+    out_dir = invoker.out_dir
+  } else {
+    out_dir = target_gen_dir
+  }
+
+  header_file = "{{source_name_part}}.h"
+  dlldata_file = "{{source_name_part}}.dlldata.c"
+  interface_identifier_file = "{{source_name_part}}_i.c"
+  proxy_file = "{{source_name_part}}_p.c"
+  type_library_file = "{{source_name_part}}.tlb"
+
+  action_foreach(action_name) {
+    visibility = [ ":$source_set_name" ]
+
+    # This functionality is handled by the win-tool because the GYP build has
+    # MIDL support built-in.
+    # TODO(brettw) move this to a separate MIDL wrapper script for better
+    # clarity.
+    script = "//build/toolchain/win/tool_wrapper.py"
+
+    sources = invoker.sources
+
+    # Note that .tlb is not included in the outputs as it is not always
+    # generated depending on the content of the input idl file.
+    outputs = [
+      "$out_dir/$header_file",
+      "$out_dir/$dlldata_file",
+      "$out_dir/$interface_identifier_file",
+      "$out_dir/$proxy_file",
+    ]
+
+    if (current_cpu == "x86") {
+      win_tool_arch = "environment.x86"
+      idl_target_platform = "win32"
+    } else if (current_cpu == "x64") {
+      win_tool_arch = "environment.x64"
+      idl_target_platform = "x64"
+    } else {
+      assert(false, "Need environment for this arch")
+    }
+
+    args = [
+      "midl-wrapper",
+      win_tool_arch,
+      rebase_path(out_dir, root_build_dir),
+      type_library_file,
+      header_file,
+      dlldata_file,
+      interface_identifier_file,
+      proxy_file,
+      "{{source}}",
+      "/char",
+      "signed",
+      "/env",
+      idl_target_platform,
+      "/Oicf",
+    ]
+
+    forward_variables_from(invoker, [ "deps" ])
+  }
+
+  source_set(target_name) {
+    forward_variables_from(invoker, [ "visibility" ])
+
+    # We only compile the IID files from the IDL tool rather than all outputs.
+    sources = process_file_template(invoker.sources,
+                                    [ "$out_dir/$interface_identifier_file" ])
+
+    public_deps = [
+      ":$action_name",
+    ]
+
+    configs += [ "//build/config/win:midl_warnings" ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/win/setup_toolchain.py
@@ -0,0 +1,217 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Copies the given "win tool" (which the toolchain uses to wrap compiler
+# invocations) and the environment blocks for the 32-bit and 64-bit builds on
+# Windows to the build directory.
+#
+# The arguments are the visual studio install location and the location of the
+# win tool. The script assumes that the root build directory is the current dir
+# and the files will be written to the current directory.
+
+import errno
+import json
+import os
+import re
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+import gn_helpers
+
+SCRIPT_DIR = os.path.dirname(__file__)
+
+def _ExtractImportantEnvironment(output_of_set):
+  """Extracts environment variables required for the toolchain to run from
+  a textual dump output by the cmd.exe 'set' command."""
+  envvars_to_save = (
+      'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
+      'include',
+      'lib',
+      'libpath',
+      'path',
+      'pathext',
+      'systemroot',
+      'temp',
+      'tmp',
+      )
+  env = {}
+  # This occasionally happens and leads to misleading SYSTEMROOT error messages
+  # if not caught here.
+  if output_of_set.count('=') == 0:
+    raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set)
+  for line in output_of_set.splitlines():
+    for envvar in envvars_to_save:
+      if re.match(envvar + '=', line.lower()):
+        var, setting = line.split('=', 1)
+        if envvar == 'path':
+          # Our own rules and actions in Chromium rely on python being in the
+          # path. Add the path to this python here so that if it's not in the
+          # path when ninja is run later, python will still be found.
+          setting = os.path.dirname(sys.executable) + os.pathsep + setting
+        env[var.upper()] = setting
+        break
+  if sys.platform in ('win32', 'cygwin'):
+    for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
+      if required not in env:
+        raise Exception('Environment variable "%s" '
+                        'required to be set to valid path' % required)
+  return env
+
+
+def _DetectVisualStudioPath():
+  """Return path to the GYP_MSVS_VERSION of Visual Studio.
+  """
+
+  # Use the code in build/vs_toolchain.py to avoid duplicating code.
+  chromium_dir = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..', '..'))
+  sys.path.append(os.path.join(chromium_dir, 'build'))
+  import vs_toolchain
+  return vs_toolchain.DetectVisualStudioPath()
+
+
+def _LoadEnvFromBat(args):
+  """Given a bat command, runs it and returns env vars set by it."""
+  args = args[:]
+  args.extend(('&&', 'set'))
+  popen = subprocess.Popen(
+      args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  variables, _ = popen.communicate()
+  if popen.returncode != 0:
+    raise Exception('"%s" failed with error %d' % (args, popen.returncode))
+  return variables
+
+
+def _LoadToolchainEnv(cpu, sdk_dir):
+  """Returns a dictionary with environment variables that must be set while
+  running binaries from the toolchain (e.g. INCLUDE and PATH for cl.exe)."""
+  # Check if we are running in the SDK command line environment and use
+  # the setup script from the SDK if so. |cpu| should be either
+  # 'x86' or 'x64'.
+  assert cpu in ('x86', 'x64')
+  if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and sdk_dir:
+    # Load environment from json file.
+    env = os.path.normpath(os.path.join(sdk_dir, 'bin/SetEnv.%s.json' % cpu))
+    env = json.load(open(env))['env']
+    for k in env:
+      entries = [os.path.join(*([os.path.join(sdk_dir, 'bin')] + e))
+                 for e in env[k]]
+      # clang-cl wants INCLUDE to be ;-separated even on non-Windows,
+      # lld-link wants LIB to be ;-separated even on non-Windows.  Path gets :.
+      # The separator for INCLUDE here must match the one used in main() below.
+      sep = os.pathsep if k == 'PATH' else ';'
+      env[k] = sep.join(entries)
+    # PATH is a bit of a special case, it's in addition to the current PATH.
+    env['PATH'] = env['PATH'] + os.pathsep + os.environ['PATH']
+    # Augment with the current env to pick up TEMP and friends.
+    for k in os.environ:
+      if k not in env:
+        env[k] = os.environ[k]
+
+    varlines = []
+    for k in sorted(env.keys()):
+      varlines.append('%s=%s' % (str(k), str(env[k])))
+    variables = '\n'.join(varlines)
+
+    # Check that the json file contained the same environment as the .cmd file.
+    if sys.platform in ('win32', 'cygwin'):
+      script = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.cmd'))
+      assert _ExtractImportantEnvironment(variables) == \
+             _ExtractImportantEnvironment(_LoadEnvFromBat([script, '/' + cpu]))
+  else:
+    if 'GYP_MSVS_OVERRIDE_PATH' not in os.environ:
+      os.environ['GYP_MSVS_OVERRIDE_PATH'] = _DetectVisualStudioPath()
+    # We only support x64-hosted tools.
+    script_path = os.path.normpath(os.path.join(
+                                       os.environ['GYP_MSVS_OVERRIDE_PATH'],
+                                       'VC/vcvarsall.bat'))
+    if not os.path.exists(script_path):
+      # vcvarsall.bat for VS 2017 fails if run after running vcvarsall.bat from
+      # VS 2013 or VS 2015. Fix this by clearing the vsinstalldir environment
+      # variable.
+      if 'VSINSTALLDIR' in os.environ:
+        del os.environ['VSINSTALLDIR']
+      other_path = os.path.normpath(os.path.join(
+                                        os.environ['GYP_MSVS_OVERRIDE_PATH'],
+                                        'VC/Auxiliary/Build/vcvarsall.bat'))
+      if not os.path.exists(other_path):
+        raise Exception('%s is missing - make sure VC++ tools are installed.' %
+                        script_path)
+      script_path = other_path
+    # Chromium requires the 10.0.14393.0 SDK. Previous versions don't have all
+    # of the required declarations, and 10.0.15063.0 is buggy.
+    args = [script_path, 'amd64_x86' if cpu == 'x86' else 'amd64',
+            '10.0.14393.0']
+    variables = _LoadEnvFromBat(args)
+  return _ExtractImportantEnvironment(variables)
+
+
+def _FormatAsEnvironmentBlock(envvar_dict):
+  """Format as an 'environment block' directly suitable for CreateProcess.
+  Briefly this is a list of key=value\0, terminated by an additional \0. See
+  CreateProcess documentation for more details."""
+  block = ''
+  nul = '\0'
+  for key, value in envvar_dict.iteritems():
+    block += key + '=' + value + nul
+  block += nul
+  return block
+
+
+def main():
+  if len(sys.argv) != 5:
+    print('Usage setup_toolchain.py '
+          '<visual studio path> <win sdk path> '
+          '<runtime dirs> <target_cpu> <include prefix>')
+    sys.exit(2)
+  win_sdk_path = sys.argv[2]
+  runtime_dirs = sys.argv[3]
+  target_cpu = sys.argv[4]
+
+  cpus = ('x86', 'x64')
+  assert target_cpu in cpus
+  vc_bin_dir = ''
+  include = ''
+
+  # TODO(scottmg|goma): Do we need an equivalent of
+  # ninja_use_custom_environment_files?
+
+  for cpu in cpus:
+    # Extract environment variables for subprocesses.
+    env = _LoadToolchainEnv(cpu, win_sdk_path)
+    env['PATH'] = runtime_dirs + os.pathsep + env['PATH']
+
+    if cpu == target_cpu:
+      for path in env['PATH'].split(os.pathsep):
+        if os.path.exists(os.path.join(path, 'cl.exe')):
+          vc_bin_dir = os.path.realpath(path)
+          break
+      # The separator for INCLUDE here must match the one used in
+      # _LoadToolchainEnv() above.
+      include = [p.replace('"', r'\"') for p in env['INCLUDE'].split(';') if p]
+      include_I = ' '.join(['"/I' + i + '"' for i in include])
+      include_imsvc = ' '.join(['"-imsvc' + i + '"' for i in include])
+
+    env_block = _FormatAsEnvironmentBlock(env)
+    with open('environment.' + cpu, 'wb') as f:
+      f.write(env_block)
+
+    # Create a store app version of the environment.
+    if 'LIB' in env:
+      env['LIB']     = env['LIB']    .replace(r'\VC\LIB', r'\VC\LIB\STORE')
+    if 'LIBPATH' in env:
+      env['LIBPATH'] = env['LIBPATH'].replace(r'\VC\LIB', r'\VC\LIB\STORE')
+    env_block = _FormatAsEnvironmentBlock(env)
+    with open('environment.winrt_' + cpu, 'wb') as f:
+      f.write(env_block)
+
+  assert vc_bin_dir
+  print 'vc_bin_dir = ' + gn_helpers.ToGNString(vc_bin_dir)
+  assert include_I
+  print 'include_flags_I = ' + gn_helpers.ToGNString(include_I)
+  assert include_imsvc
+  print 'include_flags_imsvc = ' + gn_helpers.ToGNString(include_imsvc)
+
+if __name__ == '__main__':
+  main()
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/win/tool_wrapper.py
@@ -0,0 +1,320 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions for Windows builds.
+
+This file is copied to the build directory as part of toolchain setup and
+is used to set up calls to tools used by the build that need wrappers.
+"""
+
+import os
+import re
+import shutil
+import subprocess
+import stat
+import string
+import sys
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+
+# A regex matching an argument corresponding to the output filename passed to
+# link.exe.
+_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
+
+def main(args):
+  executor = WinTool()
+  exit_code = executor.Dispatch(args)
+  if exit_code is not None:
+    sys.exit(exit_code)
+
+
+class WinTool(object):
+  """This class performs all the Windows tooling steps. The methods can either
+  be executed directly, or dispatched from an argument list."""
+
+  def _UseSeparateMspdbsrv(self, env, args):
+    """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
+    shared one."""
+    if len(args) < 1:
+      raise Exception("Not enough arguments")
+
+    if args[0] != 'link.exe':
+      return
+
+    # Use the output filename passed to the linker to generate an endpoint name
+    # for mspdbsrv.exe.
+    endpoint_name = None
+    for arg in args:
+      m = _LINK_EXE_OUT_ARG.match(arg)
+      if m:
+        endpoint_name = re.sub(r'\W+', '',
+            '%s_%d' % (m.group('out'), os.getpid()))
+        break
+
+    if endpoint_name is None:
+      return
+
+    # Adds the appropriate environment variable. This will be read by link.exe
+    # to know which instance of mspdbsrv.exe it should connect to (if it's
+    # not set then the default endpoint is used).
+    env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
+
+  def Dispatch(self, args):
+    """Dispatches a string command to a method."""
+    if len(args) < 1:
+      raise Exception("Not enough arguments")
+
+    method = "Exec%s" % self._CommandifyName(args[0])
+    return getattr(self, method)(*args[1:])
+
+  def _CommandifyName(self, name_string):
+    """Transforms a tool name like recursive-mirror to RecursiveMirror."""
+    return name_string.title().replace('-', '')
+
+  def _GetEnv(self, arch):
+    """Gets the saved environment from a file for a given architecture."""
+    # The environment is saved as an "environment block" (see CreateProcess
+    # and msvs_emulation for details). We convert to a dict here.
+    # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
+    pairs = open(arch).read()[:-2].split('\0')
+    kvs = [item.split('=', 1) for item in pairs]
+    return dict(kvs)
+
+  def ExecStamp(self, path):
+    """Simple stamp command."""
+    open(path, 'w').close()
+
+  def ExecDeleteFile(self, path):
+    """Simple file delete command."""
+    if os.path.exists(path):
+      os.unlink(path)
+
+  def ExecRecursiveMirror(self, source, dest):
+    """Emulation of rm -rf out && cp -af in out."""
+    if os.path.exists(dest):
+      if os.path.isdir(dest):
+        def _on_error(fn, path, dummy_excinfo):
+          # The operation failed, possibly because the file is set to
+          # read-only. If that's why, make it writable and try the op again.
+          if not os.access(path, os.W_OK):
+            os.chmod(path, stat.S_IWRITE)
+          fn(path)
+        shutil.rmtree(dest, onerror=_on_error)
+      else:
+        if not os.access(dest, os.W_OK):
+          # Attempt to make the file writable before deleting it.
+          os.chmod(dest, stat.S_IWRITE)
+        os.unlink(dest)
+
+    if os.path.isdir(source):
+      shutil.copytree(source, dest)
+    else:
+      shutil.copy2(source, dest)
+
+  def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
+    """Filter diagnostic output from link that looks like:
+    '   Creating library ui.dll.lib and object ui.dll.exp'
+    This happens when there are exports from the dll or exe.
+    """
+    env = self._GetEnv(arch)
+    if use_separate_mspdbsrv == 'True':
+      self._UseSeparateMspdbsrv(env, args)
+    if sys.platform == 'win32':
+      args = list(args)  # *args is a tuple by default, which is read-only.
+      args[0] = args[0].replace('/', '\\')
+    # https://docs.python.org/2/library/subprocess.html:
+    # "On Unix with shell=True [...] if args is a sequence, the first item
+    # specifies the command string, and any additional items will be treated as
+    # additional arguments to the shell itself.  That is to say, Popen does the
+    # equivalent of:
+    #   Popen(['/bin/sh', '-c', args[0], args[1], ...])"
+    # For that reason, since going through the shell doesn't seem necessary on
+    # non-Windows don't do that there.
+    link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    # Read output one line at a time as it shows up to avoid OOM failures when
+    # GBs of output is produced.
+    for line in link.stdout:
+      if (not line.startswith('   Creating library ') and
+          not line.startswith('Generating code') and
+          not line.startswith('Finished generating code')):
+        print line,
+    return link.wait()
+
+  def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
+                            mt, rc, intermediate_manifest, *manifests):
+    """A wrapper for handling creating a manifest resource and then executing
+    a link command."""
+    # The 'normal' way to do manifests is to have link generate a manifest
+    # based on gathering dependencies from the object files, then merge that
+    # manifest with other manifests supplied as sources, convert the merged
+    # manifest to a resource, and then *relink*, including the compiled
+    # version of the manifest resource. This breaks incremental linking, and
+    # is generally overly complicated. Instead, we merge all the manifests
+    # provided (along with one that includes what would normally be in the
+    # linker-generated one, see msvs_emulation.py), and include that into the
+    # first and only link. We still tell link to generate a manifest, but we
+    # only use that to assert that our simpler process did not miss anything.
+    variables = {
+      'python': sys.executable,
+      'arch': arch,
+      'out': out,
+      'ldcmd': ldcmd,
+      'resname': resname,
+      'mt': mt,
+      'rc': rc,
+      'intermediate_manifest': intermediate_manifest,
+      'manifests': ' '.join(manifests),
+    }
+    add_to_ld = ''
+    if manifests:
+      subprocess.check_call(
+          '%(python)s tool_wrapper.py manifest-wrapper %(arch)s %(mt)s -nologo '
+          '-manifest %(manifests)s -out:%(out)s.manifest' % variables)
+      if embed_manifest == 'True':
+        subprocess.check_call(
+            '%(python)s tool_wrapper.py manifest-to-rc %(arch)s'
+                '%(out)s.manifest %(out)s.manifest.rc %(resname)s' % variables)
+        subprocess.check_call(
+            '%(python)s tool_wrapper.py rc-wrapper %(arch)s %(rc)s '
+            '%(out)s.manifest.rc' % variables)
+        add_to_ld = ' %(out)s.manifest.res' % variables
+    subprocess.check_call(ldcmd + add_to_ld)
+
+    # Run mt.exe on the theoretically complete manifest we generated, merging
+    # it with the one the linker generated to confirm that the linker
+    # generated one does not add anything. This is strictly unnecessary for
+    # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
+    # used in a #pragma comment.
+    if manifests:
+      # Merge the intermediate one with ours to .assert.manifest, then check
+      # that .assert.manifest is identical to ours.
+      subprocess.check_call(
+          '%(python)s tool_wrapper.py manifest-wrapper %(arch)s %(mt)s -nologo '
+          '-manifest %(out)s.manifest %(intermediate_manifest)s '
+          '-out:%(out)s.assert.manifest' % variables)
+      assert_manifest = '%(out)s.assert.manifest' % variables
+      our_manifest = '%(out)s.manifest' % variables
+      # Load and normalize the manifests. mt.exe sometimes removes whitespace,
+      # and sometimes doesn't unfortunately.
+      with open(our_manifest, 'rb') as our_f:
+        with open(assert_manifest, 'rb') as assert_f:
+          our_data = our_f.read().translate(None, string.whitespace)
+          assert_data = assert_f.read().translate(None, string.whitespace)
+      if our_data != assert_data:
+        os.unlink(out)
+        def dump(filename):
+          sys.stderr.write('%s\n-----\n' % filename)
+          with open(filename, 'rb') as f:
+            sys.stderr.write(f.read() + '\n-----\n')
+        dump(intermediate_manifest)
+        dump(our_manifest)
+        dump(assert_manifest)
+        sys.stderr.write(
+            'Linker generated manifest "%s" added to final manifest "%s" '
+            '(result in "%s"). '
+            'Were /MANIFEST switches used in #pragma statements? ' % (
+              intermediate_manifest, our_manifest, assert_manifest))
+        return 1
+
+  def ExecManifestWrapper(self, arch, *args):
+    """Run manifest tool with environment set. Strip out undesirable warning
+    (some XML blocks are recognized by the OS loader, but not the manifest
+    tool)."""
+    env = self._GetEnv(arch)
+    popen = subprocess.Popen(args, shell=True, env=env,
+                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    out, _ = popen.communicate()
+    for line in out.splitlines():
+      if line and 'manifest authoring warning 81010002' not in line:
+        print line
+    return popen.returncode
+
+  def ExecManifestToRc(self, dummy_arch, *args):
+    """Creates a resource file pointing a SxS assembly manifest.
+    |args| is tuple containing path to resource file, path to manifest file
+    and resource name which can be "1" (for executables) or "2" (for DLLs)."""
+    manifest_path, resource_path, resource_name = args
+    with open(resource_path, 'wb') as output:
+      output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
+        resource_name,
+        os.path.abspath(manifest_path).replace('\\', '/')))
+
+  def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
+                      *flags):
+    """Filter noisy filenames output from MIDL compile step that isn't
+    quietable via command line flags.
+    """
+    args = ['midl', '/nologo'] + list(flags) + [
+        '/out', outdir,
+        '/tlb', tlb,
+        '/h', h,
+        '/dlldata', dlldata,
+        '/iid', iid,
+        '/proxy', proxy,
+        idl]
+    env = self._GetEnv(arch)
+    popen = subprocess.Popen(args, shell=True, env=env,
+                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    out, _ = popen.communicate()
+    # Filter junk out of stdout, and write filtered versions. Output we want
+    # to filter is pairs of lines that look like this:
+    # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
+    # objidl.idl
+    lines = out.splitlines()
+    prefixes = ('Processing ', '64 bit Processing ')
+    processing = set(os.path.basename(x)
+                     for x in lines if x.startswith(prefixes))
+    for line in lines:
+      if not line.startswith(prefixes) and line not in processing:
+        print line
+    return popen.returncode
+
+  def ExecAsmWrapper(self, arch, *args):
+    """Filter logo banner from invocations of asm.exe."""
+    env = self._GetEnv(arch)
+    popen = subprocess.Popen(args, shell=True, env=env,
+                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    out, _ = popen.communicate()
+    for line in out.splitlines():
+      # Split to avoid triggering license checks:
+      if (not line.startswith('Copy' + 'right (C' +
+                              ') Microsoft Corporation') and
+          not line.startswith('Microsoft (R) Macro Assembler') and
+          not line.startswith(' Assembling: ') and
+          line):
+        print line
+    return popen.returncode
+
+  def ExecRcWrapper(self, arch, *args):
+    """Filter logo banner from invocations of rc.exe. Older versions of RC
+    don't support the /nologo flag."""
+    env = self._GetEnv(arch)
+    popen = subprocess.Popen(args, shell=True, env=env,
+                             stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    out, _ = popen.communicate()
+    for line in out.splitlines():
+      if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
+          not line.startswith('Copy' + 'right (C' +
+                              ') Microsoft Corporation') and
+          line):
+        print line
+    return popen.returncode
+
+  def ExecActionWrapper(self, arch, rspfile, *dirname):
+    """Runs an action command line from a response file using the environment
+    for |arch|. If |dirname| is supplied, use that as the working directory."""
+    env = self._GetEnv(arch)
+    # TODO(scottmg): This is a temporary hack to get some specific variables
+    # through to actions that are set after GN-time. http://crbug.com/333738.
+    for k, v in os.environ.iteritems():
+      if k not in env:
+        env[k] = v
+    args = open(rspfile).read()
+    dirname = dirname[0] if dirname else None
+    return subprocess.call(args, shell=True, env=env, cwd=dirname)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/toolchain/wrapper_utils.py
@@ -0,0 +1,150 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions for gcc_toolchain.gni wrappers."""
+
+import gzip
+import os
+import re
+import subprocess
+import shlex
+import shutil
+import sys
+import threading
+
+_BAT_PREFIX = 'cmd /c call '
+_WHITELIST_RE = re.compile('whitelisted_resource_(?P<resource_id>[0-9]+)')
+
+
+def _GzipThenDelete(src_path, dest_path):
+  # Results for Android map file with GCC on a z620:
+  # Uncompressed: 207MB
+  # gzip -9: 16.4MB, takes 8.7 seconds.
+  # gzip -1: 21.8MB, takes 2.0 seconds.
+  # Piping directly from the linker via -print-map (or via -Map with a fifo)
+  # adds a whopping 30-45 seconds!
+  with open(src_path, 'rb') as f_in, gzip.GzipFile(dest_path, 'wb', 1) as f_out:
+    shutil.copyfileobj(f_in, f_out)
+  os.unlink(src_path)
+
+
+def CommandToRun(command):
+  """Generates commands compatible with Windows.
+
+  When running on a Windows host and using a toolchain whose tools are
+  actually wrapper scripts (i.e. .bat files on Windows) rather than binary
+  executables, the |command| to run has to be prefixed with this magic.
+  The GN toolchain definitions take care of that for when GN/Ninja is
+  running the tool directly.  When that command is passed in to this
+  script, it appears as a unitary string but needs to be split up so that
+  just 'cmd' is the actual command given to Python's subprocess module.
+
+  Args:
+    command: List containing the UNIX style |command|.
+
+  Returns:
+    A list containing the Windows version of the |command|.
+  """
+  if command[0].startswith(_BAT_PREFIX):
+    command = command[0].split(None, 3) + command[1:]
+  return command
+
+
+def RunLinkWithOptionalMapFile(command, env=None, map_file=None):
+  """Runs the given command, adding in -Wl,-Map when |map_file| is given.
+
+  Also takes care of gzipping when |map_file| ends with .gz.
+
+  Args:
+    command: List of arguments comprising the command.
+    env: Environment variables.
+    map_file: Path to output map_file.
+
+  Returns:
+    The exit code of running |command|.
+  """
+  tmp_map_path = None
+  if map_file and map_file.endswith('.gz'):
+    tmp_map_path = map_file + '.tmp'
+    command.append('-Wl,-Map,' + tmp_map_path)
+  elif map_file:
+    command.append('-Wl,-Map,' + map_file)
+
+  result = subprocess.call(command, env=env)
+
+  if tmp_map_path and result == 0:
+    threading.Thread(
+        target=lambda: _GzipThenDelete(tmp_map_path, map_file)).start()
+  elif tmp_map_path and os.path.exists(tmp_map_path):
+    os.unlink(tmp_map_path)
+
+  return result
+
+
+def ResolveRspLinks(inputs):
+  """Return a list of files contained in a response file.
+
+  Args:
+    inputs: A command containing rsp files.
+
+  Returns:
+    A set containing the rsp file content."""
+  rspfiles = [a[1:] for a in inputs if a.startswith('@')]
+  resolved = set()
+  for rspfile in rspfiles:
+    with open(rspfile, 'r') as f:
+      resolved.update(shlex.split(f.read()))
+
+  return resolved
+
+
+def CombineResourceWhitelists(whitelist_candidates, outfile):
+  """Combines all whitelists for a resource file into a single whitelist.
+
+  Args:
+    whitelist_candidates: List of paths to rsp files containing all targets.
+    outfile: Path to save the combined whitelist.
+  """
+  whitelists = ('%s.whitelist' % candidate for candidate in whitelist_candidates
+                if os.path.exists('%s.whitelist' % candidate))
+
+  resources = set()
+  for whitelist in whitelists:
+    with open(whitelist, 'r') as f:
+      resources.update(f.readlines())
+
+  with open(outfile, 'w') as f:
+    f.writelines(resources)
+
+
+def ExtractResourceIdsFromPragmaWarnings(text):
+  """Returns set of resource IDs that are inside unknown pragma warnings.
+
+  Args:
+    text: The text that will be scanned for unknown pragma warnings.
+
+  Returns:
+    A set containing integers representing resource IDs.
+  """
+  used_resources = set()
+  lines = text.splitlines()
+  for ln in lines:
+    match = _WHITELIST_RE.search(ln)
+    if match:
+      resource_id = int(match.group('resource_id'))
+      used_resources.add(resource_id)
+
+  return used_resources
+
+
+def CaptureCommandStderr(command, env=None):
+  """Returns the stderr of a command.
+
+  Args:
+    command: A list containing the command and arguments.
+    env: Environment variables for the new process.
+  """
+  child = subprocess.Popen(command, stderr=subprocess.PIPE, env=env)
+  _, stderr = child.communicate()
+  return child.returncode, stderr
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/tree_truth.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Script for printing recent commits in a buildbot run.
+
+# Return the sha1 of the given tag.  If not present, return "".
+# $1: path to repo
+# $2: tag name
+tt_sha1_for_tag() {
+  oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null)
+  if [ $? -eq 0 ] ; then
+    echo $oneline
+  fi
+}
+
+# Return the sha1 of HEAD, or ""
+# $1: path to repo
+tt_sha1_for_head() {
+  ( cd $1 && git log HEAD -n1 --format='%H' | cat )
+}
+
+# For the given repo, set tag to HEAD.
+# $1: path to repo
+# $2: tag name
+tt_tag_head() {
+  ( cd $1 && git tag -f $2 )
+}
+
+# For the given repo, delete the tag.
+# $1: path to repo
+# $2: tag name
+tt_delete_tag() {
+  ( cd $1 && git tag -d $2 )
+}
+
+# For the given repo, set tag to "three commits ago" (for testing).
+# $1: path to repo
+# $2: tag name
+tt_tag_three_ago() {
+ local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}')
+  ( cd $1 && git tag -f $2 $sh )
+}
+
+# List the commits between the given tag and HEAD.
+# If the tag does not exist, only list the last few.
+# If the tag is at HEAD, list nothing.
+# Output format has distinct build steps for repos with changes.
+# $1: path to repo
+# $2: tag name
+# $3: simple/short repo name to use for display
+tt_list_commits() {
+  local tag_sha1=$(tt_sha1_for_tag $1 $2)
+  local head_sha1=$(tt_sha1_for_head $1)
+  local display_name=$(echo $3 | sed 's#/#_#g')
+  if [ "${tag_sha1}" = "${head_sha1}" ] ; then
+    return
+  fi
+  if [ "${tag_sha1}" = "" ] ; then
+    echo "@@@BUILD_STEP Recent commits in repo $display_name@@@"
+    echo "NOTE: git tag was not found so we have no baseline."
+    echo "Here are some recent commits, but they may not be new for this build."
+    ( cd $1 && git log -n 10 --stat | cat)
+  else
+    echo "@@@BUILD_STEP New commits in repo $display_name@@@"
+    ( cd $1 && git log -n 500 $2..HEAD --stat | cat)
+  fi
+}
+
+# Clean out the tree truth tags in all repos.  For testing.
+tt_clean_all() {
+ for project in $@; do
+   tt_delete_tag $CHROME_SRC/../$project tree_truth
+ done
+}
+
+# Print tree truth for all clank repos.
+tt_print_all() {
+ for project in $@; do
+   local full_path=$CHROME_SRC/../$project
+   tt_list_commits $full_path tree_truth $project
+   tt_tag_head $full_path tree_truth
+ done
+}
+
+# Print a summary of the last 10 commits for each repo.
+tt_brief_summary() {
+  echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@"
+  for project in $@; do
+    echo $project:
+    local full_path=$CHROME_SRC/../$project
+    (cd $full_path && git log -n 10 --format="   %H %s   %an, %ad" | cat)
+    echo "================================================================="
+  done
+}
+
+CHROME_SRC=$1
+shift
+PROJECT_LIST=$@
+tt_brief_summary $PROJECT_LIST
+tt_print_all $PROJECT_LIST
--- a/media/webrtc/trunk/build/update-linux-sandbox.sh
+++ b/media/webrtc/trunk/build/update-linux-sandbox.sh
@@ -1,17 +1,17 @@
 #!/bin/sh
 
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 BUILDTYPE="${BUILDTYPE:-Debug}"
 CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}"
-CHROME_OUT_DIR="${CHROME_SRC_DIR}/out/${BUILDTYPE}"
+CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}"
 CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox"
 CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox"
 CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH")
 
 TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null)
 if [ $? -ne 0 ]; then
   echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}"
   exit 1
@@ -34,18 +34,19 @@ installsandbox() {
 
 if [ ! -d "${CHROME_OUT_DIR}" ]; then
   echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" "
   echo "If you are building in Release mode"
   exit 1
 fi
 
 if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then
-  echo -n "Could not find ${CHROME_SANDBOX_BUILD_PATH}, "
-  echo "please make sure you build the chrome_sandbox target"
+  echo "Could not find ${CHROME_SANDBOX_BUILD_PATH}"
+  echo -n "BUILDTYPE is $BUILDTYPE, use \"BUILDTYPE=<value> ${0}\" to override "
+  echo "after you build the chrome_sandbox target"
   exit 1
 fi
 
 if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
   echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, "
   echo "installing it now."
   installsandbox
 fi
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/BUILD.gn
@@ -0,0 +1,51 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+action("webkit_version") {
+  script = "version.py"
+
+  lastchange_file = "LASTCHANGE"
+
+  template_file = "webkit_version.h.in"
+  inputs = [
+    lastchange_file,
+    template_file,
+  ]
+
+  output_file = "$target_gen_dir/webkit_version.h"
+  outputs = [
+    output_file,
+  ]
+
+  args = [
+    # LASTCHANGE contains "<build hash>-<ref>".  The user agent only wants the
+    # "<build hash>" bit, so chop off everything after it.
+    "-e",
+    "LASTCHANGE=LASTCHANGE[:LASTCHANGE.find('-')]",
+    "-f",
+    rebase_path(lastchange_file, root_build_dir),
+    rebase_path(template_file, root_build_dir),
+    rebase_path(output_file, root_build_dir),
+  ]
+}
+
+action("chrome_version_json") {
+  script = "version.py"
+  _chrome_version_path = "//chrome/VERSION"
+  inputs = [
+    _chrome_version_path,
+  ]
+  _output_file = "$root_gen_dir/CHROME_VERSION.json"
+  outputs = [
+    _output_file,
+  ]
+  args = [
+    "--file",
+    rebase_path(_chrome_version_path, root_build_dir),
+    "--template",
+    "{\"full-quoted\": \"\\\"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\\\"\"}",
+    "--output",
+    rebase_path(_output_file, root_build_dir),
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/LASTCHANGE
@@ -0,0 +1,1 @@
+LASTCHANGE=ff6a08293bc69c91b2243e4acec755e481a05d07-
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/branding.gni
@@ -0,0 +1,44 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This exposes the Chrome branding as GN variables for use in build files.
+#
+# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively.
+# However, it is far better to write an action to generate a file at
+# build-time with the information you need. This allows better dependency
+# checking and GN will run faster.
+#
+# These values should only be used if you REALLY need to depend on them at
+# build-time, for example, in the computation of output file names.
+
+import("//build/config/chrome_build.gni")
+
+_branding_dictionary_template =
+    "full_name = \"@PRODUCT_FULLNAME@\" " +
+    "short_name = \"@PRODUCT_SHORTNAME@\" " +
+    "bundle_id = \"@MAC_BUNDLE_ID@\" " +
+    "creator_code = \"@MAC_CREATOR_CODE@\" " +
+    "installer_full_name = \"@PRODUCT_INSTALLER_FULLNAME@\" " +
+    "installer_short_name = \"@PRODUCT_INSTALLER_SHORTNAME@\" "
+
+_branding_file = "//chrome/app/theme/$branding_path_component/BRANDING"
+_result = exec_script("version.py",
+                      [
+                        "-f",
+                        rebase_path(_branding_file, root_build_dir),
+                        "-t",
+                        _branding_dictionary_template,
+                      ],
+                      "scope",
+                      [ _branding_file ])
+
+chrome_product_full_name = _result.full_name
+chrome_product_short_name = _result.short_name
+chrome_product_installer_full_name = _result.installer_full_name
+chrome_product_installer_short_name = _result.installer_short_name
+
+if (is_mac) {
+  chrome_mac_bundle_id = _result.bundle_id
+  chrome_mac_creator_code = _result.creator_code
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/java_action.gni
@@ -0,0 +1,101 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+jarrunner = "//build/util/java_action.py"
+
+# Declare a target that runs a java command a single time.
+#
+# This target type allows you to run a java command a single time to produce
+# one or more output files. If you want to run a java command for each of a
+# set of input files, see "java_action_foreach".
+#
+# See "gn help action" for more information on how to use this target. This
+# template is based on the "action" and supports the same variables.
+template("java_action") {
+  assert(defined(invoker.script),
+         "Need script in $target_name listing the .jar file to run.")
+  assert(defined(invoker.outputs),
+         "Need outputs in $target_name listing the generated outputs.")
+
+  jarscript = invoker.script
+  action(target_name) {
+    script = jarrunner
+
+    inputs = [
+      jarscript,
+    ]
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+
+    args = [
+      "-jar",
+      rebase_path(jarscript, root_build_dir),
+    ]
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+
+    forward_variables_from(invoker,
+                           [
+                             "console",
+                             "data",
+                             "data_deps",
+                             "depfile",
+                             "deps",
+                             "outputs",
+                             "sources",
+                             "visibility",
+                           ])
+  }
+}
+
+# Declare a target that runs a java command over a set of files.
+#
+# This target type allows you to run a java command once-per-file over a set of
+# sources. If you want to run a java command once that takes many files as
+# input, see "java_action".
+#
+# See "gn help action_foreach" for more information on how to use this target.
+# This template is based on the "action_foreach" supports the same variables.
+template("java_action_foreach") {
+  assert(defined(invoker.script),
+         "Need script in $target_name listing the .jar file to run.")
+  assert(defined(invoker.outputs),
+         "Need outputs in $target_name listing the generated outputs.")
+  assert(defined(invoker.sources),
+         "Need sources in $target_name listing the target inputs.")
+
+  jarscript = invoker.script
+  action_foreach(target_name) {
+    script = jarrunner
+
+    inputs = [
+      jarscript,
+    ]
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+
+    args = [
+      "-jar",
+      rebase_path(jarscript, root_build_dir),
+    ]
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+
+    forward_variables_from(invoker,
+                           [
+                             "console",
+                             "data",
+                             "data_deps",
+                             "depfile",
+                             "deps",
+                             "outputs",
+                             "sources",
+                             "visibility",
+                           ])
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/java_action.py
@@ -0,0 +1,82 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper script to run java command as action with gn."""
+
+import os
+import subprocess
+import sys
+
+EXIT_SUCCESS = 0
+EXIT_FAILURE = 1
+
+
+def IsExecutable(path):
+  """Returns whether file at |path| exists and is executable.
+
+  Args:
+    path: absolute or relative path to test.
+
+  Returns:
+    True if the file at |path| exists, False otherwise.
+  """
+  return os.path.isfile(path) and os.access(path, os.X_OK)
+
+
+def FindCommand(command):
+  """Looks up for |command| in PATH.
+
+  Args:
+    command: name of the command to lookup, if command is a relative or
+      absolute path (i.e. contains some path separator) then only that
+      path will be tested.
+
+  Returns:
+    Full path to command or None if the command was not found.
+
+    On Windows, this respects the PATHEXT environment variable when the
+    command name does not have an extension.
+  """
+  fpath, _ = os.path.split(command)
+  if fpath:
+    if IsExecutable(command):
+      return command
+
+  if sys.platform == 'win32':
+    # On Windows, if the command does not have an extension, cmd.exe will
+    # try all extensions from PATHEXT when resolving the full path.
+    command, ext = os.path.splitext(command)
+    if not ext:
+      exts = os.environ['PATHEXT'].split(os.path.pathsep)
+    else:
+      exts = [ext]
+  else:
+    exts = ['']
+
+  for path in os.environ['PATH'].split(os.path.pathsep):
+    for ext in exts:
+      path = os.path.join(path, command) + ext
+      if IsExecutable(path):
+        return path
+
+  return None
+
+
+def main():
+  java_path = FindCommand('java')
+  if not java_path:
+    sys.stderr.write('java: command not found\n')
+    sys.exit(EXIT_FAILURE)
+
+  args = sys.argv[1:]
+  if len(args) < 2 or args[0] != '-jar':
+    sys.stderr.write('usage: %s -jar JARPATH [java_args]...\n' % sys.argv[0])
+    sys.exit(EXIT_FAILURE)
+
+  return subprocess.check_call([java_path] + args)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
--- a/media/webrtc/trunk/build/util/lastchange.py
+++ b/media/webrtc/trunk/build/util/lastchange.py
@@ -8,66 +8,21 @@ lastchange.py -- Chromium revision fetch
 """
 
 import re
 import optparse
 import os
 import subprocess
 import sys
 
-_GIT_SVN_ID_REGEX = re.compile(r'.*git-svn-id:\s*([^@]*)@([0-9]+)', re.DOTALL)
-
 class VersionInfo(object):
-  def __init__(self, url, revision):
-    self.url = url
+  def __init__(self, revision):
     self.revision = revision
 
 
-def FetchSVNRevision(directory, svn_url_regex):
-  """
-  Fetch the Subversion branch and revision for a given directory.
-
-  Errors are swallowed.
-
-  Returns:
-    A VersionInfo object or None on error.
-  """
-  try:
-    proc = subprocess.Popen(['svn', 'info'],
-                            stdout=subprocess.PIPE,
-                            stderr=subprocess.PIPE,
-                            cwd=directory,
-                            shell=(sys.platform=='win32'))
-  except OSError:
-    # command is apparently either not installed or not executable.
-    return None
-  if not proc:
-    return None
-
-  attrs = {}
-  for line in proc.stdout:
-    line = line.strip()
-    if not line:
-      continue
-    key, val = line.split(': ', 1)
-    attrs[key] = val
-
-  try:
-    match = svn_url_regex.search(attrs['URL'])
-    if match:
-      url = match.group(2)
-    else:
-      url = ''
-    revision = attrs['Revision']
-  except KeyError:
-    return None
-
-  return VersionInfo(url, revision)
-
-
 def RunGitCommand(directory, command):
   """
   Launches git subcommand.
 
   Errors are swallowed.
 
   Returns:
     A process object or None.
@@ -94,86 +49,86 @@ def FetchGitRevision(directory):
   """
   Fetch the Git hash for a given directory.
 
   Errors are swallowed.
 
   Returns:
     A VersionInfo object or None on error.
   """
-  proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
-  if proc:
-    output = proc.communicate()[0].strip()
-    if proc.returncode == 0 and output:
-      return VersionInfo('git', output[:7])
-  return None
-
-
-def FetchGitSVNURLAndRevision(directory, svn_url_regex):
-  """
-  Fetch the Subversion URL and revision through Git.
-
-  Errors are swallowed.
-
-  Returns:
-    A tuple containing the Subversion URL and revision.
-  """
-  proc = RunGitCommand(directory, ['log', '-1',
-                                   '--grep=git-svn-id', '--format=%b'])
+  hsh = ''
+  git_args = ['log', '-1', '--format=%H']
+  proc = RunGitCommand(directory, git_args)
   if proc:
     output = proc.communicate()[0].strip()
     if proc.returncode == 0 and output:
-      # Extract the latest SVN revision and the SVN URL.
-      # The target line is the last "git-svn-id: ..." line like this:
-      # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
-      match = _GIT_SVN_ID_REGEX.search(output)
-      if match:
-        revision = match.group(2)
-        url_match = svn_url_regex.search(match.group(1))
-        if url_match:
-          url = url_match.group(2)
-        else:
-          url = ''
-        return url, revision
-  return None, None
+      hsh = output
+  if not hsh:
+    return None
+  pos = ''
+  proc = RunGitCommand(directory, ['cat-file', 'commit', hsh])
+  if proc:
+    output = proc.communicate()[0]
+    if proc.returncode == 0 and output:
+      for line in reversed(output.splitlines()):
+        if line.startswith('Cr-Commit-Position:'):
+          pos = line.rsplit()[-1].strip()
+          break
+  return VersionInfo('%s-%s' % (hsh, pos))
 
 
-def FetchGitSVNRevision(directory, svn_url_regex):
-  """
-  Fetch the Git-SVN identifier for the local tree.
-
-  Errors are swallowed.
-  """
-  url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex)
-  if url and revision:
-    return VersionInfo(url, revision)
-  return None
-
-
-def FetchVersionInfo(default_lastchange, directory=None,
-                     directory_regex_prior_to_src_url='chrome|svn'):
+def FetchVersionInfo(directory=None):
   """
   Returns the last change (in the form of a branch, revision tuple),
   from some appropriate revision control system.
   """
-  svn_url_regex = re.compile(
-      r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)')
-
-  version_info = (FetchSVNRevision(directory, svn_url_regex) or
-                  FetchGitSVNRevision(directory, svn_url_regex) or
-                  FetchGitRevision(directory))
+  version_info = FetchGitRevision(directory)
   if not version_info:
-    if default_lastchange and os.path.exists(default_lastchange):
-      revision = open(default_lastchange, 'r').read().strip()
-      version_info = VersionInfo(None, revision)
-    else:
-      version_info = VersionInfo(None, None)
+    version_info = VersionInfo(None)
   return version_info
 
 
+def GetHeaderGuard(path):
+  """
+  Returns the header #define guard for the given file path.
+  This treats everything after the last instance of "src/" as being a
+  relevant part of the guard. If there is no "src/", then the entire path
+  is used.
+  """
+  src_index = path.rfind('src/')
+  if src_index != -1:
+    guard = path[src_index + 4:]
+  else:
+    guard = path
+  guard = guard.upper()
+  return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_'
+
+
+def GetHeaderContents(path, define, version):
+  """
+  Returns what the contents of the header file should be that indicate the given
+  revision.
+  """
+  header_guard = GetHeaderGuard(path)
+
+  header_contents = """/* Generated by lastchange.py, do not edit.*/
+
+#ifndef %(header_guard)s
+#define %(header_guard)s
+
+#define %(define)s "%(version)s"
+
+#endif  // %(header_guard)s
+"""
+  header_contents = header_contents % { 'header_guard': header_guard,
+                                        'define': define,
+                                        'version': version }
+  return header_contents
+
+
 def WriteIfChanged(file_name, contents):
   """
   Writes the specified contents to the specified file_name
   iff the contents are different than the current contents.
   """
   try:
     old_contents = open(file_name, 'r').read()
   except EnvironmentError:
@@ -185,46 +140,65 @@ def WriteIfChanged(file_name, contents):
   open(file_name, 'w').write(contents)
 
 
 def main(argv=None):
   if argv is None:
     argv = sys.argv
 
   parser = optparse.OptionParser(usage="lastchange.py [options]")
-  parser.add_option("-d", "--default-lastchange", metavar="FILE",
-                    help="default last change input FILE")
+  parser.add_option("-m", "--version-macro",
+                    help="Name of C #define when using --header. Defaults to " +
+                    "LAST_CHANGE.",
+                    default="LAST_CHANGE")
   parser.add_option("-o", "--output", metavar="FILE",
-                    help="write last change to FILE")
+                    help="Write last change to FILE. " +
+                    "Can be combined with --header to write both files.")
+  parser.add_option("", "--header", metavar="FILE",
+                    help="Write last change to FILE as a C/C++ header. " +
+                    "Can be combined with --output to write both files.")
   parser.add_option("--revision-only", action='store_true',
-                    help="just print the SVN revision number")
+                    help="Just print the GIT hash. Overrides any " +
+                    "file-output-related options.")
+  parser.add_option("-s", "--source-dir", metavar="DIR",
+                    help="Use repository in the given directory.")
   opts, args = parser.parse_args(argv[1:])
 
   out_file = opts.output
+  header = opts.header
 
   while len(args) and out_file is None:
     if out_file is None:
       out_file = args.pop(0)
   if args:
     sys.stderr.write('Unexpected arguments: %r\n\n' % args)
     parser.print_help()
     sys.exit(2)
 
-  version_info = FetchVersionInfo(opts.default_lastchange,
-      os.path.dirname(sys.argv[0]))
+  if opts.source_dir:
+    src_dir = opts.source_dir
+  else:
+    src_dir = os.path.dirname(os.path.abspath(__file__))
+
+  version_info = FetchVersionInfo(directory=src_dir)
 
   if version_info.revision == None:
     version_info.revision = '0'
 
   if opts.revision_only:
     print version_info.revision
   else:
     contents = "LASTCHANGE=%s\n" % version_info.revision
-    if out_file:
-      WriteIfChanged(out_file, contents)
+    if not out_file and not opts.header:
+      sys.stdout.write(contents)
     else:
-      sys.stdout.write(contents)
+      if out_file:
+        WriteIfChanged(out_file, contents)
+      if header:
+        WriteIfChanged(header,
+                       GetHeaderContents(header, opts.version_macro,
+                                         version_info.revision))
 
   return 0
 
 
 if __name__ == '__main__':
   sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/lib/common/PRESUBMIT.py
@@ -0,0 +1,16 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+def _RunTests(input_api, output_api):
+  return (input_api.canned_checks.RunUnitTestsInDirectory(
+          input_api, output_api, '.', whitelist=[r'.+_test.py$']))
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return _RunTests(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return _RunTests(input_api, output_api)
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/lib/common/perf_result_data_type.py
@@ -0,0 +1,20 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEFAULT = 'default'
+UNIMPORTANT = 'unimportant'
+HISTOGRAM = 'histogram'
+UNIMPORTANT_HISTOGRAM = 'unimportant-histogram'
+INFORMATIONAL = 'informational'
+
+ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM,
+             INFORMATIONAL]
+
+
+def IsValidType(datatype):
+  return datatype in ALL_TYPES
+
+
+def IsHistogram(datatype):
+  return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/lib/common/perf_tests_results_helper.py
@@ -0,0 +1,166 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+import sys
+
+import json
+import logging
+import math
+
+import perf_result_data_type
+
+
+# Mapping from result type to test output
+RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ',
+                perf_result_data_type.DEFAULT: '*RESULT ',
+                perf_result_data_type.INFORMATIONAL: '',
+                perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ',
+                perf_result_data_type.HISTOGRAM: '*HISTOGRAM '}
+
+
+def _EscapePerfResult(s):
+  """Escapes |s| for use in a perf result."""
+  return re.sub('[\:|=/#&,]', '_', s)
+
+
+def FlattenList(values):
+  """Returns a simple list without sub-lists."""
+  ret = []
+  for entry in values:
+    if isinstance(entry, list):
+      ret.extend(FlattenList(entry))
+    else:
+      ret.append(entry)
+  return ret
+
+
+def GeomMeanAndStdDevFromHistogram(histogram_json):
+  histogram = json.loads(histogram_json)
+  # Handle empty histograms gracefully.
+  if not 'buckets' in histogram:
+    return 0.0, 0.0
+  count = 0
+  sum_of_logs = 0
+  for bucket in histogram['buckets']:
+    if 'high' in bucket:
+      bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
+    else:
+      bucket['mean'] = bucket['low']
+    if bucket['mean'] > 0:
+      sum_of_logs += math.log(bucket['mean']) * bucket['count']
+      count += bucket['count']
+
+  if count == 0:
+    return 0.0, 0.0
+
+  sum_of_squares = 0
+  geom_mean = math.exp(sum_of_logs / count)
+  for bucket in histogram['buckets']:
+    if bucket['mean'] > 0:
+      sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
+  return geom_mean, math.sqrt(sum_of_squares / count)
+
+
+def _ValueToString(v):
+  # Special case for floats so we don't print using scientific notation.
+  if isinstance(v, float):
+    return '%f' % v
+  else:
+    return str(v)
+
+
+def _MeanAndStdDevFromList(values):
+  avg = None
+  sd = None
+  if len(values) > 1:
+    try:
+      value = '[%s]' % ','.join([_ValueToString(v) for v in values])
+      avg = sum([float(v) for v in values]) / len(values)
+      sqdiffs = [(float(v) - avg) ** 2 for v in values]
+      variance = sum(sqdiffs) / (len(values) - 1)
+      sd = math.sqrt(variance)
+    except ValueError:
+      value = ', '.join(values)
+  else:
+    value = values[0]
+  return value, avg, sd
+
+
+def PrintPages(page_list):
+  """Prints list of pages to stdout in the format required by perf tests."""
+  print 'Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list])
+
+
+def PrintPerfResult(measurement, trace, values, units,
+                    result_type=perf_result_data_type.DEFAULT,
+                    print_to_stdout=True):
+  """Prints numerical data to stdout in the format required by perf tests.
+
+  The string args may be empty but they must not contain any colons (:) or
+  equals signs (=).
+  This is parsed by the buildbot using:
+  http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/process_log_utils.py
+
+  Args:
+    measurement: A description of the quantity being measured, e.g. "vm_peak".
+        On the dashboard, this maps to a particular graph. Mandatory.
+    trace: A description of the particular data point, e.g. "reference".
+        On the dashboard, this maps to a particular "line" in the graph.
+        Mandatory.
+    values: A list of numeric measured values. An N-dimensional list will be
+        flattened and treated as a simple list.
+    units: A description of the units of measure, e.g. "bytes".
+    result_type: Accepts values of perf_result_data_type.ALL_TYPES.
+    print_to_stdout: If True, prints the output in stdout instead of returning
+        the output to caller.
+
+    Returns:
+      String of the formated perf result.
+  """
+  assert perf_result_data_type.IsValidType(result_type), \
+         'result type: %s is invalid' % result_type
+
+  trace_name = _EscapePerfResult(trace)
+
+  if (result_type == perf_result_data_type.UNIMPORTANT or
+      result_type == perf_result_data_type.DEFAULT or
+      result_type == perf_result_data_type.INFORMATIONAL):
+    assert isinstance(values, list)
+    assert '/' not in measurement
+    flattened_values = FlattenList(values)
+    assert len(flattened_values)
+    value, avg, sd = _MeanAndStdDevFromList(flattened_values)
+    output = '%s%s: %s%s%s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        # Do not show equal sign if the trace is empty. Usually it happens when
+        # measurement is enough clear to describe the result.
+        '= ' if trace_name else '',
+        value,
+        units)
+  else:
+    assert perf_result_data_type.IsHistogram(result_type)
+    assert isinstance(values, list)
+    # The histograms can only be printed individually, there's no computation
+    # across different histograms.
+    assert len(values) == 1
+    value = values[0]
+    output = '%s%s: %s= %s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        value,
+        units)
+    avg, sd = GeomMeanAndStdDevFromHistogram(value)
+
+  if avg:
+    output += '\nAvg %s: %f%s' % (measurement, avg, units)
+  if sd:
+    output += '\nSd  %s: %f%s' % (measurement, sd, units)
+  if print_to_stdout:
+    print output
+    sys.stdout.flush()
+  return output
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/lib/common/unittest_util.py
@@ -0,0 +1,149 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for dealing with the python unittest module."""
+
+import fnmatch
+import sys
+import unittest
+
+
+class _TextTestResult(unittest._TextTestResult):
+  """A test result class that can print formatted text results to a stream.
+
+  Results printed in conformance with gtest output format, like:
+  [ RUN        ] autofill.AutofillTest.testAutofillInvalid: "test desc."
+  [         OK ] autofill.AutofillTest.testAutofillInvalid
+  [ RUN        ] autofill.AutofillTest.testFillProfile: "test desc."
+  [         OK ] autofill.AutofillTest.testFillProfile
+  [ RUN        ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
+  [         OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
+  """
+  def __init__(self, stream, descriptions, verbosity):
+    unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
+    self._fails = set()
+
+  def _GetTestURI(self, test):
+    return '%s.%s.%s' % (test.__class__.__module__,
+                         test.__class__.__name__,
+                         test._testMethodName)
+
+  def getDescription(self, test):
+    return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
+
+  def startTest(self, test):
+    unittest.TestResult.startTest(self, test)
+    self.stream.writeln('[ RUN        ] %s' % self.getDescription(test))
+
+  def addSuccess(self, test):
+    unittest.TestResult.addSuccess(self, test)
+    self.stream.writeln('[         OK ] %s' % self._GetTestURI(test))
+
+  def addError(self, test, err):
+    unittest.TestResult.addError(self, test, err)
+    self.stream.writeln('[      ERROR ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def addFailure(self, test, err):
+    unittest.TestResult.addFailure(self, test, err)
+    self.stream.writeln('[     FAILED ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def getRetestFilter(self):
+    return ':'.join(self._fails)
+
+
+class TextTestRunner(unittest.TextTestRunner):
+  """Test Runner for displaying test results in textual format.
+
+  Results are displayed in conformance with google test output.
+  """
+
+  def __init__(self, verbosity=1):
+    unittest.TextTestRunner.__init__(self, stream=sys.stderr,
+                                     verbosity=verbosity)
+
+  def _makeResult(self):
+    return _TextTestResult(self.stream, self.descriptions, self.verbosity)
+
+
+def GetTestsFromSuite(suite):
+  """Returns all the tests from a given test suite."""
+  tests = []
+  for x in suite:
+    if isinstance(x, unittest.TestSuite):
+      tests += GetTestsFromSuite(x)
+    else:
+      tests += [x]
+  return tests
+
+
+def GetTestNamesFromSuite(suite):
+  """Returns a list of every test name in the given suite."""
+  return map(lambda x: GetTestName(x), GetTestsFromSuite(suite))
+
+
+def GetTestName(test):
+  """Gets the test name of the given unittest test."""
+  return '.'.join([test.__class__.__module__,
+                   test.__class__.__name__,
+                   test._testMethodName])
+
+
+def FilterTestSuite(suite, gtest_filter):
+  """Returns a new filtered tests suite based on the given gtest filter.
+
+  See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
+  for gtest_filter specification.
+  """
+  return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter))
+
+
+def FilterTests(all_tests, gtest_filter):
+  """Filter a list of tests based on the given gtest filter.
+
+  Args:
+    all_tests: List of tests (unittest.TestSuite)
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of tests.
+  """
+  test_names = [GetTestName(test) for test in all_tests]
+  filtered_names = FilterTestNames(test_names, gtest_filter)
+  return [test for test in all_tests if GetTestName(test) in filtered_names]
+
+
+def FilterTestNames(all_tests, gtest_filter):
+  """Filter a list of test names based on the given gtest filter.
+
+  See https://github.com/google/googletest/blob/master/googletest/docs/AdvancedGuide.md
+  for gtest_filter specification.
+
+  Args:
+    all_tests: List of test names.
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of test names.
+  """
+  pattern_groups = gtest_filter.split('-')
+  positive_patterns = ['*']
+  if pattern_groups[0]:
+    positive_patterns = pattern_groups[0].split(':')
+  negative_patterns = []
+  if len(pattern_groups) > 1:
+    negative_patterns = pattern_groups[1].split(':')
+
+  tests = []
+  test_set = set()
+  for pattern in positive_patterns:
+    pattern_tests = [
+        test for test in all_tests
+        if (fnmatch.fnmatch(test, pattern)
+            and not any(fnmatch.fnmatch(test, p) for p in negative_patterns)
+            and test not in test_set)]
+    tests.extend(pattern_tests)
+    test_set.update(pattern_tests)
+  return tests
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/lib/common/unittest_util_test.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=protected-access
+
+import logging
+import sys
+import unittest
+import unittest_util
+
+
+class FilterTestNamesTest(unittest.TestCase):
+
+  possible_list = ["Foo.One",
+                   "Foo.Two",
+                   "Foo.Three",
+                   "Bar.One",
+                   "Bar.Two",
+                   "Bar.Three",
+                   "Quux.One",
+                   "Quux.Two",
+                   "Quux.Three"]
+
+  def testMatchAll(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "*")
+    self.assertEquals(x, self.possible_list)
+
+  def testMatchPartial(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Foo.*")
+    self.assertEquals(x, ["Foo.One", "Foo.Two", "Foo.Three"])
+
+  def testMatchFull(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Foo.Two")
+    self.assertEquals(x, ["Foo.Two"])
+
+  def testMatchTwo(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*")
+    self.assertEquals(x, ["Bar.One",
+                          "Bar.Two",
+                          "Bar.Three",
+                          "Foo.One",
+                          "Foo.Two",
+                          "Foo.Three"])
+
+  def testMatchWithNegative(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*-*.Three")
+    self.assertEquals(x, ["Bar.One",
+                          "Bar.Two",
+                          "Foo.One",
+                          "Foo.Two"])
+
+  def testMatchOverlapping(self):
+    x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:*.Two")
+    self.assertEquals(x, ["Bar.One",
+                          "Bar.Two",
+                          "Bar.Three",
+                          "Foo.Two",
+                          "Quux.Two"])
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/lib/common/util.py
@@ -0,0 +1,151 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generic utilities for all python scripts."""
+
+import atexit
+import httplib
+import os
+import signal
+import stat
+import subprocess
+import sys
+import tempfile
+import urlparse
+
+
+def GetPlatformName():
+  """Return a string to be used in paths for the platform."""
+  if IsWindows():
+    return 'win'
+  if IsMac():
+    return 'mac'
+  if IsLinux():
+    return 'linux'
+  raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+
+def IsWindows():
+  return sys.platform == 'cygwin' or sys.platform.startswith('win')
+
+
+def IsLinux():
+  return sys.platform.startswith('linux')
+
+
+def IsMac():
+  return sys.platform.startswith('darwin')
+
+
+def _DeleteDir(path):
+  """Deletes a directory recursively, which must exist."""
+  # Don't use shutil.rmtree because it can't delete read-only files on Win.
+  for root, dirs, files in os.walk(path, topdown=False):
+    for name in files:
+      filename = os.path.join(root, name)
+      os.chmod(filename, stat.S_IWRITE)
+      os.remove(filename)
+    for name in dirs:
+      os.rmdir(os.path.join(root, name))
+  os.rmdir(path)
+
+
+def Delete(path):
+  """Deletes the given file or directory (recursively), which must exist."""
+  if os.path.isdir(path):
+    _DeleteDir(path)
+  else:
+    os.remove(path)
+
+
+def MaybeDelete(path):
+  """Deletes the given file or directory (recurisvely), if it exists."""
+  if os.path.exists(path):
+    Delete(path)
+
+
+def MakeTempDir(parent_dir=None):
+  """Creates a temporary directory and returns an absolute path to it.
+
+  The temporary directory is automatically deleted when the python interpreter
+  exits normally.
+
+  Args:
+    parent_dir: the directory to create the temp dir in. If None, the system
+                temp dir is used.
+
+  Returns:
+    The absolute path to the temporary directory.
+  """
+  path = tempfile.mkdtemp(dir=parent_dir)
+  atexit.register(MaybeDelete, path)
+  return path
+
+
+def Unzip(zip_path, output_dir):
+  """Unzips the given zip file using a system installed unzip tool.
+
+  Args:
+    zip_path: zip file to unzip.
+    output_dir: directory to unzip the contents of the zip file. The directory
+                must exist.
+
+  Raises:
+    RuntimeError if the unzip operation fails.
+  """
+  if IsWindows():
+    unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
+  else:
+    unzip_cmd = ['unzip', '-o']
+  unzip_cmd += [zip_path]
+  if RunCommand(unzip_cmd, output_dir) != 0:
+    raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir))
+
+
+def Kill(pid):
+  """Terminate the given pid."""
+  if IsWindows():
+    subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
+  else:
+    os.kill(pid, signal.SIGTERM)
+
+
+def RunCommand(cmd, cwd=None):
+  """Runs the given command and returns the exit code.
+
+  Args:
+    cmd: list of command arguments.
+    cwd: working directory to execute the command, or None if the current
+         working directory should be used.
+
+  Returns:
+    The exit code of the command.
+  """
+  process = subprocess.Popen(cmd, cwd=cwd)
+  process.wait()
+  return process.returncode
+
+
+def DoesUrlExist(url):
+  """Determines whether a resource exists at the given URL.
+
+  Args:
+    url: URL to be verified.
+
+  Returns:
+    True if url exists, otherwise False.
+  """
+  parsed = urlparse.urlparse(url)
+  try:
+    conn = httplib.HTTPConnection(parsed.netloc)
+    conn.request('HEAD', parsed.path)
+    response = conn.getresponse()
+  except (socket.gaierror, socket.error):
+    return False
+  finally:
+    conn.close()
+  # Follow both permanent (301) and temporary (302) redirects.
+  if response.status == 302 or response.status == 301:
+    return DoesUrlExist(response.getheader('location'))
+  return response.status == 200
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/process_version.gni
@@ -0,0 +1,126 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Runs the version processing script over the given template file to produce
+# an output file. This is used for generating various forms of files that
+# incorporate the product name and version.
+#
+# Unlike GYP, this will actually compile the resulting file, so you don't need
+# to add it separately to the sources, just depend on the target.
+#
+# In GYP this is a rule that runs once per ".ver" file. In GN this just
+# processes one file per invocation of the template so you may have to have
+# multiple targets.
+#
+# Parameters:
+#   sources (optional):
+#     List of file names to read. When converting a GYP target, this should
+#     list the 'source' (see above) as well as any extra_variable_files.
+#     The files will be passed to version.py in the order specified here.
+#
+#   output:
+#     File name of file to write. In GYP this is unspecified and it will
+#     make up a file name for you based on the input name, and tack on
+#     "_version.rc" to the end. But in GN you need to specify the full name.
+#
+#   template_file (optional):
+#     Template file to use (not a list). Most Windows users that want to use
+#     this to process a .rc template should use process_version_rc_template(),
+#     defined in //chrome/process_version_rc_template.gni, instead.
+#
+#   extra_args (optional):
+#     Extra arguments to pass to version.py. Any "-f <filename>" args should
+#     use sources instead.
+#
+#   process_only (optional, defaults to false)
+#     Set to generate only one action that processes the version file and
+#     doesn't attempt to link the result into a source set. This is for if
+#     you are processing the version as data only.
+#
+#   visibility (optional)
+#
+# Example:
+#   process_version("myversion") {
+#     sources = [
+#       "//chrome/VERSION"
+#       "myfile.h.in"
+#     ]
+#     output = "$target_gen_dir/myfile.h"
+#     extra_args = [ "-e", "FOO=42" ]
+#   }
+template("process_version") {
+  assert(defined(invoker.output), "Output must be defined for $target_name")
+
+  process_only = defined(invoker.process_only) && invoker.process_only
+
+  if (process_only) {
+    action_name = target_name
+  } else {
+    action_name = target_name + "_action"
+    source_set_name = target_name
+  }
+
+  action(action_name) {
+    script = "//build/util/version.py"
+
+    inputs = []
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+    if (defined(invoker.template_file)) {
+      inputs += [ invoker.template_file ]
+    }
+
+    outputs = [
+      invoker.output,
+    ]
+
+    args = []
+
+    if (is_official_build) {
+      args += [ "--official" ]
+    }
+
+    if (defined(invoker.sources)) {
+      inputs += invoker.sources
+      foreach(i, invoker.sources) {
+        args += [
+          "-f",
+          rebase_path(i, root_build_dir),
+        ]
+      }
+    }
+
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+    args += [
+      "-o",
+      rebase_path(invoker.output, root_build_dir),
+    ]
+    if (defined(invoker.template_file)) {
+      args += [ rebase_path(invoker.template_file, root_build_dir) ]
+    }
+
+    forward_variables_from(invoker, [ "deps" ])
+
+    if (process_only) {
+      # When processing only, visibility gets applied to this target.
+      forward_variables_from(invoker, [ "visibility" ])
+    } else {
+      # When linking the result, only the source set can depend on the action.
+      visibility = [ ":$source_set_name" ]
+    }
+  }
+
+  if (!process_only) {
+    source_set(source_set_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      sources = get_target_outputs(":$action_name")
+      public_deps = [
+        ":$action_name",
+      ]
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/version.gni
@@ -0,0 +1,59 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This exposes the Chrome version as GN variables for use in build files.
+#
+# PREFER NOT TO USE THESE. The GYP build uses this kind of thing extensively.
+# However, it is far better to write an action (or use the process_version
+# wrapper in build/util/version.gni) to generate a file at build-time with the
+# information you need. This allows better dependency checking and GN will
+# run faster.
+#
+# These values should only be used if you REALLY need to depend on them at
+# build-time, for example, in the computation of output file names.
+
+# Give version.py a pattern that will expand to a GN scope consisting of
+# all values we need at once.
+_version_dictionary_template = "full = \"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\" " +
+                               "major = \"@MAJOR@\" minor = \"@MINOR@\" " +
+                               "build = \"@BUILD@\" patch = \"@PATCH@\" "
+
+# The file containing the Chrome version number.
+chrome_version_file = "//chrome/VERSION"
+
+_result = exec_script("version.py",
+                      [
+                        "-f",
+                        rebase_path(chrome_version_file, root_build_dir),
+                        "-t",
+                        _version_dictionary_template,
+                      ],
+                      "scope",
+                      [ chrome_version_file ])
+
+# Full version. For example "45.0.12321.0"
+chrome_version_full = _result.full
+
+# The consituent parts of the full version.
+chrome_version_major = _result.major
+chrome_version_minor = _result.minor
+chrome_version_build = _result.build
+chrome_version_patch = _result.patch
+
+if (is_mac) {
+  _result = exec_script("version.py",
+                        [
+                          "-f",
+                          rebase_path(chrome_version_file, root_build_dir),
+                          "-t",
+                          "@BUILD@.@PATCH_HI@.@PATCH_LO@",
+                          "-e",
+                          "PATCH_HI=int(PATCH)/256",
+                          "-e",
+                          "PATCH_LO=int(PATCH)%256",
+                        ],
+                        "trim string",
+                        [ chrome_version_file ])
+  chrome_dylib_version = _result
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/version.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+version.py -- Chromium version string substitution utility.
+"""
+
+import argparse
+import os
+import sys
+
+
+def fetch_values_from_file(values_dict, file_name):
+  """
+  Fetches KEYWORD=VALUE settings from the specified file.
+
+  Everything to the left of the first '=' is the keyword,
+  everything to the right is the value.  No stripping of
+  white space, so beware.
+
+  The file must exist, otherwise you get the Python exception from open().
+  """
+  for line in open(file_name, 'r').readlines():
+    key, val = line.rstrip('\r\n').split('=', 1)
+    values_dict[key] = val
+
+
+def fetch_values(file_list, is_official_build=None):
+  """
+  Returns a dictionary of values to be used for substitution, populating
+  the dictionary with KEYWORD=VALUE settings from the files in 'file_list'.
+
+  Explicitly adds the following value from internal calculations:
+
+    OFFICIAL_BUILD
+  """
+  CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE')
+  if CHROME_BUILD_TYPE == '_official' or is_official_build:
+    official_build = '1'
+  else:
+    official_build = '0'
+
+  values = dict(
+    OFFICIAL_BUILD = official_build,
+  )
+
+  for file_name in file_list:
+    fetch_values_from_file(values, file_name)
+
+  return values
+
+
+def subst_template(contents, values):
+  """
+  Returns the template with substituted values from the specified dictionary.
+
+  Keywords to be substituted are surrounded by '@':  @KEYWORD@.
+
+  No attempt is made to avoid recursive substitution.  The order
+  of evaluation is random based on the order of the keywords returned
+  by the Python dictionary.  So do NOT substitute a value that
+  contains any @KEYWORD@ strings expecting them to be recursively
+  substituted, okay?
+  """
+  for key, val in values.iteritems():
+    try:
+      contents = contents.replace('@' + key + '@', val)
+    except TypeError:
+      print repr(key), repr(val)
+  return contents
+
+
+def subst_file(file_name, values):
+  """
+  Returns the contents of the specified file_name with substituted
+  values from the specified dictionary.
+
+  This is like subst_template, except it operates on a file.
+  """
+  template = open(file_name, 'r').read()
+  return subst_template(template, values);
+
+
+def write_if_changed(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-f', '--file', action='append', default=[],
+                      help='Read variables from FILE.')
+  parser.add_argument('-i', '--input', default=None,
+                      help='Read strings to substitute from FILE.')
+  parser.add_argument('-o', '--output', default=None,
+                      help='Write substituted strings to FILE.')
+  parser.add_argument('-t', '--template', default=None,
+                      help='Use TEMPLATE as the strings to substitute.')
+  parser.add_argument('-e', '--eval', action='append', default=[],
+                      help='Evaluate VAL after reading variables. Can be used '
+                           'to synthesize variables. e.g. -e \'PATCH_HI=int('
+                           'PATCH)/256.')
+  parser.add_argument('--official', action='store_true',
+                      help='Whether the current build should be an official '
+                           'build, used in addition to the environment '
+                           'variable.')
+  parser.add_argument('args', nargs=argparse.REMAINDER,
+                      help='For compatibility: INPUT and OUTPUT can be '
+                           'passed as positional arguments.')
+  options = parser.parse_args()
+
+  evals = {}
+  for expression in options.eval:
+    try:
+      evals.update(dict([expression.split('=', 1)]))
+    except ValueError:
+      parser.error('-e requires VAR=VAL')
+
+  # Compatibility with old versions that considered the first two positional
+  # arguments shorthands for --input and --output.
+  while len(options.args) and (options.input is None or \
+                               options.output is None):
+    if options.input is None:
+      options.input = options.args.pop(0)
+    elif options.output is None:
+      options.output = options.args.pop(0)
+  if options.args:
+    parser.error('Unexpected arguments: %r' % options.args)
+
+  values = fetch_values(options.file, options.official)
+  for key, val in evals.iteritems():
+    values[key] = str(eval(val, globals(), values))
+
+  if options.template is not None:
+    contents = subst_template(options.template, values)
+  elif options.input:
+    contents = subst_file(options.input, values)
+  else:
+    # Generate a default set of version information.
+    contents = """MAJOR=%(MAJOR)s
+MINOR=%(MINOR)s
+BUILD=%(BUILD)s
+PATCH=%(PATCH)s
+LASTCHANGE=%(LASTCHANGE)s
+OFFICIAL_BUILD=%(OFFICIAL_BUILD)s
+""" % values
+
+  if options.output is not None:
+    write_if_changed(options.output, contents)
+  else:
+    print contents
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/util/webkit_version.h.in
@@ -0,0 +1,9 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// webkit_version.h is generated from webkit_version.h.in.  Edit the source!
+
+#define WEBKIT_VERSION_MAJOR 537
+#define WEBKIT_VERSION_MINOR 36
+#define WEBKIT_SVN_REVISION "@@LASTCHANGE@"
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/vs_toolchain.py
@@ -0,0 +1,449 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import json
+import os
+import pipes
+import platform
+import re
+import shutil
+import stat
+import subprocess
+import sys
+
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+json_data_file = os.path.join(script_dir, 'win_toolchain.json')
+
+
+# Use MSVS2017 as the default toolchain.
+CURRENT_DEFAULT_TOOLCHAIN_VERSION = '2017'
+
+
+def SetEnvironmentAndGetRuntimeDllDirs():
+  """Sets up os.environ to use the depot_tools VS toolchain with gyp, and
+  returns the location of the VS runtime DLLs so they can be copied into
+  the output directory after gyp generation.
+
+  Return value is [x64path, x86path] or None
+  """
+  vs_runtime_dll_dirs = None
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  # When running on a non-Windows host, only do this if the SDK has explicitly
+  # been downloaded before (in which case json_data_file will exist).
+  if ((sys.platform in ('win32', 'cygwin') or os.path.exists(json_data_file))
+      and depot_tools_win_toolchain):
+    if ShouldUpdateToolchain():
+      Update()
+    with open(json_data_file, 'r') as tempf:
+      toolchain_data = json.load(tempf)
+
+    toolchain = toolchain_data['path']
+    version = toolchain_data['version']
+    win_sdk = toolchain_data.get('win_sdk')
+    if not win_sdk:
+      win_sdk = toolchain_data['win8sdk']
+    wdk = toolchain_data['wdk']
+    # TODO(scottmg): The order unfortunately matters in these. They should be
+    # split into separate keys for x86 and x64. (See CopyDlls call below).
+    # http://crbug.com/345992
+    vs_runtime_dll_dirs = toolchain_data['runtime_dirs']
+
+    os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
+    os.environ['GYP_MSVS_VERSION'] = version
+
+    # Limit the scope of the gyp import to only where it is used. This
+    # potentially lets build configs that never execute this block to drop
+    # their GYP checkout.
+    import gyp
+
+    # We need to make sure windows_sdk_path is set to the automated
+    # toolchain values in GYP_DEFINES, but don't want to override any
+    # otheroptions.express
+    # values there.
+    gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES'))
+    gyp_defines_dict['windows_sdk_path'] = win_sdk
+    os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
+        for k, v in gyp_defines_dict.iteritems())
+
+    os.environ['WINDOWSSDKDIR'] = win_sdk
+    os.environ['WDK_DIR'] = wdk
+    # Include the VS runtime in the PATH in case it's not machine-installed.
+    runtime_path = os.path.pathsep.join(vs_runtime_dll_dirs)
+    os.environ['PATH'] = runtime_path + os.path.pathsep + os.environ['PATH']
+  elif sys.platform == 'win32' and not depot_tools_win_toolchain:
+    if not 'GYP_MSVS_OVERRIDE_PATH' in os.environ:
+      os.environ['GYP_MSVS_OVERRIDE_PATH'] = DetectVisualStudioPath()
+    if not 'GYP_MSVS_VERSION' in os.environ:
+      os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion()
+
+    # When using an installed toolchain these files aren't needed in the output
+    # directory in order to run binaries locally, but they are needed in order
+    # to create isolates or the mini_installer. Copying them to the output
+    # directory ensures that they are available when needed.
+    bitness = platform.architecture()[0]
+    # When running 64-bit python the x64 DLLs will be in System32
+    x64_path = 'System32' if bitness == '64bit' else 'Sysnative'
+    x64_path = os.path.join(r'C:\Windows', x64_path)
+    vs_runtime_dll_dirs = [x64_path, r'C:\Windows\SysWOW64']
+
+  return vs_runtime_dll_dirs
+
+
+def _RegistryGetValueUsingWinReg(key, value):
+  """Use the _winreg module to obtain the value of a registry key.
+
+  Args:
+    key: The registry key.
+    value: The particular registry value to read.
+  Return:
+    contents of the registry key's value, or None on failure.  Throws
+    ImportError if _winreg is unavailable.
+  """
+  import _winreg
+  try:
+    root, subkey = key.split('\\', 1)
+    assert root == 'HKLM'  # Only need HKLM for now.
+    with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
+      return _winreg.QueryValueEx(hkey, value)[0]
+  except WindowsError:
+    return None
+
+
+def _RegistryGetValue(key, value):
+  try:
+    return _RegistryGetValueUsingWinReg(key, value)
+  except ImportError:
+    raise Exception('The python library _winreg not found.')
+
+
+def GetVisualStudioVersion():
+  """Return GYP_MSVS_VERSION of Visual Studio.
+  """
+  return os.environ.get('GYP_MSVS_VERSION', CURRENT_DEFAULT_TOOLCHAIN_VERSION)
+
+
+def DetectVisualStudioPath():
+  """Return path to the GYP_MSVS_VERSION of Visual Studio.
+  """
+
+  # Note that this code is used from
+  # build/toolchain/win/setup_toolchain.py as well.
+  version_as_year = GetVisualStudioVersion()
+  year_to_version = {
+      '2015': '14.0',
+      '2017': '15.0',
+  }
+  if version_as_year not in year_to_version:
+    raise Exception(('Visual Studio version %s (from GYP_MSVS_VERSION)'
+                     ' not supported. Supported versions are: %s') % (
+                       version_as_year, ', '.join(year_to_version.keys())))
+  version = year_to_version[version_as_year]
+  if version_as_year == '2017':
+    # The VC++ 2017 install location needs to be located using COM instead of
+    # the registry. For details see:
+    # https://blogs.msdn.microsoft.com/heaths/2016/09/15/changes-to-visual-studio-15-setup/
+    # For now we use a hardcoded default with an environment variable override.
+    for path in (
+        os.environ.get('vs2017_install'),
+        r'C:\Program Files (x86)\Microsoft Visual Studio\2017\Professional',
+        r'C:\Program Files (x86)\Microsoft Visual Studio\2017\Community'):
+      if path and os.path.exists(path):
+        return path
+  else:
+    keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
+            r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version]
+    for key in keys:
+      path = _RegistryGetValue(key, 'InstallDir')
+      if not path:
+        continue
+      path = os.path.normpath(os.path.join(path, '..', '..'))
+      return path
+
+  raise Exception(('Visual Studio Version %s (from GYP_MSVS_VERSION)'
+                   ' not found.') % (version_as_year))
+
+
+def _CopyRuntimeImpl(target, source, verbose=True):
+  """Copy |source| to |target| if it doesn't already exist or if it needs to be
+  updated (comparing last modified time as an approximate float match as for
+  some reason the values tend to differ by ~1e-07 despite being copies of the
+  same file... https://crbug.com/603603).
+  """
+  if (os.path.isdir(os.path.dirname(target)) and
+      (not os.path.isfile(target) or
+       abs(os.stat(target).st_mtime - os.stat(source).st_mtime) >= 0.01)):
+    if verbose:
+      print 'Copying %s to %s...' % (source, target)
+    if os.path.exists(target):
+      # Make the file writable so that we can delete it now.
+      os.chmod(target, stat.S_IWRITE)
+      os.unlink(target)
+    shutil.copy2(source, target)
+    # Make the file writable so that we can overwrite or delete it later.
+    os.chmod(target, stat.S_IWRITE)
+
+
+def _CopyUCRTRuntime(target_dir, source_dir, target_cpu, dll_pattern, suffix):
+  """Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't
+  exist, but the target directory does exist."""
+  for file_part in ('msvcp', 'vccorlib', 'vcruntime'):
+    dll = dll_pattern % file_part
+    target = os.path.join(target_dir, dll)
+    source = os.path.join(source_dir, dll)
+    _CopyRuntimeImpl(target, source)
+  # Copy the UCRT files needed by VS 2015 from the Windows SDK. This location
+  # includes the api-ms-win-crt-*.dll files that are not found in the Windows
+  # directory. These files are needed for component builds.
+  # If WINDOWSSDKDIR is not set use the default SDK path. This will be the case
+  # when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
+  win_sdk_dir = os.path.normpath(
+      os.environ.get('WINDOWSSDKDIR',
+                     'C:\\Program Files (x86)\\Windows Kits\\10'))
+  ucrt_dll_dirs = os.path.join(win_sdk_dir, r'Redist\ucrt\DLLs', target_cpu)
+  ucrt_files = glob.glob(os.path.join(ucrt_dll_dirs, 'api-ms-win-*.dll'))
+  assert len(ucrt_files) > 0
+  for ucrt_src_file in ucrt_files:
+    file_part = os.path.basename(ucrt_src_file)
+    ucrt_dst_file = os.path.join(target_dir, file_part)
+    _CopyRuntimeImpl(ucrt_dst_file, ucrt_src_file, False)
+  _CopyRuntimeImpl(os.path.join(target_dir, 'ucrtbase' + suffix),
+                    os.path.join(source_dir, 'ucrtbase' + suffix))
+
+
+def FindVCToolsRoot():
+  """In VS2017 the PGO runtime dependencies are located in
+  {toolchain_root}/VC/Tools/MSVC/{x.y.z}/bin/Host{target_cpu}/{target_cpu}/, the
+  {version_number} part is likely to change in case of a minor update of the
+  toolchain so we don't hardcode this value here (except for the major number).
+
+  This returns the '{toolchain_root}/VC/Tools/MSVC/{x.y.z}/bin/' path.
+
+  This function should only be called when using VS2017.
+  """
+  assert GetVisualStudioVersion() == '2017'
+  SetEnvironmentAndGetRuntimeDllDirs()
+  assert ('GYP_MSVS_OVERRIDE_PATH' in os.environ)
+  vc_tools_msvc_root = os.path.join(os.environ['GYP_MSVS_OVERRIDE_PATH'],
+      'VC', 'Tools', 'MSVC')
+  for directory in os.listdir(vc_tools_msvc_root):
+    if not os.path.isdir(os.path.join(vc_tools_msvc_root, directory)):
+      continue
+    if re.match('14\.\d+\.\d+', directory):
+      return os.path.join(vc_tools_msvc_root, directory, 'bin')
+  raise Exception('Unable to find the VC tools directory.')
+
+
+def _CopyPGORuntime(target_dir, target_cpu):
+  """Copy the runtime dependencies required during a PGO build.
+  """
+  env_version = GetVisualStudioVersion()
+  # These dependencies will be in a different location depending on the version
+  # of the toolchain.
+  if env_version == '2015':
+    pgo_x86_runtime_dir = os.path.join(os.environ.get('GYP_MSVS_OVERRIDE_PATH'),
+                                       'VC', 'bin')
+    pgo_x64_runtime_dir = os.path.join(pgo_x86_runtime_dir, 'amd64')
+  elif env_version == '2017':
+    pgo_runtime_root = FindVCToolsRoot()
+    assert pgo_runtime_root
+    # There's no version of pgosweep.exe in HostX64/x86, so we use the copy
+    # from HostX86/x86.
+    pgo_x86_runtime_dir = os.path.join(pgo_runtime_root, 'HostX86', 'x86')
+    pgo_x64_runtime_dir = os.path.join(pgo_runtime_root, 'HostX64', 'x64')
+  else:
+    raise Exception('Unexpected toolchain version: %s.' % env_version)
+
+  # We need to copy 2 runtime dependencies used during the profiling step:
+  #     - pgort140.dll: runtime library required to run the instrumented image.
+  #     - pgosweep.exe: executable used to collect the profiling data
+  pgo_runtimes = ['pgort140.dll', 'pgosweep.exe']
+  for runtime in pgo_runtimes:
+    if target_cpu == 'x86':
+      source = os.path.join(pgo_x86_runtime_dir, runtime)
+    elif target_cpu == 'x64':
+      source = os.path.join(pgo_x64_runtime_dir, runtime)
+    else:
+      raise NotImplementedError("Unexpected target_cpu value: " + target_cpu)
+    if not os.path.exists(source):
+      raise Exception('Unable to find %s.' % source)
+    _CopyRuntimeImpl(os.path.join(target_dir, runtime), source)
+
+
+def _CopyRuntime(target_dir, source_dir, target_cpu, debug):
+  """Copy the VS runtime DLLs, only if the target doesn't exist, but the target
+  directory does exist. Handles VS 2015 and VS 2017."""
+  suffix = "d.dll" if debug else ".dll"
+  # VS 2017 uses the same CRT DLLs as VS 2015.
+  _CopyUCRTRuntime(target_dir, source_dir, target_cpu, '%s140' + suffix,
+                    suffix)
+
+
+def CopyDlls(target_dir, configuration, target_cpu):
+  """Copy the VS runtime DLLs into the requested directory as needed.
+
+  configuration is one of 'Debug' or 'Release'.
+  target_cpu is one of 'x86' or 'x64'.
+
+  The debug configuration gets both the debug and release DLLs; the
+  release config only the latter.
+  """
+  vs_runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+  if not vs_runtime_dll_dirs:
+    return
+
+  x64_runtime, x86_runtime = vs_runtime_dll_dirs
+  runtime_dir = x64_runtime if target_cpu == 'x64' else x86_runtime
+  _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=False)
+  if configuration == 'Debug':
+    _CopyRuntime(target_dir, runtime_dir, target_cpu, debug=True)
+  else:
+    _CopyPGORuntime(target_dir, target_cpu)
+
+  _CopyDebugger(target_dir, target_cpu)
+
+
+def _CopyDebugger(target_dir, target_cpu):
+  """Copy dbghelp.dll into the requested directory as needed.
+
+  target_cpu is one of 'x86' or 'x64'.
+
+  dbghelp.dll is used when Chrome needs to symbolize stacks. Copying this file
+  from the SDK directory avoids using the system copy of dbghelp.dll which then
+  ensures compatibility with recent debug information formats, such as VS
+  2017 /debug:fastlink PDBs.
+  """
+  win_sdk_dir = SetEnvironmentAndGetSDKDir()
+  if not win_sdk_dir:
+    return
+
+  debug_file = 'dbghelp.dll'
+  full_path = os.path.join(win_sdk_dir, 'Debuggers', target_cpu, debug_file)
+  if not os.path.exists(full_path):
+    raise Exception('dbghelp.dll not found in "%s"\r\nYou must install the '
+                    '"Debugging Tools for Windows" feature from the Windows '
+                    '10 SDK.' % full_path)
+  target_path = os.path.join(target_dir, debug_file)
+  _CopyRuntimeImpl(target_path, full_path)
+
+
+def _GetDesiredVsToolchainHashes():
+  """Load a list of SHA1s corresponding to the toolchains that we want installed
+  to build with."""
+  env_version = GetVisualStudioVersion()
+  if env_version == '2015':
+    # Update 3 final with patches with 10.0.14393.0 SDK.
+    return ['d3cb0e37bdd120ad0ac4650b674b09e81be45616']
+  if env_version == '2017':
+    # VS 2017 Update 3 Preview 1 with 10.0.14393.0 SDK and patched statreg.h.
+    return ['3915730f76bd9c6155aed871b944b0a25c18f15e']
+  raise Exception('Unsupported VS version %s' % env_version)
+
+
+def ShouldUpdateToolchain():
+  """Check if the toolchain should be upgraded."""
+  if not os.path.exists(json_data_file):
+    return True
+  with open(json_data_file, 'r') as tempf:
+    toolchain_data = json.load(tempf)
+  version = toolchain_data['version']
+  env_version = GetVisualStudioVersion()
+  # If there's a mismatch between the version set in the environment and the one
+  # in the json file then the toolchain should be updated.
+  return version != env_version
+
+
+def Update(force=False):
+  """Requests an update of the toolchain to the specific hashes we have at
+  this revision. The update outputs a .json of the various configuration
+  information required to pass to gyp which we use in |GetToolchainDir()|.
+  """
+  if force != False and force != '--force':
+    print >>sys.stderr, 'Unknown parameter "%s"' % force
+    return 1
+  if force == '--force' or os.path.exists(json_data_file):
+    force = True
+
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  if ((sys.platform in ('win32', 'cygwin') or force) and
+        depot_tools_win_toolchain):
+    import find_depot_tools
+    depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+    # Necessary so that get_toolchain_if_necessary.py will put the VS toolkit
+    # in the correct directory.
+    os.environ['GYP_MSVS_VERSION'] = GetVisualStudioVersion()
+    get_toolchain_args = [
+        sys.executable,
+        os.path.join(depot_tools_path,
+                    'win_toolchain',
+                    'get_toolchain_if_necessary.py'),
+        '--output-json', json_data_file,
+      ] + _GetDesiredVsToolchainHashes()
+    if force:
+      get_toolchain_args.append('--force')
+    subprocess.check_call(get_toolchain_args)
+
+  return 0
+
+
+def NormalizePath(path):
+  while path.endswith("\\"):
+    path = path[:-1]
+  return path
+
+
+def SetEnvironmentAndGetSDKDir():
+  """Gets location information about the current sdk (must have been
+  previously updated by 'update'). This is used for the GN build."""
+  SetEnvironmentAndGetRuntimeDllDirs()
+
+  # If WINDOWSSDKDIR is not set, search the default SDK path and set it.
+  if not 'WINDOWSSDKDIR' in os.environ:
+    default_sdk_path = 'C:\\Program Files (x86)\\Windows Kits\\10'
+    if os.path.isdir(default_sdk_path):
+      os.environ['WINDOWSSDKDIR'] = default_sdk_path
+
+  return NormalizePath(os.environ['WINDOWSSDKDIR'])
+
+
+def GetToolchainDir():
+  """Gets location information about the current toolchain (must have been
+  previously updated by 'update'). This is used for the GN build."""
+  runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+  win_sdk_dir = SetEnvironmentAndGetSDKDir()
+
+  print '''vs_path = "%s"
+sdk_path = "%s"
+vs_version = "%s"
+wdk_dir = "%s"
+runtime_dirs = "%s"
+''' % (
+      NormalizePath(os.environ['GYP_MSVS_OVERRIDE_PATH']),
+      win_sdk_dir,
+      GetVisualStudioVersion(),
+      NormalizePath(os.environ.get('WDK_DIR', '')),
+      os.path.pathsep.join(runtime_dll_dirs or ['None']))
+
+
+def main():
+  commands = {
+      'update': Update,
+      'get_toolchain_dir': GetToolchainDir,
+      'copy_dlls': CopyDlls,
+  }
+  if len(sys.argv) < 2 or sys.argv[1] not in commands:
+    print >>sys.stderr, 'Expected one of: %s' % ', '.join(commands)
+    return 1
+  return commands[sys.argv[1]](*sys.argv[2:])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
--- a/media/webrtc/trunk/build/whitespace_file.txt
+++ b/media/webrtc/trunk/build/whitespace_file.txt
@@ -1,69 +1,174 @@
-Copyright (c) 2012 The Chromium Authors. All rights reserved.
+Copyright 2014 The Chromium Authors. All rights reserved.
 Use of this useless file is governed by a BSD-style license that can be
 found in the LICENSE file.
 
+
 This file is used for making non-code changes to trigger buildbot cycles. Make
 any modification below this line.
 
-=========================================================================
+======================================================================
 
-Let's make a story. Add one sentence for every commit:
+Let's make a story. Add zero+ sentences for every commit:
 
-CHAPTER 1.0:
-It was a dark and stormy night; the rain fell in torrents -- except at
+CHÄPTER 1:
+It was a dark and blinky night; the rain fell in torrents -- except at
 occasional intervals, when it was checked by a violent gust of wind which
 swept up the streets (for it is in London that our scene lies), rattling along
 the housetops, and fiercely agitating the scanty flame of the lamps that
-struggled against the darkness. A hooded figure emerged.
+struggled against the elements. A hooded figure emerged.
 
-It was a Domo-Kun. "What took you so long?", inquired his wife.
+It was a Domo-Kun.
+
+"What took you so long?", inquired his wife.
+
 Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the
 waffles you brought him?" "You know him, he's not one to forego a waffle,
-no matter how burnt", he snickered.
+no matter how burnt," he snickered.
 
-The pause was filled with the sound of thunder.
+The pause was filled with the sound of compile errors.
 
-
-CHAPTER 2.0:
+CHAPTER 2:
 The jelly was as dark as night, and just as runny.
 The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
 with his fork, watching the runny jelly spread and pool across his plate,
 like the blood of a dying fawn. "It reminds me of that time --" he started, as
 his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
 images coming from the past flowed through his mind.
 
 "You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
 overhead, barely disturbing the thick cigarette smoke. No doubt was left about
 when the fan was last cleaned.
 
 There was a poignant pause.
 
-CHAPTER 3.0:
+CHAPTER 3:
 Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
 began feeling sick. He thought out loud to himself, "No, he wouldn't have done
-that to me." He considered that perhaps he shouldn't have pushed him so far.
+that to me." He considered that perhaps he shouldn't have pushed so hard.
 Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
-horror that had occurred, just the week before.
+horror that had occurred just the week before.
 
-Next time, there won't be any sushi. Why sushis with waffles anyway?  It's like
-salmon in a cereal bowl.
+Next time, there won't be any sushi. Why sushi with waffles anyway?  It's like
+adorning breakfast cereal with halibut -- shameful.
 
 CHAPTER 4:
 The taste of stale sushi in his mouth the next morning was unbearable. He
-wondered where the sushi came from. He tries to recall the cook's face.  Purple?
+wondered where the sushi came from as he attempted to wash the taste away with
+a bottle of 3000Â¥ sake. He tries to recall the cook's face.  Green? Probably.
 
 CHAPTER 5:
-Many years later, Mr. Usagi would laugh at the memory of the earnest,
-well-intentioned Domo-Kun.
-Another day in the life...
+Many tears later, Mr. Usagi would laugh at the memory of the earnest,
+well-intentioned Domo-Kun. Another day in the life. That is when he realized that
+life goes on.
+
+$CHAPTER6
 
 TRUISMS (1978-1983)
 JENNY HOLZER
 A LITTLE KNOWLEDGE CAN GO A LONG WAY
 A LOT OF PROFESSIONALS ARE CRACKPOTS
 A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER
 A NAME MEANS A LOT JUST BY ITSELF
 A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD
 A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
+NO ONE SHOULD EVER USE SVN
+AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS
+IT IS MANS FATE TO OUTSMART HIMSELF
+BEING SURE OF YOURSELF MEANS YOU'RE A FOOL
+AM NOT
+ARE TOO
+IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY
+AND THEN, TIME LEAPT BACKWARDS
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT
+I'm really tempted to change something above the line.
+Reeccciiiipppppeeeeeesssssss!!!!!!!!!
+PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION.
+WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY
 
-This commit will change the world as we know it.
+I can feel the heat closing in, feel them out there making their moves...
+What could possibly go wrong? We've already ate our cake.
+
+Stand Still. Pause Clocks. We can make the World Stop.
+WUBWUBWUBWUBWUB
+
+I want a 1917 build and you will give me what I want.
+
+This sentence is false.
+
+Beauty is in the eyes of a Beholder.
+
+I'm the best at space.
+
+The first time Yossarian saw the chaplain, he fell madly in love with him.
+*
+*
+*
+Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for
+the time it did me. There is a wisdom that is woe; but there is a woe that is
+madness. And there is a Catskill eagle in some souls that can alike dive down
+into the blackest gorges, and soar out of them again and become invisible in
+the sunny spaces. And even if he for ever flies within the gorge, that gorge
+is in the mountains; so that even in his lowest swoop the mountain eagle is
+still higher than other birds upon the plain, even though they soar.
+*
+*
+*
+
+I'm here to commit lines and drop rhymes
+*
+This is a line to test and try uploading a cl.
+
+And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on
+the Android bots, and it was good. Except on one bot, where it was bad. And
+lo, the change was reverted, and GCC went back to 4.6, where code is slower
+and less optimized. And verily did it break the build, because artifacts had
+been created with 4.8, and alignment was no longer the same, and a great
+sadness descended upon the Android GN buildbot, and it did refuseth to build
+any more. But the sheriffs thought to themselves: Placebo! Let us clobber the
+bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many
+seasons. And so they modified the whitespace file with these immortal lines,
+and visited it upon the bots, that great destruction might be wrought upon
+their outdated binaries. In clobberus, veritas.
+
+As the git approaches, light begins to shine through the SCM thrice again...
+However, the git, is, after all, quite stupid.
+
+Suddenly Domo-Kun found itself in a room filled with dazzling mirrors. As
+Domo-Kun looked around, it realized that some of the mirrors were actually but
+pale reflections of true reality.
+
+A herd of wild gits appears!  Time for CQ :D
+And one more for sizes.py...
+
+What's an overmarketed dietary supplement expressing sadness, relief,
+tiredness, or a similar feeling.?  Ah-Sigh-ee.
+
+It was love at first sight.  The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him.
+
+Cool whitespace change for git-cl land
+
+Oh god the bots are red! I'm blind! Mmmm, cronuts.
+
+If you stand on your head, you will get footprints in your hair.
+
+sigh
+sigher
+pick up cls
+
+In the BUILD we trust.
+^_^
+
+In the masters we don't.
+In the tryservers, we don't either.
+In the CQ sometimes.
+Auto-generated by git-eject-upstream (http://goo.gl/cIHsYR)
+My sandwiches are like my children: I love them all.
+No, really, I couldn't eat another bit.
+When I hunger I think of you, and a pastrami sandwich.
+Do make a terrible mistake every once in a while.
+I just made two.
+Mistakes are the best sometimes.
+\o/
+This is groovy.
+
+SECRET ENDING: IT WAS _____ ALL ALONG!
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/BUILD.gn
@@ -0,0 +1,80 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/win/manifest.gni")
+
+# Depending on this target will cause the manifests for Chrome's default
+# Windows and common control compatibility and elevation for executables.
+windows_manifest("default_exe_manifest") {
+  sources = [
+    as_invoker_manifest,
+    common_controls_manifest,
+    default_compatibility_manifest,
+  ]
+  type = "exe"
+}
+
+if (is_win) {
+  action("copy_cdb_to_output") {
+    script = "//build/win/copy_cdb_to_output.py"
+    inputs = [
+      script,
+    ]
+    outputs = [
+      "$root_out_dir/cdb/cdb.exe",
+      "$root_out_dir/cdb/dbgeng.dll",
+      "$root_out_dir/cdb/dbghelp.dll",
+      "$root_out_dir/cdb/dbgmodel.dll",
+      "$root_out_dir/cdb/winext/ext.dll",
+      "$root_out_dir/cdb/winext/uext.dll",
+      "$root_out_dir/cdb/winxp/exts.dll",
+      "$root_out_dir/cdb/winxp/ntsdexts.dll",
+      "$root_out_dir/cdb/api-ms-win-core-console-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-datetime-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-debug-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-errorhandling-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-file-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-file-l1-2-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-file-l2-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-handle-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-heap-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-interlocked-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-libraryloader-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-localization-l1-2-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-memory-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-namedpipe-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-processenvironment-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-processthreads-l1-1-1.dll",
+      "$root_out_dir/cdb/api-ms-win-core-profile-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-rtlsupport-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-string-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-synch-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-synch-l1-2-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-sysinfo-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-timezone-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-core-util-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-conio-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-convert-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-environment-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-filesystem-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-heap-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-locale-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-math-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-multibyte-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-private-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-process-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-runtime-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-stdio-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-string-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-time-l1-1-0.dll",
+      "$root_out_dir/cdb/api-ms-win-crt-utility-l1-1-0.dll",
+      "$root_out_dir/cdb/ucrtbase.dll",
+    ]
+    args = [
+      rebase_path("$root_out_dir/cdb", root_out_dir),
+      current_cpu,
+    ]
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/as_invoker.manifest
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+  <security>
+    <requestedPrivileges>
+      <requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
+    </requestedPrivileges>
+  </security>
+</trustInfo></assembly>
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/common_controls.manifest
@@ -0,0 +1,8 @@
+<?xml version='1.0' encoding='UTF-8' standalone='yes'?>
+<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>
+  <dependency>
+    <dependentAssembly>
+      <assemblyIdentity type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*' />
+    </dependentAssembly>
+  </dependency>
+</assembly>
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/compatibility.manifest
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+  <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+    <application>
+      <!--The ID below indicates application support for Windows Vista -->
+      <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
+      <!--The ID below indicates application support for Windows 7 -->
+      <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+      <!--The ID below indicates application support for Windows 8 -->
+      <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+      <!--The ID below indicates application support for Windows 8.1 -->
+      <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+      <!--The ID below indicates application support for Windows 10 -->
+      <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
+    </application>
+  </compatibility>
+</assembly>
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/copy_cdb_to_output.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import hashlib
+import os
+import shutil
+import sys
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+src_build_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
+sys.path.insert(0, src_build_dir)
+
+import vs_toolchain
+
+
+def _HexDigest(file_name):
+  hasher = hashlib.sha256()
+  afile = open(file_name, 'rb')
+  blocksize = 65536
+  buf = afile.read(blocksize)
+  while len(buf) > 0:
+    hasher.update(buf)
+    buf = afile.read(blocksize)
+  afile.close()
+  return hasher.hexdigest()
+
+
+def _CopyImpl(file_name, target_dir, source_dir, verbose=False):
+  """Copy |source| to |target| if it doesn't already exist or if it
+  needs to be updated.
+  """
+  target = os.path.join(target_dir, file_name)
+  source = os.path.join(source_dir, file_name)
+  if (os.path.isdir(os.path.dirname(target)) and
+      ((not os.path.isfile(target)) or
+       _HexDigest(source) != _HexDigest(target))):
+    if verbose:
+      print 'Copying %s to %s...' % (source, target)
+    if os.path.exists(target):
+      os.unlink(target)
+    shutil.copy(source, target)
+
+
+def _ConditionalMkdir(output_dir):
+  if not os.path.isdir(output_dir):
+    os.makedirs(output_dir)
+
+
+def _CopyCDBToOutput(output_dir, target_arch):
+  """Copies the Windows debugging executable cdb.exe to the output
+  directory, which is created if it does not exist. The output
+  directory, and target architecture that should be copied, are
+  passed. Supported values for the target architecture are the GYP
+  values "ia32" and "x64" and the GN values "x86" and "x64".
+  """
+  _ConditionalMkdir(output_dir)
+  vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
+  # If WINDOWSSDKDIR is not set use the default SDK path. This will be the case
+  # when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.
+  win_sdk_dir = os.path.normpath(
+      os.environ.get('WINDOWSSDKDIR',
+                     'C:\\Program Files (x86)\\Windows Kits\\10'))
+  if target_arch == 'ia32' or target_arch == 'x86':
+    src_arch = 'x86'
+  elif target_arch == 'x64':
+    src_arch = 'x64'
+  else:
+    print 'copy_cdb_to_output.py: unknown target_arch %s' % target_arch
+    sys.exit(1)
+  # We need to copy multiple files, so cache the computed source directory.
+  src_dir = os.path.join(win_sdk_dir, 'Debuggers', src_arch)
+  # We need to copy some helper DLLs to get access to the !uniqstack
+  # command to dump all threads' stacks.
+  src_winext_dir = os.path.join(src_dir, 'winext')
+  dst_winext_dir = os.path.join(output_dir, 'winext')
+  src_winxp_dir = os.path.join(src_dir, 'winxp')
+  dst_winxp_dir = os.path.join(output_dir, 'winxp')
+  src_crt_dir = os.path.join(win_sdk_dir, r'Redist\ucrt\DLLs', src_arch)
+  _ConditionalMkdir(dst_winext_dir)
+  _ConditionalMkdir(dst_winxp_dir)
+  # Note that the outputs from the "copy_cdb_to_output" target need to
+  # be kept in sync with this list.
+  _CopyImpl('cdb.exe', output_dir, src_dir)
+  _CopyImpl('dbgeng.dll', output_dir, src_dir)
+  _CopyImpl('dbghelp.dll', output_dir, src_dir)
+  _CopyImpl('dbgmodel.dll', output_dir, src_dir)
+  _CopyImpl('ext.dll', dst_winext_dir, src_winext_dir)
+  _CopyImpl('uext.dll', dst_winext_dir, src_winext_dir)
+  _CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir)
+  _CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir)
+  for dll_path in glob.glob(os.path.join(src_crt_dir, 'api-ms-win-*.dll')):
+    _CopyImpl(os.path.split(dll_path)[1], output_dir, src_crt_dir)
+  _CopyImpl('ucrtbase.dll', output_dir, src_crt_dir)
+  return 0
+
+
+def main():
+  if len(sys.argv) < 2:
+    print >>sys.stderr, 'Usage: copy_cdb_to_output.py <output_dir> ' + \
+        '<target_arch>'
+    return 1
+  return _CopyCDBToOutput(sys.argv[1], sys.argv[2])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/merge_pgc_files.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merge the PGC files generated during the profiling step to the PGD database.
+
+This is required to workaround a flakyness in pgomgr.exe where it can run out
+of address space while trying to merge all the PGC files at the same time.
+"""
+
+import glob
+import json
+import optparse
+import os
+import subprocess
+import sys
+
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+sys.path.insert(0, os.path.join(script_dir, os.pardir))
+
+import vs_toolchain
+
+
+def find_pgomgr(chrome_checkout_dir):
+  """Find pgomgr.exe."""
+  win_toolchain_json_file = os.path.join(chrome_checkout_dir, 'build',
+      'win_toolchain.json')
+  if not os.path.exists(win_toolchain_json_file):
+    raise Exception('The toolchain JSON file is missing.')
+  with open(win_toolchain_json_file) as temp_f:
+    toolchain_data = json.load(temp_f)
+  if not os.path.isdir(toolchain_data['path']):
+    raise Exception('The toolchain JSON file is invalid.')
+
+  # Always use the x64 version of pgomgr (the x86 one doesn't work on the bot's
+  # environment).
+  if toolchain_data['version'] == '2015':
+    pgomgr_dir = os.path.join(toolchain_data['path'], 'VC', 'bin', 'amd64')
+  elif toolchain_data['version'] == '2017':
+    vc_tools_root = vs_toolchain.FindVCToolsRoot()
+    pgomgr_dir = os.path.join(vc_tools_root, 'HostX64', 'x64')
+
+  pgomgr_path = os.path.join(pgomgr_dir, 'pgomgr.exe')
+  if not os.path.exists(pgomgr_path):
+    raise Exception('pgomgr.exe is missing from %s.' % pgomgr_dir)
+
+  return pgomgr_path
+
+
+def main():
+  parser = optparse.OptionParser(usage='%prog [options]')
+  parser.add_option('--checkout-dir', help='The Chrome checkout directory.')
+  parser.add_option('--target-cpu', help='[DEPRECATED] The target\'s bitness.')
+  parser.add_option('--build-dir', help='Chrome build directory.')
+  parser.add_option('--binary-name', help='The binary for which the PGC files '
+                    'should be merged, without extension.')
+  options, _ = parser.parse_args()
+
+  if not options.checkout_dir:
+    parser.error('--checkout-dir is required')
+  if not options.build_dir:
+    parser.error('--build-dir is required')
+  if not options.binary_name:
+    parser.error('--binary-name is required')
+
+  # Starts by finding pgomgr.exe.
+  pgomgr_path = find_pgomgr(options.checkout_dir)
+
+  pgc_files = glob.glob(os.path.join(options.build_dir,
+                                     '%s*.pgc' % options.binary_name))
+
+  # Number of PGC files that should be merged in each iterations, merging all
+  # the files one by one is really slow but merging more to 10 at a time doesn't
+  # really seem to impact the total time.
+  #
+  # Number of pgc merged per iteration  |  Time (in min)
+  # 1                                   |  27.2
+  # 10                                  |  12.8
+  # 20                                  |  12.0
+  # 30                                  |  11.5
+  # 40                                  |  11.4
+  # 50                                  |  11.5
+  # 60                                  |  11.6
+  # 70                                  |  11.6
+  # 80                                  |  11.7
+  #
+  # TODO(sebmarchand): Measure the memory usage of pgomgr.exe to see how it get
+  #     affected by the number of pgc files.
+  pgc_per_iter = 20
+
+  def _split_in_chunks(items, chunk_size):
+    """Split |items| in chunks of size |chunk_size|.
+
+    Source: http://stackoverflow.com/a/312464
+    """
+    for i in xrange(0, len(items), chunk_size):
+      yield items[i:i + chunk_size]
+
+  for chunk in _split_in_chunks(pgc_files, pgc_per_iter):
+    merge_command = [
+        pgomgr_path,
+        '/merge'
+    ]
+    for pgc_file in chunk:
+      merge_command.append([
+          os.path.join(options.build_dir, os.path.basename(pgc_file))
+      ])
+
+    merge_command.append([
+        os.path.join(options.build_dir, '%s.pgd' % options.binary_name)
+    ])
+    proc = subprocess.Popen(merge_command, stdout=subprocess.PIPE)
+    stdout, stderr = proc.communicate()
+    print stdout
+    if proc.returncode != 0:
+      raise Exception('Error while trying to merge the PGC files:\n%s' % stderr)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/message_compiler.gni
@@ -0,0 +1,89 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win, "This only runs on Windows.")
+
+# Runs mc.exe over a list of sources. The outputs (a header and rc file) are
+# placed in the target gen dir, and compiled.
+#
+# sources
+#   List of message files to process.
+#
+# user_mode_logging (optional bool)
+#   Generates user-mode logging code. Defaults to false (no logging code).
+#
+# compile_generated_code (optional, deafults = true)
+#   If unset or true, the generated code will be compiled and linked into
+#   targets that depend on it. If set to false, the .h and .rc files will only
+#   be generated.
+#
+# deps, public_deps, visibility
+#   Normal meaning.
+template("message_compiler") {
+  if (defined(invoker.compile_generated_code) &&
+      !invoker.compile_generated_code) {
+    compile_generated_code = false
+    action_name = target_name
+  } else {
+    compile_generated_code = true
+    action_name = "${target_name}_mc"
+    source_set_name = target_name
+  }
+
+  action_foreach(action_name) {
+    if (compile_generated_code) {
+      visibility = [ ":$source_set_name" ]
+    } else {
+      forward_variables_from(invoker, [ "visibility" ])
+    }
+
+    script = "//build/win/message_compiler.py"
+
+    outputs = [
+      "$target_gen_dir/{{source_name_part}}.h",
+      "$target_gen_dir/{{source_name_part}}.rc",
+    ]
+
+    args = [
+      # The first argument is the environment file saved to the build
+      # directory. This is required because the Windows toolchain setup saves
+      # the VC paths and such so that running "mc.exe" will work with the
+      # configured toolchain. This file is in the root build dir.
+      "environment.$current_cpu",
+
+      # Where to put the header.
+      "-h",
+      rebase_path(target_gen_dir, root_build_dir),
+
+      # Where to put the .rc file.
+      "-r",
+      rebase_path(target_gen_dir, root_build_dir),
+
+      # Input is Unicode.
+      "-u",
+    ]
+    if (defined(invoker.user_mode_logging) && invoker.user_mode_logging) {
+      args += [ "-um" ]
+    }
+    args += [ "{{source}}" ]
+
+    forward_variables_from(invoker,
+                           [
+                             "deps",
+                             "public_deps",
+                             "sources",
+                           ])
+  }
+
+  if (compile_generated_code) {
+    # Compile the generated rc file.
+    source_set(source_set_name) {
+      forward_variables_from(invoker, [ "visibility" ])
+      sources = get_target_outputs(":$action_name")
+      deps = [
+        ":$action_name",
+      ]
+    }
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/message_compiler.py
@@ -0,0 +1,63 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Runs the Microsoft Message Compiler (mc.exe). This Python adapter is for the
+# GN build, which can only run Python and not native binaries.
+#
+# Usage: message_compiler.py <environment_file> [<args to mc.exe>*]
+
+import os
+import subprocess
+import sys
+
+def main():
+  # Read the environment block from the file. This is stored in the format used
+  # by CreateProcess. Drop last 2 NULs, one for list terminator, one for
+  # trailing vs. separator.
+  env_pairs = open(sys.argv[1]).read()[:-2].split('\0')
+  env_dict = dict([item.split('=', 1) for item in env_pairs])
+
+  # mc writes to stderr, so this explicitly redirects to stdout and eats it.
+  try:
+    # This needs shell=True to search the path in env_dict for the mc
+    # executable.
+    rest = sys.argv[2:]
+    subprocess.check_output(['mc.exe'] + rest,
+                            env=env_dict,
+                            stderr=subprocess.STDOUT,
+                            shell=True)
+    # We require all source code (in particular, the header generated here) to
+    # be UTF-8. jinja can output the intermediate .mc file in UTF-8 or UTF-16LE.
+    # However, mc.exe only supports Unicode via the -u flag, and it assumes when
+    # that is specified that the input is UTF-16LE (and errors out on UTF-8
+    # files, assuming they're ANSI). Even with -u specified and UTF16-LE input,
+    # it generates an ANSI header, and includes broken versions of the message
+    # text in the comment before the value. To work around this, for any invalid
+    # // comment lines, we simply drop the line in the header after building it.
+    header_dir = None
+    input_file = None
+    for i, arg in enumerate(rest):
+      if arg == '-h' and len(rest) > i + 1:
+        assert header_dir == None
+        header_dir = rest[i + 1]
+      elif arg.endswith('.mc') or arg.endswith('.man'):
+        assert input_file == None
+        input_file = arg
+    if header_dir:
+      header_file = os.path.join(
+          header_dir, os.path.splitext(os.path.basename(input_file))[0] + '.h')
+      header_contents = []
+      with open(header_file, 'rb') as f:
+        for line in f.readlines():
+          if line.startswith('//') and '?' in line:
+            continue
+          header_contents.append(line)
+      with open(header_file, 'wb') as f:
+        f.write(''.join(header_contents))
+  except subprocess.CalledProcessError as e:
+    print e.output
+    sys.exit(e.returncode)
+
+if __name__ == '__main__':
+  main()
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/reorder-imports.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+
+def reorder_imports(input_dir, output_dir, architecture):
+  """Run swapimports.exe on the initial chrome.exe, and write to the output
+  directory. Also copy over any related files that might be needed
+  (pdbs, manifests etc.).
+  """
+
+  input_image = os.path.join(input_dir, 'chrome.exe')
+  output_image = os.path.join(output_dir, 'chrome.exe')
+
+  swap_exe = os.path.join(
+    __file__,
+    '..\\..\\..\\third_party\\syzygy\\binaries\\exe\\swapimport.exe')
+
+  args = [swap_exe, '--input-image=%s' % input_image,
+      '--output-image=%s' % output_image, '--overwrite', '--no-logo']
+
+  if architecture == 'x64':
+    args.append('--x64');
+
+  args.append('chrome_elf.dll');
+
+  subprocess.check_call(args)
+
+  for fname in glob.iglob(os.path.join(input_dir, 'chrome.exe.*')):
+    shutil.copy(fname, os.path.join(output_dir, os.path.basename(fname)))
+  return 0
+
+
+def main(argv):
+  usage = 'reorder_imports.py -i <input_dir> -o <output_dir> -a <target_arch>'
+  parser = optparse.OptionParser(usage=usage)
+  parser.add_option('-i', '--input', help='reorder chrome.exe in DIR',
+      metavar='DIR')
+  parser.add_option('-o', '--output', help='write new chrome.exe to DIR',
+      metavar='DIR')
+  parser.add_option('-a', '--arch', help='architecture of build (optional)',
+      default='ia32')
+  opts, args = parser.parse_args()
+
+  if not opts.input or not opts.output:
+    parser.error('Please provide and input and output directory')
+  return reorder_imports(opts.input, opts.output, opts.arch)
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv[1:]))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/require_administrator.manifest
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+  <security>
+    <requestedPrivileges>
+      <requestedExecutionLevel level="requireAdministrator" uiAccess="false"></requestedExecutionLevel>
+    </requestedPrivileges>
+  </security>
+</trustInfo></assembly>
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/run_pgo_profiling_benchmarks.py
@@ -0,0 +1,123 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility script to run the benchmarks during the profiling step of a PGO
+build.
+"""
+
+import json
+import optparse
+import os
+import subprocess
+import sys
+
+# Make sure that we're running as admin, this is required to run the Telemetry
+# benchmarks.
+from win32com.shell import shell
+if not shell.IsUserAnAdmin():
+  raise Exception('This script has to be run as admin.')
+
+
+_SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+_CHROME_BUILD_DIR = os.path.dirname(_SCRIPT_DIR)
+_CHROME_SRC_DIR = os.path.dirname(_CHROME_BUILD_DIR)
+
+
+# List of the benchmark that we run during the profiling step.
+_BENCHMARKS_TO_RUN = {
+  'blink_perf.bindings',
+  'blink_perf.canvas',
+  'blink_perf.css',
+  'blink_perf.dom',
+  'blink_perf.paint',
+  'blink_perf.svg',
+  'blink_style.top_25',
+  'dromaeo.cssqueryjquery',
+  'dromaeo.domcoreattr',
+  'dromaeo.domcoremodify',
+  'dromaeo.domcorequery',
+  'dromaeo.domcoretraverse',
+  'dromaeo.jslibattrprototype',
+  'dromaeo.jslibeventprototype',
+  'dromaeo.jslibmodifyprototype',
+  'dromaeo.jslibstyleprototype',
+  'dromaeo.jslibtraversejquery',
+  'dromaeo.jslibtraverseprototype',
+  'media.tough_video_cases',
+  'octane',
+  'smoothness.top_25_smooth',
+  'storage.indexeddb_endure_tracing',
+  'sunspider',
+}
+
+
+def RunBenchmarks(options):
+  """Run the benchmarks."""
+  # Find the run_benchmark script.
+  chrome_run_benchmark_script = os.path.join(_CHROME_SRC_DIR, 'tools',
+                                             'perf', 'run_benchmark')
+  if not os.path.exists(chrome_run_benchmark_script):
+    raise Exception('Unable to find the run_benchmark script '
+                    '(%s doesn\'t exist) ' % chrome_run_benchmark_script)
+
+  # Augment the PATH to make sure that the benchmarking script can find
+  # pgosweep.exe and its runtime libraries.
+  env = os.environ.copy()
+  env['PATH'] = str(os.pathsep.join([options.build_dir, os.environ['PATH']]))
+  env['PogoSafeMode'] = '1'
+  # Apply a scaling factor of 0.5 to the PGO profiling buffers for the 32-bit
+  # builds, without this the buffers will be too large and the process will
+  # fail to start. See crbug.com/632864#c22.
+  if options.target_cpu == 'x86':
+    env['VCPROFILE_ALLOC_SCALE'] = '0.5'
+
+  # Run all the benchmarks.
+  # TODO(sebmarchand): Make this run in parallel.
+  for benchmark in _BENCHMARKS_TO_RUN:
+    try:
+      benchmark_command = [
+          sys.executable,
+          chrome_run_benchmark_script,
+          '--browser', options.browser_type,
+        ]
+      # Automatically set the arguments to run this script on a local build.
+      if options.browser_type == 'exact':
+        benchmark_command += [
+          '--browser-executable', os.path.join(options.build_dir, 'chrome.exe')
+        ]
+      benchmark_command += [
+          '--profiler', 'win_pgo_profiler',
+          benchmark
+        ]
+      subprocess.check_call(benchmark_command, env=env)
+    except:
+      print ('Error while trying to run the %s benchmark, continuing.' %
+             benchmark)
+      continue
+
+  return 0
+
+
+def main():
+  parser = optparse.OptionParser(usage='%prog [options]')
+  parser.add_option(
+      '--browser-type', help='The browser type (to be passed to Telemetry\'s '
+                              'benchmark runner).')
+  # TODO(sebmarchand): Parse the args.gn file to automatically set this value.
+  parser.add_option('--target-cpu', help='The target\'s bitness.')
+  parser.add_option('--build-dir', help='Chrome build directory.')
+  options, _ = parser.parse_args()
+
+  if not options.target_cpu:
+    parser.error('--target-cpu is required')
+  if not options.build_dir:
+    parser.error('--build-dir is required')
+  if not options.browser_type:
+    options.browser_type = 'exact'
+
+  return RunBenchmarks(options)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/syzygy/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+copy("copy_syzyasan_binaries") {
+  visibility = [
+    "//chrome/*",
+    "//content/*",
+  ]
+
+  source_dir = "//third_party/syzygy/binaries/exe"
+
+  sources = [
+    "$source_dir/agent_logger.exe",
+    "$source_dir/minidump_symbolizer.py",
+    "$source_dir/syzyasan_rtl.dll",
+    "$source_dir/syzyasan_rtl.dll.pdb",
+  ]
+
+  outputs = [
+    "$root_out_dir/syzygy/{{source_file_part}}",
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/syzygy/OWNERS
@@ -0,0 +1,3 @@
+chrisha@chromium.org
+sebmarchand@chromium.org
+siggi@chromium.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/syzygy/instrument.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A utility script to help building Syzygy-instrumented Chrome binaries."""
+
+import glob
+import logging
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+
+
+# The default directory containing the Syzygy toolchain.
+_DEFAULT_SYZYGY_DIR = os.path.abspath(os.path.join(
+    os.path.dirname(__file__), '../../..',
+    'third_party/syzygy/binaries/exe/'))
+
+# Basenames of various tools.
+_INSTRUMENT_EXE = 'instrument.exe'
+_GENFILTER_EXE = 'genfilter.exe'
+
+_LOGGER = logging.getLogger()
+
+
+def _Shell(*cmd, **kw):
+  """Shells out to "cmd". Returns a tuple of cmd's stdout, stderr."""
+  _LOGGER.info('Running command "%s".', cmd)
+  prog = subprocess.Popen(cmd, **kw)
+
+  stdout, stderr = prog.communicate()
+  if prog.returncode != 0:
+    raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode))
+
+  return stdout, stderr
+
+
+def _CompileFilter(syzygy_dir, executable, symbol, filter_file,
+                   output_filter_file):
+  """Compiles the provided filter writing the compiled filter file to
+  output_filter_file.
+  """
+  cmd = [os.path.abspath(os.path.join(syzygy_dir, _GENFILTER_EXE)),
+         '--action=compile',
+         '--input-image=%s' % executable,
+         '--input-pdb=%s' % symbol,
+         '--output-file=%s' % output_filter_file,
+         '--overwrite',
+         os.path.abspath(filter_file)]
+
+  _Shell(*cmd)
+  if not os.path.exists(output_filter_file):
+    raise RuntimeError('Compiled filter file missing: %s' % output_filter_file)
+  return
+
+
+def _InstrumentBinary(syzygy_dir, mode, executable, symbol, dst_dir,
+                      filter_file, allocation_filter_file):
+  """Instruments the executable found in input_dir, and writes the resultant
+  instrumented executable and symbol files to dst_dir.
+  """
+  cmd = [os.path.abspath(os.path.join(syzygy_dir, _INSTRUMENT_EXE)),
+         '--overwrite',
+         '--mode=%s' % mode,
+         '--debug-friendly',
+         '--input-image=%s' % executable,
+         '--input-pdb=%s' % symbol,
+         '--output-image=%s' % os.path.abspath(
+             os.path.join(dst_dir, os.path.basename(executable))),
+         '--output-pdb=%s' % os.path.abspath(
+             os.path.join(dst_dir, os.path.basename(symbol)))]
+
+  if mode == "asan":
+    cmd.append('--no-augment-pdb')
+    # Disable some of the new SysyASAN features. We're seeing an increase in
+    # crash rates and are wondering if they are to blame.
+    cmd.append(
+        '--asan-rtl-options="--enable_feature_randomization '
+        '--prevent_duplicate_corruption_crashes"')
+
+  # If any filters were specified then pass them on to the instrumenter.
+  if filter_file:
+    cmd.append('--filter=%s' % os.path.abspath(filter_file))
+  if allocation_filter_file:
+    cmd.append('--allocation-filter-config-file=%s' %
+        os.path.abspath(allocation_filter_file))
+
+  return _Shell(*cmd)
+
+
+def main(options):
+  # Make sure the destination directory exists.
+  if not os.path.isdir(options.destination_dir):
+    _LOGGER.info('Creating destination directory "%s".',
+                 options.destination_dir)
+    os.makedirs(options.destination_dir)
+
+  # Compile the filter if one was provided.
+  if options.filter:
+    _CompileFilter(options.syzygy_dir,
+                   options.input_executable,
+                   options.input_symbol,
+                   options.filter,
+                   options.output_filter_file)
+
+  # Instruments the binaries into the destination directory.
+  _InstrumentBinary(options.syzygy_dir,
+                    options.mode,
+                    options.input_executable,
+                    options.input_symbol,
+                    options.destination_dir,
+                    options.output_filter_file,
+                    options.allocation_filter_file)
+
+
+def _ParseOptions():
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--input_executable',
+      help='The path to the input executable.')
+  option_parser.add_option('--input_symbol',
+      help='The path to the input symbol file.')
+  option_parser.add_option('--mode',
+      help='Specifies which instrumentation mode is to be used.')
+  option_parser.add_option('--syzygy-dir', default=_DEFAULT_SYZYGY_DIR,
+      help='Instrumenter executable to use, defaults to "%default".')
+  option_parser.add_option('-d', '--destination_dir',
+      help='Destination directory for instrumented files.')
+  option_parser.add_option('--filter',
+      help='An optional filter. This will be compiled and passed to the '
+           'instrumentation executable.')
+  option_parser.add_option('--output-filter-file',
+      help='The path where the compiled filter will be written. This is '
+           'required if --filter is specified.')
+  option_parser.add_option('--allocation-filter-file',
+      help='The path to the SyzyASAN allocation filter to use.')
+  options, args = option_parser.parse_args()
+
+  if not options.mode:
+    option_parser.error('You must provide an instrumentation mode.')
+  if not options.input_executable:
+    option_parser.error('You must provide an input executable.')
+  if not options.input_symbol:
+    option_parser.error('You must provide an input symbol file.')
+  if not options.destination_dir:
+    option_parser.error('You must provide a destination directory.')
+  if options.filter and not options.output_filter_file:
+    option_parser.error('You must provide a filter output file.')
+
+  return options
+
+
+if '__main__' == __name__:
+  logging.basicConfig(level=logging.INFO)
+  sys.exit(main(_ParseOptions()))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/syzygy/reorder.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A utility script to help building Syzygy-reordered Chrome binaries."""
+
+import logging
+import optparse
+import os
+import subprocess
+import sys
+
+
+# The default relink executable to use to reorder binaries.
+_DEFAULT_RELINKER = os.path.join(
+    os.path.join(os.path.dirname(__file__), '../../..'),
+    'third_party/syzygy/binaries/exe/relink.exe')
+
+_LOGGER = logging.getLogger()
+
+# We use the same seed for all random reorderings to get a deterministic build.
+_RANDOM_SEED = 1347344
+
+
+def _Shell(*cmd, **kw):
+  """Shells out to "cmd". Returns a tuple of cmd's stdout, stderr."""
+  _LOGGER.info('Running command "%s".', cmd)
+  prog = subprocess.Popen(cmd, **kw)
+
+  stdout, stderr = prog.communicate()
+  if prog.returncode != 0:
+    raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode))
+
+  return stdout, stderr
+
+
+def _ReorderBinary(relink_exe, executable, symbol, destination_dir):
+  """Reorders the executable found in input_dir, and writes the resultant
+  reordered executable and symbol files to destination_dir.
+
+  If a file named <executable>-order.json exists, imposes that order on the
+  output binaries, otherwise orders them randomly.
+  """
+  cmd = [relink_exe,
+         '--overwrite',
+         '--input-image=%s' % executable,
+         '--input-pdb=%s' % symbol,
+         '--output-image=%s' % os.path.abspath(
+             os.path.join(destination_dir, os.path.basename(executable))),
+         '--output-pdb=%s' % os.path.abspath(
+             os.path.join(destination_dir, os.path.basename(symbol))),]
+
+  # Check whether there's an order file available for the executable.
+  order_file = '%s-order.json' % executable
+  if os.path.exists(order_file):
+    # The ordering file exists, let's use that.
+    _LOGGER.info('Reordering "%s" according to "%s".',
+                 os.path.basename(executable),
+                 os.path.basename(order_file))
+    cmd.append('--order-file=%s' % order_file)
+  else:
+    # No ordering file, we randomize the output.
+    _LOGGER.info('Randomly reordering "%s"', executable)
+    cmd.append('--seed=%d' % _RANDOM_SEED)
+
+  return _Shell(*cmd)
+
+
+def main(options):
+  logging.basicConfig(level=logging.INFO)
+
+  # Make sure the destination directory exists.
+  if not os.path.isdir(options.destination_dir):
+    _LOGGER.info('Creating destination directory "%s".',
+                 options.destination_dir)
+    os.makedirs(options.destination_dir)
+
+  # Reorder the binaries into the destination directory.
+  _ReorderBinary(options.relinker,
+                 options.input_executable,
+                 options.input_symbol,
+                 options.destination_dir)
+
+
+def _ParseOptions():
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--input_executable',
+      help='The path to the input executable.')
+  option_parser.add_option('--input_symbol',
+      help='The path to the input symbol file.')
+  option_parser.add_option('--relinker', default=_DEFAULT_RELINKER,
+      help='Relinker exectuable to use, defaults to "%s"' % _DEFAULT_RELINKER)
+  option_parser.add_option('-d', '--destination_dir',
+      help='Destination directory for reordered files, defaults to '
+           'the subdirectory "reordered" in the output_dir.')
+  options, args = option_parser.parse_args()
+
+  if not options.input_executable:
+    option_parser.error('You must provide an input executable.')
+  if not options.input_symbol:
+    option_parser.error('You must provide an input symbol file.')
+
+  if not options.destination_dir:
+    options.destination_dir = os.path.join(options.output_dir, 'reordered')
+
+  return options
+
+
+if '__main__' == __name__:
+  sys.exit(main(_ParseOptions()))
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/syzygy/syzyasan-allocation-filter.txt
@@ -0,0 +1,14 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file describes suspicious allocation sites that are to be treated
+// specially and served from the page heap. It is used at instrumentation time
+// to ensure allocations are runtime redirected from specified stack traces.
+//
+// See chrome_syzygy.gypi and instrument.py for more details.
+{
+  "hooks": {
+    "func": []
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/syzygy/syzyasan-instrumentation-filter.txt
@@ -0,0 +1,10 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This file describes filtering rules that will be applied when applying
+# Syzygy ASAN instrumentation to chrome.dll. It is intended to be used for
+# disabling instrumentation of functions with known and deferred bugs, allowing
+# functional instrumented builds of Chrome to be produced in the meantime.
+#
+# See chrome_syzygy.gypi and instrument.py for more details.
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/syzygy/syzygy.gni
@@ -0,0 +1,86 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win)
+
+# Where the output binaries will be placed.
+syzygy_dest_dir = "$root_out_dir/syzygy"
+
+# Instruments a binary with SyzyAsan.
+#
+#   binary_name (required)
+#     Name of the binary to be instrumented, with no extension or path. This
+#     binary_name is assumed to be in the output directory and must be
+#     generated by a dependency of this target.
+#
+#   dest_dir (required)
+#     The destination directory where the instrumented image should be
+#     written.
+#
+#   deps (required)
+#     Normal meaning.
+#
+#   public_deps
+#     Normal meaning.
+#
+#   data_deps
+#     Normal meaning.
+template("syzygy_asan") {
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    script = "//build/win/syzygy/instrument.py"
+
+    filter = "//build/win/syzygy/syzyasan-instrumentation-filter.txt"
+
+    binary_name = invoker.binary_name
+    dest_dir = invoker.dest_dir
+    input_image = "$root_out_dir/$binary_name"
+    input_pdb = "$root_out_dir/$binary_name.pdb"
+
+    inputs = [
+      filter,
+      input_image,
+
+      #input_pdb,
+    ]
+
+    output_filter = "$dest_dir/win-syzyasan-filter-$binary_name.txt.json"
+
+    outputs = [
+      "$dest_dir/$binary_name",
+      "$dest_dir/$binary_name.pdb",
+      output_filter,
+    ]
+
+    args = [
+      "--mode",
+      "asan",
+      "--input_executable",
+      rebase_path(input_image, root_build_dir),
+      "--input_symbol",
+      rebase_path(input_pdb, root_build_dir),
+      "--filter",
+      rebase_path(filter, root_build_dir),
+      "--output-filter-file",
+      rebase_path(output_filter, root_build_dir),
+      "--destination_dir",
+      rebase_path(dest_dir, root_build_dir),
+    ]
+
+    deps = [
+      "//build/win/syzygy:copy_syzyasan_binaries",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    forward_variables_from(invoker,
+                           [
+                             "data_deps",
+                             "public_deps",
+                             "testonly",
+                           ])
+  }
+}
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win/use_ansi_codes.py
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Prints if the the terminal is likely to understand ANSI codes."""
+
+import os
+
+# Add more terminals here as needed.
+print 'ANSICON' in os.environ
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/win_is_xtree_patched.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Determines if the VS xtree header has been patched to disable C4702."""
+
+import os
+
+
+def IsPatched():
+  # TODO(scottmg): For now, just return if we're using the packaged toolchain
+  # script (because we know it's patched). Another case could be added here to
+  # query the active VS installation and actually check the contents of xtree.
+  # http://crbug.com/346399.
+  return int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1)) == 1
+
+
+def DoMain(_):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return "1" if IsPatched() else "0"
+
+
+if __name__ == '__main__':
+  print DoMain([])
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/write_build_date_header.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes a file that contains a define that approximates the build date.
+
+build_type impacts the timestamp generated:
+- default: the build date is set to the most recent first Sunday of a month at
+  5:00am. The reason is that it is a time where invalidating the build cache
+  shouldn't have major reprecussions (due to lower load).
+- official: the build date is set to the current date at 5:00am, or the day
+  before if the current time is before 5:00am.
+Either way, it is guaranteed to be in the past and always in UTC.
+
+It is also possible to explicitly set a build date to be used.
+"""
+
+import argparse
+import calendar
+import datetime
+import doctest
+import os
+import sys
+
+
+def GetFirstSundayOfMonth(year, month):
+  """Returns the first sunday of the given month of the given year.
+
+  >>> GetFirstSundayOfMonth(2016, 2)
+  7
+  >>> GetFirstSundayOfMonth(2016, 3)
+  6
+  >>> GetFirstSundayOfMonth(2000, 1)
+  2
+  """
+  weeks = calendar.Calendar().monthdays2calendar(year, month)
+  # Return the first day in the first week that is a Sunday.
+  return [date_day[0] for date_day in weeks[0] if date_day[1] == 6][0]
+
+
+def GetBuildDate(build_type, utc_now):
+  """Gets the approximate build date given the specific build type.
+
+  >>> GetBuildDate('default', datetime.datetime(2016, 2, 6, 1, 2, 3))
+  'Jan 03 2016 01:02:03'
+  >>> GetBuildDate('default', datetime.datetime(2016, 2, 7, 5))
+  'Feb 07 2016 05:00:00'
+  >>> GetBuildDate('default', datetime.datetime(2016, 2, 8, 5))
+  'Feb 07 2016 05:00:00'
+  """
+  day = utc_now.day
+  month = utc_now.month
+  year = utc_now.year
+  if build_type != 'official':
+    first_sunday = GetFirstSundayOfMonth(year, month)
+    # If our build is after the first Sunday, we've already refreshed our build
+    # cache on a quiet day, so just use that day.
+    # Otherwise, take the first Sunday of the previous month.
+    if day >= first_sunday:
+      day = first_sunday
+    else:
+      month -= 1
+      if month == 0:
+        month = 12
+        year -= 1
+      day = GetFirstSundayOfMonth(year, month)
+  now = datetime.datetime(
+      year, month, day, utc_now.hour, utc_now.minute, utc_now.second)
+  return '{:%b %d %Y %H:%M:%S}'.format(now)
+
+
+def main():
+  if doctest.testmod()[0]:
+    return 1
+  argument_parser = argparse.ArgumentParser(
+      description=sys.modules[__name__].__doc__,
+      formatter_class=argparse.RawDescriptionHelpFormatter)
+  argument_parser.add_argument('output_file', help='The file to write to')
+  argument_parser.add_argument(
+      'build_type', help='The type of build', choices=('official', 'default'))
+  argument_parser.add_argument(
+      'build_date_override', nargs='?',
+      help='Optional override for the build date. Format must be '
+           '\'Mmm DD YYYY HH:MM:SS\'')
+  args = argument_parser.parse_args()
+
+  if args.build_date_override:
+    # Format is expected to be "Mmm DD YYYY HH:MM:SS".
+    build_date = args.build_date_override
+  else:
+    now = datetime.datetime.utcnow()
+    if now.hour < 5:
+      # The time is locked at 5:00 am in UTC to cause the build cache
+      # invalidation to not happen exactly at midnight. Use the same calculation
+      # as the day before.
+      # See //base/build_time.cc.
+      now = now - datetime.timedelta(days=1)
+    now = datetime.datetime(now.year, now.month, now.day, 5, 0, 0)
+    build_date = GetBuildDate(args.build_type, now)
+
+  output = ('// Generated by //build/write_build_date_header.py\n'
+           '#ifndef BUILD_DATE\n'
+           '#define BUILD_DATE "{}"\n'
+           '#endif // BUILD_DATE\n'.format(build_date))
+
+  current_contents = ''
+  if os.path.isfile(args.output_file):
+    with open(args.output_file, 'r') as current_file:
+      current_contents = current_file.read()
+
+  if current_contents != output:
+    with open(args.output_file, 'w') as output_file:
+      output_file.write(output)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build/write_buildflag_header.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This writes headers for build flags. See buildflag_header.gni for usage of
+# this system as a whole.
+#
+# The parameters are passed in a response file so we don't have to worry
+# about command line lengths. The name of the response file is passed on the
+# command line.
+#
+# The format of the response file is:
+#    [--flags <list of one or more flag values>]
+
+import optparse
+import os
+import shlex
+import sys
+
+
+class Options:
+  def __init__(self, output, rulename, header_guard, flags):
+    self.output = output
+    self.rulename = rulename
+    self.header_guard = header_guard
+    self.flags = flags
+
+
+def GetOptions():
+  parser = optparse.OptionParser()
+  parser.add_option('--output', help="Output header name inside --gen-dir.")
+  parser.add_option('--rulename',
+                    help="Helpful name of build rule for including in the " +
+                         "comment at the top of the file.")
+  parser.add_option('--gen-dir',
+                    help="Path to root of generated file directory tree.")
+  parser.add_option('--definitions',
+                    help="Name of the response file containing the flags.")
+  cmdline_options, cmdline_flags = parser.parse_args()
+
+  # Compute header guard by replacing some chars with _ and upper-casing.
+  header_guard = cmdline_options.output.upper()
+  header_guard = \
+      header_guard.replace('/', '_').replace('\\', '_').replace('.', '_')
+  header_guard += '_'
+
+  # The actual output file is inside the gen dir.
+  output = os.path.join(cmdline_options.gen_dir, cmdline_options.output)
+
+  # Definition file in GYP is newline separated, in GN they are shell formatted.
+  # shlex can parse both of these.
+  with open(cmdline_options.definitions, 'r') as def_file:
+    defs = shlex.split(def_file.read())
+  flags_index = defs.index('--flags')
+
+  # Everything after --flags are flags. true/false are remapped to 1/0,
+  # everything else is passed through.
+  flags = []
+  for flag in defs[flags_index + 1 :]:
+    equals_index = flag.index('=')
+    key = flag[:equals_index]
+    value = flag[equals_index + 1:]
+
+    # Canonicalize and validate the value.
+    if value == 'true':
+      value = '1'
+    elif value == 'false':
+      value = '0'
+    flags.append((key, str(value)))
+
+  return Options(output=output,
+                 rulename=cmdline_options.rulename,
+                 header_guard=header_guard,
+                 flags=flags)
+
+
+def WriteHeader(options):
+  with open(options.output, 'w') as output_file:
+    output_file.write("// Generated by build/write_buildflag_header.py\n")
+    if options.rulename:
+      output_file.write('// From "' + options.rulename + '"\n')
+
+    output_file.write('\n#ifndef %s\n' % options.header_guard)
+    output_file.write('#define %s\n\n' % options.header_guard)
+    output_file.write('#include "build/buildflag.h"\n\n')
+
+    for pair in options.flags:
+      output_file.write('#define BUILDFLAG_INTERNAL_%s() (%s)\n' % pair)
+
+    output_file.write('\n#endif  // %s\n' % options.header_guard)
+
+
+options = GetOptions()
+WriteHeader(options)
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build_overrides/OWNERS
@@ -0,0 +1,1 @@
+kjellander@webrtc.org
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build_overrides/README.md
@@ -0,0 +1,7 @@
+# Build overrides in GN
+
+This directory is used to allow us to customize variables that differ between
+WebRTC being built as standalone and as a part of Chromium.
+
+There's another build_overrides in Chromium that needs to contain the same
+set of files with the same set of variables (but with different values).
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build_overrides/build.gni
@@ -0,0 +1,44 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Some WebRTC targets require a 10.11 min SDK but those targets are only
+# used in non-Chromium builds.
+mac_sdk_min_build_override = "10.11"
+
+# Some non-Chromium builds don't support building java targets.
+enable_java_templates = true
+
+# Some non-Chromium builds don't use Chromium's third_party/binutils.
+linux_use_bundled_binutils_override = true
+
+# Variable that can be used to support multiple build scenarios, like having
+# Chromium specific targets in a client project's GN file etc.
+build_with_chromium = false
+
+# Use our own suppressions files.
+asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
+lsan_suppressions_file = "//webrtc/build/sanitizers/lsan_suppressions_webrtc.cc"
+tsan_suppressions_file = "//webrtc/build/sanitizers/tsan_suppressions_webrtc.cc"
+msan_blacklist_path =
+    rebase_path("//tools-webrtc/msan/blacklist.txt", root_build_dir)
+ubsan_blacklist_path =
+    rebase_path("//tools-webrtc/ubsan/blacklist.txt", root_build_dir)
+ubsan_vptr_blacklist_path =
+    rebase_path("//tools-webrtc/ubsan/vptr_blacklist.txt", root_build_dir)
+
+# Android lint suppressions file
+lint_suppressions_file = "//webrtc/build/android/suppressions.xml"
+
+# For Chromium, Android 32-bit non-component, non-clang builds hit a 4GiB size
+# limit, making them requiring symbol_level=2. WebRTC doesn't hit that problem
+# so we just ignore that assert. See https://crbug.com/648948 for more info.
+ignore_elf32_limitations = true
+
+# Use system Xcode installation instead of the Chromium bundled Mac toolchain,
+# since it contains only SDK 10.11, not 10.12 which WebRTC needs.
+use_system_xcode = true
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/build_overrides/gtest.gni
@@ -0,0 +1,19 @@
+# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Include support for registering main function in multi-process tests.
+gtest_include_multiprocess = true
+
+# Include support for platform-specific operations across unit tests.
+gtest_include_platform_test = true
+
+# Exclude support for testing Objective C code on OS X and iOS.
+gtest_include_objc_support = true
+
+# Exclude support for flushing coverage files on iOS.
+gtest_include_ios_coverage = true
new file mode 100644
--- /dev/null
+++ b/media/webrtc/trunk/webrtc/api/BUILD.gn
@@ -0,0 +1,367 @@
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../build/webrtc.gni")
+if (is_android) {
+  import("//build/config/android/config.gni")
+  import("//build/config/android/rules.gni")
+}
+
+group("api") {
+  public_deps = [
+    ":libjingle_peerconnection",
+  ]
+}
+
+rtc_source_set("call_api") {
+  sources = [
+    "call/audio_sink.h",
+  ]
+
+  deps = [
+    # TODO(kjellander): Add remaining dependencies when webrtc:4243 is done.
+    ":audio_mixer_api",
+    ":transport_api",
+    "..:webrtc_common",
+    "../base:rtc_base_approved",
+    "../modules/audio_coding:audio_decoder_factory_interface",
+    "../modules/audio_coding:audio_encoder_interface",
+  ]
+}
+
+config("libjingle_peerconnection_warnings_config") {
+  # GN orders flags on a target before flags from configs. The default config
+  # adds these flags so to cancel them out they need to come from a config and
+  # cannot be on the target directly.
+  if (!is_win && !is_clang) {
+    cflags = [ "-Wno-maybe-uninitialized" ]  # Only exists for GCC.
+  }
+}
+
+rtc_static_library("libjingle_peerconnection") {
+  check_includes = false  # TODO(kjellander): Remove (bugs.webrtc.org/6828)
+  cflags = []
+  sources = [
+    "audiotrack.cc",
+    "audiotrack.h",
+    "datachannel.cc",
+    "datachannel.h",
+    "datachannelinterface.h",
+    "dtmfsender.cc",
+    "dtmfsender.h",
+    "dtmfsenderinterface.h",
+    "jsep.h",
+    "jsepicecandidate.cc",
+    "jsepicecandidate.h",
+    "jsepsessiondescription.cc",
+    "jsepsessiondescription.h",
+    "localaudiosource.cc",
+    "localaudiosource.h",
+    "mediaconstraintsinterface.cc",
+    "mediaconstraintsinterface.h",
+    "mediacontroller.cc",
+    "mediacontroller.h",
+    "mediastream.cc",
+    "mediastream.h",
+    "mediastreaminterface.h",
+    "mediastreamobserver.cc",
+    "mediastreamobserver.h",
+    "mediastreamproxy.h",
+    "mediastreamtrack.h",
+    "mediastreamtrackproxy.h",
+    "notifier.h",
+    "peerconnection.cc",
+    "peerconnection.h",
+    "peerconnectionfactory.cc",
+    "peerconnectionfactory.h",
+    "peerconnectionfactoryproxy.h",
+    "peerconnectioninterface.h",
+    "peerconnectionproxy.h",
+    "proxy.h",
+    "remoteaudiosource.cc",
+    "remoteaudiosource.h",
+    "rtcstatscollector.cc",
+    "rtcstatscollector.h",
+    "rtpparameters.h",
+    "rtpreceiver.cc",
+    "rtpreceiver.h",
+    "rtpreceiverinterface.h",
+    "rtpsender.cc",
+    "rtpsender.h",
+    "rtpsenderinterface.h",
+    "sctputils.cc",
+    "sctputils.h",
+    "statscollector.cc",
+    "statscollector.h",
+    "statstypes.cc",
+    "statstypes.h",
+    "streamcollection.h",
+    "trackmediainfomap.cc",
+    "trackmediainfomap.h",
+    "videocapturertracksource.cc",
+    "videocapturertracksource.h",
+    "videosourceproxy.h",
+    "videotrack.cc",
+    "videotrack.h",
+    "videotracksource.cc",
+    "videotracksource.h",
+    "webrtcsdp.cc",
+    "webrtcsdp.h",
+    "webrtcsession.cc",
+    "webrtcsession.h",
+    "webrtcsessiondescriptionfactory.cc",
+    "webrtcsessiondescriptionfactory.h",
+  ]
+
+  configs += [ ":libjingle_peerconnection_warnings_config" ]
+
+  if (!build_with_chromium && is_clang) {
+    # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
+    suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
+  }
+
+  deps = [
+    ":call_api",
+    ":rtc_stats_api",
+    "../call",
+    "../media",
+    "../pc",
+    "../stats",
+  ]
+
+  if (rtc_use_quic) {
+    sources += [
+      "quicdatachannel.cc",
+      "quicdatachannel.h",
+      "quicdatatransport.cc",
+      "quicdatatransport.h",
+    ]
+    deps += [ "//third_party/libquic" ]
+    public_deps = [
+      "//third_party/libquic",
+    ]
+  }
+}
+
+rtc_source_set("rtc_stats_api") {
+  cflags = []
+  sources = [
+    "stats/rtcstats.h",
+    "stats/rtcstats_objects.h",
+    "stats/rtcstatsreport.h",
+  ]
+
+  deps = [
+    "../base:rtc_base_approved",
+  ]
+}
+
+rtc_source_set("audio_mixer_api") {
+  sources = [
+    "audio/audio_mixer.h",
+  ]
+
+  deps = [
+    "../base:rtc_base_approved",
+  ]
+}
+
+rtc_source_set("transport_api") {
+  sources = [
+    "call/transport.h",
+  ]
+}
+
+rtc_source_set("video_frame_api") {
+  sources = [
+    "video/i420_buffer.cc",
+    "video/i420_buffer.h",
+    "video/video_frame.cc",
+    "video/video_frame.h",
+    "video/video_frame_buffer.h",
+    "video/video_rotation.h",
+  ]
+
+  deps = [
+    "../base:rtc_base_approved",
+    "../system_wrappers",
+  ]
+
+  # TODO(nisse): This logic is duplicated in multiple places.
+  # Define in a single place.
+  if (rtc_build_libyuv) {
+    deps += [ "$rtc_libyuv_dir" ]
+    public_deps = [
+      "$rtc_libyuv_dir",
+    ]
+  } else {
+    # Need to add a directory normally exported by libyuv.
+    include_dirs = [ "$rtc_libyuv_dir/include" ]
+  }
+}
+
+if (rtc_include_tests) {
+  config("peerconnection_unittests_config") {
+    # The warnings below are enabled by default. Since GN orders compiler flags
+    # for a target before flags from configs, the only way to disable such
+    # warnings is by having them in a separate config, loaded from the target.
+    # TODO(kjellander): Make the code compile without disabling these flags.
+    # See https://bugs.webrtc.org/3307.
+    if (is_clang && is_win) {
+      cflags = [
+        # See https://bugs.chromium.org/p/webrtc/issues/detail?id=6267
+        # for -Wno-sign-compare
+        "-Wno-sign-compare",
+        "-Wno-unused-function",
+      ]
+    }
+
+    if (!is_win) {
+      cflags = [ "-Wno-sign-compare" ]
+    }
+  }
+
+  rtc_test("peerconnection_unittests") {
+    check_includes = false  # TODO(kjellander): Remove (bugs.webrtc.org/6828)
+    testonly = true
+    sources = [
+      "datachannel_unittest.cc",
+      "dtmfsender_unittest.cc",
+      "jsepsessiondescription_unittest.cc",
+      "localaudiosource_unittest.cc",
+      "mediaconstraintsinterface_unittest.cc",
+      "mediastream_unittest.cc",
+      "peerconnection_unittest.cc",
+      "peerconnectionendtoend_unittest.cc",
+      "peerconnectionfactory_unittest.cc",
+      "peerconnectioninterface_unittest.cc",
+      "proxy_unittest.cc",
+      "rtcstats_integrationtest.cc",
+      "rtcstatscollector_unittest.cc",
+      "rtpsenderreceiver_unittest.cc",
+      "sctputils_unittest.cc",
+      "statscollector_unittest.cc",
+      "test/fakeaudiocapturemodule.cc",
+      "test/fakeaudiocapturemodule.h",
+      "test/fakeaudiocapturemodule_unittest.cc",
+      "test/fakeconstraints.h",
+      "test/fakedatachannelprovider.h",
+      "test/fakeperiodicvideocapturer.h",
+      "test/fakertccertificategenerator.h",
+      "test/fakevideotrackrenderer.h",
+      "test/mock_datachannel.h",
+      "test/mock_peerconnection.h",
+      "test/mock_rtpreceiver.h",
+      "test/mock_rtpsender.h",
+      "test/mock_webrtcsession.h",
+      "test/mockpeerconnectionobservers.h",
+      "test/peerconnectiontestwrapper.cc",
+      "test/peerconnectiontestwrapper.h",
+      "test/rtcstatsobtainer.h",
+      "test/testsdpstrings.h",
+      "trackmediainfomap_unittest.cc",
+      "videocapturertracksource_unittest.cc",
+      "videotrack_unittest.cc",
+      "webrtcsdp_unittest.cc",
+      "webrtcsession_unittest.cc",
+    ]
+
+    if (rtc_enable_sctp) {
+      defines = [ "HAVE_SCTP" ]
+    }
+
+    configs += [ ":peerconnection_unittests_config" ]
+
+    if (!build_with_chromium && is_clang) {
+      # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
+      suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
+    }
+
+    # TODO(jschuh): Bug 1348: fix this warning.
+    configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
+
+    if (is_win) {
+      cflags = [
+        "/wd4245",  # conversion from int to size_t, signed/unsigned mismatch.
+        "/wd4389",  # signed/unsigned mismatch.
+      ]
+    }
+
+    if (rtc_use_quic) {
+      public_deps = [
+        "//third_party/libquic",
+      ]
+      sources += [
+        "quicdatachannel_unittest.cc",
+        "quicdatatransport_unittest.cc",
+      ]
+    }
+
+    deps = []
+    if (is_android) {
+      sources += [
+        "test/androidtestinitializer.cc",
+        "test/androidtestinitializer.h",
+      ]
+      deps += [
+        "//testing/android/native_test:native_test_support",
+        "//webrtc/sdk/android:libjingle_peerconnection_java",
+        "//webrtc/sdk/android:libjingle_peerconnection_jni",
+      ]
+    }
+
+    deps += [
+      ":fakemetricsobserver",
+      ":libjingle_peerconnection",
+      "..:webrtc_common",
+      "../base:rtc_base_tests_utils",
+      "../media:rtc_unittest_main",
+      "../pc:rtc_pc",
+      "../system_wrappers:metrics_default",
+      "//testing/gmock",
+    ]
+
+    if (is_android) {
+      deps += [ "//testing/android/native_test:native_test_support" ]
+
+      shard_timeout = 900
+    }
+  }
+
+  rtc_source_set("mock_audio_mixer") {
+    testonly = true
+    sources = [
+      "test/mock_audio_mixer.h",
+    ]
+
+    public_deps = [
+      ":audio_mixer_api",
+    ]
+
+    deps = [
+      "//testing/gmock",
+      "//webrtc/test:test_support",
+    ]
+  }
+
+  rtc_source_set("fakemetricsobserver") {
+    testonly = true
+    sources = [
+      "fakemetricsobserver.cc",
+      "fakemetricsobserver.h",
+    ]
+    deps = [
+      ":libjingle_peerconnection",
+      "../base:rtc_base_approved",
+    ]
+    if (!build_with_chromium && is_clang) {
+      # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
+      suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
+    }
+  }
+}