--- a/build/build-clang/clang-3.9-linux64.json
+++ b/build/build-clang/clang-3.9-linux64.json
@@ -5,18 +5,18 @@
"build_type": "Release",
"assertions": false,
"llvm_repo": "https://llvm.org/svn/llvm-project/llvm/tags/RELEASE_390/final",
"clang_repo": "https://llvm.org/svn/llvm-project/cfe/tags/RELEASE_390/final",
"compiler_repo": "https://llvm.org/svn/llvm-project/compiler-rt/tags/RELEASE_390/final",
"libcxx_repo": "https://llvm.org/svn/llvm-project/libcxx/tags/RELEASE_390/final",
"libcxxabi_repo": "https://llvm.org/svn/llvm-project/libcxxabi/tags/RELEASE_390/final",
"python_path": "/usr/bin/python2.7",
- "gcc_dir": "/home/worker/workspace/build/src/gcc",
- "cc": "/home/worker/workspace/build/src/gcc/bin/gcc",
- "cxx": "/home/worker/workspace/build/src/gcc/bin/g++",
- "as": "/home/worker/workspace/build/src/gcc/bin/gcc",
+ "gcc_dir": "/builds/worker/workspace/build/src/gcc",
+ "cc": "/builds/worker/workspace/build/src/gcc/bin/gcc",
+ "cxx": "/builds/worker/workspace/build/src/gcc/bin/g++",
+ "as": "/builds/worker/workspace/build/src/gcc/bin/gcc",
"patches": [
"llvm-debug-frame.patch",
"r277806.patch",
"r285657.patch"
]
}
--- a/build/build-clang/clang-4-linux64.json
+++ b/build/build-clang/clang-4-linux64.json
@@ -5,16 +5,16 @@
"build_type": "Release",
"assertions": false,
"llvm_repo": "https://llvm.org/svn/llvm-project/llvm/tags/RELEASE_401/final",
"clang_repo": "https://llvm.org/svn/llvm-project/cfe/tags/RELEASE_401/final",
"compiler_repo": "https://llvm.org/svn/llvm-project/compiler-rt/tags/RELEASE_401/final",
"libcxx_repo": "https://llvm.org/svn/llvm-project/libcxx/tags/RELEASE_401/final",
"libcxxabi_repo": "https://llvm.org/svn/llvm-project/libcxxabi/tags/RELEASE_401/final",
"python_path": "/usr/bin/python2.7",
- "gcc_dir": "/home/worker/workspace/build/src/gcc",
- "cc": "/home/worker/workspace/build/src/gcc/bin/gcc",
- "cxx": "/home/worker/workspace/build/src/gcc/bin/g++",
- "as": "/home/worker/workspace/build/src/gcc/bin/gcc",
+ "gcc_dir": "/builds/worker/workspace/build/src/gcc",
+ "cc": "/builds/worker/workspace/build/src/gcc/bin/gcc",
+ "cxx": "/builds/worker/workspace/build/src/gcc/bin/g++",
+ "as": "/builds/worker/workspace/build/src/gcc/bin/gcc",
"patches": [
"llvm-debug-frame.patch"
]
}
--- a/build/build-clang/clang-macosx64.json
+++ b/build/build-clang/clang-macosx64.json
@@ -6,24 +6,24 @@
"assertions": false,
"osx_cross_compile": true,
"llvm_repo": "https://llvm.org/svn/llvm-project/llvm/tags/RELEASE_390/final",
"clang_repo": "https://llvm.org/svn/llvm-project/cfe/tags/RELEASE_390/final",
"compiler_repo": "https://llvm.org/svn/llvm-project/compiler-rt/tags/RELEASE_390/final",
"libcxx_repo": "https://llvm.org/svn/llvm-project/libcxx/tags/RELEASE_390/final",
"libcxxabi_repo": "https://llvm.org/svn/llvm-project/libcxxabi/tags/RELEASE_390/final",
"python_path": "/usr/bin/python2.7",
- "gcc_dir": "/home/worker/workspace/build/src/gcc",
- "cc": "/home/worker/workspace/build/src/clang/bin/clang",
- "cxx": "/home/worker/workspace/build/src/clang/bin/clang++",
- "as": "/home/worker/workspace/build/src/clang/bin/clang",
- "ar": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ar",
- "ranlib": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ranlib",
- "libtool": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-libtool",
- "ld": "/home/worker/workspace/build/src/clang/bin/clang",
+ "gcc_dir": "/builds/worker/workspace/build/src/gcc",
+ "cc": "/builds/worker/workspace/build/src/clang/bin/clang",
+ "cxx": "/builds/worker/workspace/build/src/clang/bin/clang++",
+ "as": "/builds/worker/workspace/build/src/clang/bin/clang",
+ "ar": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ar",
+ "ranlib": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ranlib",
+ "libtool": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-libtool",
+ "ld": "/builds/worker/workspace/build/src/clang/bin/clang",
"patches":[
"llvm-debug-frame.patch",
"compiler-rt-cross-compile.patch",
"pr28831-r280042.patch",
"r277806.patch",
"r285657.patch"
]
}
--- a/build/build-clang/clang-tidy-linux64.json
+++ b/build/build-clang/clang-tidy-linux64.json
@@ -7,13 +7,13 @@
"build_clang_tidy": true,
"llvm_repo": "https://llvm.org/svn/llvm-project/llvm/trunk",
"clang_repo": "https://llvm.org/svn/llvm-project/cfe/trunk",
"extra_repo": "https://llvm.org/svn/llvm-project/clang-tools-extra/trunk",
"compiler_repo": "https://llvm.org/svn/llvm-project/compiler-rt/trunk",
"libcxx_repo": "https://llvm.org/svn/llvm-project/libcxx/trunk",
"libcxxabi_repo": "https://llvm.org/svn/llvm-project/libcxxabi/trunk",
"python_path": "/usr/bin/python2.7",
- "gcc_dir": "/home/worker/workspace/build/src/gcc",
- "cc": "/home/worker/workspace/build/src/gcc/bin/gcc",
- "cxx": "/home/worker/workspace/build/src/gcc/bin/g++",
- "as": "/home/worker/workspace/build/src/gcc/bin/gcc"
+ "gcc_dir": "/builds/worker/workspace/build/src/gcc",
+ "cc": "/builds/worker/workspace/build/src/gcc/bin/gcc",
+ "cxx": "/builds/worker/workspace/build/src/gcc/bin/g++",
+ "as": "/builds/worker/workspace/build/src/gcc/bin/gcc"
}
--- a/build/build-clang/clang-tidy-macosx64.json
+++ b/build/build-clang/clang-tidy-macosx64.json
@@ -8,20 +8,20 @@
"osx_cross_compile": true,
"llvm_repo": "https://llvm.org/svn/llvm-project/llvm/trunk",
"clang_repo": "https://llvm.org/svn/llvm-project/cfe/trunk",
"extra_repo": "https://llvm.org/svn/llvm-project/clang-tools-extra/trunk",
"compiler_repo": "https://llvm.org/svn/llvm-project/compiler-rt/trunk",
"libcxx_repo": "https://llvm.org/svn/llvm-project/libcxx/trunk",
"libcxxabi_repo": "https://llvm.org/svn/llvm-project/libcxxabi/trunk",
"python_path": "/usr/bin/python2.7",
- "gcc_dir": "/home/worker/workspace/build/src/gcc",
- "cc": "/home/worker/workspace/build/src/clang/bin/clang",
- "cxx": "/home/worker/workspace/build/src/clang/bin/clang++",
- "as": "/home/worker/workspace/build/src/clang/bin/clang",
- "ar": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ar",
- "ranlib": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ranlib",
- "ld": "/home/worker/workspace/build/src/clang/bin/clang",
+ "gcc_dir": "/builds/worker/workspace/build/src/gcc",
+ "cc": "/builds/worker/workspace/build/src/clang/bin/clang",
+ "cxx": "/builds/worker/workspace/build/src/clang/bin/clang++",
+ "as": "/builds/worker/workspace/build/src/clang/bin/clang",
+ "ar": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ar",
+ "ranlib": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ranlib",
+ "ld": "/builds/worker/workspace/build/src/clang/bin/clang",
"patches": [
"llvm-debug-frame.patch",
"compiler-rt-cross-compile.patch"
]
}
--- a/build/valgrind/x86_64-redhat-linux-gnu.sup
+++ b/build/valgrind/x86_64-redhat-linux-gnu.sup
@@ -331,44 +331,44 @@
fun:sse2_composite_over_8888_8888
fun:_moz_pixman_image_composite32
fun:_clip_and_composite_boxes
fun:_cairo_image_surface_fill
}
# Conditional jump or move depends on uninitialised value(s)
-# at 0xF9D56AE: sse2_combine_over_u (in /home/worker/workspace/build/applic
-# by 0xF9D05D4: general_composite_rect (in /home/worker/workspace/build/app
-# by 0xF9F5B5F: _moz_pixman_image_composite32 (in /home/worker/workspace/bu
-# by 0xF96CF63: _clip_and_composite (in /home/worker/workspace/build/applic
-# by 0xF96D656: _clip_and_composite_boxes.part.32 (in /home/worker/workspac
-# by 0xF96E328: _clip_and_composite_boxes (in /home/worker/workspace/build/
-# by 0xF96F79D: _cairo_image_surface_fill (in /home/worker/workspace/build/
-# by 0xF98790C: _cairo_surface_fill (in /home/worker/workspace/build/applic
+# at 0xF9D56AE: sse2_combine_over_u (in /builds/worker/workspace/build/applic
+# by 0xF9D05D4: general_composite_rect (in /builds/worker/workspace/build/app
+# by 0xF9F5B5F: _moz_pixman_image_composite32 (in /builds/worker/workspace/bu
+# by 0xF96CF63: _clip_and_composite (in /builds/worker/workspace/build/applic
+# by 0xF96D656: _clip_and_composite_boxes.part.32 (in /builds/worker/workspac
+# by 0xF96E328: _clip_and_composite_boxes (in /builds/worker/workspace/build/
+# by 0xF96F79D: _cairo_image_surface_fill (in /builds/worker/workspace/build/
+# by 0xF98790C: _cairo_surface_fill (in /builds/worker/workspace/build/applic
# Uninitialised value was created by a stack allocation
-# at 0xF9D024D: general_composite_rect (in /home/worker/workspace/build/app
+# at 0xF9D024D: general_composite_rect (in /builds/worker/workspace/build/app
#
{
Bug 1248365: mochitest-libpixman-3
Memcheck:Cond
fun:sse2_combine_over_u
fun:general_composite_rect
fun:_moz_pixman_image_composite32
fun:_clip_and_composite*
}
# Conditional jump or move depends on uninitialised value(s)
-# at 0xE626A5C: mozilla::image::imgFrame::Optimize() (in /home/worker/work
+# at 0xE626A5C: mozilla::image::imgFrame::Optimize() (in /builds/worker/work
# by 0xE626C68: mozilla::image::imgFrame::UnlockImageData() (in /home/work
# by 0xE608E8F: mozilla::image::RawAccessFrameRef::~RawAccessFrameRef() (i
-# by 0xE61F5E4: mozilla::image::Decoder::~Decoder() (in /home/worker/works
+# by 0xE61F5E4: mozilla::image::Decoder::~Decoder() (in /builds/worker/works
# by 0xE630E32: mozilla::image::nsIconDecoder::~nsIconDecoder() (in /home/
-# by 0xE61A5B2: mozilla::image::Decoder::Release() (in /home/worker/worksp
+# by 0xE61A5B2: mozilla::image::Decoder::Release() (in /builds/worker/worksp
# by 0xE61DD73: mozilla::image::NotifyDecodeCompleteWorker::~NotifyDecodeC
# by 0xE61DD8F: mozilla::image::NotifyDecodeCompleteWorker::~NotifyDecodeC
# Uninitialised value was created by a stack allocation
# at 0xB8E46B0: ??? (in /usr/lib/x86_64-linux-gnu/libpixman-1.so.0.30.2)
{
Bug 1248365: mochitest-libpixman-4
Memcheck:Cond
fun:_ZN7mozilla5image8imgFrame8OptimizeEv
@@ -378,21 +378,21 @@
}
# Not sure what this. I can't reproduce it locally despite much trying.
# Syscall param sendmsg(msg.msg_iov[0]) points to uninitialised byte(s)
# at 0x4E4533D: ??? (syscall-template.S:82)
# by 0xE12C0A7: IPC::Channel::ChannelImpl::ProcessOutgoingMessages() (in /h
# by 0xE142FD0: RunnableMethod<IPC::Channel, bool (IPC::Channel::*)(IPC::Me
-# by 0xE1240EA: MessageLoop::RunTask(Task*) (in /home/worker/workspace/buil
+# by 0xE1240EA: MessageLoop::RunTask(Task*) (in /builds/worker/workspace/buil
# by 0xE128A46: MessageLoop::DeferOrRunPendingTask(MessageLoop::PendingTask
-# by 0xE128B6D: MessageLoop::DoWork() (in /home/worker/workspace/build/appl
+# by 0xE128B6D: MessageLoop::DoWork() (in /builds/worker/workspace/build/appl
# by 0xE12272C: base::MessagePumpLibevent::Run(base::MessagePump::Delegate*
-# by 0xE124155: MessageLoop::Run() (in /home/worker/workspace/build/applica
+# by 0xE124155: MessageLoop::Run() (in /builds/worker/workspace/build/applica
{
Bug 1248365: mochitest-sendmsg-1
Memcheck:Param
sendmsg(msg.msg_iov[0])
obj:/lib/x86_64-linux-gnu/libpthread-2.15.so
fun:_ZN3IPC7Channel11ChannelImpl23ProcessOutgoingMessagesEv
fun:_ZN14RunnableMethodIN3IPC7ChannelEMS1_FbPNS0_7MessageEEN7mozilla5Tuple*
}
@@ -401,24 +401,24 @@
# I can't repro this either.
# Conditional jump or move depends on uninitialised value(s)
# at 0x418E7E7C: ??? (in /usr/lib/x86_64-linux-gnu/libavcodec.so.53.35.0)
# by 0x4192D620: ??? (in /usr/lib/x86_64-linux-gnu/libavcodec.so.53.35.0)
# by 0x4192E717: ??? (in /usr/lib/x86_64-linux-gnu/libavcodec.so.53.35.0)
# by 0x41711BC4: ??? (in /usr/lib/x86_64-linux-gnu/libavcodec.so.53.35.0)
# by 0x41B08B6A: avcodec_open2 (in /usr/lib/x86_64-linux-gnu/libavcodec.so.
# by 0xEEAD89C: mozilla::FFmpegDataDecoder<53>::InitDecoder() (in /home/wor
-# by 0xEEAE42B: mozilla::FFmpegVideoDecoder<53>::Init() (in /home/worker/wo
-# by 0xEEA4C07: mozilla::H264Converter::Init() (in /home/worker/workspace/b
+# by 0xEEAE42B: mozilla::FFmpegVideoDecoder<53>::Init() (in /builds/worker/wo
+# by 0xEEA4C07: mozilla::H264Converter::Init() (in /builds/worker/workspace/b
# Uninitialised value was created by a heap allocation
# at 0x4C2D11F: realloc (vg_replace_malloc.c:785)
-# by 0x406196: moz_xrealloc (in /home/worker/workspace/build/application/fi
+# by 0x406196: moz_xrealloc (in /builds/worker/workspace/build/application/fi
# by 0xDEB43AC: nsTArrayInfallibleAllocator::ResultTypeProxy nsTArray_base<
# by 0xEEAD850: mozilla::FFmpegDataDecoder<53>::InitDecoder() (in /home/wor
-# by 0xEEAE42B: mozilla::FFmpegVideoDecoder<53>::Init() (in /home/worker/wo
+# by 0xEEAE42B: mozilla::FFmpegVideoDecoder<53>::Init() (in /builds/worker/wo
{
Bug 1248365: mochitest-libavcodec-1-c
Memcheck:Cond
obj:/*/libavcodec.so.53*
obj:/*/libavcodec.so.53*
obj:/*/libavcodec.so.53*
obj:/*/libavcodec.so.53*
}
@@ -430,17 +430,17 @@
obj:/*/libavcodec.so.53*
obj:/*/libavcodec.so.53*
}
# Not sure what this is, but I am inclined to think it is also probably a
# SSE2-induced false positive similar to mochitest-libpixman-2 above.
# Use of uninitialised value of size 8
-# at 0xE4F3E89: FastConvertYUVToRGB32Row (in /home/worker/workspace/build/a
+# at 0xE4F3E89: FastConvertYUVToRGB32Row (in /builds/worker/workspace/build/a
# by 0xE4F4A6D: mozilla::gfx::ConvertYCbCrToRGB32(unsigned char const*, uns
# by 0xE4F4B17: mozilla::gfx::ConvertYCbCrToRGB(mozilla::layers::PlanarYCbC
# by 0xE5227CB: mozilla::layers::PlanarYCbCrImage::GetAsSourceSurface() (in
# by 0xE5B2465: mozilla::layers::SharedPlanarYCbCrImage::GetAsSourceSurface
# by 0xE52FE44: mozilla::layers::BasicImageLayer::Paint(mozilla::gfx::DrawT
# by 0xE5618A1: mozilla::layers::BasicLayerManager::PaintSelfOrChildren(moz
# by 0xE560F83: mozilla::layers::BasicLayerManager::PaintLayer(gfxContext*,
# Uninitialised value was created by a stack allocation
@@ -468,21 +468,21 @@
fun:_ZN6SkScan9FillIRect*
fun:_ZN6SkScan9FillIRect*
}
# This is probably a V false positive, due to an insufficiently accurate
# description of the ioctl(SIOCETHTOOL) behavior.
# Syscall param ioctl(SIOCETHTOOL) points to uninitialised byte(s)
# at 0x5D5CBF7: ioctl (syscall-template.S:82)
-# by 0xF58EB67: nr_stun_get_addrs (in /home/worker/workspace/build/applica
-# by 0xF594791: nr_stun_find_local_addresses (in /home/worker/workspace/bu
-# by 0xF58A237: nr_ice_get_local_addresses (in /home/worker/workspace/buil
-# by 0xF58ADDE: nr_ice_gather (in /home/worker/workspace/build/application
-# by 0xE43F35F: mozilla::NrIceCtx::StartGathering() (in /home/worker/works
+# by 0xF58EB67: nr_stun_get_addrs (in /builds/worker/workspace/build/applica
+# by 0xF594791: nr_stun_find_local_addresses (in /builds/worker/workspace/bu
+# by 0xF58A237: nr_ice_get_local_addresses (in /builds/worker/workspace/buil
+# by 0xF58ADDE: nr_ice_gather (in /builds/worker/workspace/build/application
+# by 0xE43F35F: mozilla::NrIceCtx::StartGathering() (in /builds/worker/works
# by 0xE419560: mozilla::PeerConnectionMedia::EnsureIceGathering_s() (in /
# by 0xE41A11C: mozilla::runnable_args_memfn<RefPtr<mozilla::PeerConnectio
# Address 0x1cc3fb48 is on thread 6's stack
# in frame #1, created by nr_stun_get_addrs (???:)
{
Bug 1248365: mochitest-ioctl(SIOCETHTOOL)-1
Memcheck:Param
ioctl(SIOCETHTOOL)
@@ -497,26 +497,26 @@
# Syscall param write(buf) points to uninitialised byte(s)
# at 0x4E44CCD: ??? (syscall-template.S:82)
# by 0x9F1FF56: ??? (in /usr/lib/x86_64-linux-gnu/libfontconfig.so.1.4.4)
# by 0x9F2679B: ??? (in /usr/lib/x86_64-linux-gnu/libfontconfig.so.1.4.4)
# by 0x9F22B98: ??? (in /usr/lib/x86_64-linux-gnu/libfontconfig.so.1.4.4)
# by 0x9F22C5F: FcConfigAppFontAddDir (in /usr/lib/x86_64-linux-gnu/libfon
# by 0xE850173: gfxFcPlatformFontList::ActivateBundledFonts() (in /home/wo
# by 0xE852258: gfxFcPlatformFontList::InitFontListForPlatform() (in /home
-# by 0xE895E21: gfxPlatformFontList::InitFontList() (in /home/worker/works
+# by 0xE895E21: gfxPlatformFontList::InitFontList() (in /builds/worker/works
# Address 0x2316663c is 156 bytes inside a block of size 1,448 alloc'd
# at 0x4C2CF71: malloc (vg_replace_malloc.c:299)
# by 0x9F1FD1D: ??? (in /usr/lib/x86_64-linux-gnu/libfontconfig.so.1.4.4)
# by 0x9F26788: ??? (in /usr/lib/x86_64-linux-gnu/libfontconfig.so.1.4.4)
# by 0x9F22B98: ??? (in /usr/lib/x86_64-linux-gnu/libfontconfig.so.1.4.4)
# by 0x9F22C5F: FcConfigAppFontAddDir (in /usr/lib/x86_64-linux-gnu/libfon
# by 0xE850173: gfxFcPlatformFontList::ActivateBundledFonts() (in /home/wo
# by 0xE852258: gfxFcPlatformFontList::InitFontListForPlatform() (in /home
-# by 0xE895E21: gfxPlatformFontList::InitFontList() (in /home/worker/works
+# by 0xE895E21: gfxPlatformFontList::InitFontList() (in /builds/worker/works
{
Bug 1248365: libfontconfig-1
Memcheck:Param
write(buf)
obj:/*/libpthread*.so*
obj:/*/libfontconfig.so*
...
obj:/*/libfontconfig.so*
@@ -525,18 +525,18 @@
# There's nothing we can do about these short of throwing in
# --show-mismatched-frees=no, but that's a bit drastic, so for now,
# just suppress them. A typical error is:
#
# Mismatched free() / delete / delete []
# at 0x4C2BE97: free (vg_replace_malloc.c:530)
-# by 0xFCD09EC: ots::ots_post_free(ots::Font*) (in /home/worker/workspace/
-# by 0xFCC600E: ots::Font::~Font() (in /home/worker/workspace/build/applic
+# by 0xFCD09EC: ots::ots_post_free(ots::Font*) (in /builds/worker/workspace/
+# by 0xFCC600E: ots::Font::~Font() (in /builds/worker/workspace/build/applic
# by 0xFCCBFA5: ots::OTSContext::Process(ots::OTSStream*, unsigned char co
# by 0xE7D7C8D: gfxUserFontEntry::SanitizeOpenTypeData(unsigned char const
# by 0xE7E371D: gfxUserFontEntry::LoadPlatformFont(unsigned char const*, u
# by 0xE7E48AA: gfxUserFontEntry::FontDataDownloadComplete(unsigned char c
# by 0xF49D25B: nsFontFaceLoader::OnStreamComplete(nsIStreamLoader*, nsISu
# Address 0x15671f00 is 0 bytes inside a block of size 490 alloc'd
# at 0x4C2CAEE: operator new(unsigned long) (vg_replace_malloc.c:332)
# by 0xF6AB737: std::vector<unsigned short, std::allocator<unsigned short>
--- a/mobile/android/config/mozconfigs/android-api-15-gradle-dependencies/nightly
+++ b/mobile/android/config/mozconfigs/android-api-15-gradle-dependencies/nightly
@@ -46,9 +46,9 @@ export MOZ_TELEMETRY_REPORTING=1
export MOZ_ANDROID_POCKET=1
. "$topsrcdir/mobile/android/config/mozconfigs/common.override"
# End ../android-api-15-frontend/nightly.
# Populated after checking out the sources and before building the
# tree as part of the dependencies task bin/ scripts.
-ac_add_options --with-android-sdk="/home/worker/.mozbuild/android-sdk-linux"
+ac_add_options --with-android-sdk="/builds/worker/.mozbuild/android-sdk-linux"
--- a/mobile/android/config/mozconfigs/public-partner/distribution_sample/mozconfig1
+++ b/mobile/android/config/mozconfigs/public-partner/distribution_sample/mozconfig1
@@ -4,17 +4,17 @@
ac_add_options --enable-profiling
# Android
ac_add_options --with-android-min-sdk=16
ac_add_options --target=arm-linux-androideabi
ac_add_options --with-branding=mobile/android/branding/nightly
-ac_add_options --with-android-distribution-directory=/home/worker/workspace/build/partner
+ac_add_options --with-android-distribution-directory=/builds/worker/workspace/build/partner
# This will overwrite the default of stripping everything and keep the symbol table.
# This is useful for profiling with eideticker. See bug 788680
STRIP_FLAGS="--strip-debug"
export MOZILLA_OFFICIAL=1
export MOZ_TELEMETRY_REPORTING=1
--- a/taskcluster/ci/build/android-stuff.yml
+++ b/taskcluster/ci/build/android-stuff.yml
@@ -7,28 +7,29 @@ android-dependencies/opt:
platform: android-4-0-armv7-api15/opt
kind: build
tier: 2
symbol: tc(Deps)
worker-type: aws-provisioner-v1/gecko-{level}-b-android
worker:
docker-image: {in-tree: android-gradle-build}
env:
- GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle-online"
+ GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle-online"
PERFHERDER_EXTRA_OPTIONS: android-dependencies
TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-gradle-dependencies/releng.manifest"
+ WORKSPACE: "/builds/worker/workspace"
artifacts:
- name: public/build
- path: /home/worker/artifacts/
+ path: /builds/worker/artifacts/
type: directory
- name: private/android-sdk
- path: /home/worker/private/android-sdk
+ path: /builds/worker/private/android-sdk
type: directory
- name: private/java_home
- path: /home/worker/private/java_home
+ path: /builds/worker/private/java_home
type: directory
max-run-time: 36000
run:
using: mozharness
actions: [get-secrets build multi-l10n update]
config:
- builds/releng_base_android_64_builds.py
- disable_signing.py
@@ -54,25 +55,25 @@ android-test/opt:
platform: android-4-0-armv7-api15/opt
kind: build
tier: 2
symbol: tc(test)
worker-type: aws-provisioner-v1/gecko-{level}-b-android
worker:
docker-image: {in-tree: desktop-build}
env:
- GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
+ GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle"
PERFHERDER_EXTRA_OPTIONS: android-test
TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
artifacts:
- name: public/android/unittest
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/tests
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/tests
type: directory
- name: public/build
- path: /home/worker/artifacts/
+ path: /builds/worker/artifacts/
type: directory
max-run-time: 36000
run:
using: mozharness
actions: [get-secrets build multi-l10n update]
config:
- builds/releng_base_android_64_builds.py
- disable_signing.py
@@ -96,40 +97,40 @@ android-lint/opt:
platform: android-4-0-armv7-api15/opt
kind: build
tier: 2
symbol: tc(lint)
worker-type: aws-provisioner-v1/gecko-{level}-b-android
worker:
docker-image: {in-tree: desktop-build}
env:
- GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
+ GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle"
PERFHERDER_EXTRA_OPTIONS: android-lint
TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
artifacts:
- name: public/android/lint/lint-results-officialAustralisDebug.html
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.html
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.html
type: file
- name: public/android/lint/lint-results-officialAustralisDebug.xml
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.xml
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.xml
type: file
- name: public/android/lint/lint-results-officialAustralisDebug_files
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug_files
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug_files
type: directory
- name: public/android/lint/lint-results-officialPhotonDebug.html
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.html
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.html
type: file
- name: public/android/lint/lint-results-officialPhotonDebug.xml
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.xml
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.xml
type: file
- name: public/android/lint/lint-results-officialPhotonDebug_files
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug_files
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug_files
type: directory
- name: public/build
- path: /home/worker/artifacts/
+ path: /builds/worker/artifacts/
type: directory
max-run-time: 36000
run:
using: mozharness
actions: [get-secrets build multi-l10n update]
config:
- builds/releng_base_android_64_builds.py
- disable_signing.py
@@ -159,28 +160,28 @@ android-checkstyle/opt:
platform: android-4-0-armv7-api15/opt
kind: build
tier: 2
symbol: tc(checkstyle)
worker-type: aws-provisioner-v1/gecko-{level}-b-android
worker:
docker-image: {in-tree: desktop-build}
env:
- GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
+ GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle"
PERFHERDER_EXTRA_OPTIONS: android-checkstyle
TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
artifacts:
- name: public/android/checkstyle/checkstyle.html
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/checkstyle/checkstyle.html
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/checkstyle/checkstyle.html
type: file
- name: public/android/checkstyle/checkstyle.xml
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/checkstyle/checkstyle.xml
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/checkstyle/checkstyle.xml
type: file
- name: public/build
- path: /home/worker/artifacts/
+ path: /builds/worker/artifacts/
type: directory
max-run-time: 36000
run:
using: mozharness
actions: [get-secrets build multi-l10n update]
config:
- builds/releng_base_android_64_builds.py
- disable_signing.py
@@ -206,34 +207,34 @@ android-findbugs/opt:
platform: android-4-0-armv7-api15/opt
kind: build
tier: 2
symbol: tc(findbugs)
worker-type: aws-provisioner-v1/gecko-{level}-b-android
worker:
docker-image: {in-tree: desktop-build}
env:
- GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
+ GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle"
PERFHERDER_EXTRA_OPTIONS: android-findbugs
TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
artifacts:
- name: public/android/findbugs/findbugs-officialAustralisDebug-output.html
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialAustralisDebug-output.html
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialAustralisDebug-output.html
type: file
- name: public/android/findbugs/findbugs-officialAustralisDebug-output.xml
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialAustralisDebug-output.xml
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialAustralisDebug-output.xml
type: file
- name: public/android/findbugs/findbugs-officialPhotonDebug-output.html
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialPhotonDebug-output.html
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialPhotonDebug-output.html
type: file
- name: public/android/findbugs/findbugs-officialPhotonDebug-output.xml
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialPhotonDebug-output.xml
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialPhotonDebug-output.xml
type: file
- name: public/build
- path: /home/worker/artifacts/
+ path: /builds/worker/artifacts/
type: directory
max-run-time: 36000
run:
using: mozharness
actions: [get-secrets build multi-l10n update]
config:
- builds/releng_base_android_64_builds.py
- disable_signing.py
--- a/taskcluster/ci/build/android.yml
+++ b/taskcluster/ci/build/android.yml
@@ -279,27 +279,27 @@ android-api-15-gradle/opt:
platform: android-api-15-gradle/opt
symbol: tc(Bg)
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-android
worker:
max-run-time: 7200
env:
# Bug 1292762 - Set GRADLE_USER_HOME to avoid sdk-manager-plugin intermittent
- GRADLE_USER_HOME: /home/worker/workspace/build/src/dotgradle
+ GRADLE_USER_HOME: /builds/worker/workspace/build/src/dotgradle
TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
artifacts:
- name: public/android/maven
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview/maven/
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview/maven/
type: directory
- name: public/build/geckoview_example.apk
- path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview_example/outputs/apk/geckoview_example-withGeckoBinaries.apk
+ path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview_example/outputs/apk/geckoview_example-withGeckoBinaries.apk
type: file
- name: public/build
- path: /home/worker/artifacts/
+ path: /builds/worker/artifacts/
type: directory
run:
using: mozharness
actions: [get-secrets build multi-l10n update]
config:
- builds/releng_base_android_64_builds.py
- disable_signing.py
- platform_supports_post_upload_to_latest.py
--- a/taskcluster/ci/hazard/kind.yml
+++ b/taskcluster/ci/hazard/kind.yml
@@ -32,17 +32,17 @@ jobs:
platform: linux64/debug
symbol: SM-tc(H)
worker:
env:
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/hazard.manifest"
run:
using: hazard
command: >
- cd /home/worker/checkouts/gecko/taskcluster/scripts/builder
+ cd /builds/worker/checkouts/gecko/taskcluster/scripts/builder
&& ./build-haz-linux.sh --project shell $HOME/workspace
when:
files-changed:
- js/public/**
- js/src/**
toolchains:
- linux64-clang
- linux64-gcc-4.9
@@ -57,13 +57,13 @@ jobs:
symbol: tc(H)
worker:
env:
TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/hazard.manifest"
run:
using: hazard
mozconfig: "browser/config/mozconfigs/linux64/hazards"
command: >
- cd /home/worker/checkouts/gecko/taskcluster/scripts/builder
+ cd /builds/worker/checkouts/gecko/taskcluster/scripts/builder
&& ./build-haz-linux.sh --project browser $HOME/workspace
toolchains:
- linux64-clang
- linux64-gcc-4.9
--- a/taskcluster/ci/source-test/doc.yml
+++ b/taskcluster/ci/source-test/doc.yml
@@ -7,21 +7,21 @@ doc-generate:
tier: 1
worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
worker:
docker-image: {in-tree: "lint"}
max-run-time: 1800
artifacts:
- type: file
name: public/docs.tar.gz
- path: /home/worker/checkouts/gecko/docs-out/main.tar.gz
+ path: /builds/worker/checkouts/gecko/docs-out/main.tar.gz
run:
using: run-task
command: >
- cd /home/worker/checkouts/gecko &&
+ cd /builds/worker/checkouts/gecko &&
./mach doc --outdir docs-out --no-open --archive
when:
files-changed:
- '**/*.py'
- '**/*.rst'
- 'tools/docs/**'
doc-upload:
@@ -34,16 +34,16 @@ doc-upload:
run-on-projects: [mozilla-central]
worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
worker:
docker-image: {in-tree: "lint"}
max-run-time: 1800
taskcluster-proxy: true
run:
using: run-task
- command: cd /home/worker/checkouts/gecko && ./mach doc --upload --no-open
+ command: cd /builds/worker/checkouts/gecko && ./mach doc --upload --no-open
scopes:
- secrets:get:project/releng/gecko/build/level-{level}/gecko-docs-upload
when:
files-changed:
- '**/*.py'
- '**/*.rst'
- 'tools/docs/**'
--- a/taskcluster/ci/source-test/mocha.yml
+++ b/taskcluster/ci/source-test/mocha.yml
@@ -8,14 +8,14 @@ eslint-plugin-mozilla:
worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
worker:
docker-image: {in-tree: "lint"}
max-run-time: 1800
run:
using: run-task
cache-dotcache: true
command: >
- cd /home/worker/checkouts/gecko/tools/lint/eslint/eslint-plugin-mozilla &&
+ cd /builds/worker/checkouts/gecko/tools/lint/eslint/eslint-plugin-mozilla &&
cp -r /build/node_modules_eslint-plugin-mozilla node_modules &&
npm run test
when:
files-changed:
- 'tools/lint/eslint/eslint-plugin-mozilla/**'
--- a/taskcluster/ci/source-test/mozlint.yml
+++ b/taskcluster/ci/source-test/mozlint.yml
@@ -7,17 +7,17 @@ mozlint-eslint:
tier: 1
worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
worker:
docker-image: {in-tree: "lint"}
max-run-time: 1800
run:
using: run-task
command: >
- cd /home/worker/checkouts/gecko/ &&
+ cd /builds/worker/checkouts/gecko/ &&
cp -r /build/node_modules_eslint node_modules &&
ln -s ../tools/lint/eslint/eslint-plugin-mozilla node_modules &&
ln -s ../tools/lint/eslint/eslint-plugin-spidermonkey-js node_modules &&
./mach lint -l eslint -f treeherder --quiet
when:
files-changed:
# Files that are likely audited.
- '**/*.js'
--- a/taskcluster/ci/source-test/python-tests.yml
+++ b/taskcluster/ci/source-test/python-tests.yml
@@ -61,19 +61,19 @@ mochitest-harness:
worker:
by-platform:
linux64.*:
docker-image: {in-tree: "desktop1604-test"}
max-run-time: 3600
run:
using: run-task
command: >
- source /home/worker/scripts/xvfb.sh &&
+ source /builds/worker/scripts/xvfb.sh &&
start_xvfb '1600x1200x24' 0 &&
- cd /home/worker/checkouts/gecko &&
+ cd /builds/worker/checkouts/gecko &&
./mach python-test --subsuite mochitest
when:
files-changed:
- 'config/mozunit.py'
- 'python/mach_commands.py'
- 'testing/mochitest/**'
- 'testing/mozharness/mozharness/base/log.py'
- 'testing/mozharness/mozharness/mozilla/structuredlog.py'
@@ -115,17 +115,17 @@ mozharness:
worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
worker:
docker-image: {in-tree: "lint"}
max-run-time: 1800
run:
using: run-task
cache-dotcache: true
command: >
- cd /home/worker/checkouts/gecko/testing/mozharness &&
+ cd /builds/worker/checkouts/gecko/testing/mozharness &&
/usr/local/bin/tox -e py27-hg4.3
when:
files-changed:
- 'testing/mozharness/**'
mozlint:
description: python/mozlint unit tests
platform: linux64/opt
--- a/taskcluster/docker/android-gradle-build/Dockerfile
+++ b/taskcluster/docker/android-gradle-build/Dockerfile
@@ -1,17 +1,17 @@
# TODO remove VOLUME below when the base image is updated next.
-FROM taskcluster/centos6-build-upd:0.1.6.20160329195300
+FROM taskcluster/centos6-build-upd:0.1.8.20170808150401
MAINTAINER Nick Alexander <nalexander@mozilla.com>
# BEGIN ../desktop-build/Dockerfile
-VOLUME /home/worker/checkouts
-VOLUME /home/worker/workspace
-VOLUME /home/worker/tooltool-cache
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
# %include python/mozbuild/mozbuild/action/tooltool.py
COPY topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /build/tooltool.py
COPY topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /builds/tooltool.py
COPY topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
COPY topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /tmp/tooltool.py
# %include testing/mozharness/external_tools/robustcheckout.py
@@ -37,37 +37,37 @@ COPY topsrcdir/taskcluster/docker/recipe
# TODO remove once base image doesn't install Mercurial
RUN pip uninstall -y Mercurial
RUN bash /setup/system-setup.sh
# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
# %include taskcluster/docker/recipes/xvfb.sh
-COPY topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
+COPY topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
# %include taskcluster/docker/recipes/run-task
-COPY topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
+COPY topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
# Add configuration
-COPY dot-config /home/worker/.config
+COPY dot-config /builds/worker/.config
# Generate machine uuid file
RUN dbus-uuidgen --ensure=/var/lib/dbus/machine-id
# Stubbed out credentials; mozharness looks for this file an issues a WARNING
# if it's not found, which causes the build to fail. Note that this needs to
# be in the parent of the workspace directory and in the directory where
# mozharness is run (not its --work-dir). See Bug 1169652.
-ADD oauth.txt /home/worker/
+ADD oauth.txt /builds/worker/
# stubbed out buildprops, which keeps mozharness from choking
# Note that this needs to be in the parent of the workspace directory and in
# the directory where mozharness is run (not its --work-dir)
-ADD buildprops.json /home/worker/
+ADD buildprops.json /builds/worker/
# END ../desktop-build/Dockerfile
# Reset user/workdir from parent image so we can install software.
WORKDIR /
USER root
# Update base.
@@ -109,12 +109,12 @@ RUN tar zxf nexus-${NEXUS_VERSION}-bundl
&& mv /tmp/nexus-${NEXUS_VERSION}/* /opt/sonatype/nexus/ \
&& rm -rf /tmp/nexus-${NEXUS_VERSION} \
&& rm -rf /tmp/nexus-${NEXUS_VERSION}-bundle.tar.gz
# So that we don't have to RUN_AS_USER=root.
RUN chown -R worker:worker /opt/sonatype/nexus/
# Back to the centos6-build workdir, matching desktop-build.
-WORKDIR /home/worker
+WORKDIR /builds/worker
# Set a default command useful for debugging
CMD ["/bin/bash", "--login"]
--- a/taskcluster/docker/android-gradle-build/VERSION
+++ b/taskcluster/docker/android-gradle-build/VERSION
@@ -1,1 +1,1 @@
-0.0.1
+0.0.2
--- a/taskcluster/docker/centos6-build-upd/Dockerfile
+++ b/taskcluster/docker/centos6-build-upd/Dockerfile
@@ -1,9 +1,9 @@
-FROM taskcluster/centos6-build:0.1.7
+FROM taskcluster/centos6-build:0.1.8
MAINTAINER Dustin J. Mitchell <dustin@mozilla.com>
### update to latest from upstream repositories
# if this becomes a long list of packages, consider bumping the
# centos6-build version
RUN yum update -y
# Set a default command useful for debugging
--- a/taskcluster/docker/centos6-build-upd/VERSION
+++ b/taskcluster/docker/centos6-build-upd/VERSION
@@ -1,1 +1,1 @@
-0.1.7.20170801103900
+0.1.8.20170808150401
--- a/taskcluster/docker/centos6-build/Dockerfile
+++ b/taskcluster/docker/centos6-build/Dockerfile
@@ -1,35 +1,37 @@
FROM centos:6
MAINTAINER Dustin J. Mitchell <dustin@mozilla.com>
+RUN mkdir /builds
+
### add worker user and setup its workspace
-RUN useradd -d /home/worker -s /bin/bash -m worker
+RUN useradd -d /builds/worker -s /bin/bash -m worker
# Declare default working folder
-WORKDIR /home/worker
+WORKDIR /builds/worker
# This will create a host mounted filesystem when the cache is stripped
# on Try. This cancels out some of the performance losses of aufs. See
# bug 1291940.
-VOLUME /home/worker/workspace
-VOLUME /home/worker/tooltool-cache
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
# %include build/valgrind/valgrind-epochs.patch
ADD topsrcdir/build/valgrind/valgrind-epochs.patch /tmp/valgrind-epochs.patch
# install non-build specific dependencies in a single layer
ADD system-setup.sh /tmp/system-setup.sh
RUN bash /tmp/system-setup.sh
# Builds need the share module enabled
-ADD hgrc /home/worker/.hgrc
-RUN chown -R worker:worker /home/worker/.hgrc
+ADD hgrc /builds/worker/.hgrc
+RUN chown -R worker:worker /builds/worker/.hgrc
# Set variable normally configured at login, by the shells parent process, these
# are taken from GNU su manual
-ENV HOME /home/worker
+ENV HOME /builds/worker
ENV SHELL /bin/bash
ENV USER worker
ENV LOGNAME worker
ENV HOSTNAME taskcluster-worker
# Set a default command useful for debugging
CMD ["/bin/bash", "--login"]
--- a/taskcluster/docker/centos6-build/VERSION
+++ b/taskcluster/docker/centos6-build/VERSION
@@ -1,1 +1,1 @@
-0.1.7
+0.1.8
--- a/taskcluster/docker/centos6-build/system-setup.sh
+++ b/taskcluster/docker/centos6-build/system-setup.sh
@@ -451,18 +451,18 @@ EOF
cd ninja-1.6.0
./configure.py --bootstrap
cp ninja /usr/local/bin/ninja
# Old versions of Cmake can only find ninja in this location!
ln -s /usr/local/bin/ninja /usr/local/bin/ninja-build
# note that TC will replace workspace with a cache mount; there's no sense
# creating anything inside there
-mkdir -p /home/worker/workspace
-chown worker:worker /home/worker/workspace
+mkdir -p /builds/worker/workspace
+chown worker:worker /builds/worker/workspace
# /builds is *not* replaced with a mount in the docker container. The worker
# user writes to lots of subdirectories, though, so it's owned by that user
mkdir -p /builds
chown worker:worker /builds
# remove packages installed for the builds above
yum shell -y <<'EOF'
--- a/taskcluster/docker/desktop-build/Dockerfile
+++ b/taskcluster/docker/desktop-build/Dockerfile
@@ -1,21 +1,21 @@
# TODO remove VOLUME below when the base image is updated next.
-FROM taskcluster/centos6-build-upd:0.1.7.20170801103900
+FROM taskcluster/centos6-build-upd:0.1.8.20170808150401
MAINTAINER Dustin J. Mitchell <dustin@mozilla.com>
-VOLUME /home/worker/workspace
-VOLUME /home/worker/tooltool-cache
-VOLUME /home/worker/checkouts
-VOLUME /home/worker/.tc-vcs
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/.tc-vcs
# Add build scripts; these are the entry points from the taskcluster worker, and
# operate on environment variables
-ADD bin /home/worker/bin
-RUN chmod +x /home/worker/bin/*
+ADD bin /builds/worker/bin
+RUN chmod +x /builds/worker/bin/*
# %include python/mozbuild/mozbuild/action/tooltool.py
ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /builds/tooltool.py
ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
# %include testing/mozharness/external_tools/robustcheckout.py
ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
@@ -36,37 +36,37 @@ ADD topsrcdir/taskcluster/docker/recipes
# TODO remove once base image doesn't install Mercurial
RUN pip uninstall -y Mercurial
RUN bash /setup/system-setup.sh
# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
# %include taskcluster/docker/recipes/xvfb.sh
-ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
+ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
# %include taskcluster/docker/recipes/run-task
-ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
+ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
# Add configuration
-COPY dot-config /home/worker/.config
+COPY dot-config /builds/worker/.config
# Generate machine uuid file
RUN dbus-uuidgen --ensure=/var/lib/dbus/machine-id
# Stubbed out credentials; mozharness looks for this file an issues a WARNING
# if it's not found, which causes the build to fail. Note that this needs to
# be in the parent of the workspace directory and in the directory where
# mozharness is run (not its --work-dir). See Bug 1169652.
-ADD oauth.txt /home/worker/
+ADD oauth.txt /builds/worker/
# stubbed out buildprops, which keeps mozharness from choking
# Note that this needs to be in the parent of the workspace directory and in
# the directory where mozharness is run (not its --work-dir)
-ADD buildprops.json /home/worker/
+ADD buildprops.json /builds/worker/
# Move installation to base centos6-build image once Bug 1272629 is fixed
# Install the screen package here to use with xvfb.
# Install bison to build binutils.
RUN yum install -y bison screen
# Install libtool.
RUN yum install -y libtool
--- a/taskcluster/docker/desktop-build/bin/build.sh
+++ b/taskcluster/docker/desktop-build/bin/build.sh
@@ -7,26 +7,26 @@ set -x -e -v
script_args="${@}"
# TODO: when bug 1093833 is solved and tasks can run as non-root, reduce this
# to a simple fail-if-root check
if [ $(id -u) = 0 ]; then
# each of the caches we have mounted are owned by root, so update that ownership
# to 'worker'
- for cache in /home/worker/.tc-vcs /home/worker/workspace /home/worker/tooltool-cache; do
+ for cache in /builds/worker/.tc-vcs /builds/worker/workspace /builds/worker/tooltool-cache; do
if [ -d $cache ]; then
# -R probably isn't necessary forever, but it fixes some poisoned
# caches for now
chown -R worker:worker $cache
fi
done
# ..then drop privileges by re-running this script
- exec su worker -c "/home/worker/bin/build.sh $script_args"
+ exec su worker -c "/builds/worker/bin/build.sh $script_args"
fi
####
# The default build works for any fx_desktop_build based mozharness job:
# via build-linux.sh
####
. $HOME/bin/checkout-sources.sh
--- a/taskcluster/docker/desktop-build/bin/checkout-sources.sh
+++ b/taskcluster/docker/desktop-build/bin/checkout-sources.sh
@@ -20,17 +20,17 @@ set -x -e
: TOOLS_REPOSITORY ${TOOLS_REPOSITORY:=https://hg.mozilla.org/build/tools}
: TOOLS_BASE_REPOSITORY ${TOOLS_BASE_REPOSITORY:=${TOOLS_REPOSITORY}}
: TOOLS_HEAD_REPOSITORY ${TOOLS_HEAD_REPOSITORY:=${TOOLS_REPOSITORY}}
: TOOLS_HEAD_REV ${TOOLS_HEAD_REV:=default}
: TOOLS_HEAD_REF ${TOOLS_HEAD_REF:=${TOOLS_HEAD_REV}}
: TOOLS_DISABLE ${TOOLS_DISABLE:=false}
-: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE ${WORKSPACE:=/builds/worker/workspace}
set -v
# check out tools where mozharness expects it to be ($PWD/build/tools and $WORKSPACE/build/tools)
if [ ! "$TOOLS_DISABLE" = true ]
then
tc-vcs checkout $WORKSPACE/build/tools $TOOLS_BASE_REPOSITORY $TOOLS_HEAD_REPOSITORY $TOOLS_HEAD_REV $TOOLS_HEAD_REF
--- a/taskcluster/docker/desktop1604-test/Dockerfile
+++ b/taskcluster/docker/desktop1604-test/Dockerfile
@@ -1,21 +1,22 @@
FROM ubuntu:16.04
MAINTAINER Joel Maher <joel.maher@gmail.com>
-RUN useradd -d /home/worker -s /bin/bash -m worker
-WORKDIR /home/worker
+RUN mkdir /builds
+RUN useradd -d /builds/worker -s /bin/bash -m worker
+WORKDIR /builds/worker
# We need to declare all potentially cache volumes as caches. Also,
# making high I/O paths volumes increase I/O throughput because of
# AUFS slowness.
-VOLUME /home/worker/.cache
-VOLUME /home/worker/checkouts
-VOLUME /home/worker/tooltool-cache
-VOLUME /home/worker/workspace
+VOLUME /builds/worker/.cache
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/tooltool-cache
+VOLUME /builds/worker/workspace
# %include python/mozbuild/mozbuild/action/tooltool.py
ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
# %include testing/mozharness/external_tools/robustcheckout.py
ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
# %include taskcluster/docker/recipes/common.sh
@@ -32,56 +33,56 @@ ADD topsrcdir/taskcluster/docker/recipes
ADD topsrcdir/testing/config/tooltool-manifests/linux64/releng.manifest /tmp/minidump_stackwalk.manifest
# %include taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh
ADD topsrcdir/taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh /setup/system-setup.sh
RUN bash /setup/system-setup.sh
# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
# %include taskcluster/docker/recipes/xvfb.sh
-ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
+ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
# %include taskcluster/docker/recipes/run-task
-ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
+ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
# %include taskcluster/scripts/tester/test-linux.sh
-ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /home/worker/bin/test-linux.sh
+ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /builds/worker/bin/test-linux.sh
# Set variable normally configured at login, by the shells parent process, these
# are taken from GNU su manual
-ENV HOME /home/worker
+ENV HOME /builds/worker
ENV SHELL /bin/bash
ENV USER worker
ENV LOGNAME worker
ENV HOSTNAME taskcluster-worker
ENV LANG en_US.UTF-8
ENV LC_ALL en_US.UTF-8
# Add utilities and configuration
-COPY dot-files/config /home/worker/.config
-COPY dot-files/pulse /home/worker/.pulse
+COPY dot-files/config /builds/worker/.config
+COPY dot-files/pulse /builds/worker/.pulse
RUN chmod +x bin/*
# TODO: remove this when buildbot is gone
-COPY buildprops.json /home/worker/buildprops.json
+COPY buildprops.json /builds/worker/buildprops.json
COPY tc-vcs-config.yml /etc/taskcluster-vcs.yml
# TODO: remove
-ADD https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /home/worker/bin/buildbot_step
-RUN chmod u+x /home/worker/bin/buildbot_step
+ADD https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /builds/worker/bin/buildbot_step
+RUN chmod u+x /builds/worker/bin/buildbot_step
# allow the worker user to access video devices
RUN usermod -a -G video worker
RUN mkdir Documents; mkdir Pictures; mkdir Music; mkdir Videos; mkdir artifacts
# install tc-vcs and tc-npm-cache
RUN npm install -g taskcluster-vcs@2.3.12 \
&& npm install -g taskcluster-npm-cache@1.1.14 \
&& rm -rf ~/.npm
-ENV PATH $PATH:/home/worker/bin
+ENV PATH $PATH:/builds/worker/bin
# TODO Re-enable worker when bug 1093833 lands
#USER worker
# Disable Ubuntu update prompt
# http://askubuntu.com/questions/515161/ubuntu-12-04-disable-release-notification-of-14-04-in-update-manager
ADD release-upgrades /etc/update-manager/release-upgrades
@@ -98,19 +99,19 @@ EXPOSE 5900
# This helps not forgetting setting DISPLAY=:0 when running
# tests outside of test.sh
ENV DISPLAY :0
# Disable apport (Ubuntu app crash reporter) to avoid stealing focus from test runs
ADD apport /etc/default/apport
# Disable font antialiasing for now to match releng's setup
-ADD fonts.conf /home/worker/.fonts.conf
+ADD fonts.conf /builds/worker/.fonts.conf
# Set up first-run experience for interactive mode
ADD motd /etc/taskcluster-motd
ADD taskcluster-interactive-shell /bin/taskcluster-interactive-shell
RUN chmod +x /bin/taskcluster-interactive-shell
-RUN chown -R worker:worker /home/worker
+RUN chown -R worker:worker /builds/worker
# Set a default command useful for debugging
CMD ["/bin/bash", "--login"]
--- a/taskcluster/docker/lint/Dockerfile
+++ b/taskcluster/docker/lint/Dockerfile
@@ -1,16 +1,17 @@
FROM ubuntu:16.04
MAINTAINER Andrew Halberstadt <ahalberstadt@mozilla.com>
-RUN useradd -d /home/worker -s /bin/bash -m worker
-WORKDIR /home/worker
+RUN mkdir /builds
+RUN useradd -d /builds/worker -s /bin/bash -m worker
+WORKDIR /builds/worker
-VOLUME /home/worker/.cache
-VOLUME /home/worker/checkouts
+VOLUME /builds/worker/.cache
+VOLUME /builds/worker/checkouts
RUN mkdir /build
# %include python/mozbuild/mozbuild/action/tooltool.py
ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /build/tooltool.py
# %include testing/mozharness/external_tools/robustcheckout.py
ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
@@ -26,22 +27,22 @@ ADD topsrcdir/tools/lint/eslint/manifest
ADD topsrcdir/tools/lint/eslint/eslint-plugin-mozilla/manifest.tt /tmp/eslint-plugin-mozilla.tt
# %include tools/lint/flake8_/flake8_requirements.txt
ADD topsrcdir/tools/lint/flake8_/flake8_requirements.txt /tmp/flake8_requirements.txt
# %include tools/lint/tox/tox_requirements.txt
ADD topsrcdir/tools/lint/tox/tox_requirements.txt /tmp/tox_requirements.txt
RUN bash /tmp/system-setup.sh
# %include taskcluster/docker/recipes/run-task
-ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
-RUN chown -R worker:worker /home/worker/bin && chmod 755 /home/worker/bin/*
+ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
+RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/*
# Set variable normally configured at login, by the shells parent process, these
# are taken from GNU su manual
-ENV HOME /home/worker
+ENV HOME /builds/worker
ENV SHELL /bin/bash
ENV USER worker
ENV LOGNAME worker
ENV HOSTNAME taskcluster-worker
ENV LANG en_US.UTF-8
ENV LC_ALL en_US.UTF-8
# Set a default command useful for debugging
--- a/taskcluster/docker/valgrind-build/Dockerfile
+++ b/taskcluster/docker/valgrind-build/Dockerfile
@@ -1,23 +1,23 @@
# TODO remove VOLUME below when the base image is updated next.
-FROM taskcluster/centos6-build-upd:0.1.7.20170801103900
+FROM taskcluster/centos6-build-upd:0.1.8.20170808150401
MAINTAINER Dustin J. Mitchell <dustin@mozilla.com>
-VOLUME /home/worker/checkouts
-VOLUME /home/worker/workspace
-VOLUME /home/worker/tooltool-cache
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
# Add build scripts; these are the entry points from the taskcluster worker, and
# operate on environment variables
# %include taskcluster/docker/desktop-build/bin
-ADD topsrcdir/taskcluster/docker/desktop-build/bin /home/worker/bin
+ADD topsrcdir/taskcluster/docker/desktop-build/bin /builds/worker/bin
-RUN chmod +x /home/worker/bin/*
+RUN chmod +x /builds/worker/bin/*
# %include python/mozbuild/mozbuild/action/tooltool.py
ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /builds/tooltool.py
ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
# %include testing/mozharness/external_tools/robustcheckout.py
ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
@@ -41,40 +41,40 @@ ADD topsrcdir/taskcluster/docker/recipes
# TODO remove once base image doesn't install Mercurial
RUN pip uninstall -y Mercurial
RUN bash /setup/system-setup.sh
# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
# %include taskcluster/docker/recipes/xvfb.sh
-ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
+ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
# %include taskcluster/docker/recipes/run-task
-ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
+ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
# Add configuration
# %include taskcluster/docker/desktop-build/dot-config
-ADD topsrcdir/taskcluster/docker/desktop-build/dot-config /home/worker/.config
+ADD topsrcdir/taskcluster/docker/desktop-build/dot-config /builds/worker/.config
# Generate machine uuid file
RUN dbus-uuidgen --ensure=/var/lib/dbus/machine-id
# Stubbed out credentials; mozharness looks for this file an issues a WARNING
# if it's not found, which causes the build to fail. Note that this needs to
# be in the parent of the workspace directory and in the directory where
# mozharness is run (not its --work-dir). See Bug 1169652.
# %include taskcluster/docker/desktop-build/oauth.txt
-ADD topsrcdir/taskcluster/docker/desktop-build/oauth.txt /home/worker/
+ADD topsrcdir/taskcluster/docker/desktop-build/oauth.txt /builds/worker/
# stubbed out buildprops, which keeps mozharness from choking
# Note that this needs to be in the parent of the workspace directory and in
# the directory where mozharness is run (not its --work-dir)
# %include taskcluster/docker/desktop-build/buildprops.json
-ADD topsrcdir/taskcluster/docker/desktop-build/buildprops.json /home/worker/
+ADD topsrcdir/taskcluster/docker/desktop-build/buildprops.json /builds/worker/
# Move installation to base centos6-build image once Bug 1272629 is fixed
# Install the screen package here to use with xvfb.
# Install bison to build binutils.
RUN yum install -y bison screen
# Install libtool.
RUN yum install -y libtool
--- a/taskcluster/scripts/builder/build-android-dependencies.sh
+++ b/taskcluster/scripts/builder/build-android-dependencies.sh
@@ -1,15 +1,15 @@
#!/bin/bash -vex
set -x -e
echo "running as" $(id)
-: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE ${WORKSPACE:=/builds/worker/workspace}
set -v
. $WORKSPACE/build/src/taskcluster/scripts/builder/build-android-dependencies/before.sh
. $WORKSPACE/build/src/taskcluster/scripts/builder/build-linux.sh
. $WORKSPACE/build/src/taskcluster/scripts/builder/build-android-dependencies/after.sh
--- a/taskcluster/scripts/builder/build-android-dependencies/after.sh
+++ b/taskcluster/scripts/builder/build-android-dependencies/after.sh
@@ -1,35 +1,35 @@
#!/bin/bash -vex
set -x -e
echo "running as" $(id)
-: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE ${WORKSPACE:=/builds/worker/workspace}
: GRADLE_VERSION ${GRADLE_VERSION:=2.14.1}
set -v
# Package everything up.
pushd $WORKSPACE
-cp -R /home/worker/.mozbuild/android-sdk-linux android-sdk-linux
+cp -R /builds/worker/.mozbuild/android-sdk-linux android-sdk-linux
tar cJf android-sdk-linux.tar.xz android-sdk-linux
# We can't redistribute the Android SDK publicly.
-mkdir -p /home/worker/private/android-sdk
-mv android-sdk-linux.tar.xz /home/worker/private/android-sdk
+mkdir -p /builds/worker/private/android-sdk
+mv android-sdk-linux.tar.xz /builds/worker/private/android-sdk
cp -R $WORKSPACE/build/src/java_home java_home
tar cJf java_home.tar.xz java_home
# We can't redistribute Java publicly.
-mkdir -p /home/worker/private/java_home
-mv java_home.tar.xz /home/worker/private/java_home
+mkdir -p /builds/worker/private/java_home
+mv java_home.tar.xz /builds/worker/private/java_home
cp -R $WORKSPACE/nexus/storage/jcenter jcenter
tar cJf jcenter.tar.xz jcenter
cp -R $WORKSPACE/nexus/storage/google google
tar cJf google.tar.xz google
# The Gradle wrapper will have downloaded and verified the hash of exactly one
@@ -37,13 +37,13 @@ tar cJf google.tar.xz google
# ~/.gradle/wrapper/dists/gradle-2.7-all/$PROJECT_HASH/gradle-2.7-all.zip. We
# want to remove the version from the internal directory for use via tooltool in
# a mozconfig.
cp $GRADLE_USER_HOME/wrapper/dists/gradle-${GRADLE_VERSION}-all/*/gradle-${GRADLE_VERSION}-all.zip gradle-${GRADLE_VERSION}-all.zip
unzip -q gradle-${GRADLE_VERSION}-all.zip
mv gradle-${GRADLE_VERSION} gradle-dist
tar cJf gradle-dist.tar.xz gradle-dist
-mkdir -p /home/worker/artifacts
-mv jcenter.tar.xz /home/worker/artifacts
-mv google.tar.xz /home/worker/artifacts
-mv gradle-dist.tar.xz /home/worker/artifacts
+mkdir -p /builds/worker/artifacts
+mv jcenter.tar.xz /builds/worker/artifacts
+mv google.tar.xz /builds/worker/artifacts
+mv gradle-dist.tar.xz /builds/worker/artifacts
popd
--- a/taskcluster/scripts/builder/build-android-dependencies/before.sh
+++ b/taskcluster/scripts/builder/build-android-dependencies/before.sh
@@ -1,31 +1,31 @@
#!/bin/bash -vex
set -x -e
echo "running as" $(id)
-: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE ${WORKSPACE:=/builds/worker/workspace}
set -v
mkdir -p ${WORKSPACE}/nexus/conf
-cp /home/worker/workspace/build/src/taskcluster/scripts/builder/build-android-dependencies/nexus.xml ${WORKSPACE}/nexus/conf/nexus.xml
+cp /builds/worker/workspace/build/src/taskcluster/scripts/builder/build-android-dependencies/nexus.xml ${WORKSPACE}/nexus/conf/nexus.xml
-# Populate /home/worker/workspace/build/src/java_home.
+# Populate /builds/worker/workspace/build/src/java_home.
. $WORKSPACE/build/src/taskcluster/scripts/builder/build-android-dependencies/repackage-jdk-centos.sh
mv $WORKSPACE/java/usr/lib/jvm/java_home $WORKSPACE/build/src/java_home
export JAVA_HOME=$WORKSPACE/build/src/java_home
export PATH=$PATH:$JAVA_HOME/bin
-# Populate /home/worker/.mozbuild/android-sdk-linux.
-python2.7 /home/worker/workspace/build/src/python/mozboot/mozboot/android.py --artifact-mode --no-interactive
+# Populate /builds/worker/.mozbuild/android-sdk-linux.
+python2.7 $WORKSPACE/build/src/python/mozboot/mozboot/android.py --artifact-mode --no-interactive
RUN_AS_USER=worker NEXUS_WORK=$WORKSPACE/nexus /opt/sonatype/nexus/bin/nexus restart
# Wait "a while" for Nexus to actually start. Don't fail if this fails.
wget --quiet --retry-connrefused --waitretry=2 --tries=100 \
http://localhost:8081/nexus/service/local/status || true
rm -rf status
--- a/taskcluster/scripts/builder/build-android-dependencies/repackage-jdk-centos.sh
+++ b/taskcluster/scripts/builder/build-android-dependencies/repackage-jdk-centos.sh
@@ -1,13 +1,13 @@
#!/bin/bash -vex
set -e -x
-: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE ${WORKSPACE:=/builds/worker/workspace}
set -v
mkdir -p $WORKSPACE/java
pushd $WORKSPACE/java
# change these variables when updating java version
mirror_url_base="http://mirror.centos.org/centos/6/os/x86_64/Packages"
--- a/taskcluster/scripts/builder/build-l10n.sh
+++ b/taskcluster/scripts/builder/build-l10n.sh
@@ -1,33 +1,33 @@
#! /bin/bash -vex
set -x -e
echo "running as" $(id)
-. /home/worker/scripts/xvfb.sh
+. /builds/worker/scripts/xvfb.sh
####
# Taskcluster friendly wrapper for performing fx desktop l10n repacks via mozharness.
# Based on ./build-linux.sh
####
# Inputs, with defaults
: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT}
: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG}
: MOZHARNESS_ACTIONS ${MOZHARNESS_ACTIONS}
: MOZHARNESS_OPTIONS ${MOZHARNESS_OPTIONS}
-: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
: NEED_XVFB ${NEED_XVFB:=false}
-: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE ${WORKSPACE:=/builds/worker/workspace}
set -v
fail() {
echo # make sure error message is on a new line
echo "[build-l10n.sh:error]" "${@}"
exit 1
}
@@ -81,17 +81,17 @@ fi
# e.g. enable-pgo
if [ -n "$MOZHARNESS_OPTIONS" ]; then
options=""
for option in $MOZHARNESS_OPTIONS; do
options="$options --$option"
done
fi
-cd /home/worker
+cd /builds/worker
python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} \
--disable-mock \
--revision ${GECKO_HEAD_REV} \
$actions \
$options \
${config_cmds} \
--log-level=debug \
--- a/taskcluster/scripts/builder/build-linux.sh
+++ b/taskcluster/scripts/builder/build-linux.sh
@@ -1,37 +1,37 @@
#! /bin/bash -vex
set -x -e
echo "running as" $(id)
-. /home/worker/scripts/xvfb.sh
+. /builds/worker/scripts/xvfb.sh
####
# Taskcluster friendly wrapper for performing fx desktop builds via mozharness.
####
# Inputs, with defaults
: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT}
: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG}
: MOZHARNESS_ACTIONS ${MOZHARNESS_ACTIONS}
: MOZHARNESS_OPTIONS ${MOZHARNESS_OPTIONS}
-: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
: NEED_XVFB ${NEED_XVFB:=false}
: MH_CUSTOM_BUILD_VARIANT_CFG ${MH_CUSTOM_BUILD_VARIANT_CFG}
: MH_BRANCH ${MH_BRANCH:=mozilla-central}
: MH_BUILD_POOL ${MH_BUILD_POOL:=staging}
: MOZ_SCM_LEVEL ${MOZ_SCM_LEVEL:=1}
-: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE ${WORKSPACE:=/builds/worker/workspace}
set -v
fail() {
echo # make sure error message is on a new line
echo "[build-linux.sh:error]" "${@}"
exit 1
}
@@ -107,17 +107,17 @@ fi
# e.g. enable-pgo
if [ -n "$MOZHARNESS_OPTIONS" ]; then
options=""
for option in $MOZHARNESS_OPTIONS; do
options="$options --$option"
done
fi
-cd /home/worker
+cd /builds/worker
python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} ${config_cmds} \
$debug_flag \
$custom_build_variant_cfg_flag \
--disable-mock \
$actions \
$options \
--log-level=debug \
--- a/taskcluster/scripts/builder/repackage.sh
+++ b/taskcluster/scripts/builder/repackage.sh
@@ -1,30 +1,30 @@
#! /bin/bash -vex
set -x -e
echo "running as" $(id)
-. /home/worker/scripts/xvfb.sh
+. /builds/worker/scripts/xvfb.sh
####
# Taskcluster friendly wrapper for performing fx desktop builds via mozharness.
####
# Inputs, with defaults
: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT}
: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG}
: MOZHARNESS_ACTIONS ${MOZHARNESS_ACTIONS}
: MOZHARNESS_OPTIONS ${MOZHARNESS_OPTIONS}
-: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
-: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE ${WORKSPACE:=/builds/worker/workspace}
set -v
fail() {
echo # make sure error message is on a new line
echo "[build-linux.sh:error]" "${@}"
exit 1
}
@@ -79,15 +79,15 @@ fi
# e.g. enable-pgo
if [ -n "$MOZHARNESS_OPTIONS" ]; then
options=""
for option in $MOZHARNESS_OPTIONS; do
options="$options --$option"
done
fi
-cd /home/worker
+cd /builds/worker
python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} ${config_cmds} \
$actions \
$options \
--log-level=debug \
--work-dir=$WORKSPACE/build \
--- a/taskcluster/scripts/misc/build-clang-windows-helper32.sh
+++ b/taskcluster/scripts/misc/build-clang-windows-helper32.sh
@@ -1,15 +1,15 @@
#!/bin/bash
set -x -e -v
# This script is for building clang-cl on Windows.
-: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
export TOOLTOOL_CACHE
TOOLTOOL_AUTH_FILE=/c/builds/relengapi.tok
if [ ! -e ${TOOLTOOL_AUTH_FILE} ]; then
echo cannot find ${TOOLTOOL_AUTH_FILE}
exit 1
fi
--- a/taskcluster/scripts/misc/build-clang-windows-helper64.sh
+++ b/taskcluster/scripts/misc/build-clang-windows-helper64.sh
@@ -1,15 +1,15 @@
#!/bin/bash
set -x -e -v
# This script is for building clang-cl on Windows.
-: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
export TOOLTOOL_CACHE
TOOLTOOL_AUTH_FILE=/c/builds/relengapi.tok
if [ ! -e ${TOOLTOOL_AUTH_FILE} ]; then
echo cannot find ${TOOLTOOL_AUTH_FILE}
exit 1
fi
--- a/taskcluster/scripts/misc/tooltool-download.sh
+++ b/taskcluster/scripts/misc/tooltool-download.sh
@@ -22,14 +22,14 @@ if [ -n "$RELENGAPI_PORT" ]; then
# When the worker has the relengapi proxy setup, use it.
TOOLTOOL_DL_FLAGS="${TOOLTOOL_DL_FLAGS=} --tooltool-url=http://relengapi/tooltool/"
fi
if [ -n "$UPLOAD_DIR" ]; then
TOOLTOOL_DL_FLAGS="${TOOLTOOL_DL_FLAGS=} --artifact-manifest $UPLOAD_DIR/toolchains.json"
fi
-: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
export TOOLTOOL_CACHE
./mach artifact toolchain -v${TOOLTOOL_DL_FLAGS}${TOOLTOOL_MANIFEST:+ --tooltool-manifest "${TOOLTOOL_MANIFEST}"}${TOOLTOOL_CACHE:+ --cache-dir ${TOOLTOOL_CACHE}} --retry 5${MOZ_TOOLCHAINS:+ ${MOZ_TOOLCHAINS}}
cd $OLDPWD
--- a/taskcluster/taskgraph/action.yml
+++ b/taskcluster/taskgraph/action.yml
@@ -23,46 +23,46 @@ routes:
- "tc-treeherder-stage.v2.{{project}}.{{head_rev}}.{{pushlog_id}}"
payload:
env:
GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified'
GECKO_HEAD_REPOSITORY: '{{{head_repository}}}'
GECKO_HEAD_REF: '{{head_ref}}'
GECKO_HEAD_REV: '{{head_rev}}'
- HG_STORE_PATH: /home/worker/checkouts/hg-store
+ HG_STORE_PATH: /builds/worker/checkouts/hg-store
cache:
- level-{{level}}-checkouts: /home/worker/checkouts
+ level-{{level}}-checkouts: /builds/worker/checkouts
features:
taskclusterProxy: true
# Note: This task is built server side without the context or tooling that
# exist in tree so we must hard code the version
image: 'taskcluster/decision:0.1.7'
# Virtually no network or other potentially risky operations happen as part
# of the task timeout aside from the initial clone. We intentionally have
# set this to a lower value _all_ decision tasks should use a root
# repository which is cached.
maxRunTime: 1800
command:
- - /home/worker/bin/run-task
- - '--vcs-checkout=/home/worker/checkouts/gecko'
+ - /builds/worker/bin/run-task
+ - '--vcs-checkout=/builds/worker/checkouts/gecko'
- '--'
- bash
- -cx
- >
- cd /home/worker/checkouts/gecko &&
- ln -s /home/worker/artifacts artifacts &&
+ cd /builds/worker/checkouts/gecko &&
+ ln -s /builds/worker/artifacts artifacts &&
./mach --log-no-times taskgraph {{action}} {{action_args}}
artifacts:
'public':
type: 'directory'
- path: '/home/worker/artifacts'
+ path: '/builds/worker/artifacts'
expires: '{{#from_now}}7 days{{/from_now}}'
extra:
treeherder:
symbol: A
--- a/taskcluster/taskgraph/actions/registry.py
+++ b/taskcluster/taskgraph/actions/registry.py
@@ -216,40 +216,41 @@ def register_callback_action(name, title
parameters['project'], parameters['head_rev'], parameters['pushlog_id']),
],
'payload': {
'env': {
'GECKO_BASE_REPOSITORY': 'https://hg.mozilla.org/mozilla-unified',
'GECKO_HEAD_REPOSITORY': parameters['head_repository'],
'GECKO_HEAD_REF': parameters['head_ref'],
'GECKO_HEAD_REV': parameters['head_rev'],
- 'HG_STORE_PATH': '/home/worker/checkouts/hg-store',
+ 'HG_STORE_PATH': '/home/builds/checkouts/hg-store',
'ACTION_TASK_GROUP_ID': task_group_id,
'ACTION_TASK_ID': {'$json': {'$eval': 'taskId'}},
'ACTION_TASK': {'$json': {'$eval': 'task'}},
'ACTION_INPUT': {'$json': {'$eval': 'input'}},
'ACTION_CALLBACK': cb.__name__,
'ACTION_PARAMETERS': {'$json': {'$eval': 'parameters'}},
},
'cache': {
'level-{}-checkouts'.format(parameters['level']):
- '/home/worker/checkouts',
+ '/builds/worker/checkouts',
},
'features': {
'taskclusterProxy': True,
'chainOfTrust': True,
},
'image': docker_image('decision'),
'maxRunTime': 1800,
'command': [
- '/home/worker/bin/run-task', '--vcs-checkout=/home/worker/checkouts/gecko',
+ '/builds/worker/bin/run-task',
+ '--vcs-checkout=/builds/worker/checkouts/gecko',
'--', 'bash', '-cx',
"""\
-cd /home/worker/checkouts/gecko &&
-ln -s /home/worker/artifacts artifacts &&
+cd /builds/worker/checkouts/gecko &&
+ln -s /builds/worker/artifacts artifacts &&
./mach --log-no-times taskgraph action-callback""",
],
},
'extra': {
'treeherder': {
'groupName': 'action-callback',
'groupSymbol': 'AC',
'symbol': symbol,
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -19,47 +19,47 @@ def docker_worker_add_workspace_cache(co
key name to avoid undesired conflicts with other caches."""
taskdesc['worker'].setdefault('caches', []).append({
'type': 'persistent',
'name': 'level-{}-{}-build-{}-{}-workspace'.format(
config.params['level'], config.params['project'],
taskdesc['attributes']['build_platform'],
taskdesc['attributes']['build_type'],
),
- 'mount-point': "/home/worker/workspace",
+ 'mount-point': "/builds/worker/workspace",
# Don't enable the workspace cache when we can't guarantee its
# behavior, like on Try.
'skip-untrusted': True,
})
if extra:
taskdesc['worker']['caches'][-1]['name'] += '-{}'.format(
extra
)
def docker_worker_add_tc_vcs_cache(config, job, taskdesc):
taskdesc['worker'].setdefault('caches', []).append({
'type': 'persistent',
'name': 'level-{}-{}-tc-vcs'.format(
config.params['level'], config.params['project']),
- 'mount-point': "/home/worker/.tc-vcs",
+ 'mount-point': "/builds/worker/.tc-vcs",
})
def add_public_artifacts(config, job, taskdesc, path):
taskdesc['worker'].setdefault('artifacts', []).append({
'name': 'public/build',
'path': path,
'type': 'directory',
})
def docker_worker_add_public_artifacts(config, job, taskdesc):
""" Adds a public artifact directory to the task """
- add_public_artifacts(config, job, taskdesc, path='/home/worker/artifacts/')
+ add_public_artifacts(config, job, taskdesc, path='/builds/worker/artifacts/')
def generic_worker_add_public_artifacts(config, job, taskdesc):
""" Adds a public artifact directory to the task """
add_public_artifacts(config, job, taskdesc, path=r'public/build')
def docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc):
@@ -89,24 +89,24 @@ def support_vcs_checkout(config, job, ta
# Sparse checkouts need their own cache because they can interfere
# with clients that aren't sparse aware.
if sparse:
name += '-sparse'
taskdesc['worker'].setdefault('caches', []).append({
'type': 'persistent',
'name': name,
- 'mount-point': '/home/worker/checkouts',
+ 'mount-point': '/builds/worker/checkouts',
})
taskdesc['worker'].setdefault('env', {}).update({
'GECKO_BASE_REPOSITORY': config.params['base_repository'],
'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
'GECKO_HEAD_REV': config.params['head_rev'],
- 'HG_STORE_PATH': '/home/worker/checkouts/hg-store',
+ 'HG_STORE_PATH': '/builds/worker/checkouts/hg-store',
})
# Give task access to hgfingerprint secret so it can pin the certificate
# for hg.mozilla.org.
taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
# only some worker platforms have taskcluster-proxy enabled
if job['worker']['implementation'] in ('docker-worker', 'docker-engine'):
@@ -143,21 +143,21 @@ def docker_worker_add_tooltool(config, j
assert job['worker']['implementation'] in ('docker-worker', 'docker-engine')
level = config.params['level']
taskdesc['worker'].setdefault('caches', []).append({
'type': 'persistent',
'name': 'level-%s-tooltool-cache' % level,
- 'mount-point': '/home/worker/tooltool-cache',
+ 'mount-point': '/builds/worker/tooltool-cache',
})
taskdesc['worker'].setdefault('env', {}).update({
- 'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+ 'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
})
taskdesc['worker']['relengapi-proxy'] = True
taskdesc['scopes'].extend([
'docker-worker:relengapi-proxy:tooltool.download.public',
])
if internal:
--- a/taskcluster/taskgraph/transforms/job/hazard.py
+++ b/taskcluster/taskgraph/transforms/job/hazard.py
@@ -57,16 +57,16 @@ def docker_worker_hazard(config, job, ta
})
# script parameters
if run.get('mozconfig'):
env['MOZCONFIG'] = run['mozconfig']
# build-haz-linux.sh needs this otherwise it assumes the checkout is in
# the workspace.
- env['GECKO_DIR'] = '/home/worker/checkouts/gecko'
+ env['GECKO_DIR'] = '/builds/worker/checkouts/gecko'
worker['command'] = [
- '/home/worker/bin/run-task',
- '--vcs-checkout', '/home/worker/checkouts/gecko',
+ '/builds/worker/bin/run-task',
+ '--vcs-checkout', '/builds/worker/checkouts/gecko',
'--',
'/bin/bash', '-c', run['command']
]
--- a/taskcluster/taskgraph/transforms/job/mach.py
+++ b/taskcluster/taskgraph/transforms/job/mach.py
@@ -24,16 +24,16 @@ mach_schema = Schema({
@run_job_using("docker-worker", "mach", schema=mach_schema)
@run_job_using("native-engine", "mach", schema=mach_schema)
def docker_worker_mach(config, job, taskdesc):
run = job['run']
# defer to the run_task implementation
- run['command'] = 'cd /home/worker/checkouts/gecko && ./mach ' + run['mach']
+ run['command'] = 'cd /builds/worker/checkouts/gecko && ./mach ' + run['mach']
run['checkout'] = True
run['sparse-profile'] = None
del run['mach']
if job['worker']['implementation'] == 'docker-worker':
docker_worker_run_task(config, job, taskdesc)
else:
native_engine_run_task(config, job, taskdesc)
--- a/taskcluster/taskgraph/transforms/job/mozharness.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness.py
@@ -163,21 +163,21 @@ def mozharness_on_docker_worker_setup(co
docker_worker_add_tooltool(config, job, taskdesc, internal=internal)
# Retry if mozharness returns TBPL_RETRY
worker['retry-exit-status'] = 4
docker_worker_setup_secrets(config, job, taskdesc)
command = [
- '/home/worker/bin/run-task',
- '--vcs-checkout', '/home/worker/workspace/build/src',
- '--tools-checkout', '/home/worker/workspace/build/tools',
+ '/builds/worker/bin/run-task',
+ '--vcs-checkout', '/builds/worker/workspace/build/src',
+ '--tools-checkout', '/builds/worker/workspace/build/tools',
'--',
- '/home/worker/workspace/build/src/{}'.format(
+ '/builds/worker/workspace/build/src/{}'.format(
run.get('job-script', 'taskcluster/scripts/builder/build-linux.sh')
),
]
worker['command'] = command
@run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema)
--- a/taskcluster/taskgraph/transforms/job/mozharness_test.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness_test.py
@@ -97,36 +97,36 @@ def mozharness_test_on_docker(config, jo
worker['allow-ptrace'] = True # required for all tests, for crashreporter
worker['loopback-video'] = test['loopback-video']
worker['loopback-audio'] = test['loopback-audio']
worker['max-run-time'] = test['max-run-time']
worker['retry-exit-status'] = test['retry-exit-status']
artifacts = [
# (artifact name prefix, in-image path)
- ("public/logs/", "/home/worker/workspace/build/upload/logs/"),
- ("public/test", "/home/worker/artifacts/"),
- ("public/test_info/", "/home/worker/workspace/build/blobber_upload_dir/"),
+ ("public/logs/", "/builds/worker/workspace/build/upload/logs/"),
+ ("public/test", "/builds/worker/artifacts/"),
+ ("public/test_info/", "/builds/worker/workspace/build/blobber_upload_dir/"),
]
installer_url = get_artifact_url('<build>', mozharness['build-artifact-name'])
mozharness_url = get_artifact_url('<build>',
'public/build/mozharness.zip')
worker['artifacts'] = [{
'name': prefix,
- 'path': os.path.join('/home/worker/workspace', path),
+ 'path': os.path.join('/builds/worker/workspace', path),
'type': 'directory',
} for (prefix, path) in artifacts]
worker['caches'] = [{
'type': 'persistent',
'name': 'level-{}-{}-test-workspace'.format(
config.params['level'], config.params['project']),
- 'mount-point': "/home/worker/workspace",
+ 'mount-point': "/builds/worker/workspace",
}]
env = worker['env'] = {
'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
'MOZHARNESS_SCRIPT': mozharness['script'],
'MOZILLA_BUILD_URL': {'task-reference': installer_url},
'NEED_PULSEAUDIO': 'true',
'NEED_WINDOW_MANAGER': 'true',
@@ -151,34 +151,34 @@ def mozharness_test_on_docker(config, jo
if mozharness['tooltool-downloads']:
docker_worker_add_tooltool(config, job, taskdesc, internal=True)
if test['reboot']:
raise Exception('reboot: {} not supported on generic-worker'.format(test['reboot']))
# assemble the command line
command = [
- '/home/worker/bin/run-task',
+ '/builds/worker/bin/run-task',
]
# Support vcs checkouts regardless of whether the task runs from
# source or not in case it is needed on an interactive loaner.
support_vcs_checkout(config, job, taskdesc)
# If we have a source checkout, run mozharness from it instead of
# downloading a zip file with the same content.
if test['checkout']:
- command.extend(['--vcs-checkout', '/home/worker/checkouts/gecko'])
- env['MOZHARNESS_PATH'] = '/home/worker/checkouts/gecko/testing/mozharness'
+ command.extend(['--vcs-checkout', '/builds/worker/checkouts/gecko'])
+ env['MOZHARNESS_PATH'] = '/builds/worker/checkouts/gecko/testing/mozharness'
else:
env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}
command.extend([
'--',
- '/home/worker/bin/test-linux.sh',
+ '/builds/worker/bin/test-linux.sh',
])
if mozharness.get('no-read-buildbot-config'):
command.append("--no-read-buildbot-config")
command.extend([
{"task-reference": "--installer-url=" + installer_url},
{"task-reference": "--test-packages-url=" + test_packages_url(taskdesc)},
])
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -14,17 +14,17 @@ from voluptuous import Required, Any
run_task_schema = Schema({
Required('using'): 'run-task',
# if true, add a cache at ~worker/.cache, which is where things like pip
# tend to hide their caches. This cache is never added for level-1 jobs.
Required('cache-dotcache', default=False): bool,
- # if true (the default), perform a checkout in /home/worker/checkouts/gecko
+ # if true (the default), perform a checkout in /builds/worker/checkouts/gecko
Required('checkout', default=True): bool,
# The sparse checkout profile to use. Value is the filename relative to the
# directory where sparse profiles are defined (build/sparse-profiles/).
Required('sparse-profile', default=None): basestring,
# The command arguments to pass to the `run-task` script, after the
# checkout arguments. If a list, it will be passed directly; otherwise
@@ -41,40 +41,40 @@ def common_setup(config, job, taskdesc):
taskdesc['worker'].setdefault('env', {})['MOZ_SCM_LEVEL'] = config.params['level']
def add_checkout_to_command(run, command):
if not run['checkout']:
return
- command.append('--vcs-checkout=/home/worker/checkouts/gecko')
+ command.append('--vcs-checkout=/builds/worker/checkouts/gecko')
if run['sparse-profile']:
command.append('--sparse-profile=build/sparse-profiles/%s' %
run['sparse-profile'])
@run_job_using("docker-worker", "run-task", schema=run_task_schema)
def docker_worker_run_task(config, job, taskdesc):
run = job['run']
worker = taskdesc['worker'] = job['worker']
common_setup(config, job, taskdesc)
worker['caches'].append({
'type': 'persistent',
'name': 'level-{level}-{project}-dotcache'.format(**config.params),
- 'mount-point': '/home/worker/.cache',
+ 'mount-point': '/builds/worker/.cache',
'skip-untrusted': True,
})
run_command = run['command']
if isinstance(run_command, basestring):
run_command = ['bash', '-cx', run_command]
- command = ['/home/worker/bin/run-task']
+ command = ['/builds/worker/bin/run-task']
add_checkout_to_command(run, command)
command.append('--fetch-hgfingerprint')
command.append('--')
command.extend(run_command)
worker['command'] = command
@run_job_using("native-engine", "run-task", schema=run_task_schema)
--- a/taskcluster/taskgraph/transforms/job/spidermonkey.py
+++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py
@@ -35,17 +35,17 @@ def docker_worker_spidermonkey(config, j
run = job['run']
worker = taskdesc['worker']
worker['artifacts'] = []
worker.setdefault('caches', []).append({
'type': 'persistent',
'name': 'level-{}-{}-build-spidermonkey-workspace'.format(
config.params['level'], config.params['project']),
- 'mount-point': "/home/worker/workspace",
+ 'mount-point': "/builds/worker/workspace",
'skip-untrusted': True,
})
docker_worker_add_public_artifacts(config, job, taskdesc)
docker_worker_add_tooltool(config, job, taskdesc)
env = worker.setdefault('env', {})
env.update({
@@ -59,22 +59,22 @@ def docker_worker_spidermonkey(config, j
script = "build-sm.sh"
if run['using'] == 'spidermonkey-package':
script = "build-sm-package.sh"
elif run['using'] == 'spidermonkey-mozjs-crate':
script = "build-sm-mozjs-crate.sh"
worker['command'] = [
- '/home/worker/bin/run-task',
- '--vcs-checkout', '/home/worker/workspace/build/src',
+ '/builds/worker/bin/run-task',
+ '--vcs-checkout', '/builds/worker/workspace/build/src',
'--',
'/bin/bash',
'-c',
- 'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script
+ 'cd /builds/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script
]
@run_job_using("generic-worker", "spidermonkey", schema=sm_run_schema)
def generic_worker_spidermonkey(config, job, taskdesc):
assert job['worker']['os'] == 'windows', 'only supports windows right now'
run = job['run']
--- a/taskcluster/taskgraph/transforms/job/toolchain.py
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -116,22 +116,22 @@ def docker_worker_toolchain(config, job,
'MOZ_AUTOMATION': '1',
})
if run['tooltool-downloads']:
internal = run['tooltool-downloads'] == 'internal'
docker_worker_add_tooltool(config, job, taskdesc, internal=internal)
worker['command'] = [
- '/home/worker/bin/run-task',
- '--vcs-checkout=/home/worker/workspace/build/src',
+ '/builds/worker/bin/run-task',
+ '--vcs-checkout=/builds/worker/workspace/build/src',
'--',
'bash',
'-c',
- 'cd /home/worker && '
+ 'cd /builds/worker && '
'./workspace/build/src/taskcluster/scripts/misc/{}'.format(
run['script'])
]
attributes = taskdesc.setdefault('attributes', {})
attributes['toolchain-artifact'] = run['toolchain-artifact']
if 'toolchain-alias' in run:
attributes['toolchain-alias'] = run['toolchain-alias']
--- a/taskcluster/taskgraph/transforms/marionette_harness.py
+++ b/taskcluster/taskgraph/transforms/marionette_harness.py
@@ -26,12 +26,12 @@ def setup_task(config, tasks):
'MOZ_BUILD_DATE': config.params['moz_build_date'],
'MOZ_SCM_LEVEL': config.params['level'],
})
task['worker']['caches'] = [{
'type': 'persistent',
'name': 'level-{}-{}-tc-vcs'.format(
config.params['level'], config.params['project']),
- 'mount-point': "/home/worker/.tc-vcs",
+ 'mount-point': "/builds/worker/.tc-vcs",
}]
yield task
--- a/taskcluster/taskgraph/transforms/repackage.py
+++ b/taskcluster/taskgraph/transforms/repackage.py
@@ -239,25 +239,25 @@ def _generate_taskcluster_prefix(task_id
def _generate_task_output_files(build_platform, locale=None):
locale_output_path = '{}/'.format(locale) if locale else ''
if build_platform.startswith('linux') or build_platform.startswith('macosx'):
output_files = [{
'type': 'file',
- 'path': '/home/worker/workspace/build/artifacts/{}target.complete.mar'
+ 'path': '/builds/worker/workspace/build/artifacts/{}target.complete.mar'
.format(locale_output_path),
'name': 'public/build/{}target.complete.mar'.format(locale_output_path),
}]
if build_platform.startswith('macosx'):
output_files.append({
'type': 'file',
- 'path': '/home/worker/workspace/build/artifacts/{}target.dmg'
+ 'path': '/builds/worker/workspace/build/artifacts/{}target.dmg'
.format(locale_output_path),
'name': 'public/build/{}target.dmg'.format(locale_output_path),
})
elif build_platform.startswith('win'):
output_files = [{
'type': 'file',
'path': 'public/build/{}target.installer.exe'.format(locale_output_path),
--- a/testing/mozharness/configs/android/androidarm_4_3.py
+++ b/testing/mozharness/configs/android/androidarm_4_3.py
@@ -4,18 +4,18 @@ config = {
"buildbot_json_path": "buildprops.json",
"hostutils_manifest_path": "testing/config/tooltool-manifests/linux64/hostutils.manifest",
"robocop_package_name": "org.mozilla.roboexample.test",
"marionette_address": "localhost:2828",
"marionette_test_manifest": "unit-tests.ini",
"download_tooltool": True,
"tooltool_servers": ['http://relengapi/tooltool/'],
"tooltool_manifest_path": "testing/config/tooltool-manifests/androidarm_4_3/releng.manifest",
- "tooltool_cache": "/home/worker/tooltool_cache",
- "avds_dir": "/home/worker/workspace/build/.android",
+ "tooltool_cache": "/builds/worker/tooltool_cache",
+ "avds_dir": "/builds/worker/workspace/build/.android",
"emulator_manifest": """
[
{
"size": 140097024,
"digest": "51781032335c09103e8509b1a558bf22a7119392cf1ea301c49c01bdf21ff0ceb37d260bc1c322cd9b903252429fb01830fc27e4632be30cd345c95bf4b1a39b",
"algorithm": "sha512",
"filename": "android-sdk_r24.0.2-linux.tgz",
"unpack": "True"
--- a/testing/mozharness/configs/android/androidx86.py
+++ b/testing/mozharness/configs/android/androidx86.py
@@ -1,18 +1,18 @@
import os
config = {
"buildbot_json_path": "buildprops.json",
"hostutils_manifest_path": "testing/config/tooltool-manifests/linux64/hostutils.manifest",
"tooltool_manifest_path": "testing/config/tooltool-manifests/androidx86/releng.manifest",
- "tooltool_cache": "/home/worker/tooltool_cache",
+ "tooltool_cache": "/builds/worker/tooltool_cache",
"download_tooltool": True,
"tooltool_servers": ['http://relengapi/tooltool/'],
- "avds_dir": "/home/worker/workspace/build/.android",
+ "avds_dir": "/builds/worker/workspace/build/.android",
"emulator_manifest": """
[
{
"size": 193383673,
"digest": "6609e8b95db59c6a3ad60fc3dcfc358b2c8ec8b4dda4c2780eb439e1c5dcc5d550f2e47ce56ba14309363070078d09b5287e372f6e95686110ff8a2ef1838221",
"algorithm": "sha512",
"filename": "android-sdk18_0.r18moz1.orig.tar.gz",
"unpack": "True"
--- a/testing/mozharness/configs/builds/build_pool_specifics.py
+++ b/testing/mozharness/configs/builds/build_pool_specifics.py
@@ -33,12 +33,12 @@ config = {
"taskcluster": {
'graph_server': 'graphs.mozilla.org',
'stage_server': 'ignored',
# use the relengapi proxy to talk to tooltool
"tooltool_servers": ['http://relengapi/tooltool/'],
"tooltool_url": 'http://relengapi/tooltool/',
'upload_env': {
'UPLOAD_HOST': 'localhost',
- 'UPLOAD_PATH': '/home/worker/artifacts',
+ 'UPLOAD_PATH': '/builds/worker/artifacts',
},
},
}
--- a/testing/mozharness/configs/builds/releng_base_android_64_builds.py
+++ b/testing/mozharness/configs/builds/releng_base_android_64_builds.py
@@ -89,17 +89,17 @@ config = {
'enable_max_vsize': False,
'use_package_as_marfile': True,
'env': {
'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
'DISPLAY': ':2',
'HG_SHARE_BASE_DIR': '/builds/hg-shared',
'MOZ_OBJDIR': 'obj-firefox',
'TINDERBOX_OUTPUT': '1',
- 'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+ 'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
'TOOLTOOL_HOME': '/builds',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
'PATH': '/usr/local/bin:/bin:/usr/bin',
'SHIP_LICENSED_FONTS': '1',
},
--- a/testing/mozharness/configs/builds/releng_base_linux_32_builds.py
+++ b/testing/mozharness/configs/builds/releng_base_linux_32_builds.py
@@ -71,17 +71,17 @@ config = {
'stage_platform': 'linux',
'publish_nightly_en_US_routes': True,
'env': {
'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
'DISPLAY': ':2',
'HG_SHARE_BASE_DIR': '/builds/hg-shared',
'MOZ_OBJDIR': 'obj-firefox',
'TINDERBOX_OUTPUT': '1',
- 'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+ 'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
'TOOLTOOL_HOME': '/builds',
'MOZ_CRASHREPORTER_NO_REPORT': '1',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
# 32 bit specific
'PATH': '/usr/local/bin:/usr/lib/ccache:\
--- a/testing/mozharness/configs/builds/releng_base_linux_64_builds.py
+++ b/testing/mozharness/configs/builds/releng_base_linux_64_builds.py
@@ -70,17 +70,17 @@ config = {
'stage_platform': 'linux64',
'publish_nightly_en_US_routes': True,
'env': {
'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
'DISPLAY': ':2',
'HG_SHARE_BASE_DIR': '/builds/hg-shared',
'MOZ_OBJDIR': 'obj-firefox',
'TINDERBOX_OUTPUT': '1',
- 'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+ 'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
'TOOLTOOL_HOME': '/builds',
'MOZ_CRASHREPORTER_NO_REPORT': '1',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
## 64 bit specific
'PATH': '/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/builds/releng_base_mac_64_cross_builds.py
+++ b/testing/mozharness/configs/builds/releng_base_mac_64_cross_builds.py
@@ -41,17 +41,17 @@ config = {
'base_name': 'OS X 10.7 %(branch)s',
'platform': 'macosx64',
'stage_platform': 'macosx64',
'env': {
'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
'HG_SHARE_BASE_DIR': '/builds/hg-shared',
'MOZ_OBJDIR': 'obj-firefox',
'TINDERBOX_OUTPUT': '1',
- 'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+ 'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
'TOOLTOOL_HOME': '/builds',
'MOZ_CRASHREPORTER_NO_REPORT': '1',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
## 64 bit specific
'PATH': '/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_add-on-devel.py
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_add-on-devel.py
@@ -27,15 +27,15 @@ config = {
'TOOLTOOL_CACHE': '/builds/tooltool_cache',
'TOOLTOOL_HOME': '/builds',
'MOZ_CRASHREPORTER_NO_REPORT': '1',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
## 64 bit specific
- 'PATH': '/home/worker/workspace/build/src/gcc/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+ 'PATH': '/builds/worker/workspace/build/src/gcc/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
/usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
/tools/python27-mercurial/bin:/home/cltbld/bin',
},
'src_mozconfig': 'browser/config/mozconfigs/linux64/add-on-devel',
#######################
}
--- a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_artifact.py
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_artifact.py
@@ -41,17 +41,17 @@ config = {
'stage_platform': 'linux64',
'publish_nightly_en_US_routes': False,
'env': {
'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
'DISPLAY': ':2',
'HG_SHARE_BASE_DIR': '/builds/hg-shared',
'MOZ_OBJDIR': 'obj-firefox',
'TINDERBOX_OUTPUT': '1',
- 'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+ 'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
'TOOLTOOL_HOME': '/builds',
'MOZ_CRASHREPORTER_NO_REPORT': '1',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
## 64 bit specific
'PATH': '/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_debug.py
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_debug.py
@@ -31,17 +31,17 @@ config = {
'stage_platform': 'linux64-st-an-opt',
'publish_nightly_en_US_routes': False,
'env': {
'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
'DISPLAY': ':2',
'HG_SHARE_BASE_DIR': '/builds/hg-shared',
'MOZ_OBJDIR': 'obj-firefox',
'TINDERBOX_OUTPUT': '1',
- 'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+ 'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
'TOOLTOOL_HOME': '/builds',
'MOZ_CRASHREPORTER_NO_REPORT': '1',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
## 64 bit specific
'PATH': '/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_opt.py
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_opt.py
@@ -31,17 +31,17 @@ config = {
'stage_platform': 'linux64-st-an',
'publish_nightly_en_US_routes': False,
'env': {
'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
'DISPLAY': ':2',
'HG_SHARE_BASE_DIR': '/builds/hg-shared',
'MOZ_OBJDIR': 'obj-firefox',
'TINDERBOX_OUTPUT': '1',
- 'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+ 'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
'TOOLTOOL_HOME': '/builds',
'MOZ_CRASHREPORTER_NO_REPORT': '1',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
## 64 bit specific
'PATH': '/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/firefox_ui_tests/taskcluster.py
+++ b/testing/mozharness/configs/firefox_ui_tests/taskcluster.py
@@ -10,10 +10,10 @@ config = {
},
"find_links": [
"http://pypi.pub.build.mozilla.org/pub",
],
"pip_index": False,
"download_minidump_stackwalk": True,
- "tooltool_cache": "/home/worker/tooltool-cache",
+ "tooltool_cache": "/builds/worker/tooltool-cache",
}
--- a/testing/mozharness/configs/marionette/prod_config.py
+++ b/testing/mozharness/configs/marionette/prod_config.py
@@ -31,17 +31,17 @@ config = {
'run-tests',
],
"default_blob_upload_servers": [
"https://blobupload.elasticbeanstalk.com",
],
"blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
"download_symbols": "ondemand",
"download_minidump_stackwalk": True,
- "tooltool_cache": "/home/worker/tooltool-cache",
+ "tooltool_cache": "/builds/worker/tooltool-cache",
"suite_definitions": {
"marionette_desktop": {
"options": [
"-vv",
"--log-raw=%(raw_log_file)s",
"--log-errorsummary=%(error_summary_file)s",
"--log-html=%(html_report_file)s",
"--binary=%(binary)s",
--- a/testing/mozharness/configs/single_locale/tc_android-api-15.py
+++ b/testing/mozharness/configs/single_locale/tc_android-api-15.py
@@ -6,13 +6,13 @@ config = {
"tooltool_config": {
"manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
"output_dir": "%(abs_work_dir)s/src",
},
"tooltool_servers": ['http://relengapi/tooltool/'],
"upload_env": {
'UPLOAD_HOST': 'localhost',
- 'UPLOAD_PATH': '/home/worker/artifacts/',
+ 'UPLOAD_PATH': '/builds/worker/artifacts/',
},
"mozilla_dir": "src/",
"simple_name_move": True,
}
--- a/testing/mozharness/configs/single_locale/tc_linux32.py
+++ b/testing/mozharness/configs/single_locale/tc_linux32.py
@@ -15,13 +15,13 @@ config = {
"DIST": "%(abs_objdir)s",
"LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
"L10NBASEDIR": "../../l10n",
"MOZ_MAKE_COMPLETE_MAR": "1",
'TOOLTOOL_CACHE': os.environ.get('TOOLTOOL_CACHE'),
},
"upload_env": {
'UPLOAD_HOST': 'localhost',
- 'UPLOAD_PATH': '/home/worker/artifacts/',
+ 'UPLOAD_PATH': '/builds/worker/artifacts/',
},
"mozilla_dir": "src/",
"simple_name_move": True,
}
--- a/testing/mozharness/configs/single_locale/tc_linux64.py
+++ b/testing/mozharness/configs/single_locale/tc_linux64.py
@@ -15,13 +15,13 @@ config = {
"DIST": "%(abs_objdir)s",
"LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
"L10NBASEDIR": "../../l10n",
"MOZ_MAKE_COMPLETE_MAR": "1",
'TOOLTOOL_CACHE': os.environ.get('TOOLTOOL_CACHE'),
},
"upload_env": {
'UPLOAD_HOST': 'localhost',
- 'UPLOAD_PATH': '/home/worker/artifacts/',
+ 'UPLOAD_PATH': '/builds/worker/artifacts/',
},
"mozilla_dir": "src/",
"simple_name_move": True,
}
--- a/testing/mozharness/configs/single_locale/tc_macosx64.py
+++ b/testing/mozharness/configs/single_locale/tc_macosx64.py
@@ -15,17 +15,17 @@ config = {
"DIST": "%(abs_objdir)s",
"LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
"L10NBASEDIR": "../../l10n",
"MOZ_MAKE_COMPLETE_MAR": "1",
'TOOLTOOL_CACHE': os.environ.get('TOOLTOOL_CACHE'),
},
"upload_env": {
'UPLOAD_HOST': 'localhost',
- 'UPLOAD_PATH': '/home/worker/artifacts/',
+ 'UPLOAD_PATH': '/builds/worker/artifacts/',
},
"tooltool_url": 'http://relengapi/tooltool/',
'tooltool_manifest_src': "browser/config/tooltool-manifests/macosx64/cross-l10n.manifest",
"mozilla_dir": "src/",
"simple_name_move": True,
}
--- a/testing/mozharness/configs/talos/linux64_config_taskcluster.py
+++ b/testing/mozharness/configs/talos/linux64_config_taskcluster.py
@@ -45,10 +45,10 @@ config = {
],
"default_blob_upload_servers": [
"https://blobupload.elasticbeanstalk.com",
],
"blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
"download_minidump_stackwalk": True,
"minidump_stackwalk_path": MINIDUMP_STACKWALK_PATH,
"minidump_tooltool_manifest_path": TOOLTOOL_MANIFEST_PATH,
- "tooltool_cache": "/home/worker/tooltool-cache",
+ "tooltool_cache": "/builds/worker/tooltool-cache",
}
--- a/testing/mozharness/configs/unittests/linux_unittest.py
+++ b/testing/mozharness/configs/unittests/linux_unittest.py
@@ -304,13 +304,13 @@ config = {
"cppunittest": [],
"jittest": [],
"mozbase": [],
},
"blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
"download_minidump_stackwalk": True,
"minidump_stackwalk_path": MINIDUMP_STACKWALK_PATH,
"minidump_tooltool_manifest_path": TOOLTOOL_MANIFEST_PATH,
- "tooltool_cache": "/home/worker/tooltool-cache",
+ "tooltool_cache": "/builds/worker/tooltool-cache",
"download_nodejs": True,
"nodejs_path": NODEJS_PATH,
"nodejs_tooltool_manifest_path": NODEJS_TOOLTOOL_MANIFEST_PATH,
}
--- a/testing/mozharness/mozharness/mozilla/testing/codecoverage.py
+++ b/testing/mozharness/mozharness/mozilla/testing/codecoverage.py
@@ -77,17 +77,17 @@ class CodeCoverageMixin(object):
# Create the grcov directory, get the tooltool manifest, and finally
# download and unpack the grcov binary.
self.grcov_dir = tempfile.mkdtemp()
manifest = os.path.join(dirs.get('abs_test_install_dir', os.path.join(dirs['abs_work_dir'], 'tests')), \
'config/tooltool-manifests/linux64/ccov.manifest')
tooltool_path = self._fetch_tooltool_py()
cmd = [tooltool_path, '--url', 'https://tooltool.mozilla-releng.net/', 'fetch', \
- '-m', manifest, '-o', '-c', '/home/worker/tooltool-cache']
+ '-m', manifest, '-o', '-c', '/builds/worker/tooltool-cache']
self.run_command(cmd, cwd=self.grcov_dir)
self.run_command(['tar', '-jxvf', os.path.join(self.grcov_dir, 'grcov-linux-standalone-x86_64.tar.bz2'), \
'-C', self.grcov_dir], cwd=self.grcov_dir)
@PostScriptAction('run-tests')
def _package_coverage_data(self, action, success=None):
if not self.code_coverage_enabled:
return
@@ -128,17 +128,17 @@ class CodeCoverageMixin(object):
# GRCOV post-processing
# Download the gcno fom the build machine.
self.download_file(self.url_to_gcno, file_name=None, parent_dir=self.grcov_dir)
# Run grcov on the zipped .gcno and .gcda files.
grcov_command = [
os.path.join(self.grcov_dir, 'grcov'),
'-t', 'lcov',
- '-p', '/home/worker/workspace/build/src/',
+ '-p', '/builds/worker/workspace/build/src/',
'--ignore-dir', 'gcc',
os.path.join(self.grcov_dir, 'target.code-coverage-gcno.zip'), file_path_gcda
]
# 'grcov_output' will be a tuple, the first variable is the path to the lcov output,
# the other is the path to the standard error output.
grcov_output = self.get_output_from_command(grcov_command, cwd=self.grcov_dir, \
silent=True, tmpfile_base_path=os.path.join(self.grcov_dir, 'grcov_lcov_output'), \