From 5b7e7525f0529ead6275fe37af1e5e875136c621 Mon Sep 17 00:00:00 2001 From: Dylan Conway Date: Wed, 20 Nov 2024 21:26:25 -0800 Subject: [PATCH 001/176] span and __bit_cast --- src/bun.js/bindings/Base64Helpers.cpp | 8 ++++---- src/bun.js/bindings/BunString.cpp | 20 +++++++++---------- src/bun.js/bindings/JSBuffer.cpp | 12 +++++------ src/bun.js/bindings/JSBundlerPlugin.cpp | 4 ++-- src/bun.js/bindings/JSFFIFunction.cpp | 2 +- src/bun.js/bindings/NodeHTTP.cpp | 4 ++-- src/bun.js/bindings/ZigGlobalObject.cpp | 10 +++++----- src/bun.js/bindings/bindings.cpp | 8 ++++---- src/bun.js/bindings/helpers.h | 8 ++++---- src/bun.js/bindings/webcore/HTTPHeaderMap.cpp | 4 ++-- .../webcore/SerializedScriptValue.cpp | 6 +++--- src/bun.js/bindings/wtf-bindings.cpp | 9 +++------ 12 files changed, 46 insertions(+), 49 deletions(-) diff --git a/src/bun.js/bindings/Base64Helpers.cpp b/src/bun.js/bindings/Base64Helpers.cpp index 1e13730f47b568..b502ef32cf4638 100644 --- a/src/bun.js/bindings/Base64Helpers.cpp +++ b/src/bun.js/bindings/Base64Helpers.cpp @@ -18,13 +18,13 @@ ExceptionOr atob(const String& encodedString) if (!encodedString.is8Bit()) { const auto span = encodedString.span16(); size_t expected_length = simdutf::latin1_length_from_utf16(span.size()); - LChar* ptr; + std::span ptr; WTF::String convertedString = WTF::String::createUninitialized(expected_length, ptr); if (UNLIKELY(convertedString.isNull())) { return WebCore::Exception { OutOfMemoryError }; } - auto result = simdutf::convert_utf16le_to_latin1_with_errors(span.data(), span.size(), reinterpret_cast(ptr)); + auto result = simdutf::convert_utf16le_to_latin1_with_errors(span.data(), span.size(), reinterpret_cast(ptr.data())); if (result.error) { return WebCore::Exception { InvalidCharacterError }; @@ -34,12 +34,12 @@ ExceptionOr atob(const String& encodedString) const auto span = encodedString.span8(); size_t result_length = simdutf::maximal_binary_length_from_base64(reinterpret_cast(span.data()), encodedString.length()); - LChar* ptr; + std::span ptr; WTF::String outString = WTF::String::createUninitialized(result_length, ptr); if (UNLIKELY(outString.isNull())) { return WebCore::Exception { OutOfMemoryError }; } - auto result = simdutf::base64_to_binary(reinterpret_cast(span.data()), span.size(), reinterpret_cast(ptr), simdutf::base64_default); + auto result = simdutf::base64_to_binary(reinterpret_cast(span.data()), span.size(), reinterpret_cast(ptr.data()), simdutf::base64_default); if (result.error != simdutf::error_code::SUCCESS) { return WebCore::Exception { InvalidCharacterError }; } diff --git a/src/bun.js/bindings/BunString.cpp b/src/bun.js/bindings/BunString.cpp index 4d9ce102f3fb02..17200b35c30552 100644 --- a/src/bun.js/bindings/BunString.cpp +++ b/src/bun.js/bindings/BunString.cpp @@ -226,7 +226,7 @@ extern "C" JSC::EncodedJSValue BunString__toJS(JSC::JSGlobalObject* globalObject extern "C" BunString BunString__fromUTF16Unitialized(size_t length) { ASSERT(length > 0); - UChar* ptr; + std::span ptr; auto impl = WTF::StringImpl::tryCreateUninitialized(length, ptr); if (UNLIKELY(!impl)) { return { .tag = BunStringTag::Dead }; @@ -237,7 +237,7 @@ extern "C" BunString BunString__fromUTF16Unitialized(size_t length) extern "C" BunString BunString__fromLatin1Unitialized(size_t length) { ASSERT(length > 0); - LChar* ptr; + std::span ptr; auto impl = WTF::StringImpl::tryCreateUninitialized(length, ptr); if (UNLIKELY(!impl)) { return { .tag = BunStringTag::Dead }; @@ -250,12 +250,12 @@ extern "C" BunString BunString__fromUTF8(const char* bytes, size_t length) ASSERT(length > 0); if (simdutf::validate_utf8(bytes, length)) { size_t u16Length = simdutf::utf16_length_from_utf8(bytes, length); - UChar* ptr; + std::span ptr; auto impl = WTF::StringImpl::tryCreateUninitialized(static_cast(u16Length), ptr); if (UNLIKELY(!impl)) { return { .tag = BunStringTag::Dead }; } - RELEASE_ASSERT(simdutf::convert_utf8_to_utf16(bytes, length, ptr) == u16Length); + RELEASE_ASSERT(simdutf::convert_utf8_to_utf16(bytes, length, ptr.data()) == u16Length); impl->ref(); return { BunStringTag::WTFStringImpl, { .wtf = impl.leakRef() } }; } @@ -271,12 +271,12 @@ extern "C" BunString BunString__fromUTF8(const char* bytes, size_t length) extern "C" BunString BunString__fromLatin1(const char* bytes, size_t length) { ASSERT(length > 0); - LChar* ptr; + std::span ptr; auto impl = WTF::StringImpl::tryCreateUninitialized(length, ptr); if (UNLIKELY(!impl)) { return { .tag = BunStringTag::Dead }; } - memcpy(ptr, bytes, length); + memcpy(ptr.data(), bytes, length); return { BunStringTag::WTFStringImpl, { .wtf = impl.leakRef() } }; } @@ -286,13 +286,13 @@ extern "C" BunString BunString__fromUTF16ToLatin1(const char16_t* bytes, size_t ASSERT(length > 0); ASSERT_WITH_MESSAGE(simdutf::validate_utf16le(bytes, length), "This function only accepts ascii UTF16 strings"); size_t outLength = simdutf::latin1_length_from_utf16(length); - LChar* ptr = nullptr; + std::span ptr; auto impl = WTF::StringImpl::tryCreateUninitialized(outLength, ptr); if (UNLIKELY(!impl)) { return { BunStringTag::Dead }; } - size_t latin1_length = simdutf::convert_valid_utf16le_to_latin1(bytes, length, reinterpret_cast(ptr)); + size_t latin1_length = simdutf::convert_valid_utf16le_to_latin1(bytes, length, reinterpret_cast(ptr.data())); ASSERT_WITH_MESSAGE(latin1_length == outLength, "Failed to convert UTF16 to Latin1"); return { BunStringTag::WTFStringImpl, { .wtf = impl.leakRef() } }; } @@ -300,12 +300,12 @@ extern "C" BunString BunString__fromUTF16ToLatin1(const char16_t* bytes, size_t extern "C" BunString BunString__fromUTF16(const char16_t* bytes, size_t length) { ASSERT(length > 0); - UChar* ptr; + std::span ptr; auto impl = WTF::StringImpl::tryCreateUninitialized(length, ptr); if (UNLIKELY(!impl)) { return { .tag = BunStringTag::Dead }; } - memcpy(ptr, bytes, length * sizeof(char16_t)); + memcpy(ptr.data(), bytes, length * sizeof(char16_t)); return { BunStringTag::WTFStringImpl, { .wtf = impl.leakRef() } }; } diff --git a/src/bun.js/bindings/JSBuffer.cpp b/src/bun.js/bindings/JSBuffer.cpp index d29fbbc0957660..fb0c0b5fe9dfb9 100644 --- a/src/bun.js/bindings/JSBuffer.cpp +++ b/src/bun.js/bindings/JSBuffer.cpp @@ -1547,21 +1547,21 @@ static inline JSC::EncodedJSValue jsBufferToString(JSC::VM& vm, JSC::JSGlobalObj switch (encoding) { case WebCore::BufferEncodingType::latin1: { - LChar* data = nullptr; + std::span data; auto str = String::createUninitialized(length, data); - memcpy(data, reinterpret_cast(castedThis->vector()) + offset, length); + memcpy(data.data(), reinterpret_cast(castedThis->vector()) + offset, length); return JSC::JSValue::encode(JSC::jsString(vm, WTFMove(str))); } case WebCore::BufferEncodingType::ucs2: case WebCore::BufferEncodingType::utf16le: { - UChar* data = nullptr; + std::span data; size_t u16length = length / 2; if (u16length == 0) { return JSC::JSValue::encode(JSC::jsEmptyString(vm)); } else { auto str = String::createUninitialized(u16length, data); - memcpy(reinterpret_cast(data), reinterpret_cast(castedThis->vector()) + offset, u16length * 2); + memcpy(reinterpret_cast(data.data()), reinterpret_cast(castedThis->vector()) + offset, u16length * 2); return JSC::JSValue::encode(JSC::jsString(vm, str)); } @@ -1571,9 +1571,9 @@ static inline JSC::EncodedJSValue jsBufferToString(JSC::VM& vm, JSC::JSGlobalObj case WebCore::BufferEncodingType::ascii: { // ascii: we always know the length // so we might as well allocate upfront - LChar* data = nullptr; + std::span data; auto str = String::createUninitialized(length, data); - Bun__encoding__writeLatin1(reinterpret_cast(castedThis->vector()) + offset, length, data, length, static_cast(encoding)); + Bun__encoding__writeLatin1(reinterpret_cast(castedThis->vector()) + offset, length, data.data(), length, static_cast(encoding)); return JSC::JSValue::encode(JSC::jsString(vm, WTFMove(str))); } diff --git a/src/bun.js/bindings/JSBundlerPlugin.cpp b/src/bun.js/bindings/JSBundlerPlugin.cpp index ff48b069180ce7..3fbe66f9bdc0c4 100644 --- a/src/bun.js/bindings/JSBundlerPlugin.cpp +++ b/src/bun.js/bindings/JSBundlerPlugin.cpp @@ -27,8 +27,8 @@ #include namespace Bun { -#define WRAP_BUNDLER_PLUGIN(argName) jsNumber(bitwise_cast(reinterpret_cast(argName))) -#define UNWRAP_BUNDLER_PLUGIN(callFrame) reinterpret_cast(bitwise_cast(callFrame->argument(0).asDouble())) +#define WRAP_BUNDLER_PLUGIN(argName) jsNumber(__bit_cast(reinterpret_cast(argName))) +#define UNWRAP_BUNDLER_PLUGIN(callFrame) reinterpret_cast(__bit_cast(callFrame->argument(0).asDouble())) /// These are callbacks defined in Zig and to be run after their associated JS version is run extern "C" void JSBundlerPlugin__addError(void*, void*, JSC::EncodedJSValue, JSC::EncodedJSValue); diff --git a/src/bun.js/bindings/JSFFIFunction.cpp b/src/bun.js/bindings/JSFFIFunction.cpp index 6f5d9dcf4c3b89..4436f86ca314b8 100644 --- a/src/bun.js/bindings/JSFFIFunction.cpp +++ b/src/bun.js/bindings/JSFFIFunction.cpp @@ -121,7 +121,7 @@ extern "C" JSC::EncodedJSValue Bun__CreateFFIFunctionValue(Zig::GlobalObject* gl // We should only expose the "ptr" field when it's a JSCallback for bun:ffi. // Not for internal usages of this function type. // We should also consider a separate JSFunction type for our usage to not have this branch in the first place... - function->putDirect(vm, JSC::Identifier::fromString(vm, String(MAKE_STATIC_STRING_IMPL("ptr"))), JSC::jsNumber(bitwise_cast(functionPointer)), JSC::PropertyAttribute::ReadOnly | 0); + function->putDirect(vm, JSC::Identifier::fromString(vm, String(MAKE_STATIC_STRING_IMPL("ptr"))), JSC::jsNumber(__bit_cast(functionPointer)), JSC::PropertyAttribute::ReadOnly | 0); return JSC::JSValue::encode(function); } diff --git a/src/bun.js/bindings/NodeHTTP.cpp b/src/bun.js/bindings/NodeHTTP.cpp index 2ff078e67d62ed..1bef336c82c3cf 100644 --- a/src/bun.js/bindings/NodeHTTP.cpp +++ b/src/bun.js/bindings/NodeHTTP.cpp @@ -212,10 +212,10 @@ static EncodedJSValue assignHeadersFromUWebSockets(uWS::HttpRequest* request, JS for (auto it = request->begin(); it != request->end(); ++it) { auto pair = *it; StringView nameView = StringView(std::span { reinterpret_cast(pair.first.data()), pair.first.length() }); - LChar* data = nullptr; + std::span data; auto value = String::createUninitialized(pair.second.length(), data); if (pair.second.length() > 0) - memcpy(data, pair.second.data(), pair.second.length()); + memcpy(data.data(), pair.second.data(), pair.second.length()); HTTPHeaderName name; WTF::String nameString; diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index cd2d6c43baec3b..c23890b7bfd6db 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -1455,8 +1455,8 @@ JSC_DEFINE_HOST_FUNCTION(functionNativeMicrotaskTrampoline, double cellPtr = callFrame->uncheckedArgument(0).asNumber(); double callbackPtr = callFrame->uncheckedArgument(1).asNumber(); - void* cell = reinterpret_cast(bitwise_cast(cellPtr)); - auto* callback = reinterpret_cast(bitwise_cast(callbackPtr)); + void* cell = reinterpret_cast(__bit_cast(cellPtr)); + auto* callback = reinterpret_cast(__bit_cast(callbackPtr)); callback(cell); return JSValue::encode(jsUndefined()); } @@ -1703,14 +1703,14 @@ JSC_DEFINE_HOST_FUNCTION(functionBTOA, // That means even though this looks like the wrong thing to do, // we should be converting to latin1, not utf8. if (!encodedString.is8Bit()) { - LChar* ptr; + std::span ptr; unsigned length = encodedString.length(); auto dest = WTF::String::createUninitialized(length, ptr); if (UNLIKELY(dest.isNull())) { throwOutOfMemoryError(globalObject, throwScope); return {}; } - WTF::StringImpl::copyCharacters(ptr, encodedString.span16()); + WTF::StringImpl::copyCharacters(ptr.data(), encodedString.span16()); encodedString = WTFMove(dest); } @@ -4020,7 +4020,7 @@ extern "C" void JSC__JSGlobalObject__queueMicrotaskCallback(Zig::GlobalObject* g JSFunction* function = globalObject->nativeMicrotaskTrampoline(); // Do not use JSCell* here because the GC will try to visit it. - globalObject->queueMicrotask(function, JSValue(bitwise_cast(reinterpret_cast(ptr))), JSValue(bitwise_cast(reinterpret_cast(callback))), jsUndefined(), jsUndefined()); + globalObject->queueMicrotask(function, JSValue(__bit_cast(reinterpret_cast(ptr))), JSValue(__bit_cast(reinterpret_cast(callback))), jsUndefined(), jsUndefined()); } JSC::Identifier GlobalObject::moduleLoaderResolve(JSGlobalObject* jsGlobalObject, diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index f80dd361480077..be13efaf22567c 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -1674,9 +1674,9 @@ WebCore::FetchHeaders* WebCore__FetchHeaders__createFromPicoHeaders_(const void* StringView nameView = StringView(std::span { reinterpret_cast(header.name.ptr), header.name.len }); - LChar* data = nullptr; + std::span data; auto value = String::createUninitialized(header.value.len, data); - memcpy(data, header.value.ptr, header.value.len); + memcpy(data.data(), header.value.ptr, header.value.len); HTTPHeaderName name; @@ -1708,10 +1708,10 @@ WebCore::FetchHeaders* WebCore__FetchHeaders__createFromUWS(void* arg1) for (const auto& header : req) { StringView nameView = StringView(std::span { reinterpret_cast(header.first.data()), header.first.length() }); - LChar* data = nullptr; + std::span data; auto value = String::createUninitialized(header.second.length(), data); if (header.second.length() > 0) - memcpy(data, header.second.data(), header.second.length()); + memcpy(data.data(), header.second.data(), header.second.length()); HTTPHeaderName name; diff --git a/src/bun.js/bindings/helpers.h b/src/bun.js/bindings/helpers.h index 5aeca386957d2c..cf5a5e875fbd4b 100644 --- a/src/bun.js/bindings/helpers.h +++ b/src/bun.js/bindings/helpers.h @@ -168,18 +168,18 @@ static const WTF::String toStringCopy(ZigString str) } if (isTaggedUTF16Ptr(str.ptr)) { - UChar* out = nullptr; + std::span out; auto impl = WTF::StringImpl::tryCreateUninitialized(str.len, out); if (UNLIKELY(!impl)) return WTF::String(); - memcpy(out, untag(str.ptr), str.len * sizeof(UChar)); + memcpy(out.data(), untag(str.ptr), str.len * sizeof(UChar)); return WTF::String(WTFMove(impl)); } else { - LChar* out = nullptr; + std::span out; auto impl = WTF::StringImpl::tryCreateUninitialized(str.len, out); if (UNLIKELY(!impl)) return WTF::String(); - memcpy(out, untag(str.ptr), str.len * sizeof(LChar)); + memcpy(out.data(), untag(str.ptr), str.len * sizeof(LChar)); return WTF::String(WTFMove(impl)); } } diff --git a/src/bun.js/bindings/webcore/HTTPHeaderMap.cpp b/src/bun.js/bindings/webcore/HTTPHeaderMap.cpp index d2765a7b8f6122..577485117be0ef 100644 --- a/src/bun.js/bindings/webcore/HTTPHeaderMap.cpp +++ b/src/bun.js/bindings/webcore/HTTPHeaderMap.cpp @@ -153,9 +153,9 @@ void HTTPHeaderMap::setUncommonHeaderCloneName(const StringView name, const Stri return equalIgnoringASCIICase(header.key, name); }); if (index == notFound) { - LChar* ptr = nullptr; + std::span ptr; auto nameCopy = WTF::String::createUninitialized(name.length(), ptr); - memcpy(ptr, name.span8().data(), name.length()); + memcpy(ptr.data(), name.span8().data(), name.length()); m_uncommonHeaders.append(UncommonHeader { nameCopy, value }); } else m_uncommonHeaders[index].value = value; diff --git a/src/bun.js/bindings/webcore/SerializedScriptValue.cpp b/src/bun.js/bindings/webcore/SerializedScriptValue.cpp index c3b936459e8fd1..f23ce53d6beb71 100644 --- a/src/bun.js/bindings/webcore/SerializedScriptValue.cpp +++ b/src/bun.js/bindings/webcore/SerializedScriptValue.cpp @@ -3223,7 +3223,7 @@ class CloneDeserializer : CloneBase { str = String({ reinterpret_cast(ptr), length }); ptr += length * sizeof(UChar); #else - UChar* characters; + std::span characters; str = String::createUninitialized(length, characters); for (unsigned i = 0; i < length; ++i) { uint16_t c; @@ -3269,7 +3269,7 @@ class CloneDeserializer : CloneBase { str = Identifier::fromString(vm, { reinterpret_cast(ptr), length }); ptr += length * sizeof(UChar); #else - UChar* characters; + std::span characters; str = String::createUninitialized(length, characters); for (unsigned i = 0; i < length; ++i) { uint16_t c; @@ -5595,7 +5595,7 @@ ExceptionOr> SerializedScriptValue::create(JSGlobalOb if (arrayBufferContentsArray.hasException()) return arrayBufferContentsArray.releaseException(); - // auto backingStores = ImageBitmap::detachBitmaps(WTFMove(imageBitmaps)); + // auto backingStores = ImageBitmap::detachBitmaps(WTFMove(imageBitmaps)); #if ENABLE(OFFSCREEN_CANVAS_IN_WORKERS) Vector> detachedCanvases; diff --git a/src/bun.js/bindings/wtf-bindings.cpp b/src/bun.js/bindings/wtf-bindings.cpp index 848e7d143d1e72..52de7d5a7c0b0c 100644 --- a/src/bun.js/bindings/wtf-bindings.cpp +++ b/src/bun.js/bindings/wtf-bindings.cpp @@ -195,13 +195,10 @@ String base64URLEncodeToString(Vector data) if (!encodedLength) return String(); - LChar* ptr; + std::span ptr; auto result = String::createUninitialized(encodedLength, ptr); - if (UNLIKELY(!ptr)) { - RELEASE_ASSERT_NOT_REACHED(); - return String(); - } - encodedLength = WTF__base64URLEncode(reinterpret_cast(data.data()), data.size(), reinterpret_cast(ptr), encodedLength); + + encodedLength = WTF__base64URLEncode(reinterpret_cast(data.data()), data.size(), reinterpret_cast(ptr.data()), encodedLength); if (result.length() != encodedLength) { return result.substringSharingImpl(0, encodedLength); } From e7d539630810a4adbe8a1f03328dd1e2f85aeeb6 Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Thu, 21 Nov 2024 18:29:13 -0800 Subject: [PATCH 002/176] Update SetupWebKit.cmake --- cmake/tools/SetupWebKit.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index 2cdea17edc99b7..733ef6a2095cae 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION 3bc4abf2d5875baf500b4687ef869987f6d19e00) + set(WEBKIT_VERSION 3b86b8ca1bb784d706215c87ef1d94d8e3103532) endif() if(WEBKIT_LOCAL) From b2ba8d418e811397823d14c1fa8db20513683c90 Mon Sep 17 00:00:00 2001 From: dylan-conway Date: Fri, 22 Nov 2024 02:31:22 +0000 Subject: [PATCH 003/176] `bun run clang-format` --- src/bun.js/bindings/webcore/SerializedScriptValue.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bun.js/bindings/webcore/SerializedScriptValue.cpp b/src/bun.js/bindings/webcore/SerializedScriptValue.cpp index f23ce53d6beb71..c749770c3328df 100644 --- a/src/bun.js/bindings/webcore/SerializedScriptValue.cpp +++ b/src/bun.js/bindings/webcore/SerializedScriptValue.cpp @@ -5595,7 +5595,7 @@ ExceptionOr> SerializedScriptValue::create(JSGlobalOb if (arrayBufferContentsArray.hasException()) return arrayBufferContentsArray.releaseException(); - // auto backingStores = ImageBitmap::detachBitmaps(WTFMove(imageBitmaps)); + // auto backingStores = ImageBitmap::detachBitmaps(WTFMove(imageBitmaps)); #if ENABLE(OFFSCREEN_CANVAS_IN_WORKERS) Vector> detachedCanvases; From e485341ee636df53c6368decb3780d7f9b9d2c21 Mon Sep 17 00:00:00 2001 From: Dylan Conway Date: Fri, 22 Nov 2024 02:00:05 -0800 Subject: [PATCH 004/176] update --- cmake/tools/SetupWebKit.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index 733ef6a2095cae..cf34f92f2c433f 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION 3b86b8ca1bb784d706215c87ef1d94d8e3103532) + set(WEBKIT_VERSION 64fa224c2aeafbe735462a9c19d5581763d81c0b) endif() if(WEBKIT_LOCAL) From e3e5e1e43da16efb50e7bf528eaf8f6123a155fb Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 1 Dec 2024 15:04:01 +0100 Subject: [PATCH 005/176] Use LLVM 18 --- cmake/targets/BuildBun.cmake | 45 ++ src/bun.js/bindings/JSBufferEncodingType.cpp | 5 +- src/bun.js/bindings/c-bindings.cpp | 6 +- .../webcore/PerformanceUserTiming.cpp | 42 +- src/bun.js/bindings/webcrypto/JSCryptoKey.cpp | 6 +- .../bindings/webcrypto/JSCryptoKeyUsage.cpp | 16 +- .../bindings/webcrypto/JSSubtleCrypto.cpp | 8 +- .../bindings/workaround-missing-symbols.cpp | 413 +++++++++++++++++- 8 files changed, 500 insertions(+), 41 deletions(-) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 20cbb8293e91de..b61296990618ad 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -860,16 +860,61 @@ if(LINUX) if(ARCH STREQUAL "x64") target_link_options(${bun} PUBLIC + -Wl,--wrap=__isoc23_sscanf + -Wl,--wrap=__isoc23_strtol + -Wl,--wrap=__isoc23_strtoul + -Wl,--wrap=__isoc23_strtoull + -Wl,--wrap=__isoc23_vfscanf + -Wl,--wrap=__isoc23_vscanf + -Wl,--wrap=__isoc23_vsscanf + -Wl,--wrap=__libc_single_threaded + -Wl,--wrap=__libc_start_main + -Wl,--wrap=_dl_find_object + -Wl,--wrap=arc4random_buf + -Wl,--wrap=dladdr + -Wl,--wrap=dlclose + -Wl,--wrap=dlerror + -Wl,--wrap=dlopen + -Wl,--wrap=dlsym + -Wl,--wrap=dlvsym -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=fstat -Wl,--wrap=fstat64 -Wl,--wrap=fstatat -Wl,--wrap=fstatat64 + -Wl,--wrap=getrandom -Wl,--wrap=lstat -Wl,--wrap=lstat64 -Wl,--wrap=mknod -Wl,--wrap=mknodat + -Wl,--wrap=pthread_attr_getstack + -Wl,--wrap=pthread_attr_setguardsize + -Wl,--wrap=pthread_attr_setstacksize + -Wl,--wrap=pthread_create + -Wl,--wrap=pthread_detach + -Wl,--wrap=pthread_getattr_np + -Wl,--wrap=pthread_getspecific + -Wl,--wrap=pthread_join + -Wl,--wrap=pthread_key_create + -Wl,--wrap=pthread_key_delete + -Wl,--wrap=pthread_kill + -Wl,--wrap=pthread_mutex_trylock + -Wl,--wrap=pthread_mutexattr_destroy + -Wl,--wrap=pthread_mutexattr_init + -Wl,--wrap=pthread_mutexattr_settype + -Wl,--wrap=pthread_once + -Wl,--wrap=pthread_rwlock_destroy + -Wl,--wrap=pthread_rwlock_init + -Wl,--wrap=pthread_rwlock_rdlock + -Wl,--wrap=pthread_rwlock_unlock + -Wl,--wrap=pthread_rwlock_wrlock + -Wl,--wrap=pthread_setspecific + -Wl,--wrap=pthread_sigmask + -Wl,--wrap=quick_exit + -Wl,--wrap=sem_init + -Wl,--wrap=sem_post + -Wl,--wrap=sem_wait -Wl,--wrap=stat -Wl,--wrap=stat64 -Wl,--wrap=statx diff --git a/src/bun.js/bindings/JSBufferEncodingType.cpp b/src/bun.js/bindings/JSBufferEncodingType.cpp index e97504fb707072..903ba949518e6c 100644 --- a/src/bun.js/bindings/JSBufferEncodingType.cpp +++ b/src/bun.js/bindings/JSBufferEncodingType.cpp @@ -31,7 +31,8 @@ using namespace JSC; String convertEnumerationToString(BufferEncodingType enumerationValue) { - static const NeverDestroyed values[] = { + + static const std::array, 8> values = { MAKE_STATIC_STRING_IMPL("utf8"), MAKE_STATIC_STRING_IMPL("ucs2"), MAKE_STATIC_STRING_IMPL("utf16le"), @@ -56,7 +57,7 @@ template<> std::optional parseEnumerationgetString(&lexicalGlobalObject)); + return parseEnumeration2(lexicalGlobalObject, arg.toWTFString(&lexicalGlobalObject)); } std::optional parseEnumeration2(JSGlobalObject& lexicalGlobalObject, WTF::String encoding) diff --git a/src/bun.js/bindings/c-bindings.cpp b/src/bun.js/bindings/c-bindings.cpp index 906fc01e812373..b952248edc5301 100644 --- a/src/bun.js/bindings/c-bindings.cpp +++ b/src/bun.js/bindings/c-bindings.cpp @@ -688,7 +688,11 @@ extern "C" int ffi_fscanf(FILE* stream, const char* fmt, ...) extern "C" int ffi_vsscanf(const char* str, const char* fmt, va_list ap) { - return vsscanf(str, fmt, ap); + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vsscanf(str, fmt, ap_copy); + va_end(ap_copy); + return result; } extern "C" int ffi_sscanf(const char* str, const char* fmt, ...) diff --git a/src/bun.js/bindings/webcore/PerformanceUserTiming.cpp b/src/bun.js/bindings/webcore/PerformanceUserTiming.cpp index 0ea5967ec1f4a5..81061abfb22b4c 100644 --- a/src/bun.js/bindings/webcore/PerformanceUserTiming.cpp +++ b/src/bun.js/bindings/webcore/PerformanceUserTiming.cpp @@ -44,27 +44,27 @@ namespace WebCore { using NavigationTimingFunction = unsigned long long (PerformanceTiming::*)() const; static constexpr std::pair restrictedMarkMappings[] = { - { "connectEnd", &PerformanceTiming::connectEnd }, - { "connectStart", &PerformanceTiming::connectStart }, - { "domComplete", &PerformanceTiming::domComplete }, - { "domContentLoadedEventEnd", &PerformanceTiming::domContentLoadedEventEnd }, - { "domContentLoadedEventStart", &PerformanceTiming::domContentLoadedEventStart }, - { "domInteractive", &PerformanceTiming::domInteractive }, - { "domLoading", &PerformanceTiming::domLoading }, - { "domainLookupEnd", &PerformanceTiming::domainLookupEnd }, - { "domainLookupStart", &PerformanceTiming::domainLookupStart }, - { "fetchStart", &PerformanceTiming::fetchStart }, - { "loadEventEnd", &PerformanceTiming::loadEventEnd }, - { "loadEventStart", &PerformanceTiming::loadEventStart }, - { "navigationStart", &PerformanceTiming::navigationStart }, - { "redirectEnd", &PerformanceTiming::redirectEnd }, - { "redirectStart", &PerformanceTiming::redirectStart }, - { "requestStart", &PerformanceTiming::requestStart }, - { "responseEnd", &PerformanceTiming::responseEnd }, - { "responseStart", &PerformanceTiming::responseStart }, - { "secureConnectionStart", &PerformanceTiming::secureConnectionStart }, - { "unloadEventEnd", &PerformanceTiming::unloadEventEnd }, - { "unloadEventStart", &PerformanceTiming::unloadEventStart }, + { "connectEnd"_s, &PerformanceTiming::connectEnd }, + { "connectStart"_s, &PerformanceTiming::connectStart }, + { "domComplete"_s, &PerformanceTiming::domComplete }, + { "domContentLoadedEventEnd"_s, &PerformanceTiming::domContentLoadedEventEnd }, + { "domContentLoadedEventStart"_s, &PerformanceTiming::domContentLoadedEventStart }, + { "domInteractive"_s, &PerformanceTiming::domInteractive }, + { "domLoading"_s, &PerformanceTiming::domLoading }, + { "domainLookupEnd"_s, &PerformanceTiming::domainLookupEnd }, + { "domainLookupStart"_s, &PerformanceTiming::domainLookupStart }, + { "fetchStart"_s, &PerformanceTiming::fetchStart }, + { "loadEventEnd"_s, &PerformanceTiming::loadEventEnd }, + { "loadEventStart"_s, &PerformanceTiming::loadEventStart }, + { "navigationStart"_s, &PerformanceTiming::navigationStart }, + { "redirectEnd"_s, &PerformanceTiming::redirectEnd }, + { "redirectStart"_s, &PerformanceTiming::redirectStart }, + { "requestStart"_s, &PerformanceTiming::requestStart }, + { "responseEnd"_s, &PerformanceTiming::responseEnd }, + { "responseStart"_s, &PerformanceTiming::responseStart }, + { "secureConnectionStart"_s, &PerformanceTiming::secureConnectionStart }, + { "unloadEventEnd"_s, &PerformanceTiming::unloadEventEnd }, + { "unloadEventStart"_s, &PerformanceTiming::unloadEventStart }, }; static constexpr SortedArrayMap restrictedMarkFunctions { restrictedMarkMappings }; diff --git a/src/bun.js/bindings/webcrypto/JSCryptoKey.cpp b/src/bun.js/bindings/webcrypto/JSCryptoKey.cpp index 1d3d32e6683bb5..ff2140d0d7fccf 100644 --- a/src/bun.js/bindings/webcrypto/JSCryptoKey.cpp +++ b/src/bun.js/bindings/webcrypto/JSCryptoKey.cpp @@ -88,9 +88,9 @@ template<> std::optional parseEnumeration(JSGl { auto stringValue = value.toWTFString(&lexicalGlobalObject); static constexpr std::pair mappings[] = { - { "private", CryptoKey::Type::Private }, - { "public", CryptoKey::Type::Public }, - { "secret", CryptoKey::Type::Secret }, + { "private"_s, CryptoKey::Type::Private }, + { "public"_s, CryptoKey::Type::Public }, + { "secret"_s, CryptoKey::Type::Secret }, }; static constexpr SortedArrayMap enumerationMapping { mappings }; if (auto* enumerationValue = enumerationMapping.tryGet(stringValue); LIKELY(enumerationValue)) diff --git a/src/bun.js/bindings/webcrypto/JSCryptoKeyUsage.cpp b/src/bun.js/bindings/webcrypto/JSCryptoKeyUsage.cpp index d749d879c2f0db..742f63ea6e62e4 100644 --- a/src/bun.js/bindings/webcrypto/JSCryptoKeyUsage.cpp +++ b/src/bun.js/bindings/webcrypto/JSCryptoKeyUsage.cpp @@ -65,14 +65,14 @@ template<> std::optional parseEnumeration(JSGlob { auto stringValue = value.toWTFString(&lexicalGlobalObject); static constexpr std::pair mappings[] = { - { "decrypt", CryptoKeyUsage::Decrypt }, - { "deriveBits", CryptoKeyUsage::DeriveBits }, - { "deriveKey", CryptoKeyUsage::DeriveKey }, - { "encrypt", CryptoKeyUsage::Encrypt }, - { "sign", CryptoKeyUsage::Sign }, - { "unwrapKey", CryptoKeyUsage::UnwrapKey }, - { "verify", CryptoKeyUsage::Verify }, - { "wrapKey", CryptoKeyUsage::WrapKey }, + { "decrypt"_s, CryptoKeyUsage::Decrypt }, + { "deriveBits"_s, CryptoKeyUsage::DeriveBits }, + { "deriveKey"_s, CryptoKeyUsage::DeriveKey }, + { "encrypt"_s, CryptoKeyUsage::Encrypt }, + { "sign"_s, CryptoKeyUsage::Sign }, + { "unwrapKey"_s, CryptoKeyUsage::UnwrapKey }, + { "verify"_s, CryptoKeyUsage::Verify }, + { "wrapKey"_s, CryptoKeyUsage::WrapKey }, }; static constexpr SortedArrayMap enumerationMapping { mappings }; if (auto* enumerationValue = enumerationMapping.tryGet(stringValue); LIKELY(enumerationValue)) diff --git a/src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp b/src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp index edd11f56ab2abd..92780d31add505 100644 --- a/src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp +++ b/src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp @@ -96,10 +96,10 @@ template<> std::optional parseEnumeration mappings[] = { - { "jwk", SubtleCrypto::KeyFormat::Jwk }, - { "pkcs8", SubtleCrypto::KeyFormat::Pkcs8 }, - { "raw", SubtleCrypto::KeyFormat::Raw }, - { "spki", SubtleCrypto::KeyFormat::Spki }, + { "jwk"_s, SubtleCrypto::KeyFormat::Jwk }, + { "pkcs8"_s, SubtleCrypto::KeyFormat::Pkcs8 }, + { "raw"_s, SubtleCrypto::KeyFormat::Raw }, + { "spki"_s, SubtleCrypto::KeyFormat::Spki }, }; static constexpr SortedArrayMap enumerationMapping { mappings }; if (auto* enumerationValue = enumerationMapping.tryGet(stringValue); LIKELY(enumerationValue)) diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index b1fcc12a637fdc..9bf30fe2fab7d6 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -1,5 +1,3 @@ - - #if defined(WIN32) #include @@ -66,6 +64,11 @@ extern "C" int kill(int pid, int sig) #include #include #include +#include +#include +#include +#include +#include #ifndef _STAT_VER #if defined(__aarch64__) @@ -94,6 +97,48 @@ __asm__(".symver powf,powf@GLIBC_2.2.5"); __asm__(".symver sincosf,sincosf@GLIBC_2.2.5"); __asm__(".symver sinf,sinf@GLIBC_2.2.5"); __asm__(".symver tanf,tanf@GLIBC_2.2.5"); + +// Add symbol versions for libc and threading functions +__asm__(".symver __libc_single_threaded,__libc_single_threaded@GLIBC_2.2.5"); +__asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.2.5"); +__asm__(".symver dladdr,dladdr@GLIBC_2.2.5"); +__asm__(".symver dlclose,dlclose@GLIBC_2.2.5"); +__asm__(".symver dlerror,dlerror@GLIBC_2.2.5"); +__asm__(".symver dlopen,dlopen@GLIBC_2.2.5"); +__asm__(".symver dlsym,dlsym@GLIBC_2.2.5"); +__asm__(".symver dlvsym,dlvsym@GLIBC_2.2.5"); +__asm__(".symver getrandom,getrandom@GLIBC_2.25"); + +// Add symbol versions for pthread functions +__asm__(".symver pthread_attr_getstack,pthread_attr_getstack@GLIBC_2.2.5"); +__asm__(".symver pthread_attr_setguardsize,pthread_attr_setguardsize@GLIBC_2.2.5"); +__asm__(".symver pthread_attr_setstacksize,pthread_attr_setstacksize@GLIBC_2.2.5"); +__asm__(".symver pthread_create,pthread_create@GLIBC_2.2.5"); +__asm__(".symver pthread_detach,pthread_detach@GLIBC_2.2.5"); +__asm__(".symver pthread_getattr_np,pthread_getattr_np@GLIBC_2.2.5"); +__asm__(".symver pthread_getspecific,pthread_getspecific@GLIBC_2.2.5"); +__asm__(".symver pthread_join,pthread_join@GLIBC_2.2.5"); +__asm__(".symver pthread_key_create,pthread_key_create@GLIBC_2.2.5"); +__asm__(".symver pthread_key_delete,pthread_key_delete@GLIBC_2.2.5"); +__asm__(".symver pthread_kill,pthread_kill@GLIBC_2.2.5"); +__asm__(".symver pthread_mutex_trylock,pthread_mutex_trylock@GLIBC_2.2.5"); +__asm__(".symver pthread_mutexattr_destroy,pthread_mutexattr_destroy@GLIBC_2.2.5"); +__asm__(".symver pthread_mutexattr_init,pthread_mutexattr_init@GLIBC_2.2.5"); +__asm__(".symver pthread_mutexattr_settype,pthread_mutexattr_settype@GLIBC_2.2.5"); +__asm__(".symver pthread_once,pthread_once@GLIBC_2.2.5"); +__asm__(".symver pthread_rwlock_destroy,pthread_rwlock_destroy@GLIBC_2.2.5"); +__asm__(".symver pthread_rwlock_init,pthread_rwlock_init@GLIBC_2.2.5"); +__asm__(".symver pthread_rwlock_rdlock,pthread_rwlock_rdlock@GLIBC_2.2.5"); +__asm__(".symver pthread_rwlock_unlock,pthread_rwlock_unlock@GLIBC_2.2.5"); +__asm__(".symver pthread_rwlock_wrlock,pthread_rwlock_wrlock@GLIBC_2.2.5"); +__asm__(".symver pthread_setspecific,pthread_setspecific@GLIBC_2.2.5"); +__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.2.5"); +__asm__(".symver quick_exit,quick_exit@GLIBC_2.2.5"); +__asm__(".symver sem_init,sem_init@GLIBC_2.2.5"); +__asm__(".symver sem_post,sem_post@GLIBC_2.2.5"); +__asm__(".symver sem_wait,sem_wait@GLIBC_2.2.5"); +__asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.2.5"); + #elif defined(__aarch64__) __asm__(".symver cosf,cosf@GLIBC_2.17"); __asm__(".symver exp,exp@GLIBC_2.17"); @@ -135,6 +180,58 @@ float BUN_WRAP_GLIBC_SYMBOL(tanf)(float); int BUN_WRAP_GLIBC_SYMBOL(fcntl)(int, int, ...); int BUN_WRAP_GLIBC_SYMBOL(fcntl64)(int, int, ...); void BUN_WRAP_GLIBC_SYMBOL(sincosf)(float, float*, float*); + +// Add new declarations for scanning/conversion functions +int BUN_WRAP_GLIBC_SYMBOL(sscanf)(const char*, const char*, ...); +long int BUN_WRAP_GLIBC_SYMBOL(strtol)(const char*, char**, int); +unsigned long int BUN_WRAP_GLIBC_SYMBOL(strtoul)(const char*, char**, int); +unsigned long long int BUN_WRAP_GLIBC_SYMBOL(strtoull)(const char*, char**, int); +int BUN_WRAP_GLIBC_SYMBOL(vfscanf)(FILE*, const char*, va_list); +int BUN_WRAP_GLIBC_SYMBOL(vscanf)(const char*, va_list); +int BUN_WRAP_GLIBC_SYMBOL(vsscanf)(const char*, const char*, va_list); + +// Add declarations for pthread functions +int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_getstack)(pthread_attr_t*, void**, size_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_setguardsize)(pthread_attr_t*, size_t); +int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_setstacksize)(pthread_attr_t*, size_t); +int BUN_WRAP_GLIBC_SYMBOL(pthread_create)(pthread_t*, const pthread_attr_t*, void* (*)(void*), void*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_detach)(pthread_t); +int BUN_WRAP_GLIBC_SYMBOL(pthread_getattr_np)(pthread_t, pthread_attr_t*); +void* BUN_WRAP_GLIBC_SYMBOL(pthread_getspecific)(pthread_key_t); +int BUN_WRAP_GLIBC_SYMBOL(pthread_join)(pthread_t, void**); +int BUN_WRAP_GLIBC_SYMBOL(pthread_key_create)(pthread_key_t*, void (*)(void*)); +int BUN_WRAP_GLIBC_SYMBOL(pthread_key_delete)(pthread_key_t); +int BUN_WRAP_GLIBC_SYMBOL(pthread_kill)(pthread_t, int); +int BUN_WRAP_GLIBC_SYMBOL(pthread_mutex_trylock)(pthread_mutex_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_mutexattr_destroy)(pthread_mutexattr_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_mutexattr_init)(pthread_mutexattr_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_mutexattr_settype)(pthread_mutexattr_t*, int); +int BUN_WRAP_GLIBC_SYMBOL(pthread_once)(pthread_once_t*, void (*)(void)); +int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_destroy)(pthread_rwlock_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_init)(pthread_rwlock_t*, const pthread_rwlockattr_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_rdlock)(pthread_rwlock_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_unlock)(pthread_rwlock_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_wrlock)(pthread_rwlock_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_setspecific)(pthread_key_t, const void*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_sigmask)(int, const sigset_t*, sigset_t*); + +// Add declarations for other system functions +void BUN_WRAP_GLIBC_SYMBOL(arc4random_buf)(void*, size_t); +ssize_t BUN_WRAP_GLIBC_SYMBOL(getrandom)(void*, size_t, unsigned int); +_Noreturn void BUN_WRAP_GLIBC_SYMBOL(quick_exit)(int); +int BUN_WRAP_GLIBC_SYMBOL(sem_init)(sem_t*, int, unsigned int); +int BUN_WRAP_GLIBC_SYMBOL(sem_post)(sem_t*); +int BUN_WRAP_GLIBC_SYMBOL(sem_wait)(sem_t*); + +// Add declarations for dynamic linking functions +int BUN_WRAP_GLIBC_SYMBOL(dladdr)(const void*, Dl_info*); +int BUN_WRAP_GLIBC_SYMBOL(dlclose)(void*); +char* BUN_WRAP_GLIBC_SYMBOL(dlerror)(void); +void* BUN_WRAP_GLIBC_SYMBOL(dlopen)(const char*, int); +void* BUN_WRAP_GLIBC_SYMBOL(dlsym)(void*, const char*); +void* BUN_WRAP_GLIBC_SYMBOL(dlvsym)(void*, const char*, const char*); + +int BUN_WRAP_GLIBC_SYMBOL(__libc_start_main)(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end); } extern "C" { @@ -308,6 +405,318 @@ extern "C" int __wrap_mknodat(int dirfd, const char* path, mode_t mode, dev_t de return __xmknodat(_MKNOD_VER, dirfd, path, mode, dev); } +extern "C" { + +// Scanning/conversion function wrappers +int __wrap_sscanf(const char* str, const char* format, ...) +{ + va_list ap; + va_start(ap, format); + int result = vsscanf(str, format, ap); + va_end(ap); + return result; +} + +long int __wrap_strtol(const char* nptr, char** endptr, int base) +{ + return strtol(nptr, endptr, base); +} + +unsigned long int __wrap_strtoul(const char* nptr, char** endptr, int base) +{ + return strtoul(nptr, endptr, base); +} + +unsigned long long int __wrap_strtoull(const char* nptr, char** endptr, int base) +{ + return strtoull(nptr, endptr, base); +} + +unsigned long int __wrap___isoc23_strtoul(const char* nptr, char** endptr, int base) +{ + return strtoul(nptr, endptr, base); +} + +long int __wrap___isoc23_strtol(const char* nptr, char** endptr, int base) +{ + return strtol(nptr, endptr, base); +} + +unsigned long long int __wrap___isoc23_strtoull(const char* nptr, char** endptr, int base) +{ + return strtoull(nptr, endptr, base); +} + +int __wrap___isoc23_sscanf(const char* str, const char* format, ...) +{ + va_list ap; + va_start(ap, format); + int result = vsscanf(str, format, ap); + va_end(ap); + return result; +} + +int __wrap___isoc23_vscanf(const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vscanf(format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap_vfscanf(FILE* stream, const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vfscanf(stream, format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap_vscanf(const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vscanf(format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap_vsscanf(const char* str, const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vsscanf(str, format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap___isoc23_vfscanf(FILE* stream, const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vfscanf(stream, format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap___isoc23_vsscanf(const char* str, const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vsscanf(str, format, ap_copy); + va_end(ap_copy); + return result; +} + +extern "C" int __libc_start_main(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end); + +int __wrap___libc_start_main(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end) +{ + return __libc_start_main(main, argc, argv, init, fini, rtld_fini, stack_end); +} + +// pthread function wrappers +int __wrap_pthread_attr_getstack(pthread_attr_t* attr, void** stackaddr, size_t* stacksize) +{ + return pthread_attr_getstack(attr, stackaddr, stacksize); +} + +int __wrap_pthread_attr_setguardsize(pthread_attr_t* attr, size_t guardsize) +{ + return pthread_attr_setguardsize(attr, guardsize); +} + +int __wrap_pthread_attr_setstacksize(pthread_attr_t* attr, size_t stacksize) +{ + return pthread_attr_setstacksize(attr, stacksize); +} + +int __wrap_pthread_create(pthread_t* thread, const pthread_attr_t* attr, void* (*start_routine)(void*), void* arg) +{ + return pthread_create(thread, attr, start_routine, arg); +} + +int __wrap_pthread_detach(pthread_t thread) +{ + return pthread_detach(thread); +} + +int __wrap_pthread_getattr_np(pthread_t thread, pthread_attr_t* attr) +{ + return pthread_getattr_np(thread, attr); +} + +void* __wrap_pthread_getspecific(pthread_key_t key) +{ + return pthread_getspecific(key); +} + +int __wrap_pthread_join(pthread_t thread, void** retval) +{ + return pthread_join(thread, retval); +} + +int __wrap_pthread_key_create(pthread_key_t* key, void (*destructor)(void*)) +{ + return pthread_key_create(key, destructor); +} + +int __wrap_pthread_key_delete(pthread_key_t key) +{ + return pthread_key_delete(key); +} + +int __wrap_pthread_kill(pthread_t thread, int sig) +{ + return pthread_kill(thread, sig); +} + +int __wrap_pthread_mutex_trylock(pthread_mutex_t* mutex) +{ + return pthread_mutex_trylock(mutex); +} + +int __wrap_pthread_mutexattr_destroy(pthread_mutexattr_t* attr) +{ + return pthread_mutexattr_destroy(attr); +} + +int __wrap_pthread_mutexattr_init(pthread_mutexattr_t* attr) +{ + return pthread_mutexattr_init(attr); +} + +int __wrap_pthread_mutexattr_settype(pthread_mutexattr_t* attr, int type) +{ + return pthread_mutexattr_settype(attr, type); +} + +int __wrap_pthread_once(pthread_once_t* once_control, void (*init_routine)(void)) +{ + return pthread_once(once_control, init_routine); +} + +int __wrap_pthread_rwlock_destroy(pthread_rwlock_t* rwlock) +{ + return pthread_rwlock_destroy(rwlock); +} + +int __wrap_pthread_rwlock_init(pthread_rwlock_t* rwlock, const pthread_rwlockattr_t* attr) +{ + return pthread_rwlock_init(rwlock, attr); +} + +int __wrap_pthread_rwlock_rdlock(pthread_rwlock_t* rwlock) +{ + return pthread_rwlock_rdlock(rwlock); +} + +int __wrap_pthread_rwlock_unlock(pthread_rwlock_t* rwlock) +{ + return pthread_rwlock_unlock(rwlock); +} + +int __wrap_pthread_rwlock_wrlock(pthread_rwlock_t* rwlock) +{ + return pthread_rwlock_wrlock(rwlock); +} + +int __wrap_pthread_setspecific(pthread_key_t key, const void* value) +{ + return pthread_setspecific(key, value); +} + +int __wrap_pthread_sigmask(int how, const sigset_t* set, sigset_t* oldset) +{ + return pthread_sigmask(how, set, oldset); +} + +int __wrap___libc_single_threaded() +{ + return 0; +} + +// Dynamic linking function wrappers +int __wrap_dladdr(const void* addr, Dl_info* info) +{ + return dladdr(addr, info); +} + +int __wrap_dlclose(void* handle) +{ + return dlclose(handle); +} + +char* __wrap_dlerror(void) +{ + return dlerror(); +} + +int __wrap__dl_find_object(void* address, struct dl_find_object* result) +{ + return _dl_find_object(address, result); +} + +void* __wrap_dlopen(const char* filename, int flags) +{ + return dlopen(filename, flags); +} + +void* __wrap_dlsym(void* handle, const char* symbol) +{ + return dlsym(handle, symbol); +} + +void* __wrap_dlvsym(void* handle, const char* symbol, const char* version) +{ + return dlvsym(handle, symbol, version); +} + +// Other system function wrappers +void __wrap_arc4random_buf(void* buf, size_t nbytes) +{ + getrandom(buf, nbytes, 0); +} + +ssize_t __wrap_getrandom(void* buffer, size_t length, unsigned int flags) +{ + return getrandom(buffer, length, flags); +} + +_Noreturn void __wrap_quick_exit(int status) +{ + typedef void (*quick_exit_func)(int) __attribute__((noreturn)); + static std::once_flag quick_exit_initialized; + static quick_exit_func quick_exit; + std::call_once(quick_exit_initialized, []() { + quick_exit = (quick_exit_func)dlsym(RTLD_NEXT, "quick_exit"); + if (UNLIKELY(!quick_exit)) { + quick_exit = _exit; + } + }); + + quick_exit(status); +} + +int __wrap_sem_init(sem_t* sem, int pshared, unsigned int value) +{ + return sem_init(sem, pshared, value); +} + +int __wrap_sem_post(sem_t* sem) +{ + return sem_post(sem); +} + +int __wrap_sem_wait(sem_t* sem) +{ + return sem_wait(sem); +} + +} // extern "C" + #endif double __wrap_exp(double x) From 59f147261f01ed173022a9f13405f6cdd3b4f1e7 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 1 Dec 2024 15:10:43 +0100 Subject: [PATCH 006/176] Bump LLVM --- scripts/bootstrap.sh | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 871340f1fc4d36..d1bf965fad0596 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -714,14 +714,7 @@ install_build_essentials() { } llvm_version_exact() { - case "$os-$abi" in - darwin-* | windows-* | linux-musl) - print "18.1.8" - ;; - linux-*) - print "16.0.6" - ;; - esac + print "18.1.8" } llvm_version() { @@ -994,7 +987,7 @@ main() { install_common_software install_build_essentials install_chrome_dependencies - raise_file_descriptor_limit # XXX: temporary + raise_file_descriptor_limit } main "$@" From ed68e1f3e3f347456cd3a9787c379010c38a3c2b Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 1 Dec 2024 15:25:02 +0100 Subject: [PATCH 007/176] Set file descriptor limit in more places --- scripts/bootstrap.sh | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index d1bf965fad0596..88d6ece59214a4 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -973,8 +973,21 @@ install_chrome_dependencies() { } raise_file_descriptor_limit() { - append_to_file_sudo /etc/security/limits.conf '* soft nofile 262144' - append_to_file_sudo /etc/security/limits.conf '* hard nofile 262144' + if [ -d /etc/security ]; then + append_to_file_sudo /etc/security/limits.conf '* soft nofile 262144' + append_to_file_sudo /etc/security/limits.conf '* hard nofile 262144' + fi + + # Always add to /etc/profile + append_to_file_sudo /etc/profile 'ulimit -n 262144' + + if [ -d /etc/systemd ]; then + append_to_file_sudo /etc/systemd/user.conf 'DefaultLimitNOFILE=262144' + fi + + if [ -d /etc/sysctl.d ]; then + append_to_file_sudo /etc/sysctl.d/99-file-max.conf 'fs.file-max=262144' + fi } main() { From e150c9a8c56f0f6d168970ba826d7d8241599e13 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 1 Dec 2024 15:26:38 +0100 Subject: [PATCH 008/176] Bump --- cmake/tools/SetupLLVM.cmake | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/cmake/tools/SetupLLVM.cmake b/cmake/tools/SetupLLVM.cmake index 9db637b60d5fce..a8b00e30e44494 100644 --- a/cmake/tools/SetupLLVM.cmake +++ b/cmake/tools/SetupLLVM.cmake @@ -4,11 +4,7 @@ if(NOT ENABLE_LLVM) return() endif() -if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR EXISTS "/etc/alpine-release") - set(DEFAULT_LLVM_VERSION "18.1.8") -else() - set(DEFAULT_LLVM_VERSION "16.0.6") -endif() +set(DEFAULT_LLVM_VERSION "18.1.8") optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION}) From 29f337b77abb8f67557a0744749f0770e25baf41 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 1 Dec 2024 15:30:48 +0100 Subject: [PATCH 009/176] Remove comment --- scripts/bootstrap.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 88d6ece59214a4..ed59a1c5cce22b 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -978,7 +978,6 @@ raise_file_descriptor_limit() { append_to_file_sudo /etc/security/limits.conf '* hard nofile 262144' fi - # Always add to /etc/profile append_to_file_sudo /etc/profile 'ulimit -n 262144' if [ -d /etc/systemd ]; then From b91db5f31bab29b3ba01d5ec433849d83070edd3 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 1 Dec 2024 16:01:03 +0100 Subject: [PATCH 010/176] Don't bother setup llvm for zig only builds --- cmake/tools/SetupLLVM.cmake | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/cmake/tools/SetupLLVM.cmake b/cmake/tools/SetupLLVM.cmake index a8b00e30e44494..a76cf16fe88964 100644 --- a/cmake/tools/SetupLLVM.cmake +++ b/cmake/tools/SetupLLVM.cmake @@ -1,4 +1,12 @@ -optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ON) + +set(DEFAULT_ENABLE_LLVM ON) + +# if target is bun-zig, set ENABLE_LLVM to OFF +if(TARGET bun-zig) + set(DEFAULT_ENABLE_LLVM OFF) +endif() + +optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ${DEFAULT_ENABLE_LLVM}) if(NOT ENABLE_LLVM) return() From bab451c1b67e395143abf440bea8632e0ec98f5c Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 1 Dec 2024 16:26:53 +0100 Subject: [PATCH 011/176] Fix --- .buildkite/ci.mjs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 70d4e44c1d70b7..97b525ef4e6aae 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -453,7 +453,10 @@ function getPipeline(options) { agents: getZigAgent(platform), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), + env: { + ...getBuildEnv(platform), + ENABLE_LLVM: "OFF", + }, command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, }; }; @@ -545,9 +548,9 @@ function getPipeline(options) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "debian", release: "11" }, - { os: "linux", arch: "x64", distro: "debian", release: "11" }, - { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "11" }, + { os: "linux", arch: "aarch64", distro: "debian", release: "12" }, + { os: "linux", arch: "x64", distro: "debian", release: "12" }, + { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12" }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, From a9d172bf02fa7a3a5efcd163a1c0a9144e3c81c3 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 2 Dec 2024 16:24:04 +0100 Subject: [PATCH 012/176] override libc stat --- cmake/targets/BuildBun.cmake | 12 +--- .../bindings/workaround-missing-symbols.cpp | 66 ------------------- src/c.zig | 18 +---- src/darwin_c.zig | 14 ++++ src/linux_c.zig | 58 ++++++++++++++++ src/windows_c.zig | 14 ++++ 6 files changed, 88 insertions(+), 94 deletions(-) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index b61296990618ad..66879b25d446e7 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -854,7 +854,6 @@ if(LINUX) if(ARCH STREQUAL "aarch64") target_link_options(${bun} PUBLIC -Wl,--wrap=fcntl64 - -Wl,--wrap=statx ) endif() @@ -879,13 +878,7 @@ if(LINUX) -Wl,--wrap=dlvsym -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 - -Wl,--wrap=fstat - -Wl,--wrap=fstat64 - -Wl,--wrap=fstatat - -Wl,--wrap=fstatat64 -Wl,--wrap=getrandom - -Wl,--wrap=lstat - -Wl,--wrap=lstat64 -Wl,--wrap=mknod -Wl,--wrap=mknodat -Wl,--wrap=pthread_attr_getstack @@ -915,9 +908,6 @@ if(LINUX) -Wl,--wrap=sem_init -Wl,--wrap=sem_post -Wl,--wrap=sem_wait - -Wl,--wrap=stat - -Wl,--wrap=stat64 - -Wl,--wrap=statx ) endif() @@ -966,7 +956,7 @@ if(LINUX) -Wl,-z,combreloc -Wl,--no-eh-frame-hdr -Wl,--sort-section=name - -Wl,--hash-style=gnu + -Wl,--hash-style=both -Wl,--build-id=sha1 # Better for debugging than default -Wl,-Map=${bun}.linker-map ) diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 9bf30fe2fab7d6..02d294c59c9492 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -99,7 +99,6 @@ __asm__(".symver sinf,sinf@GLIBC_2.2.5"); __asm__(".symver tanf,tanf@GLIBC_2.2.5"); // Add symbol versions for libc and threading functions -__asm__(".symver __libc_single_threaded,__libc_single_threaded@GLIBC_2.2.5"); __asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.2.5"); __asm__(".symver dladdr,dladdr@GLIBC_2.2.5"); __asm__(".symver dlclose,dlclose@GLIBC_2.2.5"); @@ -137,7 +136,6 @@ __asm__(".symver quick_exit,quick_exit@GLIBC_2.2.5"); __asm__(".symver sem_init,sem_init@GLIBC_2.2.5"); __asm__(".symver sem_post,sem_post@GLIBC_2.2.5"); __asm__(".symver sem_wait,sem_wait@GLIBC_2.2.5"); -__asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.2.5"); #elif defined(__aarch64__) __asm__(".symver cosf,cosf@GLIBC_2.17"); @@ -341,70 +339,6 @@ extern "C" int __wrap_fcntl64(int fd, int cmd, ...) #if defined(__x86_64__) -#ifndef _MKNOD_VER -#define _MKNOD_VER 1 -#endif - -extern "C" int __lxstat(int ver, const char* filename, struct stat* stat); -extern "C" int __wrap_lstat(const char* filename, struct stat* stat) -{ - return __lxstat(_STAT_VER, filename, stat); -} - -extern "C" int __xstat(int ver, const char* filename, struct stat* stat); -extern "C" int __wrap_stat(const char* filename, struct stat* stat) -{ - return __xstat(_STAT_VER, filename, stat); -} - -extern "C" int __fxstat(int ver, int fd, struct stat* stat); -extern "C" int __wrap_fstat(int fd, struct stat* stat) -{ - return __fxstat(_STAT_VER, fd, stat); -} - -extern "C" int __fxstatat(int ver, int dirfd, const char* path, struct stat* stat, int flags); -extern "C" int __wrap_fstatat(int dirfd, const char* path, struct stat* stat, int flags) -{ - return __fxstatat(_STAT_VER, dirfd, path, stat, flags); -} - -extern "C" int __lxstat64(int ver, const char* filename, struct stat64* stat); -extern "C" int __wrap_lstat64(const char* filename, struct stat64* stat) -{ - return __lxstat64(_STAT_VER, filename, stat); -} - -extern "C" int __xstat64(int ver, const char* filename, struct stat64* stat); -extern "C" int __wrap_stat64(const char* filename, struct stat64* stat) -{ - return __xstat64(_STAT_VER, filename, stat); -} - -extern "C" int __fxstat64(int ver, int fd, struct stat64* stat); -extern "C" int __wrap_fstat64(int fd, struct stat64* stat) -{ - return __fxstat64(_STAT_VER, fd, stat); -} - -extern "C" int __fxstatat64(int ver, int dirfd, const char* path, struct stat64* stat, int flags); -extern "C" int __wrap_fstatat64(int dirfd, const char* path, struct stat64* stat, int flags) -{ - return __fxstatat64(_STAT_VER, dirfd, path, stat, flags); -} - -extern "C" int __xmknod(int ver, const char* path, mode_t mode, dev_t dev); -extern "C" int __wrap_mknod(const char* path, mode_t mode, dev_t dev) -{ - return __xmknod(_MKNOD_VER, path, mode, dev); -} - -extern "C" int __xmknodat(int ver, int dirfd, const char* path, mode_t mode, dev_t dev); -extern "C" int __wrap_mknodat(int dirfd, const char* path, mode_t mode, dev_t dev) -{ - return __xmknodat(_MKNOD_VER, dirfd, path, mode, dev); -} - extern "C" { // Scanning/conversion function wrappers diff --git a/src/c.zig b/src/c.zig index 1579667ab9f0e5..e0b8238fca1227 100644 --- a/src/c.zig +++ b/src/c.zig @@ -42,29 +42,13 @@ pub extern "c" fn memchr(s: [*]const u8, c: u8, n: usize) ?[*]const u8; pub extern "c" fn strchr(str: [*]const u8, char: u8) ?[*]const u8; -pub const lstat = blk: { - const T = *const fn ([*c]const u8, [*c]libc_stat) callconv(.C) c_int; // TODO: this is wrong on Windows - if (bun.Environment.isMusl) break :blk @extern(T, .{ .library_name = "c", .name = "lstat" }); - break :blk @extern(T, .{ .name = "lstat64" }); -}; -pub const fstat = blk: { - const T = *const fn (c_int, [*c]libc_stat) callconv(.C) c_int; // TODO: this is wrong on Windows - if (bun.Environment.isMusl) break :blk @extern(T, .{ .library_name = "c", .name = "fstat" }); - break :blk @extern(T, .{ .name = "fstat64" }); -}; -pub const stat = blk: { - const T = *const fn ([*c]const u8, [*c]libc_stat) callconv(.C) c_int; // TODO: this is wrong on Windows - if (bun.Environment.isMusl) break :blk @extern(T, .{ .library_name = "c", .name = "stat" }); - break :blk @extern(T, .{ .name = "stat64" }); -}; - pub fn lstat_absolute(path: [:0]const u8) !Stat { if (builtin.os.tag == .windows) { @compileError("Not implemented yet, conside using bun.sys.lstat()"); } var st = zeroes(libc_stat); - switch (errno(lstat(path.ptr, &st))) { + switch (errno(bun.C.lstat(path.ptr, &st))) { .SUCCESS => {}, .NOENT => return error.FileNotFound, // .EINVAL => unreachable, diff --git a/src/darwin_c.zig b/src/darwin_c.zig index 2c0268058f8eab..12fefde4853086 100644 --- a/src/darwin_c.zig +++ b/src/darwin_c.zig @@ -73,6 +73,20 @@ pub extern "c" fn fclonefileat(c_int, c_int, [*:0]const u8, uint32_t: c_int) c_i // int clonefile(const char * src, const char * dst, int flags); pub extern "c" fn clonefile(src: [*:0]const u8, dest: [*:0]const u8, flags: c_int) c_int; +pub const lstat = blk: { + const T = *const fn (c_int, [*c]std.c.stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = "lstat64" }); +}; + +pub const fstat = blk: { + const T = *const fn (c_int, [*c]std.c.stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = "fstat64" }); +}; +pub const stat = blk: { + const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = "stat64" }); +}; + // pub fn stat_absolute(path: [:0]const u8) StatError!Stat { // if (builtin.os.tag == .windows) { // var io_status_block: windows.IO_STATUS_BLOCK = undefined; diff --git a/src/linux_c.zig b/src/linux_c.zig index 32292c6426cf66..e6f804e852c6e2 100644 --- a/src/linux_c.zig +++ b/src/linux_c.zig @@ -785,3 +785,61 @@ export fn sys_epoll_pwait2(epfd: i32, events: ?[*]std.os.linux.epoll_event, maxe ), ); } + +// ********************************************************************************* +// libc overrides +// ********************************************************************************* + +fn simulateLibcErrno(rc: usize) c_int { + const signed: isize = @bitCast(rc); + const int: c_int = @intCast(if (signed > -4096 and signed < 0) -signed else 0); + std.c._errno().* = int; + return int; +} + +pub export fn stat(path: [*:0]const u8, buf: *std.os.linux.Stat) c_int { + const rc = std.os.linux.stat(path, buf); + return simulateLibcErrno(rc); +} + +pub const stat64 = stat; +pub const lstat64 = lstat; +pub const fstat64 = fstat; +pub const fstatat64 = fstatat; + +pub export fn lstat(path: [*:0]const u8, buf: *std.os.linux.Stat) c_int { + const rc = std.os.linux.lstat(path, buf); + return simulateLibcErrno(rc); +} + +pub export fn fstat(fd: c_int, buf: *std.os.linux.Stat) c_int { + const rc = std.os.linux.fstat(fd, buf); + return simulateLibcErrno(rc); +} + +pub export fn fstatat(dirfd: i32, path: [*:0]const u8, buf: *std.os.linux.Stat, flags: u32) c_int { + const rc = std.os.linux.fstatat(dirfd, path, buf, flags); + return simulateLibcErrno(rc); +} + +pub export fn statx(dirfd: i32, path: [*:0]const u8, flags: u32, mask: u32, buf: *std.os.linux.Statx) c_int { + const rc = std.os.linux.statx(dirfd, path, flags, mask, buf); + return simulateLibcErrno(rc); +} + +comptime { + _ = stat; + _ = stat64; + _ = lstat; + _ = lstat64; + _ = fstat; + _ = fstat64; + _ = fstatat; + _ = statx; + @export(stat, .{ .name = "stat64" }); + @export(lstat, .{ .name = "lstat64" }); + @export(fstat, .{ .name = "fstat64" }); + @export(fstatat, .{ .name = "fstatat64" }); +} + +// ********************************************************************************* diff --git a/src/windows_c.zig b/src/windows_c.zig index 7c0c5d0d9eea11..78b0dbfd5e220d 100644 --- a/src/windows_c.zig +++ b/src/windows_c.zig @@ -8,6 +8,20 @@ const Stat = std.fs.File.Stat; const Kind = std.fs.File.Kind; const StatError = std.fs.File.StatError; +pub const lstat = blk: { + const T = *const fn (c_int, [*c]std.c.stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = "lstat64" }); +}; + +pub const fstat = blk: { + const T = *const fn (c_int, [*c]std.c.stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = "fstat64" }); +}; +pub const stat = blk: { + const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = "stat64" }); +}; + pub fn getTotalMemory() usize { return uv.uv_get_total_memory(); } From df0fa4a53da14e626bc41bd03a898d1b06bdfb35 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 2 Dec 2024 16:35:44 +0100 Subject: [PATCH 013/176] fix --- src/bun.js/bindings/workaround-missing-symbols.cpp | 2 ++ src/darwin_c.zig | 4 ++-- src/windows_c.zig | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 02d294c59c9492..ff2a1651002bdc 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -153,6 +153,8 @@ __asm__(".symver powf,powf@GLIBC_2.17"); __asm__(".symver sincosf,sincosf@GLIBC_2.17"); __asm__(".symver sinf,sinf@GLIBC_2.17"); __asm__(".symver tanf,tanf@GLIBC_2.17"); + +__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.2.5" #endif #if defined(__x86_64__) || defined(__aarch64__) diff --git a/src/darwin_c.zig b/src/darwin_c.zig index 12fefde4853086..0f9bc4df00a28c 100644 --- a/src/darwin_c.zig +++ b/src/darwin_c.zig @@ -74,12 +74,12 @@ pub extern "c" fn fclonefileat(c_int, c_int, [*:0]const u8, uint32_t: c_int) c_i pub extern "c" fn clonefile(src: [*:0]const u8, dest: [*:0]const u8, flags: c_int) c_int; pub const lstat = blk: { - const T = *const fn (c_int, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "lstat64" }); }; pub const fstat = blk: { - const T = *const fn (c_int, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "fstat64" }); }; pub const stat = blk: { diff --git a/src/windows_c.zig b/src/windows_c.zig index 78b0dbfd5e220d..ea19213d7613c7 100644 --- a/src/windows_c.zig +++ b/src/windows_c.zig @@ -9,12 +9,12 @@ const Kind = std.fs.File.Kind; const StatError = std.fs.File.StatError; pub const lstat = blk: { - const T = *const fn (c_int, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "lstat64" }); }; pub const fstat = blk: { - const T = *const fn (c_int, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "fstat64" }); }; pub const stat = blk: { From 5c420d972f3d3314631c3c7fb5a43a33553ec9ac Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 2 Dec 2024 16:36:34 +0100 Subject: [PATCH 014/176] Fix --- src/darwin_c.zig | 6 +++--- src/windows_c.zig | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/darwin_c.zig b/src/darwin_c.zig index 0f9bc4df00a28c..7fb07e64d9e340 100644 --- a/src/darwin_c.zig +++ b/src/darwin_c.zig @@ -74,16 +74,16 @@ pub extern "c" fn fclonefileat(c_int, c_int, [*:0]const u8, uint32_t: c_int) c_i pub extern "c" fn clonefile(src: [*:0]const u8, dest: [*:0]const u8, flags: c_int) c_int; pub const lstat = blk: { - const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "lstat64" }); }; pub const fstat = blk: { - const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "fstat64" }); }; pub const stat = blk: { - const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "stat64" }); }; diff --git a/src/windows_c.zig b/src/windows_c.zig index ea19213d7613c7..86d3c32f95311e 100644 --- a/src/windows_c.zig +++ b/src/windows_c.zig @@ -9,16 +9,16 @@ const Kind = std.fs.File.Kind; const StatError = std.fs.File.StatError; pub const lstat = blk: { - const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "lstat64" }); }; pub const fstat = blk: { - const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "fstat64" }); }; pub const stat = blk: { - const T = *const fn ([*c]const u8, [*c]std.c.stat) callconv(.C) c_int; + const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; break :blk @extern(T, .{ .name = "stat64" }); }; From 3f7627e98c914d35caffc42b83fbede7b7d89f4f Mon Sep 17 00:00:00 2001 From: Dylan Conway Date: Wed, 4 Dec 2024 15:17:12 -0800 Subject: [PATCH 015/176] case sensitive --- cmake/targets/BuildLibArchive.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/targets/BuildLibArchive.cmake b/cmake/targets/BuildLibArchive.cmake index e0cffd020be574..da8bfcb7cd5c01 100644 --- a/cmake/targets/BuildLibArchive.cmake +++ b/cmake/targets/BuildLibArchive.cmake @@ -18,7 +18,7 @@ register_cmake_command( -DENABLE_INSTALL=OFF -DENABLE_TEST=OFF -DENABLE_WERROR=OFF - -DENABLE_BZIP2=OFF + -DENABLE_BZip2=OFF -DENABLE_CAT=OFF -DENABLE_EXPAT=OFF -DENABLE_ICONV=OFF From b5862d01d2c3948c86f1811daba7ec60b026765d Mon Sep 17 00:00:00 2001 From: Dylan Conway Date: Wed, 4 Dec 2024 20:36:00 -0800 Subject: [PATCH 016/176] maybe fix build --- src/bun.js/bindings/workaround-missing-symbols.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index ff2a1651002bdc..047af7d0b30985 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -154,7 +154,7 @@ __asm__(".symver sincosf,sincosf@GLIBC_2.17"); __asm__(".symver sinf,sinf@GLIBC_2.17"); __asm__(".symver tanf,tanf@GLIBC_2.17"); -__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.2.5" +__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.2.5"); #endif #if defined(__x86_64__) || defined(__aarch64__) From e7b4a8f69ce0f88a9fdaf8648b4c77ca339f3cbb Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 4 Dec 2024 21:55:22 -0800 Subject: [PATCH 017/176] Update src/linux_c.zig --- src/linux_c.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/linux_c.zig b/src/linux_c.zig index e6f804e852c6e2..9f194017fb07b2 100644 --- a/src/linux_c.zig +++ b/src/linux_c.zig @@ -794,7 +794,7 @@ fn simulateLibcErrno(rc: usize) c_int { const signed: isize = @bitCast(rc); const int: c_int = @intCast(if (signed > -4096 and signed < 0) -signed else 0); std.c._errno().* = int; - return int; + return if (signed > -4096 and signed < 0) -1 else int; } pub export fn stat(path: [*:0]const u8, buf: *std.os.linux.Stat) c_int { From ab71d5ca1f530624822d377b5c97bb6a8d81d192 Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Wed, 4 Dec 2024 21:58:38 -0800 Subject: [PATCH 018/176] Update cmake/tools/SetupWebKit.cmake --- cmake/tools/SetupWebKit.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index 0c09983a98b435..50c1e2c7b93224 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION 8bbd4ed494f66395f9ae42aed1beb57e998265ca) + set(WEBKIT_VERSION e17d16e0060b3d80ae40e78353d19575c9a8f3af) endif() if(WEBKIT_LOCAL) From d7df0ce2640afb4c45920500338fa1261416107f Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Thu, 5 Dec 2024 13:38:00 -0800 Subject: [PATCH 019/176] Fix stat and lstat calling non-existent arm64 syscalls --- src/linux_c.zig | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/linux_c.zig b/src/linux_c.zig index 9f194017fb07b2..bf99c082710ee5 100644 --- a/src/linux_c.zig +++ b/src/linux_c.zig @@ -798,7 +798,8 @@ fn simulateLibcErrno(rc: usize) c_int { } pub export fn stat(path: [*:0]const u8, buf: *std.os.linux.Stat) c_int { - const rc = std.os.linux.stat(path, buf); + // https://git.musl-libc.org/cgit/musl/tree/src/stat/stat.c + const rc = std.os.linux.fstatat(std.os.linux.AT.FDCWD, path, buf, 0); return simulateLibcErrno(rc); } @@ -808,7 +809,8 @@ pub const fstat64 = fstat; pub const fstatat64 = fstatat; pub export fn lstat(path: [*:0]const u8, buf: *std.os.linux.Stat) c_int { - const rc = std.os.linux.lstat(path, buf); + // https://git.musl-libc.org/cgit/musl/tree/src/stat/lstat.c + const rc = std.os.linux.fstatat(std.os.linux.AT.FDCWD, path, buf, std.os.linux.AT.SYMLINK_NOFOLLOW); return simulateLibcErrno(rc); } From 333ba0f02a9b819dc35d7e9675308ff4343e1cce Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Thu, 5 Dec 2024 14:46:17 -0800 Subject: [PATCH 020/176] Clarify symbols.test.ts --- test/js/bun/symbols.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/js/bun/symbols.test.ts b/test/js/bun/symbols.test.ts index 67127e3555b3b1..510c5f68da1297 100644 --- a/test/js/bun/symbols.test.ts +++ b/test/js/bun/symbols.test.ts @@ -6,7 +6,7 @@ import { semver } from "bun"; const BUN_EXE = bunExe(); if (process.platform === "linux") { - test("objdump -T does not include symbols from glibc > 2.27", async () => { + test("objdump -T does not include symbols from glibc >= 2.27", async () => { const objdump = Bun.which("objdump") || Bun.which("llvm-objdump"); if (!objdump) { throw new Error("objdump executable not found. Please install it."); From 46140e9a0391d7db8f0580933412c011fe3d69ac Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Thu, 5 Dec 2024 14:46:29 -0800 Subject: [PATCH 021/176] Fix pthread_sigmask version on aarch64 --- src/bun.js/bindings/workaround-missing-symbols.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 047af7d0b30985..61cba9310454a8 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -153,8 +153,7 @@ __asm__(".symver powf,powf@GLIBC_2.17"); __asm__(".symver sincosf,sincosf@GLIBC_2.17"); __asm__(".symver sinf,sinf@GLIBC_2.17"); __asm__(".symver tanf,tanf@GLIBC_2.17"); - -__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.2.5"); +__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.17"); #endif #if defined(__x86_64__) || defined(__aarch64__) From 819e64c5e94b0b90322073c94ca109e1e41477d5 Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Thu, 5 Dec 2024 18:30:07 -0800 Subject: [PATCH 022/176] Wrap more symbols on ARM --- cmake/targets/BuildBun.cmake | 84 ++-- .../bindings/workaround-missing-symbols.cpp | 364 ++++++++++-------- 2 files changed, 243 insertions(+), 205 deletions(-) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 66879b25d446e7..1cdb6d0fe91240 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -850,13 +850,7 @@ if(APPLE) endif() if(LINUX) - if(NOT ABI STREQUAL "musl") - if(ARCH STREQUAL "aarch64") - target_link_options(${bun} PUBLIC - -Wl,--wrap=fcntl64 - ) - endif() - + if(NOT ABI STREQUAL "musl") if(ARCH STREQUAL "x64") target_link_options(${bun} PUBLIC -Wl,--wrap=__isoc23_sscanf @@ -866,48 +860,19 @@ if(LINUX) -Wl,--wrap=__isoc23_vfscanf -Wl,--wrap=__isoc23_vscanf -Wl,--wrap=__isoc23_vsscanf - -Wl,--wrap=__libc_single_threaded - -Wl,--wrap=__libc_start_main - -Wl,--wrap=_dl_find_object - -Wl,--wrap=arc4random_buf - -Wl,--wrap=dladdr - -Wl,--wrap=dlclose - -Wl,--wrap=dlerror - -Wl,--wrap=dlopen - -Wl,--wrap=dlsym -Wl,--wrap=dlvsym -Wl,--wrap=fcntl - -Wl,--wrap=fcntl64 -Wl,--wrap=getrandom -Wl,--wrap=mknod -Wl,--wrap=mknodat - -Wl,--wrap=pthread_attr_getstack - -Wl,--wrap=pthread_attr_setguardsize - -Wl,--wrap=pthread_attr_setstacksize - -Wl,--wrap=pthread_create - -Wl,--wrap=pthread_detach - -Wl,--wrap=pthread_getattr_np - -Wl,--wrap=pthread_getspecific - -Wl,--wrap=pthread_join - -Wl,--wrap=pthread_key_create - -Wl,--wrap=pthread_key_delete - -Wl,--wrap=pthread_kill - -Wl,--wrap=pthread_mutex_trylock - -Wl,--wrap=pthread_mutexattr_destroy - -Wl,--wrap=pthread_mutexattr_init - -Wl,--wrap=pthread_mutexattr_settype - -Wl,--wrap=pthread_once - -Wl,--wrap=pthread_rwlock_destroy - -Wl,--wrap=pthread_rwlock_init - -Wl,--wrap=pthread_rwlock_rdlock - -Wl,--wrap=pthread_rwlock_unlock - -Wl,--wrap=pthread_rwlock_wrlock - -Wl,--wrap=pthread_setspecific -Wl,--wrap=pthread_sigmask -Wl,--wrap=quick_exit - -Wl,--wrap=sem_init - -Wl,--wrap=sem_post - -Wl,--wrap=sem_wait + ) + endif() + + if (ARCH STREQUAL "aarch64") + target_link_options(${bun} PUBLIC + -Wl,--wrap=__pthread_key_create ) endif() @@ -927,6 +892,41 @@ if(LINUX) -Wl,--wrap=sincosf -Wl,--wrap=sinf -Wl,--wrap=tanf + -Wl,--wrap=arc4random_buf + -Wl,--wrap=sem_post + -Wl,--wrap=sem_wait + -Wl,--wrap=sem_init + -Wl,--wrap=__libc_single_threaded + -Wl,--wrap=__libc_start_main + -Wl,--wrap=_dl_find_object + -Wl,--wrap=dladdr + -Wl,--wrap=dlclose + -Wl,--wrap=dlerror + -Wl,--wrap=dlopen + -Wl,--wrap=dlsym + -Wl,--wrap=pthread_attr_getstack + -Wl,--wrap=pthread_attr_setguardsize + -Wl,--wrap=pthread_attr_setstacksize + -Wl,--wrap=pthread_create + -Wl,--wrap=pthread_detach + -Wl,--wrap=pthread_getattr_np + -Wl,--wrap=pthread_getspecific + -Wl,--wrap=pthread_join + -Wl,--wrap=pthread_key_create + -Wl,--wrap=pthread_key_delete + -Wl,--wrap=pthread_kill + -Wl,--wrap=pthread_mutex_trylock + -Wl,--wrap=pthread_mutexattr_destroy + -Wl,--wrap=pthread_mutexattr_init + -Wl,--wrap=pthread_mutexattr_settype + -Wl,--wrap=pthread_once + -Wl,--wrap=pthread_rwlock_destroy + -Wl,--wrap=pthread_rwlock_init + -Wl,--wrap=pthread_rwlock_rdlock + -Wl,--wrap=pthread_rwlock_unlock + -Wl,--wrap=pthread_rwlock_wrlock + -Wl,--wrap=pthread_setspecific + -Wl,--wrap=fcntl64 ) endif() diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 61cba9310454a8..541aa492408027 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -136,6 +136,7 @@ __asm__(".symver quick_exit,quick_exit@GLIBC_2.2.5"); __asm__(".symver sem_init,sem_init@GLIBC_2.2.5"); __asm__(".symver sem_post,sem_post@GLIBC_2.2.5"); __asm__(".symver sem_wait,sem_wait@GLIBC_2.2.5"); +__asm__(".symver __pthread_key_create,__pthread_key_create@GLIBC_2.2.5"); #elif defined(__aarch64__) __asm__(".symver cosf,cosf@GLIBC_2.17"); @@ -154,7 +155,42 @@ __asm__(".symver sincosf,sincosf@GLIBC_2.17"); __asm__(".symver sinf,sinf@GLIBC_2.17"); __asm__(".symver tanf,tanf@GLIBC_2.17"); __asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.17"); -#endif +__asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.17"); +__asm__(".symver dlsym,dlsym@GLIBC_2.17"); +__asm__(".symver dlopen,dlopen@GLIBC_2.17"); +__asm__(".symver pthread_detach,pthread_detach@GLIBC_2.17"); +__asm__(".symver pthread_attr_setstacksize,pthread_attr_setstacksize@GLIBC_2.17"); +__asm__(".symver pthread_attr_setguardsize,pthread_attr_setguardsize@GLIBC_2.17"); +__asm__(".symver pthread_create,pthread_create@GLIBC_2.17"); +__asm__(".symver pthread_join,pthread_join@GLIBC_2.17"); +__asm__(".symver dlclose,dlclose@GLIBC_2.17"); +__asm__(".symver dlerror,dlerror@GLIBC_2.17"); +__asm__(".symver pthread_key_create,pthread_key_create@GLIBC_2.17"); +__asm__(".symver pthread_key_delete,pthread_key_delete@GLIBC_2.17"); +__asm__(".symver pthread_setspecific,pthread_setspecific@GLIBC_2.17"); +__asm__(".symver pthread_mutex_trylock,pthread_mutex_trylock@GLIBC_2.17"); +__asm__(".symver pthread_once,pthread_once@GLIBC_2.17"); +__asm__(".symver pthread_getspecific,pthread_getspecific@GLIBC_2.17"); +__asm__(".symver dladdr,dladdr@GLIBC_2.17"); +__asm__(".symver pthread_getattr_np,pthread_getattr_np@GLIBC_2.17"); +__asm__(".symver pthread_attr_getstack,pthread_attr_getstack@GLIBC_2.17"); +__asm__(".symver __libc_single_threaded,__libc_single_threaded@GLIBC_2.17"); +__asm__(".symver pthread_kill,pthread_kill@GLIBC_2.17"); +__asm__(".symver sem_post,sem_post@GLIBC_2.17"); +__asm__(".symver sem_wait,sem_wait@GLIBC_2.17"); +__asm__(".symver sem_init,sem_init@GLIBC_2.17"); +__asm__(".symver pthread_rwlock_init,pthread_rwlock_init@GLIBC_2.17"); +__asm__(".symver pthread_rwlock_rdlock,pthread_rwlock_rdlock@GLIBC_2.17"); +__asm__(".symver pthread_rwlock_wrlock,pthread_rwlock_wrlock@GLIBC_2.17"); +__asm__(".symver pthread_rwlock_unlock,pthread_rwlock_unlock@GLIBC_2.17"); +__asm__(".symver pthread_rwlock_destroy,pthread_rwlock_destroy@GLIBC_2.17"); +__asm__(".symver pthread_mutexattr_init,pthread_mutexattr_init@GLIBC_2.17"); +__asm__(".symver pthread_mutexattr_settype,pthread_mutexattr_settype@GLIBC_2.17"); +__asm__(".symver pthread_mutexattr_destroy,pthread_mutexattr_destroy@GLIBC_2.17"); +__asm__(".symver _dl_find_object,_dl_find_object@GLIBC_2.17"); +__asm__(".symver __pthread_key_create,__pthread_key_create@GLIBC_2.17"); + +#endif // aarch64 #if defined(__x86_64__) || defined(__aarch64__) #define BUN_WRAP_GLIBC_SYMBOL(symbol) __wrap_##symbol @@ -163,6 +199,7 @@ __asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.17"); #endif extern "C" { + double BUN_WRAP_GLIBC_SYMBOL(exp)(double); double BUN_WRAP_GLIBC_SYMBOL(fmod)(double, double); double BUN_WRAP_GLIBC_SYMBOL(log)(double); @@ -190,7 +227,7 @@ int BUN_WRAP_GLIBC_SYMBOL(vscanf)(const char*, va_list); int BUN_WRAP_GLIBC_SYMBOL(vsscanf)(const char*, const char*, va_list); // Add declarations for pthread functions -int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_getstack)(pthread_attr_t*, void**, size_t*); +int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_getstack)(const pthread_attr_t*, void**, size_t*); int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_setguardsize)(pthread_attr_t*, size_t); int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_setstacksize)(pthread_attr_t*, size_t); int BUN_WRAP_GLIBC_SYMBOL(pthread_create)(pthread_t*, const pthread_attr_t*, void* (*)(void*), void*); @@ -213,6 +250,7 @@ int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_unlock)(pthread_rwlock_t*); int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_wrlock)(pthread_rwlock_t*); int BUN_WRAP_GLIBC_SYMBOL(pthread_setspecific)(pthread_key_t, const void*); int BUN_WRAP_GLIBC_SYMBOL(pthread_sigmask)(int, const sigset_t*, sigset_t*); +void* BUN_WRAP_GLIBC_SYMBOL(pthread_getspecific)(pthread_key_t key); // Add declarations for other system functions void BUN_WRAP_GLIBC_SYMBOL(arc4random_buf)(void*, size_t); @@ -231,19 +269,34 @@ void* BUN_WRAP_GLIBC_SYMBOL(dlsym)(void*, const char*); void* BUN_WRAP_GLIBC_SYMBOL(dlvsym)(void*, const char*, const char*); int BUN_WRAP_GLIBC_SYMBOL(__libc_start_main)(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end); -} - -extern "C" { #if defined(__x86_64__) || defined(__aarch64__) -int __wrap_fcntl(int fd, int cmd, ...) +double __wrap_exp(double x) { return exp(x); } +double __wrap_fmod(double x, double y) { return fmod(x, y); } +double __wrap_log(double x) { return log(x); } +double __wrap_log2(double x) { return log2(x); } +double __wrap_pow(double x, double y) { return pow(x, y); } +float __wrap_powf(float x, float y) { return powf(x, y); } +float __wrap_cosf(float x) { return cosf(x); } +float __wrap_expf(float x) { return expf(x); } +float __wrap_fmodf(float x, float y) { return fmodf(x, y); } +float __wrap_log10f(float x) { return log10f(x); } +float __wrap_log2f(float x) { return log2f(x); } +float __wrap_logf(float x) { return logf(x); } +float __wrap_sinf(float x) { return sinf(x); } +float __wrap_tanf(float x) { return tanf(x); } +void __wrap_sincosf(float x, float* sin_x, float* cos_x) { sincosf(x, sin_x, cos_x); } + +// ban statx, for now +int __wrap_statx(int fd, const char* path, int flags, + unsigned int mask, struct statx* buf) { - va_list args; - va_start(args, cmd); - void* arg = va_arg(args, void*); - va_end(args); - return fcntl(fd, cmd, arg); + errno = ENOSYS; +#ifdef BUN_DEBUG + abort(); +#endif + return -1; } typedef int (*fcntl64_func)(int fd, int cmd, ...); @@ -297,7 +350,7 @@ static enum arg_type get_arg_type(int cmd) } } -extern "C" int __wrap_fcntl64(int fd, int cmd, ...) +int __wrap_fcntl64(int fd, int cmd, ...) { va_list ap; enum arg_type type = get_arg_type(cmd); @@ -336,116 +389,30 @@ extern "C" int __wrap_fcntl64(int fd, int cmd, ...) } } -#endif - -#if defined(__x86_64__) - -extern "C" { - -// Scanning/conversion function wrappers -int __wrap_sscanf(const char* str, const char* format, ...) -{ - va_list ap; - va_start(ap, format); - int result = vsscanf(str, format, ap); - va_end(ap); - return result; -} - -long int __wrap_strtol(const char* nptr, char** endptr, int base) -{ - return strtol(nptr, endptr, base); -} - -unsigned long int __wrap_strtoul(const char* nptr, char** endptr, int base) -{ - return strtoul(nptr, endptr, base); -} - -unsigned long long int __wrap_strtoull(const char* nptr, char** endptr, int base) -{ - return strtoull(nptr, endptr, base); -} - -unsigned long int __wrap___isoc23_strtoul(const char* nptr, char** endptr, int base) -{ - return strtoul(nptr, endptr, base); -} - -long int __wrap___isoc23_strtol(const char* nptr, char** endptr, int base) -{ - return strtol(nptr, endptr, base); -} - -unsigned long long int __wrap___isoc23_strtoull(const char* nptr, char** endptr, int base) -{ - return strtoull(nptr, endptr, base); -} - -int __wrap___isoc23_sscanf(const char* str, const char* format, ...) -{ - va_list ap; - va_start(ap, format); - int result = vsscanf(str, format, ap); - va_end(ap); - return result; -} - -int __wrap___isoc23_vscanf(const char* format, va_list ap) -{ - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vscanf(format, ap_copy); - va_end(ap_copy); - return result; -} - -int __wrap_vfscanf(FILE* stream, const char* format, va_list ap) +void __wrap_arc4random_buf(void* buf, size_t nbytes) { - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vfscanf(stream, format, ap_copy); - va_end(ap_copy); - return result; + getrandom(buf, nbytes, 0); } -int __wrap_vscanf(const char* format, va_list ap) +int __wrap_sem_init(sem_t* sem, int pshared, unsigned int value) { - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vscanf(format, ap_copy); - va_end(ap_copy); - return result; + return sem_init(sem, pshared, value); } -int __wrap_vsscanf(const char* str, const char* format, va_list ap) +int __wrap_sem_post(sem_t* sem) { - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vsscanf(str, format, ap_copy); - va_end(ap_copy); - return result; + return sem_post(sem); } -int __wrap___isoc23_vfscanf(FILE* stream, const char* format, va_list ap) +int __wrap_sem_wait(sem_t* sem) { - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vfscanf(stream, format, ap_copy); - va_end(ap_copy); - return result; + return sem_wait(sem); } -int __wrap___isoc23_vsscanf(const char* str, const char* format, va_list ap) -{ - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vsscanf(str, format, ap_copy); - va_end(ap_copy); - return result; -} +// https://www.gnu.org/software/libc/manual/html_node/Single_002dThreaded.html +char __wrap___libc_single_threaded = 0; -extern "C" int __libc_start_main(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end); +int __libc_start_main(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end); int __wrap___libc_start_main(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end) { @@ -453,7 +420,7 @@ int __wrap___libc_start_main(int (*main)(int, char**, char**), int argc, char** } // pthread function wrappers -int __wrap_pthread_attr_getstack(pthread_attr_t* attr, void** stackaddr, size_t* stacksize) +int __wrap_pthread_attr_getstack(const pthread_attr_t* attr, void** stackaddr, size_t* stacksize) { return pthread_attr_getstack(attr, stackaddr, stacksize); } @@ -568,11 +535,6 @@ int __wrap_pthread_sigmask(int how, const sigset_t* set, sigset_t* oldset) return pthread_sigmask(how, set, oldset); } -int __wrap___libc_single_threaded() -{ - return 0; -} - // Dynamic linking function wrappers int __wrap_dladdr(const void* addr, Dl_info* info) { @@ -589,11 +551,6 @@ char* __wrap_dlerror(void) return dlerror(); } -int __wrap__dl_find_object(void* address, struct dl_find_object* result) -{ - return _dl_find_object(address, result); -} - void* __wrap_dlopen(const char* filename, int flags) { return dlopen(filename, flags); @@ -604,17 +561,119 @@ void* __wrap_dlsym(void* handle, const char* symbol) return dlsym(handle, symbol); } -void* __wrap_dlvsym(void* handle, const char* symbol, const char* version) +#endif // x86_64 or aarch64 + +#if defined(__x86_64__) + +// Scanning/conversion function wrappers +int __wrap_sscanf(const char* str, const char* format, ...) { - return dlvsym(handle, symbol, version); + va_list ap; + va_start(ap, format); + int result = vsscanf(str, format, ap); + va_end(ap); + return result; } -// Other system function wrappers -void __wrap_arc4random_buf(void* buf, size_t nbytes) +long int __wrap_strtol(const char* nptr, char** endptr, int base) { - getrandom(buf, nbytes, 0); + return strtol(nptr, endptr, base); +} + +unsigned long int __wrap_strtoul(const char* nptr, char** endptr, int base) +{ + return strtoul(nptr, endptr, base); +} + +unsigned long long int __wrap_strtoull(const char* nptr, char** endptr, int base) +{ + return strtoull(nptr, endptr, base); +} + +unsigned long int __wrap___isoc23_strtoul(const char* nptr, char** endptr, int base) +{ + return strtoul(nptr, endptr, base); +} + +long int __wrap___isoc23_strtol(const char* nptr, char** endptr, int base) +{ + return strtol(nptr, endptr, base); +} + +unsigned long long int __wrap___isoc23_strtoull(const char* nptr, char** endptr, int base) +{ + return strtoull(nptr, endptr, base); +} + +int __wrap___isoc23_sscanf(const char* str, const char* format, ...) +{ + va_list ap; + va_start(ap, format); + int result = vsscanf(str, format, ap); + va_end(ap); + return result; +} + +int __wrap___isoc23_vscanf(const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vscanf(format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap_vfscanf(FILE* stream, const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vfscanf(stream, format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap_vscanf(const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vscanf(format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap_vsscanf(const char* str, const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vsscanf(str, format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap___isoc23_vfscanf(FILE* stream, const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vfscanf(stream, format, ap_copy); + va_end(ap_copy); + return result; +} + +int __wrap___isoc23_vsscanf(const char* str, const char* format, va_list ap) +{ + va_list ap_copy; + va_copy(ap_copy, ap); + int result = vsscanf(str, format, ap_copy); + va_end(ap_copy); + return result; +} + +void* __wrap_dlvsym(void* handle, const char* symbol, const char* version) +{ + return dlvsym(handle, symbol, version); } +// Other system function wrappers ssize_t __wrap_getrandom(void* buffer, size_t length, unsigned int flags) { return getrandom(buffer, length, flags); @@ -635,56 +694,35 @@ _Noreturn void __wrap_quick_exit(int status) quick_exit(status); } -int __wrap_sem_init(sem_t* sem, int pshared, unsigned int value) -{ - return sem_init(sem, pshared, value); -} - -int __wrap_sem_post(sem_t* sem) +int __wrap_fcntl(int fd, int cmd, ...) { - return sem_post(sem); + va_list args; + va_start(args, cmd); + void* arg = va_arg(args, void*); + va_end(args); + return fcntl(fd, cmd, arg); } -int __wrap_sem_wait(sem_t* sem) +int __wrap__dl_find_object(void* address, struct dl_find_object* result) { - return sem_wait(sem); + return _dl_find_object(address, result); } -} // extern "C" - -#endif +#endif // x86_64 -double __wrap_exp(double x) -{ - return exp(x); -} -double __wrap_fmod(double x, double y) { return fmod(x, y); } -double __wrap_log(double x) { return log(x); } -double __wrap_log2(double x) { return log2(x); } -double __wrap_pow(double x, double y) { return pow(x, y); } -float __wrap_powf(float x, float y) { return powf(x, y); } -float __wrap_cosf(float x) { return cosf(x); } -float __wrap_expf(float x) { return expf(x); } -float __wrap_fmodf(float x, float y) { return fmodf(x, y); } -float __wrap_log10f(float x) { return log10f(x); } -float __wrap_log2f(float x) { return log2f(x); } -float __wrap_logf(float x) { return logf(x); } -float __wrap_sinf(float x) { return sinf(x); } -float __wrap_tanf(float x) { return tanf(x); } -void __wrap_sincosf(float x, float* sin_x, float* cos_x) { sincosf(x, sin_x, cos_x); } -} +#if defined(__aarch64__) -// ban statx, for now -extern "C" int __wrap_statx(int fd, const char* path, int flags, - unsigned int mask, struct statx* buf) +// This function is only called by the unwind implementation, which won't be run in the first place +// since we don't allow C++ exceptions (any thrown will just go to the crash handler) +int __wrap__dl_find_object(void* address, struct dl_find_object* result) { - errno = ENOSYS; -#ifdef BUN_DEBUG abort(); -#endif - return -1; } +#endif // aarch64 + +} // extern "C" + #endif // glibc // musl From 7eb58b3d394a26f6e7c224efb687e129c4963774 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 19:14:58 -0800 Subject: [PATCH 023/176] Add __wrap___pthread_key_create --- cmake/targets/BuildBun.cmake | 7 +-- .../bindings/workaround-missing-symbols.cpp | 62 ++++++++++--------- 2 files changed, 35 insertions(+), 34 deletions(-) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 1cdb6d0fe91240..0f625a4fd9c8b7 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -870,12 +870,6 @@ if(LINUX) ) endif() - if (ARCH STREQUAL "aarch64") - target_link_options(${bun} PUBLIC - -Wl,--wrap=__pthread_key_create - ) - endif() - target_link_options(${bun} PUBLIC -Wl,--wrap=cosf -Wl,--wrap=exp @@ -898,6 +892,7 @@ if(LINUX) -Wl,--wrap=sem_init -Wl,--wrap=__libc_single_threaded -Wl,--wrap=__libc_start_main + -Wl,--wrap=__pthread_key_create -Wl,--wrap=_dl_find_object -Wl,--wrap=dladdr -Wl,--wrap=dlclose diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 541aa492408027..a7b06eedb1000a 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -139,7 +139,16 @@ __asm__(".symver sem_wait,sem_wait@GLIBC_2.2.5"); __asm__(".symver __pthread_key_create,__pthread_key_create@GLIBC_2.2.5"); #elif defined(__aarch64__) +__asm__(".symver __libc_single_threaded,__libc_single_threaded@GLIBC_2.17"); +__asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.17"); +__asm__(".symver __pthread_key_create,__pthread_key_create@GLIBC_2.17"); +__asm__(".symver _dl_find_object,_dl_find_object@GLIBC_2.17"); __asm__(".symver cosf,cosf@GLIBC_2.17"); +__asm__(".symver dladdr,dladdr@GLIBC_2.17"); +__asm__(".symver dlclose,dlclose@GLIBC_2.17"); +__asm__(".symver dlerror,dlerror@GLIBC_2.17"); +__asm__(".symver dlopen,dlopen@GLIBC_2.17"); +__asm__(".symver dlsym,dlsym@GLIBC_2.17"); __asm__(".symver exp,exp@GLIBC_2.17"); __asm__(".symver expf,expf@GLIBC_2.17"); __asm__(".symver fmod,fmod@GLIBC_2.17"); @@ -151,44 +160,35 @@ __asm__(".symver log2f,log2f@GLIBC_2.17"); __asm__(".symver logf,logf@GLIBC_2.17"); __asm__(".symver pow,pow@GLIBC_2.17"); __asm__(".symver powf,powf@GLIBC_2.17"); -__asm__(".symver sincosf,sincosf@GLIBC_2.17"); -__asm__(".symver sinf,sinf@GLIBC_2.17"); -__asm__(".symver tanf,tanf@GLIBC_2.17"); -__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.17"); -__asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.17"); -__asm__(".symver dlsym,dlsym@GLIBC_2.17"); -__asm__(".symver dlopen,dlopen@GLIBC_2.17"); -__asm__(".symver pthread_detach,pthread_detach@GLIBC_2.17"); -__asm__(".symver pthread_attr_setstacksize,pthread_attr_setstacksize@GLIBC_2.17"); +__asm__(".symver pthread_attr_getstack,pthread_attr_getstack@GLIBC_2.17"); __asm__(".symver pthread_attr_setguardsize,pthread_attr_setguardsize@GLIBC_2.17"); +__asm__(".symver pthread_attr_setstacksize,pthread_attr_setstacksize@GLIBC_2.17"); __asm__(".symver pthread_create,pthread_create@GLIBC_2.17"); +__asm__(".symver pthread_detach,pthread_detach@GLIBC_2.17"); +__asm__(".symver pthread_getattr_np,pthread_getattr_np@GLIBC_2.17"); +__asm__(".symver pthread_getspecific,pthread_getspecific@GLIBC_2.17"); __asm__(".symver pthread_join,pthread_join@GLIBC_2.17"); -__asm__(".symver dlclose,dlclose@GLIBC_2.17"); -__asm__(".symver dlerror,dlerror@GLIBC_2.17"); __asm__(".symver pthread_key_create,pthread_key_create@GLIBC_2.17"); __asm__(".symver pthread_key_delete,pthread_key_delete@GLIBC_2.17"); -__asm__(".symver pthread_setspecific,pthread_setspecific@GLIBC_2.17"); +__asm__(".symver pthread_kill,pthread_kill@GLIBC_2.17"); __asm__(".symver pthread_mutex_trylock,pthread_mutex_trylock@GLIBC_2.17"); +__asm__(".symver pthread_mutexattr_destroy,pthread_mutexattr_destroy@GLIBC_2.17"); +__asm__(".symver pthread_mutexattr_init,pthread_mutexattr_init@GLIBC_2.17"); +__asm__(".symver pthread_mutexattr_settype,pthread_mutexattr_settype@GLIBC_2.17"); __asm__(".symver pthread_once,pthread_once@GLIBC_2.17"); -__asm__(".symver pthread_getspecific,pthread_getspecific@GLIBC_2.17"); -__asm__(".symver dladdr,dladdr@GLIBC_2.17"); -__asm__(".symver pthread_getattr_np,pthread_getattr_np@GLIBC_2.17"); -__asm__(".symver pthread_attr_getstack,pthread_attr_getstack@GLIBC_2.17"); -__asm__(".symver __libc_single_threaded,__libc_single_threaded@GLIBC_2.17"); -__asm__(".symver pthread_kill,pthread_kill@GLIBC_2.17"); -__asm__(".symver sem_post,sem_post@GLIBC_2.17"); -__asm__(".symver sem_wait,sem_wait@GLIBC_2.17"); -__asm__(".symver sem_init,sem_init@GLIBC_2.17"); +__asm__(".symver pthread_rwlock_destroy,pthread_rwlock_destroy@GLIBC_2.17"); __asm__(".symver pthread_rwlock_init,pthread_rwlock_init@GLIBC_2.17"); __asm__(".symver pthread_rwlock_rdlock,pthread_rwlock_rdlock@GLIBC_2.17"); -__asm__(".symver pthread_rwlock_wrlock,pthread_rwlock_wrlock@GLIBC_2.17"); __asm__(".symver pthread_rwlock_unlock,pthread_rwlock_unlock@GLIBC_2.17"); -__asm__(".symver pthread_rwlock_destroy,pthread_rwlock_destroy@GLIBC_2.17"); -__asm__(".symver pthread_mutexattr_init,pthread_mutexattr_init@GLIBC_2.17"); -__asm__(".symver pthread_mutexattr_settype,pthread_mutexattr_settype@GLIBC_2.17"); -__asm__(".symver pthread_mutexattr_destroy,pthread_mutexattr_destroy@GLIBC_2.17"); -__asm__(".symver _dl_find_object,_dl_find_object@GLIBC_2.17"); -__asm__(".symver __pthread_key_create,__pthread_key_create@GLIBC_2.17"); +__asm__(".symver pthread_rwlock_wrlock,pthread_rwlock_wrlock@GLIBC_2.17"); +__asm__(".symver pthread_setspecific,pthread_setspecific@GLIBC_2.17"); +__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.17"); +__asm__(".symver sem_init,sem_init@GLIBC_2.17"); +__asm__(".symver sem_post,sem_post@GLIBC_2.17"); +__asm__(".symver sem_wait,sem_wait@GLIBC_2.17"); +__asm__(".symver sincosf,sincosf@GLIBC_2.17"); +__asm__(".symver sinf,sinf@GLIBC_2.17"); +__asm__(".symver tanf,tanf@GLIBC_2.17"); #endif // aarch64 @@ -236,6 +236,7 @@ int BUN_WRAP_GLIBC_SYMBOL(pthread_getattr_np)(pthread_t, pthread_attr_t*); void* BUN_WRAP_GLIBC_SYMBOL(pthread_getspecific)(pthread_key_t); int BUN_WRAP_GLIBC_SYMBOL(pthread_join)(pthread_t, void**); int BUN_WRAP_GLIBC_SYMBOL(pthread_key_create)(pthread_key_t*, void (*)(void*)); +int BUN_WRAP_GLIBC_SYMBOL(__pthread_key_create)(pthread_key_t*, void (*)(void*)); int BUN_WRAP_GLIBC_SYMBOL(pthread_key_delete)(pthread_key_t); int BUN_WRAP_GLIBC_SYMBOL(pthread_kill)(pthread_t, int); int BUN_WRAP_GLIBC_SYMBOL(pthread_mutex_trylock)(pthread_mutex_t*); @@ -465,6 +466,11 @@ int __wrap_pthread_key_create(pthread_key_t* key, void (*destructor)(void*)) return pthread_key_create(key, destructor); } +int __wrap___pthread_key_create(pthread_key_t* key, void (*destructor)(void*)) +{ + return __pthread_key_create(key, destructor); +} + int __wrap_pthread_key_delete(pthread_key_t key) { return pthread_key_delete(key); From e07563f1a0733701eedc33c987895dcb413b0d9f Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 20:11:14 -0800 Subject: [PATCH 024/176] This is probably incorrect --- src/bun.js/bindings/workaround-missing-symbols.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index a7b06eedb1000a..75b2d4e6de26ef 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -468,7 +468,7 @@ int __wrap_pthread_key_create(pthread_key_t* key, void (*destructor)(void*)) int __wrap___pthread_key_create(pthread_key_t* key, void (*destructor)(void*)) { - return __pthread_key_create(key, destructor); + return pthread_key_create(key, destructor); } int __wrap_pthread_key_delete(pthread_key_t key) From 7e3f887d2e7d51e66ba32d55ea22693999d5772f Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 20:47:27 -0800 Subject: [PATCH 025/176] [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 0eca46768fe156..47fc2ed11971d1 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -460,7 +460,7 @@ function getBuildZigStep(platform) { agents: getZigAgent(platform), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: { ...getBuildEnv(platform), ENABLE_LLVM: "OFF" }, + env: getBuildEnv(platform), command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, }; } From d5fc7ad92cf44acdab7173c1f730fbfd42868d4f Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 20:50:04 -0800 Subject: [PATCH 026/176] [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 47fc2ed11971d1..0eca46768fe156 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -460,7 +460,7 @@ function getBuildZigStep(platform) { agents: getZigAgent(platform), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), + env: { ...getBuildEnv(platform), ENABLE_LLVM: "OFF" }, command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, }; } From ac707a1ed54cca5f79a508a9d00da406e88ff2df Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 20:50:44 -0800 Subject: [PATCH 027/176] Update ci.mjs --- .buildkite/ci.mjs | 1 + 1 file changed, 1 insertion(+) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 0eca46768fe156..eecc5d7ad9b1bb 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -394,6 +394,7 @@ function getBuildEnv(target) { ENABLE_ASSERTIONS: release ? "OFF" : "ON", ENABLE_LOGS: release ? "OFF" : "ON", ABI: abi === "musl" ? "musl" : undefined, + LLVM_VERSION: "18.1.8", }; } From 8c3a024b9b59099355c662909d8fb514ea8dcae0 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 21:28:41 -0800 Subject: [PATCH 028/176] [build images] --- .buildkite/ci.mjs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index eecc5d7ad9b1bb..be22d2f36461f3 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -931,9 +931,9 @@ async function getPipelineOptions() { skipBuilds: parseBoolean(options["skip-builds"]), forceBuilds: parseBoolean(options["force-builds"]), skipTests: parseBoolean(options["skip-tests"]), + buildImages: parseOption(/\[(build images?)\]/i), + publishImages: parseOption(/\[(publish images?)\]/i), testFiles: parseArray(options["test-files"]), - buildImages: parseBoolean(options["build-images"]), - publishImages: parseBoolean(options["publish-images"]), unifiedBuilds: parseBoolean(options["unified-builds"]), unifiedTests: parseBoolean(options["unified-tests"]), buildProfiles: parseArray(options["build-profiles"]), From 970775701d7b821a99a14ff86cfc4864f90fd900 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 21:33:50 -0800 Subject: [PATCH 029/176] Update ci.mjs --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index be22d2f36461f3..dac90783e4084a 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -461,7 +461,7 @@ function getBuildZigStep(platform) { agents: getZigAgent(platform), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), - env: { ...getBuildEnv(platform), ENABLE_LLVM: "OFF" }, + env: getBuildEnv(platform), command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, }; } From 0fd79283c691faf31ca9f5cef501f6d5ee7968ff Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Thu, 5 Dec 2024 21:44:53 -0800 Subject: [PATCH 030/176] [build images] --- .buildkite/ci.mjs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index dac90783e4084a..6fceaa93e75d4d 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -931,8 +931,8 @@ async function getPipelineOptions() { skipBuilds: parseBoolean(options["skip-builds"]), forceBuilds: parseBoolean(options["force-builds"]), skipTests: parseBoolean(options["skip-tests"]), - buildImages: parseOption(/\[(build images?)\]/i), - publishImages: parseOption(/\[(publish images?)\]/i), + buildImages: parseBoolean(options["build-images"]), + publishImages: parseBoolean(options["publish-images"]), testFiles: parseArray(options["test-files"]), unifiedBuilds: parseBoolean(options["unified-builds"]), unifiedTests: parseBoolean(options["unified-tests"]), @@ -969,6 +969,8 @@ async function getPipelineOptions() { skipBuilds: parseOption(/\[(skip builds?|no builds?|only tests?)\]/i), forceBuilds: parseOption(/\[(force builds?)\]/i), skipTests: parseOption(/\[(skip tests?|no tests?|only builds?)\]/i), + buildImages: parseOption(/\[(build images?)\]/i), + publishImages: parseOption(/\[(publish images?)\]/i), buildPlatforms: Array.from(buildPlatformsMap.values()), testPlatforms: Array.from(testPlatformsMap.values()), buildProfiles: ["release"], From eccf03f07e90ed45aa3855eb6b834f1170a736a2 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 22:19:23 -0800 Subject: [PATCH 031/176] [build images] --- scripts/bootstrap.sh | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index a3b58ec0986d77..eb9478fe1ab4f6 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -815,16 +815,7 @@ llvm_version() { install_llvm() { case "$pm" in apt) - bash="$(require bash)" - llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" - case "$distro-$release" in - ubuntu-24*) - execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all -njammy - ;; - *) - execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all - ;; - esac + install_packages "llvm-$(llvm_version)" "clang-$(llvm_version)" "lld-$(llvm_version)" "libllvm-$(llvm_version)" ;; brew) install_packages "llvm@$(llvm_version)" From 0ef42f9331f44547ff0c6723ffe15d4423d785a8 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 22:37:00 -0800 Subject: [PATCH 032/176] [build images] --- scripts/bootstrap.sh | 36 +++++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index eb9478fe1ab4f6..0d662bc9dee6cc 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -815,7 +815,41 @@ llvm_version() { install_llvm() { case "$pm" in apt) - install_packages "llvm-$(llvm_version)" "clang-$(llvm_version)" "lld-$(llvm_version)" "libllvm-$(llvm_version)" + bash="$(require bash)" + llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" + case "$distro-$release" in + ubuntu-24*) + execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all -njammy + ;; + *) + execute_sudo "$bash" "$llvm_script" "$(llvm_version)" + ;; + esac + + for f in "/usr/lib/llvm-$(llvm_version)/bin/"*; do + execute_sudo ln -sf "$f" "/usr/bin/" + done + + execute_sudo ln -sf "/usr/bin/clang-$(llvm_version)" "/usr/bin/clang" + execute_sudo ln -sf "/usr/bin/clang++-$(llvm_version)" "/usr/bin/clang++" + execute_sudo ln -sf "/usr/bin/lld-$(llvm_version)" "/usr/bin/lld" + execute_sudo ln -sf "/usr/bin/lldb-$(llvm_version)" "/usr/bin/lldb" + execute_sudo ln -sf "/usr/bin/clangd-$(llvm_version)" "/usr/bin/clangd" + execute_sudo ln -sf "/usr/bin/llvm-ar-$(llvm_version)" "/usr/bin/llvm-ar" + execute_sudo ln -sf "/usr/bin/ld.lld" "/usr/bin/ld" + execute_sudo ln -sf "/usr/bin/clang" "/usr/bin/cc" + execute_sudo ln -sf "/usr/bin/clang" "/usr/bin/c89" + execute_sudo ln -sf "/usr/bin/clang" "/usr/bin/c99" + execute_sudo ln -sf "/usr/bin/clang++" "/usr/bin/c++" + execute_sudo ln -sf "/usr/bin/clang++" "/usr/bin/g++" + execute_sudo ln -sf "/usr/bin/llvm-ar" "/usr/bin/ar" + execute_sudo ln -sf "/usr/bin/clang" "/usr/bin/gcc" + + append_to_profile "export CC=clang-$(llvm_version)" + append_to_profile "export CXX=clang++-$(llvm_version)" + append_to_profile "export AR=llvm-ar-$(llvm_version)" + append_to_profile "export RANLIB=llvm-ranlib-$(llvm_version)" + append_to_profile "export LD=lld-$(llvm_version)" ;; brew) install_packages "llvm@$(llvm_version)" From dd4c3594b69425a09636ee4638783b5d2960127b Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 22:47:29 -0800 Subject: [PATCH 033/176] Update bootstrap.sh --- scripts/bootstrap.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 0d662bc9dee6cc..b76bef48638ed7 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -816,13 +816,14 @@ install_llvm() { case "$pm" in apt) bash="$(require bash)" + execute_sudo apt update -y -qq llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" case "$distro-$release" in ubuntu-24*) execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all -njammy ;; *) - execute_sudo "$bash" "$llvm_script" "$(llvm_version)" + execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all ;; esac From 5ddfde77740415c4af68a9b04c1dd79357383805 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 22:49:22 -0800 Subject: [PATCH 034/176] [build image] --- scripts/bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index b76bef48638ed7..dc3c3cc69ff708 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -816,7 +816,7 @@ install_llvm() { case "$pm" in apt) bash="$(require bash)" - execute_sudo apt update -y -qq + execute_sudo apt-get update -y -qq llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" case "$distro-$release" in ubuntu-24*) From 84859c20df16c194f31a935766783caa47c0da05 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 22:55:31 -0800 Subject: [PATCH 035/176] Update bootstrap.sh --- scripts/bootstrap.sh | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index dc3c3cc69ff708..87e0319aa1b253 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -820,10 +820,33 @@ install_llvm() { llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" case "$distro-$release" in ubuntu-24*) - execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all -njammy + execute_sudo "$bash" "$llvm_script" "$(llvm_version)" -njammy + ;; + debian-12*) + # Add LLVM repository for Debian 12 (bookworm) + execute_sudo bash -c 'echo "deb http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-18 main" >> /etc/apt/sources.list.d/llvm.list' + execute_sudo bash -c 'echo "deb-src http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-18 main" >> /etc/apt/sources.list.d/llvm.list' + + # Download and add LLVM GPG key + execute_sudo wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | execute_sudo apt-key add - + + # Update package lists + execute_sudo apt-get update -y + + # Install LLVM packages + execute_sudo apt-get install -y \ + clang-18 \ + lld-18 \ + lldb-18 \ + llvm-18 \ + llvm-18-dev \ + llvm-18-tools \ + libc++-18-dev \ + libc++abi-18-dev \ + libunwind-18-dev ;; *) - execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all + execute_sudo "$bash" "$llvm_script" "$(llvm_version)" ;; esac @@ -835,7 +858,6 @@ install_llvm() { execute_sudo ln -sf "/usr/bin/clang++-$(llvm_version)" "/usr/bin/clang++" execute_sudo ln -sf "/usr/bin/lld-$(llvm_version)" "/usr/bin/lld" execute_sudo ln -sf "/usr/bin/lldb-$(llvm_version)" "/usr/bin/lldb" - execute_sudo ln -sf "/usr/bin/clangd-$(llvm_version)" "/usr/bin/clangd" execute_sudo ln -sf "/usr/bin/llvm-ar-$(llvm_version)" "/usr/bin/llvm-ar" execute_sudo ln -sf "/usr/bin/ld.lld" "/usr/bin/ld" execute_sudo ln -sf "/usr/bin/clang" "/usr/bin/cc" From aa962f520217ee334d20c8898d4d1087ab44c2b9 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 5 Dec 2024 22:56:25 -0800 Subject: [PATCH 036/176] [build images] --- scripts/bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 87e0319aa1b253..77ae90ec173715 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -823,7 +823,7 @@ install_llvm() { execute_sudo "$bash" "$llvm_script" "$(llvm_version)" -njammy ;; debian-12*) - # Add LLVM repository for Debian 12 (bookworm) + # Add LLVM repository for Debian 12 (bookworm) specially execute_sudo bash -c 'echo "deb http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-18 main" >> /etc/apt/sources.list.d/llvm.list' execute_sudo bash -c 'echo "deb-src http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-18 main" >> /etc/apt/sources.list.d/llvm.list' From 78206677903904d2a6adf487001c89b9c9bb0c61 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Thu, 5 Dec 2024 23:49:13 -0800 Subject: [PATCH 037/176] when buildImages is set use it --- .buildkite/ci.mjs | 55 ++++++++++++++++++++++++++++------------------- 1 file changed, 33 insertions(+), 22 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 6fceaa93e75d4d..f3deed3fb9b1ae 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -259,6 +259,7 @@ function getPriority() { * @property {string} instanceType * @property {number} cpuCount * @property {number} threadsPerCore + * @property {boolean} dryRun */ /** @@ -279,7 +280,7 @@ function getEc2Agent(platform, options) { // https://github.com/oven-sh/robobun/blob/d46c07e0ac5ac0f9ffe1012f0e98b59e1a0d387a/src/robobun.ts#L1707 robobun: true, robobun2: true, - "image-name": getImageName(platform), + "image-name": getImageName(platform, dryRun), "instance-type": instanceType, "cpu-count": cpuCount, "threads-per-core": threadsPerCore, @@ -291,7 +292,7 @@ function getEc2Agent(platform, options) { * @param {Platform} platform * @returns {string} */ -function getCppAgent(platform) { +function getCppAgent(platform, dryRun) { const { os, arch } = platform; if (os === "darwin") { @@ -306,6 +307,7 @@ function getCppAgent(platform) { instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge", cpuCount: 32, threadsPerCore: 1, + dryRun, }); } @@ -313,7 +315,7 @@ function getCppAgent(platform) { * @param {Platform} platform * @returns {Agent} */ -function getZigAgent(platform) { +function getZigAgent(platform, dryRun) { const { arch } = platform; return { @@ -339,7 +341,7 @@ function getZigAgent(platform) { * @param {Platform} platform * @returns {Agent} */ -function getTestAgent(platform) { +function getTestAgent(platform, dryRun) { const { os, arch } = platform; if (os === "darwin") { @@ -357,6 +359,7 @@ function getTestAgent(platform) { instanceType: "c7i.2xlarge", cpuCount: 1, threadsPerCore: 1, + dryRun, }); } @@ -365,6 +368,7 @@ function getTestAgent(platform) { instanceType: "c8g.xlarge", cpuCount: 1, threadsPerCore: 1, + dryRun, }); } @@ -372,6 +376,7 @@ function getTestAgent(platform) { instanceType: "c7i.xlarge", cpuCount: 1, threadsPerCore: 1, + dryRun, }); } @@ -400,13 +405,14 @@ function getBuildEnv(target) { /** * @param {Platform} platform + * @param {boolean} dryRun * @returns {Step} */ -function getBuildVendorStep(platform) { +function getBuildVendorStep(platform, dryRun) { return { key: `${getTargetKey(platform)}-build-vendor`, label: `${getTargetLabel(platform)} - build-vendor`, - agents: getCppAgent(platform), + agents: getCppAgent(platform, dryRun), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: getBuildEnv(platform), @@ -416,13 +422,14 @@ function getBuildVendorStep(platform) { /** * @param {Platform} platform + * @param {boolean} dryRun * @returns {Step} */ -function getBuildCppStep(platform) { +function getBuildCppStep(platform, dryRun) { return { key: `${getTargetKey(platform)}-build-cpp`, label: `${getTargetLabel(platform)} - build-cpp`, - agents: getCppAgent(platform), + agents: getCppAgent(platform, dryRun), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: { @@ -453,12 +460,12 @@ function getBuildToolchain(target) { * @param {Platform} platform * @returns {Step} */ -function getBuildZigStep(platform) { +function getBuildZigStep(platform, dryRun) { const toolchain = getBuildToolchain(platform); return { key: `${getTargetKey(platform)}-build-zig`, label: `${getTargetLabel(platform)} - build-zig`, - agents: getZigAgent(platform), + agents: getZigAgent(platform, dryRun), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: getBuildEnv(platform), @@ -468,9 +475,10 @@ function getBuildZigStep(platform) { /** * @param {Platform} platform + * @param {boolean} dryRun * @returns {Step} */ -function getLinkBunStep(platform) { +function getLinkBunStep(platform, dryRun) { return { key: `${getTargetKey(platform)}-build-bun`, label: `${getTargetLabel(platform)} - build-bun`, @@ -479,7 +487,7 @@ function getLinkBunStep(platform) { `${getTargetKey(platform)}-build-cpp`, `${getTargetKey(platform)}-build-zig`, ], - agents: getCppAgent(platform), + agents: getCppAgent(platform, dryRun), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: { @@ -492,13 +500,14 @@ function getLinkBunStep(platform) { /** * @param {Platform} platform + * @param {boolean} dryRun * @returns {Step} */ -function getBuildBunStep(platform) { +function getBuildBunStep(platform, dryRun) { return { key: `${getTargetKey(platform)}-build-bun`, label: `${getTargetLabel(platform)} - build-bun`, - agents: getCppAgent(platform), + agents: getCppAgent(platform, dryRun), retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: getBuildEnv(platform), @@ -511,6 +520,7 @@ function getBuildBunStep(platform) { * @property {string} [buildId] * @property {boolean} [unifiedTests] * @property {string[]} [testFiles] + * @property {boolean} [dryRun] */ /** @@ -520,7 +530,7 @@ function getBuildBunStep(platform) { */ function getTestBunStep(platform, options = {}) { const { os } = platform; - const { buildId, unifiedTests, testFiles } = options; + const { buildId, unifiedTests, testFiles, dryRun } = options; const args = [`--step=${getTargetKey(platform)}-build-bun`]; if (buildId) { @@ -539,7 +549,7 @@ function getTestBunStep(platform, options = {}) { key: `${getPlatformKey(platform)}-test-bun`, label: `${getPlatformLabel(platform)} - test-bun`, depends_on: depends, - agents: getTestAgent(platform), + agents: getTestAgent(platform, dryRun), cancel_on_build_failing: isMergeQueue(), retry: getRetry(), soft_fail: isMainBranch() ? true : [{ exit_status: 2 }], @@ -1043,12 +1053,12 @@ async function getPipeline(options = {}) { key: getTargetKey(target), group: getTargetLabel(target), steps: unifiedBuilds - ? [getBuildBunStep(target)] + ? [getBuildBunStep(target, !!buildImages)] : [ - getBuildVendorStep(target), - getBuildCppStep(target), - getBuildZigStep(target), - getLinkBunStep(target), + getBuildVendorStep(target, !!buildImages), + getBuildCppStep(target, !!buildImages), + getBuildZigStep(target, !!buildImages), + getLinkBunStep(target, !!buildImages), ], }, imagePlatform ? `${imageKey}-build-image` : undefined, @@ -1059,13 +1069,14 @@ async function getPipeline(options = {}) { const { skipTests, forceTests, unifiedTests, testFiles } = options; if (!skipTests || forceTests) { + // steps.push( ...testPlatforms .flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile }))) .map(target => ({ key: getTargetKey(target), group: getTargetLabel(target), - steps: [getTestBunStep(target, { unifiedTests, testFiles, buildId })], + steps: [getTestBunStep(target, { unifiedTests, testFiles, buildId, dryRun: !!buildImages })], })), ); } From 560e48e944dc8d446ef7e48c01fba481c26af9e2 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Thu, 5 Dec 2024 23:51:29 -0800 Subject: [PATCH 038/176] forgot to pull dryRun out of the options object there [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index f3deed3fb9b1ae..786b351a3a8eb6 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -269,7 +269,7 @@ function getPriority() { */ function getEc2Agent(platform, options) { const { os, arch, abi, distro, release } = platform; - const { instanceType, cpuCount, threadsPerCore } = options; + const { instanceType, cpuCount, threadsPerCore, dryRun } = options; return { os, arch, From bfd2e80fa1629846f5c8c754770f3535a43947f5 Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 11:28:55 -0800 Subject: [PATCH 039/176] Do not reference glibc's version of __libc_single_threaded --- src/bun.js/bindings/workaround-missing-symbols.cpp | 1 - 1 file changed, 1 deletion(-) diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 75b2d4e6de26ef..c7461ed8b1f18c 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -139,7 +139,6 @@ __asm__(".symver sem_wait,sem_wait@GLIBC_2.2.5"); __asm__(".symver __pthread_key_create,__pthread_key_create@GLIBC_2.2.5"); #elif defined(__aarch64__) -__asm__(".symver __libc_single_threaded,__libc_single_threaded@GLIBC_2.17"); __asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.17"); __asm__(".symver __pthread_key_create,__pthread_key_create@GLIBC_2.17"); __asm__(".symver _dl_find_object,_dl_find_object@GLIBC_2.17"); From f96c22ddf9b15cd852d1db47b71da82591e65741 Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 11:45:37 -0800 Subject: [PATCH 040/176] [build images] From 70ca52de122d786f4c7199e7068d5296b8ddfefd Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Fri, 6 Dec 2024 12:08:15 -0800 Subject: [PATCH 041/176] Fix image [build image] --- scripts/bootstrap.sh | 48 -------------------------------------------- 1 file changed, 48 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 77ae90ec173715..2603d7fa7f8f65 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -816,63 +816,15 @@ install_llvm() { case "$pm" in apt) bash="$(require bash)" - execute_sudo apt-get update -y -qq llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" case "$distro-$release" in ubuntu-24*) execute_sudo "$bash" "$llvm_script" "$(llvm_version)" -njammy ;; - debian-12*) - # Add LLVM repository for Debian 12 (bookworm) specially - execute_sudo bash -c 'echo "deb http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-18 main" >> /etc/apt/sources.list.d/llvm.list' - execute_sudo bash -c 'echo "deb-src http://apt.llvm.org/bookworm/ llvm-toolchain-bookworm-18 main" >> /etc/apt/sources.list.d/llvm.list' - - # Download and add LLVM GPG key - execute_sudo wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | execute_sudo apt-key add - - - # Update package lists - execute_sudo apt-get update -y - - # Install LLVM packages - execute_sudo apt-get install -y \ - clang-18 \ - lld-18 \ - lldb-18 \ - llvm-18 \ - llvm-18-dev \ - llvm-18-tools \ - libc++-18-dev \ - libc++abi-18-dev \ - libunwind-18-dev - ;; *) execute_sudo "$bash" "$llvm_script" "$(llvm_version)" ;; esac - - for f in "/usr/lib/llvm-$(llvm_version)/bin/"*; do - execute_sudo ln -sf "$f" "/usr/bin/" - done - - execute_sudo ln -sf "/usr/bin/clang-$(llvm_version)" "/usr/bin/clang" - execute_sudo ln -sf "/usr/bin/clang++-$(llvm_version)" "/usr/bin/clang++" - execute_sudo ln -sf "/usr/bin/lld-$(llvm_version)" "/usr/bin/lld" - execute_sudo ln -sf "/usr/bin/lldb-$(llvm_version)" "/usr/bin/lldb" - execute_sudo ln -sf "/usr/bin/llvm-ar-$(llvm_version)" "/usr/bin/llvm-ar" - execute_sudo ln -sf "/usr/bin/ld.lld" "/usr/bin/ld" - execute_sudo ln -sf "/usr/bin/clang" "/usr/bin/cc" - execute_sudo ln -sf "/usr/bin/clang" "/usr/bin/c89" - execute_sudo ln -sf "/usr/bin/clang" "/usr/bin/c99" - execute_sudo ln -sf "/usr/bin/clang++" "/usr/bin/c++" - execute_sudo ln -sf "/usr/bin/clang++" "/usr/bin/g++" - execute_sudo ln -sf "/usr/bin/llvm-ar" "/usr/bin/ar" - execute_sudo ln -sf "/usr/bin/clang" "/usr/bin/gcc" - - append_to_profile "export CC=clang-$(llvm_version)" - append_to_profile "export CXX=clang++-$(llvm_version)" - append_to_profile "export AR=llvm-ar-$(llvm_version)" - append_to_profile "export RANLIB=llvm-ranlib-$(llvm_version)" - append_to_profile "export LD=lld-$(llvm_version)" ;; brew) install_packages "llvm@$(llvm_version)" From 4362acf9c5d6788e60240f2929a527c07f1943cf Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Fri, 6 Dec 2024 12:22:08 -0800 Subject: [PATCH 042/176] Fix ubuntu 24.04 [build image] --- scripts/bootstrap.sh | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 2603d7fa7f8f65..4380968fe097e8 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -817,14 +817,7 @@ install_llvm() { apt) bash="$(require bash)" llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" - case "$distro-$release" in - ubuntu-24*) - execute_sudo "$bash" "$llvm_script" "$(llvm_version)" -njammy - ;; - *) - execute_sudo "$bash" "$llvm_script" "$(llvm_version)" - ;; - esac + execute_sudo "$bash" "$llvm_script" "$(llvm_version)" ;; brew) install_packages "llvm@$(llvm_version)" From 7c2c5580410e7967b5726fa550827a57a13eaf1a Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 12:24:50 -0800 Subject: [PATCH 043/176] Bump WebKit to include oven-sh/WebKit#71 --- cmake/tools/SetupWebKit.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index 50c1e2c7b93224..d21a5543113538 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION e17d16e0060b3d80ae40e78353d19575c9a8f3af) + set(WEBKIT_VERSION 2776bf2834aa773fe0d77f00018a7cf819c730ec) endif() if(WEBKIT_LOCAL) From ca495d36df3470b49c8e974be1019ff06cfee4fc Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 12:26:22 -0800 Subject: [PATCH 044/176] [build image] From 6759e64a0b0df99161d2995c25558c2e77f45c29 Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 12:40:04 -0800 Subject: [PATCH 045/176] Fix WTF::constantTimeMemcmp calls for new signature [build images] --- src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp index 2caf8149e852f6..2e29328bea0b83 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp @@ -95,7 +95,7 @@ ExceptionOr CryptoAlgorithmHMAC::platformVerifyWithAlgorithm(const CryptoK if (!expectedSignature) return Exception { OperationError }; // Using a constant time comparison to prevent timing attacks. - return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->data(), signature.data(), expectedSignature->size()); + return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->span(), signature.span()); } ExceptionOr CryptoAlgorithmHMAC::platformVerify(const CryptoKeyHMAC& key, const Vector& signature, const Vector& data) { @@ -107,7 +107,7 @@ ExceptionOr CryptoAlgorithmHMAC::platformVerify(const CryptoKeyHMAC& key, if (!expectedSignature) return Exception { OperationError }; // Using a constant time comparison to prevent timing attacks. - return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->data(), signature.data(), expectedSignature->size()); + return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->span(), signature.span()); } } // namespace WebCore From 7e658a3e4faa00d76e23d95b9bad33bafc496846 Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 12:47:01 -0800 Subject: [PATCH 046/176] Revert: Bump WebKit to include oven-sh/WebKit#71 [build images] --- cmake/tools/SetupWebKit.cmake | 2 +- src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index d21a5543113538..50c1e2c7b93224 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION 2776bf2834aa773fe0d77f00018a7cf819c730ec) + set(WEBKIT_VERSION e17d16e0060b3d80ae40e78353d19575c9a8f3af) endif() if(WEBKIT_LOCAL) diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp index 2e29328bea0b83..2caf8149e852f6 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp @@ -95,7 +95,7 @@ ExceptionOr CryptoAlgorithmHMAC::platformVerifyWithAlgorithm(const CryptoK if (!expectedSignature) return Exception { OperationError }; // Using a constant time comparison to prevent timing attacks. - return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->span(), signature.span()); + return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->data(), signature.data(), expectedSignature->size()); } ExceptionOr CryptoAlgorithmHMAC::platformVerify(const CryptoKeyHMAC& key, const Vector& signature, const Vector& data) { @@ -107,7 +107,7 @@ ExceptionOr CryptoAlgorithmHMAC::platformVerify(const CryptoKeyHMAC& key, if (!expectedSignature) return Exception { OperationError }; // Using a constant time comparison to prevent timing attacks. - return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->span(), signature.span()); + return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->data(), signature.data(), expectedSignature->size()); } } // namespace WebCore From fbdbcabf6ea5b4956caf314c7e5f8a45afdd45ad Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 15:13:14 -0800 Subject: [PATCH 047/176] Link pthread --- cmake/targets/BuildBun.cmake | 1 + 1 file changed, 1 insertion(+) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 0f625a4fd9c8b7..f3a37c502b67f9 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -871,6 +871,7 @@ if(LINUX) endif() target_link_options(${bun} PUBLIC + -lpthread -Wl,--wrap=cosf -Wl,--wrap=exp -Wl,--wrap=expf From e0235c5b02102bac23b1e596faefb2d4bb2c7a30 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 6 Dec 2024 15:21:34 -0800 Subject: [PATCH 048/176] [build images] --- src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp index 2caf8149e852f6..78cbc6f8e86740 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp @@ -97,6 +97,7 @@ ExceptionOr CryptoAlgorithmHMAC::platformVerifyWithAlgorithm(const CryptoK // Using a constant time comparison to prevent timing attacks. return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->data(), signature.data(), expectedSignature->size()); } + ExceptionOr CryptoAlgorithmHMAC::platformVerify(const CryptoKeyHMAC& key, const Vector& signature, const Vector& data) { auto algorithm = digestAlgorithm(key.hashAlgorithmIdentifier()); From 0377bafc6579e94cb40eacd74868663122aa5b53 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 6 Dec 2024 16:07:54 -0800 Subject: [PATCH 049/176] `-fno-semantic-interposition` [build images] --- cmake/CompilerFlags.cmake | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/cmake/CompilerFlags.cmake b/cmake/CompilerFlags.cmake index 31d738134a0af1..847b365ddae467 100644 --- a/cmake/CompilerFlags.cmake +++ b/cmake/CompilerFlags.cmake @@ -176,6 +176,10 @@ if(LINUX) DESCRIPTION "Disable relocation read-only (RELRO)" -Wl,-z,norelro ) + register_compiler_flags( + DESCRIPTION "Disable semantic interposition" + -fno-semantic-interposition + ) endif() # --- Assertions --- From 54fe995557fbf38890d7e465e20109304ceb4603 Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 17:39:41 -0800 Subject: [PATCH 050/176] Clearer glibc version requirement in symbols.test.ts --- test/js/bun/symbols.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/js/bun/symbols.test.ts b/test/js/bun/symbols.test.ts index 510c5f68da1297..cc563e5e4ea7d1 100644 --- a/test/js/bun/symbols.test.ts +++ b/test/js/bun/symbols.test.ts @@ -6,7 +6,7 @@ import { semver } from "bun"; const BUN_EXE = bunExe(); if (process.platform === "linux") { - test("objdump -T does not include symbols from glibc >= 2.27", async () => { + test("objdump -T does not include symbols from glibc > 2.26", async () => { const objdump = Bun.which("objdump") || Bun.which("llvm-objdump"); if (!objdump) { throw new Error("objdump executable not found. Please install it."); @@ -31,7 +31,7 @@ if (process.platform === "linux") { } } if (errors.length) { - throw new Error(`Found glibc symbols >= 2.27. This breaks Amazon Linux 2 and Vercel. + throw new Error(`Found glibc symbols > 2.26. This breaks Amazon Linux 2 and Vercel. ${Bun.inspect.table(errors, { colors: true })} To fix this, add it to -Wl,-wrap=symbol in the linker flags and update workaround-missing-symbols.cpp.`); From 0eb43bc0c714ac64e3155335a5dfa36e461f7330 Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 17:39:54 -0800 Subject: [PATCH 051/176] [build image] From b6d2d32aa887287216dc62d50089b94f4e58bb2e Mon Sep 17 00:00:00 2001 From: Ben Grant Date: Fri, 6 Dec 2024 17:40:32 -0800 Subject: [PATCH 052/176] Clearer glibc version requirement in symbols.test.ts [build image] --- test/js/bun/symbols.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/js/bun/symbols.test.ts b/test/js/bun/symbols.test.ts index cc563e5e4ea7d1..e255954ec606cb 100644 --- a/test/js/bun/symbols.test.ts +++ b/test/js/bun/symbols.test.ts @@ -22,7 +22,7 @@ if (process.platform === "linux") { if (version.startsWith("2..")) { version = "2." + version.slice(3); } - if (semver.order(version, "2.27.0") >= 0) { + if (semver.order(version, "2.26.0") > 0) { errors.push({ symbol: line.slice(line.lastIndexOf(")") + 1).trim(), "glibc version": version, From 88ddef4514f54a3923fd74429e5c15cb16d7d18e Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 6 Dec 2024 18:19:27 -0800 Subject: [PATCH 053/176] [build images] --- .buildkite/ci.mjs | 7 +- cmake/targets/BuildBun.cmake | 72 +- scripts/bootstrap.sh | 67 +- .../bindings/workaround-missing-symbols.cpp | 626 +----------------- 4 files changed, 67 insertions(+), 705 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 786b351a3a8eb6..a3009fd9bec55a 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -104,9 +104,9 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "debian", release: "12" }, - { os: "linux", arch: "x64", distro: "debian", release: "12" }, - { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12" }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "18.04" }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04" }, + { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04" }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, @@ -399,7 +399,6 @@ function getBuildEnv(target) { ENABLE_ASSERTIONS: release ? "OFF" : "ON", ENABLE_LOGS: release ? "OFF" : "ON", ABI: abi === "musl" ? "musl" : undefined, - LLVM_VERSION: "18.1.8", }; } diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index f3a37c502b67f9..60c51277b6a4fb 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -850,79 +850,9 @@ if(APPLE) endif() if(LINUX) - if(NOT ABI STREQUAL "musl") - if(ARCH STREQUAL "x64") - target_link_options(${bun} PUBLIC - -Wl,--wrap=__isoc23_sscanf - -Wl,--wrap=__isoc23_strtol - -Wl,--wrap=__isoc23_strtoul - -Wl,--wrap=__isoc23_strtoull - -Wl,--wrap=__isoc23_vfscanf - -Wl,--wrap=__isoc23_vscanf - -Wl,--wrap=__isoc23_vsscanf - -Wl,--wrap=dlvsym - -Wl,--wrap=fcntl - -Wl,--wrap=getrandom - -Wl,--wrap=mknod - -Wl,--wrap=mknodat - -Wl,--wrap=pthread_sigmask - -Wl,--wrap=quick_exit - ) - endif() - + if(NOT ABI STREQUAL "musl") target_link_options(${bun} PUBLIC - -lpthread - -Wl,--wrap=cosf - -Wl,--wrap=exp -Wl,--wrap=expf - -Wl,--wrap=fmod - -Wl,--wrap=fmodf - -Wl,--wrap=log - -Wl,--wrap=log10f - -Wl,--wrap=log2 - -Wl,--wrap=log2f - -Wl,--wrap=logf - -Wl,--wrap=pow - -Wl,--wrap=powf - -Wl,--wrap=sincosf - -Wl,--wrap=sinf - -Wl,--wrap=tanf - -Wl,--wrap=arc4random_buf - -Wl,--wrap=sem_post - -Wl,--wrap=sem_wait - -Wl,--wrap=sem_init - -Wl,--wrap=__libc_single_threaded - -Wl,--wrap=__libc_start_main - -Wl,--wrap=__pthread_key_create - -Wl,--wrap=_dl_find_object - -Wl,--wrap=dladdr - -Wl,--wrap=dlclose - -Wl,--wrap=dlerror - -Wl,--wrap=dlopen - -Wl,--wrap=dlsym - -Wl,--wrap=pthread_attr_getstack - -Wl,--wrap=pthread_attr_setguardsize - -Wl,--wrap=pthread_attr_setstacksize - -Wl,--wrap=pthread_create - -Wl,--wrap=pthread_detach - -Wl,--wrap=pthread_getattr_np - -Wl,--wrap=pthread_getspecific - -Wl,--wrap=pthread_join - -Wl,--wrap=pthread_key_create - -Wl,--wrap=pthread_key_delete - -Wl,--wrap=pthread_kill - -Wl,--wrap=pthread_mutex_trylock - -Wl,--wrap=pthread_mutexattr_destroy - -Wl,--wrap=pthread_mutexattr_init - -Wl,--wrap=pthread_mutexattr_settype - -Wl,--wrap=pthread_once - -Wl,--wrap=pthread_rwlock_destroy - -Wl,--wrap=pthread_rwlock_init - -Wl,--wrap=pthread_rwlock_rdlock - -Wl,--wrap=pthread_rwlock_unlock - -Wl,--wrap=pthread_rwlock_wrlock - -Wl,--wrap=pthread_setspecific - -Wl,--wrap=fcntl64 ) endif() diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 4380968fe097e8..095c1c7a9f237d 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -85,6 +85,55 @@ fetch() { fi } + +install_gcc13_ubuntu18() { + if ! [ "$distro" = "ubuntu" ] || ! [ "$release" = "18.04" ]; then + return + fi + + print "Installing GCC 13 toolchain for Ubuntu 18.04..." + + # Add the Ubuntu Toolchain PPA + execute_sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test + execute_sudo apt-get update + + # Install GCC 13 and related packages + install_packages \ + gcc-13 \ + g++-13 \ + libgcc-13-dev \ + libstdc++-13-dev \ + libasan6 \ + libubsan1 \ + libatomic1 \ + libtsan0 \ + liblsan0 \ + libgfortran5 \ + libc6-dev + + # Set up GCC 13 as the default compiler + execute_sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \ + --slave /usr/bin/g++ g++ /usr/bin/g++-13 \ + --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \ + --slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \ + --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13 + + # Configure library paths for Ubuntu 18.04 + execute_sudo mkdir -p /usr/lib/gcc/x86_64-linux-gnu/13 + execute_sudo ln -sf /usr/lib/x86_64-linux-gnu/libstdc++.so.6 /usr/lib/gcc/x86_64-linux-gnu/13/ + + # Update library paths configuration + execute_sudo sh -c 'echo "/usr/lib/gcc/x86_64-linux-gnu/13" > /etc/ld.so.conf.d/gcc-13.conf' + execute_sudo sh -c 'echo "/usr/lib/x86_64-linux-gnu" >> /etc/ld.so.conf.d/gcc-13.conf' + execute_sudo ldconfig + + # Set environment variables for the toolchain + append_to_profile 'export LD_LIBRARY_PATH="/usr/lib/gcc/x86_64-linux-gnu/13:/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH"' + append_to_profile 'export LIBRARY_PATH="/usr/lib/gcc/x86_64-linux-gnu/13:/usr/lib/x86_64-linux-gnu:$LIBRARY_PATH"' + append_to_profile 'export CPLUS_INCLUDE_PATH="/usr/include/c++/13:/usr/include/x86_64-linux-gnu/c++/13:$CPLUS_INCLUDE_PATH"' + append_to_profile 'export C_INCLUDE_PATH="/usr/lib/gcc/x86_64-linux-gnu/13/include:$C_INCLUDE_PATH"' +} + download_file() { url="$1" filename="${2:-$(basename "$url")}" @@ -630,11 +679,7 @@ install_common_software() { nodejs_version_exact() { # https://unofficial-builds.nodejs.org/download/release/ - if ! [ "$abi" = "musl" ] && [ -n "$abi_version" ] && ! [ "$(compare_version "$abi_version" "2.27")" = "1" ]; then - print "16.9.1" - else - print "22.9.0" - fi + print "22.9.0" } nodejs_version() { @@ -748,6 +793,15 @@ install_rosetta() { install_build_essentials() { case "$pm" in apt) + + # Install modern CMake for Ubuntu 18.04 + if [ "$distro" = "ubuntu" ] && [ "$release" = "18.04" ]; then + # Add Kitware's CMake repository + wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | execute_sudo tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null + execute_sudo apt-add-repository "deb https://apt.kitware.com/ubuntu/ bionic main" + execute_sudo apt-get update + fi + install_packages \ build-essential \ ninja-build \ @@ -797,6 +851,9 @@ install_build_essentials() { ruby \ perl + # Install GCC 13 specifically for Ubuntu 18.04 + install_gcc13_ubuntu18 + install_cmake install_llvm install_ccache diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index c7461ed8b1f18c..bd244cf1674c00 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -81,115 +81,10 @@ extern "C" int kill(int pid, int sig) #endif #if defined(__x86_64__) -__asm__(".symver cosf,cosf@GLIBC_2.2.5"); -__asm__(".symver exp,exp@GLIBC_2.2.5"); __asm__(".symver expf,expf@GLIBC_2.2.5"); -__asm__(".symver fcntl,fcntl@GLIBC_2.2.5"); -__asm__(".symver fmod,fmod@GLIBC_2.2.5"); -__asm__(".symver fmodf,fmodf@GLIBC_2.2.5"); -__asm__(".symver log,log@GLIBC_2.2.5"); -__asm__(".symver log10f,log10f@GLIBC_2.2.5"); -__asm__(".symver log2,log2@GLIBC_2.2.5"); -__asm__(".symver log2f,log2f@GLIBC_2.2.5"); -__asm__(".symver logf,logf@GLIBC_2.2.5"); -__asm__(".symver pow,pow@GLIBC_2.2.5"); -__asm__(".symver powf,powf@GLIBC_2.2.5"); -__asm__(".symver sincosf,sincosf@GLIBC_2.2.5"); -__asm__(".symver sinf,sinf@GLIBC_2.2.5"); -__asm__(".symver tanf,tanf@GLIBC_2.2.5"); - -// Add symbol versions for libc and threading functions -__asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.2.5"); -__asm__(".symver dladdr,dladdr@GLIBC_2.2.5"); -__asm__(".symver dlclose,dlclose@GLIBC_2.2.5"); -__asm__(".symver dlerror,dlerror@GLIBC_2.2.5"); -__asm__(".symver dlopen,dlopen@GLIBC_2.2.5"); -__asm__(".symver dlsym,dlsym@GLIBC_2.2.5"); -__asm__(".symver dlvsym,dlvsym@GLIBC_2.2.5"); -__asm__(".symver getrandom,getrandom@GLIBC_2.25"); - -// Add symbol versions for pthread functions -__asm__(".symver pthread_attr_getstack,pthread_attr_getstack@GLIBC_2.2.5"); -__asm__(".symver pthread_attr_setguardsize,pthread_attr_setguardsize@GLIBC_2.2.5"); -__asm__(".symver pthread_attr_setstacksize,pthread_attr_setstacksize@GLIBC_2.2.5"); -__asm__(".symver pthread_create,pthread_create@GLIBC_2.2.5"); -__asm__(".symver pthread_detach,pthread_detach@GLIBC_2.2.5"); -__asm__(".symver pthread_getattr_np,pthread_getattr_np@GLIBC_2.2.5"); -__asm__(".symver pthread_getspecific,pthread_getspecific@GLIBC_2.2.5"); -__asm__(".symver pthread_join,pthread_join@GLIBC_2.2.5"); -__asm__(".symver pthread_key_create,pthread_key_create@GLIBC_2.2.5"); -__asm__(".symver pthread_key_delete,pthread_key_delete@GLIBC_2.2.5"); -__asm__(".symver pthread_kill,pthread_kill@GLIBC_2.2.5"); -__asm__(".symver pthread_mutex_trylock,pthread_mutex_trylock@GLIBC_2.2.5"); -__asm__(".symver pthread_mutexattr_destroy,pthread_mutexattr_destroy@GLIBC_2.2.5"); -__asm__(".symver pthread_mutexattr_init,pthread_mutexattr_init@GLIBC_2.2.5"); -__asm__(".symver pthread_mutexattr_settype,pthread_mutexattr_settype@GLIBC_2.2.5"); -__asm__(".symver pthread_once,pthread_once@GLIBC_2.2.5"); -__asm__(".symver pthread_rwlock_destroy,pthread_rwlock_destroy@GLIBC_2.2.5"); -__asm__(".symver pthread_rwlock_init,pthread_rwlock_init@GLIBC_2.2.5"); -__asm__(".symver pthread_rwlock_rdlock,pthread_rwlock_rdlock@GLIBC_2.2.5"); -__asm__(".symver pthread_rwlock_unlock,pthread_rwlock_unlock@GLIBC_2.2.5"); -__asm__(".symver pthread_rwlock_wrlock,pthread_rwlock_wrlock@GLIBC_2.2.5"); -__asm__(".symver pthread_setspecific,pthread_setspecific@GLIBC_2.2.5"); -__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.2.5"); -__asm__(".symver quick_exit,quick_exit@GLIBC_2.2.5"); -__asm__(".symver sem_init,sem_init@GLIBC_2.2.5"); -__asm__(".symver sem_post,sem_post@GLIBC_2.2.5"); -__asm__(".symver sem_wait,sem_wait@GLIBC_2.2.5"); -__asm__(".symver __pthread_key_create,__pthread_key_create@GLIBC_2.2.5"); - #elif defined(__aarch64__) -__asm__(".symver __libc_start_main,__libc_start_main@GLIBC_2.17"); -__asm__(".symver __pthread_key_create,__pthread_key_create@GLIBC_2.17"); -__asm__(".symver _dl_find_object,_dl_find_object@GLIBC_2.17"); -__asm__(".symver cosf,cosf@GLIBC_2.17"); -__asm__(".symver dladdr,dladdr@GLIBC_2.17"); -__asm__(".symver dlclose,dlclose@GLIBC_2.17"); -__asm__(".symver dlerror,dlerror@GLIBC_2.17"); -__asm__(".symver dlopen,dlopen@GLIBC_2.17"); -__asm__(".symver dlsym,dlsym@GLIBC_2.17"); -__asm__(".symver exp,exp@GLIBC_2.17"); __asm__(".symver expf,expf@GLIBC_2.17"); -__asm__(".symver fmod,fmod@GLIBC_2.17"); -__asm__(".symver fmodf,fmodf@GLIBC_2.17"); -__asm__(".symver log,log@GLIBC_2.17"); -__asm__(".symver log10f,log10f@GLIBC_2.17"); -__asm__(".symver log2,log2@GLIBC_2.17"); -__asm__(".symver log2f,log2f@GLIBC_2.17"); -__asm__(".symver logf,logf@GLIBC_2.17"); -__asm__(".symver pow,pow@GLIBC_2.17"); -__asm__(".symver powf,powf@GLIBC_2.17"); -__asm__(".symver pthread_attr_getstack,pthread_attr_getstack@GLIBC_2.17"); -__asm__(".symver pthread_attr_setguardsize,pthread_attr_setguardsize@GLIBC_2.17"); -__asm__(".symver pthread_attr_setstacksize,pthread_attr_setstacksize@GLIBC_2.17"); -__asm__(".symver pthread_create,pthread_create@GLIBC_2.17"); -__asm__(".symver pthread_detach,pthread_detach@GLIBC_2.17"); -__asm__(".symver pthread_getattr_np,pthread_getattr_np@GLIBC_2.17"); -__asm__(".symver pthread_getspecific,pthread_getspecific@GLIBC_2.17"); -__asm__(".symver pthread_join,pthread_join@GLIBC_2.17"); -__asm__(".symver pthread_key_create,pthread_key_create@GLIBC_2.17"); -__asm__(".symver pthread_key_delete,pthread_key_delete@GLIBC_2.17"); -__asm__(".symver pthread_kill,pthread_kill@GLIBC_2.17"); -__asm__(".symver pthread_mutex_trylock,pthread_mutex_trylock@GLIBC_2.17"); -__asm__(".symver pthread_mutexattr_destroy,pthread_mutexattr_destroy@GLIBC_2.17"); -__asm__(".symver pthread_mutexattr_init,pthread_mutexattr_init@GLIBC_2.17"); -__asm__(".symver pthread_mutexattr_settype,pthread_mutexattr_settype@GLIBC_2.17"); -__asm__(".symver pthread_once,pthread_once@GLIBC_2.17"); -__asm__(".symver pthread_rwlock_destroy,pthread_rwlock_destroy@GLIBC_2.17"); -__asm__(".symver pthread_rwlock_init,pthread_rwlock_init@GLIBC_2.17"); -__asm__(".symver pthread_rwlock_rdlock,pthread_rwlock_rdlock@GLIBC_2.17"); -__asm__(".symver pthread_rwlock_unlock,pthread_rwlock_unlock@GLIBC_2.17"); -__asm__(".symver pthread_rwlock_wrlock,pthread_rwlock_wrlock@GLIBC_2.17"); -__asm__(".symver pthread_setspecific,pthread_setspecific@GLIBC_2.17"); -__asm__(".symver pthread_sigmask,pthread_sigmask@GLIBC_2.17"); -__asm__(".symver sem_init,sem_init@GLIBC_2.17"); -__asm__(".symver sem_post,sem_post@GLIBC_2.17"); -__asm__(".symver sem_wait,sem_wait@GLIBC_2.17"); -__asm__(".symver sincosf,sincosf@GLIBC_2.17"); -__asm__(".symver sinf,sinf@GLIBC_2.17"); -__asm__(".symver tanf,tanf@GLIBC_2.17"); - -#endif // aarch64 +#endif #if defined(__x86_64__) || defined(__aarch64__) #define BUN_WRAP_GLIBC_SYMBOL(symbol) __wrap_##symbol @@ -199,533 +94,14 @@ __asm__(".symver tanf,tanf@GLIBC_2.17"); extern "C" { -double BUN_WRAP_GLIBC_SYMBOL(exp)(double); -double BUN_WRAP_GLIBC_SYMBOL(fmod)(double, double); -double BUN_WRAP_GLIBC_SYMBOL(log)(double); -double BUN_WRAP_GLIBC_SYMBOL(log2)(double); -double BUN_WRAP_GLIBC_SYMBOL(pow)(double, double); -float BUN_WRAP_GLIBC_SYMBOL(cosf)(float); float BUN_WRAP_GLIBC_SYMBOL(expf)(float); -float BUN_WRAP_GLIBC_SYMBOL(fmodf)(float, float); -float BUN_WRAP_GLIBC_SYMBOL(log10f)(float); -float BUN_WRAP_GLIBC_SYMBOL(log2f)(float); -float BUN_WRAP_GLIBC_SYMBOL(logf)(float); -float BUN_WRAP_GLIBC_SYMBOL(sinf)(float); -float BUN_WRAP_GLIBC_SYMBOL(tanf)(float); -int BUN_WRAP_GLIBC_SYMBOL(fcntl)(int, int, ...); -int BUN_WRAP_GLIBC_SYMBOL(fcntl64)(int, int, ...); -void BUN_WRAP_GLIBC_SYMBOL(sincosf)(float, float*, float*); - -// Add new declarations for scanning/conversion functions -int BUN_WRAP_GLIBC_SYMBOL(sscanf)(const char*, const char*, ...); -long int BUN_WRAP_GLIBC_SYMBOL(strtol)(const char*, char**, int); -unsigned long int BUN_WRAP_GLIBC_SYMBOL(strtoul)(const char*, char**, int); -unsigned long long int BUN_WRAP_GLIBC_SYMBOL(strtoull)(const char*, char**, int); -int BUN_WRAP_GLIBC_SYMBOL(vfscanf)(FILE*, const char*, va_list); -int BUN_WRAP_GLIBC_SYMBOL(vscanf)(const char*, va_list); -int BUN_WRAP_GLIBC_SYMBOL(vsscanf)(const char*, const char*, va_list); - -// Add declarations for pthread functions -int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_getstack)(const pthread_attr_t*, void**, size_t*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_setguardsize)(pthread_attr_t*, size_t); -int BUN_WRAP_GLIBC_SYMBOL(pthread_attr_setstacksize)(pthread_attr_t*, size_t); -int BUN_WRAP_GLIBC_SYMBOL(pthread_create)(pthread_t*, const pthread_attr_t*, void* (*)(void*), void*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_detach)(pthread_t); -int BUN_WRAP_GLIBC_SYMBOL(pthread_getattr_np)(pthread_t, pthread_attr_t*); -void* BUN_WRAP_GLIBC_SYMBOL(pthread_getspecific)(pthread_key_t); -int BUN_WRAP_GLIBC_SYMBOL(pthread_join)(pthread_t, void**); -int BUN_WRAP_GLIBC_SYMBOL(pthread_key_create)(pthread_key_t*, void (*)(void*)); -int BUN_WRAP_GLIBC_SYMBOL(__pthread_key_create)(pthread_key_t*, void (*)(void*)); -int BUN_WRAP_GLIBC_SYMBOL(pthread_key_delete)(pthread_key_t); -int BUN_WRAP_GLIBC_SYMBOL(pthread_kill)(pthread_t, int); -int BUN_WRAP_GLIBC_SYMBOL(pthread_mutex_trylock)(pthread_mutex_t*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_mutexattr_destroy)(pthread_mutexattr_t*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_mutexattr_init)(pthread_mutexattr_t*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_mutexattr_settype)(pthread_mutexattr_t*, int); -int BUN_WRAP_GLIBC_SYMBOL(pthread_once)(pthread_once_t*, void (*)(void)); -int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_destroy)(pthread_rwlock_t*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_init)(pthread_rwlock_t*, const pthread_rwlockattr_t*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_rdlock)(pthread_rwlock_t*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_unlock)(pthread_rwlock_t*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_rwlock_wrlock)(pthread_rwlock_t*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_setspecific)(pthread_key_t, const void*); -int BUN_WRAP_GLIBC_SYMBOL(pthread_sigmask)(int, const sigset_t*, sigset_t*); -void* BUN_WRAP_GLIBC_SYMBOL(pthread_getspecific)(pthread_key_t key); - -// Add declarations for other system functions -void BUN_WRAP_GLIBC_SYMBOL(arc4random_buf)(void*, size_t); -ssize_t BUN_WRAP_GLIBC_SYMBOL(getrandom)(void*, size_t, unsigned int); -_Noreturn void BUN_WRAP_GLIBC_SYMBOL(quick_exit)(int); -int BUN_WRAP_GLIBC_SYMBOL(sem_init)(sem_t*, int, unsigned int); -int BUN_WRAP_GLIBC_SYMBOL(sem_post)(sem_t*); -int BUN_WRAP_GLIBC_SYMBOL(sem_wait)(sem_t*); - -// Add declarations for dynamic linking functions -int BUN_WRAP_GLIBC_SYMBOL(dladdr)(const void*, Dl_info*); -int BUN_WRAP_GLIBC_SYMBOL(dlclose)(void*); -char* BUN_WRAP_GLIBC_SYMBOL(dlerror)(void); -void* BUN_WRAP_GLIBC_SYMBOL(dlopen)(const char*, int); -void* BUN_WRAP_GLIBC_SYMBOL(dlsym)(void*, const char*); -void* BUN_WRAP_GLIBC_SYMBOL(dlvsym)(void*, const char*, const char*); - -int BUN_WRAP_GLIBC_SYMBOL(__libc_start_main)(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end); #if defined(__x86_64__) || defined(__aarch64__) -double __wrap_exp(double x) { return exp(x); } -double __wrap_fmod(double x, double y) { return fmod(x, y); } -double __wrap_log(double x) { return log(x); } -double __wrap_log2(double x) { return log2(x); } -double __wrap_pow(double x, double y) { return pow(x, y); } -float __wrap_powf(float x, float y) { return powf(x, y); } -float __wrap_cosf(float x) { return cosf(x); } float __wrap_expf(float x) { return expf(x); } -float __wrap_fmodf(float x, float y) { return fmodf(x, y); } -float __wrap_log10f(float x) { return log10f(x); } -float __wrap_log2f(float x) { return log2f(x); } -float __wrap_logf(float x) { return logf(x); } -float __wrap_sinf(float x) { return sinf(x); } -float __wrap_tanf(float x) { return tanf(x); } -void __wrap_sincosf(float x, float* sin_x, float* cos_x) { sincosf(x, sin_x, cos_x); } - -// ban statx, for now -int __wrap_statx(int fd, const char* path, int flags, - unsigned int mask, struct statx* buf) -{ - errno = ENOSYS; -#ifdef BUN_DEBUG - abort(); -#endif - return -1; -} - -typedef int (*fcntl64_func)(int fd, int cmd, ...); - -enum arg_type { - NO_ARG, - INT_ARG, - PTR_ARG -}; - -static enum arg_type get_arg_type(int cmd) -{ - switch (cmd) { - // Commands that take no argument - case F_GETFD: - case F_GETFL: - case F_GETOWN: - case F_GETSIG: - case F_GETLEASE: - case F_GETPIPE_SZ: -#ifdef F_GET_SEALS - case F_GET_SEALS: -#endif - return NO_ARG; - - // Commands that take an integer argument - case F_DUPFD: - case F_DUPFD_CLOEXEC: - case F_SETFD: - case F_SETFL: - case F_SETOWN: - case F_SETSIG: - case F_SETLEASE: - case F_NOTIFY: - case F_SETPIPE_SZ: -#ifdef F_ADD_SEALS - case F_ADD_SEALS: -#endif - return INT_ARG; - - // Commands that take a pointer argument - case F_GETLK: - case F_SETLK: - case F_SETLKW: - case F_GETOWN_EX: - case F_SETOWN_EX: - return PTR_ARG; - - default: - return PTR_ARG; // Default to pointer for unknown commands - } -} - -int __wrap_fcntl64(int fd, int cmd, ...) -{ - va_list ap; - enum arg_type type = get_arg_type(cmd); - - static fcntl64_func real_fcntl64; - static std::once_flag real_fcntl64_initialized; - std::call_once(real_fcntl64_initialized, []() { - real_fcntl64 = (fcntl64_func)dlsym(RTLD_NEXT, "fcntl64"); - if (!real_fcntl64) { - real_fcntl64 = (fcntl64_func)dlsym(RTLD_NEXT, "fcntl"); - } - }); - - switch (type) { - case NO_ARG: - return real_fcntl64(fd, cmd); - - case INT_ARG: { - va_start(ap, cmd); - int arg = va_arg(ap, int); - va_end(ap); - return real_fcntl64(fd, cmd, arg); - } - - case PTR_ARG: { - va_start(ap, cmd); - void* arg = va_arg(ap, void*); - va_end(ap); - return real_fcntl64(fd, cmd, arg); - } - - default: - va_end(ap); - errno = EINVAL; - return -1; - } -} - -void __wrap_arc4random_buf(void* buf, size_t nbytes) -{ - getrandom(buf, nbytes, 0); -} - -int __wrap_sem_init(sem_t* sem, int pshared, unsigned int value) -{ - return sem_init(sem, pshared, value); -} - -int __wrap_sem_post(sem_t* sem) -{ - return sem_post(sem); -} - -int __wrap_sem_wait(sem_t* sem) -{ - return sem_wait(sem); -} - -// https://www.gnu.org/software/libc/manual/html_node/Single_002dThreaded.html -char __wrap___libc_single_threaded = 0; - -int __libc_start_main(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end); - -int __wrap___libc_start_main(int (*main)(int, char**, char**), int argc, char** argv, int (*init)(void), void (*fini)(void), void (*rtld_fini)(void), void* stack_end) -{ - return __libc_start_main(main, argc, argv, init, fini, rtld_fini, stack_end); -} - -// pthread function wrappers -int __wrap_pthread_attr_getstack(const pthread_attr_t* attr, void** stackaddr, size_t* stacksize) -{ - return pthread_attr_getstack(attr, stackaddr, stacksize); -} - -int __wrap_pthread_attr_setguardsize(pthread_attr_t* attr, size_t guardsize) -{ - return pthread_attr_setguardsize(attr, guardsize); -} - -int __wrap_pthread_attr_setstacksize(pthread_attr_t* attr, size_t stacksize) -{ - return pthread_attr_setstacksize(attr, stacksize); -} - -int __wrap_pthread_create(pthread_t* thread, const pthread_attr_t* attr, void* (*start_routine)(void*), void* arg) -{ - return pthread_create(thread, attr, start_routine, arg); -} - -int __wrap_pthread_detach(pthread_t thread) -{ - return pthread_detach(thread); -} - -int __wrap_pthread_getattr_np(pthread_t thread, pthread_attr_t* attr) -{ - return pthread_getattr_np(thread, attr); -} - -void* __wrap_pthread_getspecific(pthread_key_t key) -{ - return pthread_getspecific(key); -} - -int __wrap_pthread_join(pthread_t thread, void** retval) -{ - return pthread_join(thread, retval); -} - -int __wrap_pthread_key_create(pthread_key_t* key, void (*destructor)(void*)) -{ - return pthread_key_create(key, destructor); -} - -int __wrap___pthread_key_create(pthread_key_t* key, void (*destructor)(void*)) -{ - return pthread_key_create(key, destructor); -} - -int __wrap_pthread_key_delete(pthread_key_t key) -{ - return pthread_key_delete(key); -} - -int __wrap_pthread_kill(pthread_t thread, int sig) -{ - return pthread_kill(thread, sig); -} - -int __wrap_pthread_mutex_trylock(pthread_mutex_t* mutex) -{ - return pthread_mutex_trylock(mutex); -} - -int __wrap_pthread_mutexattr_destroy(pthread_mutexattr_t* attr) -{ - return pthread_mutexattr_destroy(attr); -} - -int __wrap_pthread_mutexattr_init(pthread_mutexattr_t* attr) -{ - return pthread_mutexattr_init(attr); -} - -int __wrap_pthread_mutexattr_settype(pthread_mutexattr_t* attr, int type) -{ - return pthread_mutexattr_settype(attr, type); -} - -int __wrap_pthread_once(pthread_once_t* once_control, void (*init_routine)(void)) -{ - return pthread_once(once_control, init_routine); -} - -int __wrap_pthread_rwlock_destroy(pthread_rwlock_t* rwlock) -{ - return pthread_rwlock_destroy(rwlock); -} - -int __wrap_pthread_rwlock_init(pthread_rwlock_t* rwlock, const pthread_rwlockattr_t* attr) -{ - return pthread_rwlock_init(rwlock, attr); -} - -int __wrap_pthread_rwlock_rdlock(pthread_rwlock_t* rwlock) -{ - return pthread_rwlock_rdlock(rwlock); -} - -int __wrap_pthread_rwlock_unlock(pthread_rwlock_t* rwlock) -{ - return pthread_rwlock_unlock(rwlock); -} - -int __wrap_pthread_rwlock_wrlock(pthread_rwlock_t* rwlock) -{ - return pthread_rwlock_wrlock(rwlock); -} - -int __wrap_pthread_setspecific(pthread_key_t key, const void* value) -{ - return pthread_setspecific(key, value); -} - -int __wrap_pthread_sigmask(int how, const sigset_t* set, sigset_t* oldset) -{ - return pthread_sigmask(how, set, oldset); -} - -// Dynamic linking function wrappers -int __wrap_dladdr(const void* addr, Dl_info* info) -{ - return dladdr(addr, info); -} - -int __wrap_dlclose(void* handle) -{ - return dlclose(handle); -} - -char* __wrap_dlerror(void) -{ - return dlerror(); -} - -void* __wrap_dlopen(const char* filename, int flags) -{ - return dlopen(filename, flags); -} - -void* __wrap_dlsym(void* handle, const char* symbol) -{ - return dlsym(handle, symbol); -} #endif // x86_64 or aarch64 -#if defined(__x86_64__) - -// Scanning/conversion function wrappers -int __wrap_sscanf(const char* str, const char* format, ...) -{ - va_list ap; - va_start(ap, format); - int result = vsscanf(str, format, ap); - va_end(ap); - return result; -} - -long int __wrap_strtol(const char* nptr, char** endptr, int base) -{ - return strtol(nptr, endptr, base); -} - -unsigned long int __wrap_strtoul(const char* nptr, char** endptr, int base) -{ - return strtoul(nptr, endptr, base); -} - -unsigned long long int __wrap_strtoull(const char* nptr, char** endptr, int base) -{ - return strtoull(nptr, endptr, base); -} - -unsigned long int __wrap___isoc23_strtoul(const char* nptr, char** endptr, int base) -{ - return strtoul(nptr, endptr, base); -} - -long int __wrap___isoc23_strtol(const char* nptr, char** endptr, int base) -{ - return strtol(nptr, endptr, base); -} - -unsigned long long int __wrap___isoc23_strtoull(const char* nptr, char** endptr, int base) -{ - return strtoull(nptr, endptr, base); -} - -int __wrap___isoc23_sscanf(const char* str, const char* format, ...) -{ - va_list ap; - va_start(ap, format); - int result = vsscanf(str, format, ap); - va_end(ap); - return result; -} - -int __wrap___isoc23_vscanf(const char* format, va_list ap) -{ - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vscanf(format, ap_copy); - va_end(ap_copy); - return result; -} - -int __wrap_vfscanf(FILE* stream, const char* format, va_list ap) -{ - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vfscanf(stream, format, ap_copy); - va_end(ap_copy); - return result; -} - -int __wrap_vscanf(const char* format, va_list ap) -{ - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vscanf(format, ap_copy); - va_end(ap_copy); - return result; -} - -int __wrap_vsscanf(const char* str, const char* format, va_list ap) -{ - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vsscanf(str, format, ap_copy); - va_end(ap_copy); - return result; -} - -int __wrap___isoc23_vfscanf(FILE* stream, const char* format, va_list ap) -{ - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vfscanf(stream, format, ap_copy); - va_end(ap_copy); - return result; -} - -int __wrap___isoc23_vsscanf(const char* str, const char* format, va_list ap) -{ - va_list ap_copy; - va_copy(ap_copy, ap); - int result = vsscanf(str, format, ap_copy); - va_end(ap_copy); - return result; -} - -void* __wrap_dlvsym(void* handle, const char* symbol, const char* version) -{ - return dlvsym(handle, symbol, version); -} - -// Other system function wrappers -ssize_t __wrap_getrandom(void* buffer, size_t length, unsigned int flags) -{ - return getrandom(buffer, length, flags); -} - -_Noreturn void __wrap_quick_exit(int status) -{ - typedef void (*quick_exit_func)(int) __attribute__((noreturn)); - static std::once_flag quick_exit_initialized; - static quick_exit_func quick_exit; - std::call_once(quick_exit_initialized, []() { - quick_exit = (quick_exit_func)dlsym(RTLD_NEXT, "quick_exit"); - if (UNLIKELY(!quick_exit)) { - quick_exit = _exit; - } - }); - - quick_exit(status); -} - -int __wrap_fcntl(int fd, int cmd, ...) -{ - va_list args; - va_start(args, cmd); - void* arg = va_arg(args, void*); - va_end(args); - return fcntl(fd, cmd, arg); -} - -int __wrap__dl_find_object(void* address, struct dl_find_object* result) -{ - return _dl_find_object(address, result); -} - -#endif // x86_64 - -#if defined(__aarch64__) - -// This function is only called by the unwind implementation, which won't be run in the first place -// since we don't allow C++ exceptions (any thrown will just go to the crash handler) -int __wrap__dl_find_object(void* address, struct dl_find_object* result) -{ - abort(); -} - -#endif // aarch64 - } // extern "C" #endif // glibc From 79518aee4644365b37eefe9975cd4f6b3cdebd96 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 6 Dec 2024 18:48:32 -0800 Subject: [PATCH 054/176] [build images] --- .buildkite/ci.mjs | 2 +- scripts/bootstrap.sh | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index a3009fd9bec55a..c5f8aa90566daf 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -104,7 +104,7 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "18.04" }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" }, { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04" }, { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04" }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 095c1c7a9f237d..8509da62ca2d74 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -679,7 +679,11 @@ install_common_software() { nodejs_version_exact() { # https://unofficial-builds.nodejs.org/download/release/ - print "22.9.0" + if ! [ "$abi" = "musl" ] && [ -n "$abi_version" ] && ! [ "$(compare_version "$abi_version" "2.27")" = "1" ]; then + print "16.9.1" + else + print "22.9.0" + fi } nodejs_version() { From 0a7e2b2a870948b55bb55faa138cfc9756bc3747 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 6 Dec 2024 18:56:12 -0800 Subject: [PATCH 055/176] [build images] --- scripts/bootstrap.sh | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 8509da62ca2d74..1b7d9063e35a60 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -118,20 +118,23 @@ install_gcc13_ubuntu18() { --slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \ --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13 + # Get system triplet dynamically + triplet=$(gcc -dumpmachine | sed 's/-pc-/-/') # Remove -pc- if present + # Configure library paths for Ubuntu 18.04 - execute_sudo mkdir -p /usr/lib/gcc/x86_64-linux-gnu/13 - execute_sudo ln -sf /usr/lib/x86_64-linux-gnu/libstdc++.so.6 /usr/lib/gcc/x86_64-linux-gnu/13/ + execute_sudo mkdir -p "/usr/lib/gcc/${triplet}/13" + execute_sudo ln -sf "/usr/lib/${triplet}/libstdc++.so.6" "/usr/lib/gcc/${triplet}/13/" # Update library paths configuration - execute_sudo sh -c 'echo "/usr/lib/gcc/x86_64-linux-gnu/13" > /etc/ld.so.conf.d/gcc-13.conf' - execute_sudo sh -c 'echo "/usr/lib/x86_64-linux-gnu" >> /etc/ld.so.conf.d/gcc-13.conf' + execute_sudo sh -c "echo '/usr/lib/gcc/${triplet}/13' > /etc/ld.so.conf.d/gcc-13.conf" + execute_sudo sh -c "echo '/usr/lib/${triplet}' >> /etc/ld.so.conf.d/gcc-13.conf" execute_sudo ldconfig # Set environment variables for the toolchain - append_to_profile 'export LD_LIBRARY_PATH="/usr/lib/gcc/x86_64-linux-gnu/13:/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH"' - append_to_profile 'export LIBRARY_PATH="/usr/lib/gcc/x86_64-linux-gnu/13:/usr/lib/x86_64-linux-gnu:$LIBRARY_PATH"' - append_to_profile 'export CPLUS_INCLUDE_PATH="/usr/include/c++/13:/usr/include/x86_64-linux-gnu/c++/13:$CPLUS_INCLUDE_PATH"' - append_to_profile 'export C_INCLUDE_PATH="/usr/lib/gcc/x86_64-linux-gnu/13/include:$C_INCLUDE_PATH"' + append_to_profile "export LD_LIBRARY_PATH=\"/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LD_LIBRARY_PATH\"" + append_to_profile "export LIBRARY_PATH=\"/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LIBRARY_PATH\"" + append_to_profile "export CPLUS_INCLUDE_PATH=\"/usr/include/c++/13:/usr/include/${triplet}/c++/13:\$CPLUS_INCLUDE_PATH\"" + append_to_profile "export C_INCLUDE_PATH=\"/usr/lib/gcc/${triplet}/13/include:\$C_INCLUDE_PATH\"" } download_file() { @@ -741,6 +744,10 @@ install_bun() { bash="$(require bash)" script=$(download_file "https://bun.sh/install") + export BUN_INSTALL="$home/.bun" + rm -rf "$BUN_INSTALL" + mkdir -p "$BUN_INSTALL" + chown -R "$user:$group" "$BUN_INSTALL" version="${1:-"latest"}" case "$version" in @@ -799,11 +806,13 @@ install_build_essentials() { apt) # Install modern CMake for Ubuntu 18.04 - if [ "$distro" = "ubuntu" ] && [ "$release" = "18.04" ]; then + if [ "$distro" = "ubuntu" ]; then # Add Kitware's CMake repository wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | execute_sudo tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null - execute_sudo apt-add-repository "deb https://apt.kitware.com/ubuntu/ bionic main" + execute_sudo apt-add-repository "deb https://apt.kitware.com/ubuntu/ $(lsb_release -cs) main" execute_sudo apt-get update + + append_to_profile "export DEBIAN_FRONTEND=noninteractive" fi install_packages \ From 348f7fcbb6d0074eaad2400bc551ac325463513c Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 6 Dec 2024 19:00:52 -0800 Subject: [PATCH 056/176] [build images] --- scripts/bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 1b7d9063e35a60..537892b0f5d8c3 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -675,9 +675,9 @@ install_common_software() { install_rosetta install_nodejs - install_bun install_tailscale install_buildkite + install_bun } nodejs_version_exact() { From 1123e040c182971f2c32192fd0cbf083fa77a6c0 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 6 Dec 2024 22:35:36 -0800 Subject: [PATCH 057/176] add -H to sudo, [build images] --- scripts/bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 537892b0f5d8c3..abe7832ce7f0f4 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -41,7 +41,7 @@ execute_sudo() { execute_as_user() { if [ "$sudo" = "1" ] || [ "$can_sudo" = "1" ]; then if [ -f "$(which sudo)" ]; then - execute sudo -n -u "$user" /bin/sh -c "$*" + execute sudo -H -n -u "$user" /bin/sh -c "$*" elif [ -f "$(which doas)" ]; then execute doas -u "$user" /bin/sh -c "$*" elif [ -f "$(which su)" ]; then From 950fbcfe7f4c42059ca419060ac3c6abd88d0214 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 6 Dec 2024 22:49:06 -0800 Subject: [PATCH 058/176] [builds images] --- scripts/bootstrap.sh | 51 +++++++++++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 17 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index abe7832ce7f0f4..0d0f76d93de5ee 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -120,11 +120,11 @@ install_gcc13_ubuntu18() { # Get system triplet dynamically triplet=$(gcc -dumpmachine | sed 's/-pc-/-/') # Remove -pc- if present - + # Configure library paths for Ubuntu 18.04 execute_sudo mkdir -p "/usr/lib/gcc/${triplet}/13" execute_sudo ln -sf "/usr/lib/${triplet}/libstdc++.so.6" "/usr/lib/gcc/${triplet}/13/" - + # Update library paths configuration execute_sudo sh -c "echo '/usr/lib/gcc/${triplet}/13' > /etc/ld.so.conf.d/gcc-13.conf" execute_sudo sh -c "echo '/usr/lib/${triplet}' >> /etc/ld.so.conf.d/gcc-13.conf" @@ -743,23 +743,40 @@ install_bun() { esac bash="$(require bash)" - script=$(download_file "https://bun.sh/install") - export BUN_INSTALL="$home/.bun" - rm -rf "$BUN_INSTALL" - mkdir -p "$BUN_INSTALL" - chown -R "$user:$group" "$BUN_INSTALL" + # script=$(download_file "https://bun.sh/install") + # export BUN_INSTALL="$home/.bun" + # rm -rf "$BUN_INSTALL" + # mkdir -p "$BUN_INSTALL" + # chown -R "$user:$group" "$BUN_INSTALL" + + # version="${1:-"latest"}" + # case "$version" in + # latest) + # execute_as_user "$bash" "$script" + # ;; + # *) + # execute_as_user "$bash" "$script" -s "$version" + # ;; + # esac + + # move_to_bin "$home/.bun/bin/bun" + + bunabi="" + if [ "$abi" = "musl" ]; then + bunabi="-musl" + fi + buntarget="bun-${os}-${arch}${bunabi}" + sudo chown -R $user:$group $home + curl -LO "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/${buntarget}.zip" --retry 5 + unzip ${buntarget}.zip + sudo mkdir -p "$home/.bun/bin" + sudo mv ${buntarget}/bun "$home/.bun/bin" + sudo chmod +x $home/.bun/bin/bun + sudo chown -R $user:$group $home/.bun + # append_to_path "$home/.bun/bin" + echo "export PATH=\$PATH:$home/.bun/bin" | sudo tee $home/.profile - version="${1:-"latest"}" - case "$version" in - latest) - execute_as_user "$bash" "$script" - ;; - *) - execute_as_user "$bash" "$script" -s "$version" - ;; - esac - move_to_bin "$home/.bun/bin/bun" bun_path="$(which bun)" bunx_path="$(dirname "$bun_path")/bunx" execute_sudo ln -sf "$bun_path" "$bunx_path" From 660661a46115319d7417f406d054237f92e1683f Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 6 Dec 2024 23:05:47 -0800 Subject: [PATCH 059/176] [build images] --- scripts/bootstrap.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 0d0f76d93de5ee..8b29949fdd7b58 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -203,7 +203,7 @@ append_to_file_sudo() { append_to_profile() { content="$1" - profiles=".profile .zprofile .bash_profile .bashrc .zshrc" + profiles=".profile" for profile in $profiles; do file="$home/$profile" if [ "$ci" = "1" ] || [ -f "$file" ]; then @@ -1052,7 +1052,7 @@ create_buildkite_user() { execute_sudo chown -R "$user:$group" "$path" done - buildkite_files="/var/run/buildkite-agent/buildkite-agent.pid" + buildkite_files="/var/run/buildkite-agent/buildkite-agent.pid /var/run/buildkite-agent/.profile" for file in $buildkite_files; do execute_sudo touch "$file" execute_sudo chown "$user:$group" "$file" From c7a49818bd6575858356add6035df3ec59267218 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 6 Dec 2024 23:09:47 -0800 Subject: [PATCH 060/176] [build images] --- scripts/bootstrap.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 8b29949fdd7b58..71a1add4fb5c2c 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -773,9 +773,8 @@ install_bun() { sudo mv ${buntarget}/bun "$home/.bun/bin" sudo chmod +x $home/.bun/bin/bun sudo chown -R $user:$group $home/.bun - # append_to_path "$home/.bun/bin" echo "export PATH=\$PATH:$home/.bun/bin" | sudo tee $home/.profile - + export PATH=$PATH:$home/.bun/bin bun_path="$(which bun)" bunx_path="$(dirname "$bun_path")/bunx" From 774d13952d28c7ca546db4e06496ac536f5b7fff Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 6 Dec 2024 23:20:25 -0800 Subject: [PATCH 061/176] [build images] --- scripts/bootstrap.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 71a1add4fb5c2c..488db65518850b 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -773,7 +773,8 @@ install_bun() { sudo mv ${buntarget}/bun "$home/.bun/bin" sudo chmod +x $home/.bun/bin/bun sudo chown -R $user:$group $home/.bun - echo "export PATH=\$PATH:$home/.bun/bin" | sudo tee $home/.profile + # echo "export PATH=\$PATH:$home/.bun/bin" | sudo tee $home/.profile + append_to_path "$home/.profile" export PATH=$PATH:$home/.bun/bin bun_path="$(which bun)" From 8a3a085696e1ac78c62f5daac06654b7505b669d Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 6 Dec 2024 23:27:20 -0800 Subject: [PATCH 062/176] [build images] --- scripts/bootstrap.sh | 56 ++++++++++++++++++++++---------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 488db65518850b..38daffbb534f23 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -743,39 +743,39 @@ install_bun() { esac bash="$(require bash)" - # script=$(download_file "https://bun.sh/install") + script=$(download_file "https://bun.sh/install") # export BUN_INSTALL="$home/.bun" # rm -rf "$BUN_INSTALL" # mkdir -p "$BUN_INSTALL" # chown -R "$user:$group" "$BUN_INSTALL" - # version="${1:-"latest"}" - # case "$version" in - # latest) - # execute_as_user "$bash" "$script" - # ;; - # *) - # execute_as_user "$bash" "$script" -s "$version" - # ;; - # esac - - # move_to_bin "$home/.bun/bin/bun" - - bunabi="" - if [ "$abi" = "musl" ]; then - bunabi="-musl" - fi - buntarget="bun-${os}-${arch}${bunabi}" - sudo chown -R $user:$group $home - curl -LO "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/${buntarget}.zip" --retry 5 - unzip ${buntarget}.zip - sudo mkdir -p "$home/.bun/bin" - sudo mv ${buntarget}/bun "$home/.bun/bin" - sudo chmod +x $home/.bun/bin/bun - sudo chown -R $user:$group $home/.bun - # echo "export PATH=\$PATH:$home/.bun/bin" | sudo tee $home/.profile - append_to_path "$home/.profile" - export PATH=$PATH:$home/.bun/bin + version="${1:-"latest"}" + case "$version" in + latest) + execute_as_user "$bash" "$script" + ;; + *) + execute_as_user "$bash" "$script" -s "$version" + ;; + esac + + move_to_bin "$home/.bun/bin/bun" + + # bunabi="" + # if [ "$abi" = "musl" ]; then + # bunabi="-musl" + # fi + # buntarget="bun-${os}-${arch}${bunabi}" + # sudo chown -R $user:$group $home + # curl -LO "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/${buntarget}.zip" --retry 5 + # unzip ${buntarget}.zip + # sudo mkdir -p "$home/.bun/bin" + # sudo mv ${buntarget}/bun "$home/.bun/bin" + # sudo chmod +x $home/.bun/bin/bun + # sudo chown -R $user:$group $home/.bun + # # echo "export PATH=\$PATH:$home/.bun/bin" | sudo tee $home/.profile + # append_to_path "$home/.profile" + # export PATH=$PATH:$home/.bun/bin bun_path="$(which bun)" bunx_path="$(dirname "$bun_path")/bunx" From 948c42d1154ed070cacc1b046851ca8542a0d073 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 6 Dec 2024 23:49:25 -0800 Subject: [PATCH 063/176] [dry run] --- .buildkite/ci.mjs | 15 +++++++++------ cmake/tools/SetupLLVM.cmake | 2 +- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index c5f8aa90566daf..096f6ebf30aba6 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -952,6 +952,7 @@ async function getPipelineOptions() { testPlatforms: testPlatformKeys?.length ? testPlatformKeys.map(key => testPlatformsMap.get(key)) : Array.from(testPlatformsMap.values()), + dryRun: parseBoolean(options["dry-run"]), }; } @@ -979,6 +980,7 @@ async function getPipelineOptions() { forceBuilds: parseOption(/\[(force builds?)\]/i), skipTests: parseOption(/\[(skip tests?|no tests?|only builds?)\]/i), buildImages: parseOption(/\[(build images?)\]/i), + dryRun: parseOption(/\[(dry run)\]/i), publishImages: parseOption(/\[(publish images?)\]/i), buildPlatforms: Array.from(buildPlatformsMap.values()), testPlatforms: Array.from(testPlatformsMap.values()), @@ -1025,7 +1027,8 @@ async function getPipeline(options = {}) { }); } - const { skipBuilds, forceBuilds, unifiedBuilds } = options; + let { skipBuilds, forceBuilds, unifiedBuilds, dryRun } = options; + dryRun = dryRun || !!buildImages; /** @type {string | undefined} */ let buildId; @@ -1052,12 +1055,12 @@ async function getPipeline(options = {}) { key: getTargetKey(target), group: getTargetLabel(target), steps: unifiedBuilds - ? [getBuildBunStep(target, !!buildImages)] + ? [getBuildBunStep(target, dryRun)] : [ - getBuildVendorStep(target, !!buildImages), - getBuildCppStep(target, !!buildImages), - getBuildZigStep(target, !!buildImages), - getLinkBunStep(target, !!buildImages), + getBuildVendorStep(target, dryRun), + getBuildCppStep(target, dryRun), + getBuildZigStep(target, dryRun), + getLinkBunStep(target, dryRun), ], }, imagePlatform ? `${imageKey}-build-image` : undefined, diff --git a/cmake/tools/SetupLLVM.cmake b/cmake/tools/SetupLLVM.cmake index a76cf16fe88964..16e03732813b8c 100644 --- a/cmake/tools/SetupLLVM.cmake +++ b/cmake/tools/SetupLLVM.cmake @@ -77,7 +77,7 @@ macro(find_llvm_command variable command) VERSION_VARIABLE LLVM_VERSION COMMAND ${commands} PATHS ${LLVM_PATHS} - VERSION ${LLVM_VERSION} + VERSION >${LLVM_VERSION_MAJOR}.1.0 ) list(APPEND CMAKE_ARGS -D${variable}=${${variable}}) endmacro() From 1d553e2bc1e08436f76fa81b44830b13ce4bac64 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Fri, 6 Dec 2024 23:51:02 -0800 Subject: [PATCH 064/176] [build images] From e4f4c71b7f5c1ba9955eb3ff5b10c1a7f4c46f2c Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Sat, 7 Dec 2024 00:17:45 -0800 Subject: [PATCH 065/176] [build images] --- cmake/Globals.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/Globals.cmake b/cmake/Globals.cmake index 3066bb2033dc67..320b05747f2138 100644 --- a/cmake/Globals.cmake +++ b/cmake/Globals.cmake @@ -291,7 +291,7 @@ function(find_command) set_property(GLOBAL PROPERTY ${FIND_NAME} "${exe}: ${reason}" APPEND) if(version) - satisfies_range(${version} ${${FIND_VERSION_VARIABLE}} ${variable}) + satisfies_range(${version} ${${FIND_VERSION}} ${variable}) set(${variable} ${${variable}} PARENT_SCOPE) endif() endfunction() From ac089d2d8c58091e00948554cde930c8c429971c Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Sat, 7 Dec 2024 00:39:46 -0800 Subject: [PATCH 066/176] [build images] --- cmake/Globals.cmake | 2 +- cmake/tools/SetupLLVM.cmake | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/cmake/Globals.cmake b/cmake/Globals.cmake index 320b05747f2138..af66b00f081762 100644 --- a/cmake/Globals.cmake +++ b/cmake/Globals.cmake @@ -291,7 +291,7 @@ function(find_command) set_property(GLOBAL PROPERTY ${FIND_NAME} "${exe}: ${reason}" APPEND) if(version) - satisfies_range(${version} ${${FIND_VERSION}} ${variable}) + satisfies_range(${version} ${FIND_VERSION} ${variable}) set(${variable} ${${variable}} PARENT_SCOPE) endif() endfunction() diff --git a/cmake/tools/SetupLLVM.cmake b/cmake/tools/SetupLLVM.cmake index 16e03732813b8c..2bcc97ceed0443 100644 --- a/cmake/tools/SetupLLVM.cmake +++ b/cmake/tools/SetupLLVM.cmake @@ -77,7 +77,7 @@ macro(find_llvm_command variable command) VERSION_VARIABLE LLVM_VERSION COMMAND ${commands} PATHS ${LLVM_PATHS} - VERSION >${LLVM_VERSION_MAJOR}.1.0 + VERSION >=${LLVM_VERSION_MAJOR}.1.0 ) list(APPEND CMAKE_ARGS -D${variable}=${${variable}}) endmacro() From 247241cd314feb36a758252d0b47c31561609b33 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Sat, 7 Dec 2024 01:17:34 -0800 Subject: [PATCH 067/176] [build images] --- cmake/tools/SetupCcache.cmake | 4 ++++ scripts/bootstrap.sh | 5 ++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/cmake/tools/SetupCcache.cmake b/cmake/tools/SetupCcache.cmake index d2367205c87d72..91379ec1db8819 100644 --- a/cmake/tools/SetupCcache.cmake +++ b/cmake/tools/SetupCcache.cmake @@ -42,3 +42,7 @@ if(CI) else() setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,random_seed,clang_index_store,gcno_cwd") endif() + +if (CI AND NOT APPLE) + set(ENABLE_CCACHE OFF) +endif() diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 38daffbb534f23..94aaa1354faffa 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -87,7 +87,10 @@ fetch() { install_gcc13_ubuntu18() { - if ! [ "$distro" = "ubuntu" ] || ! [ "$release" = "18.04" ]; then + if ! [ "$distro" = "ubuntu" ]; then + return + fi + if ! { [ "$release" = "18.04" ] && [ "$arch" = "x64" ] || [ "$release" = "20.04" ] && [ "$arch" = "aarch64" ]; }; then return fi From 64f221e6a487949051aff77ff0b2ca7985b54973 Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Sat, 7 Dec 2024 01:38:08 -0800 Subject: [PATCH 068/176] [build images] --- cmake/tools/SetupCcache.cmake | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/cmake/tools/SetupCcache.cmake b/cmake/tools/SetupCcache.cmake index 91379ec1db8819..720a7acc53feba 100644 --- a/cmake/tools/SetupCcache.cmake +++ b/cmake/tools/SetupCcache.cmake @@ -5,6 +5,11 @@ if(NOT ENABLE_CCACHE OR CACHE_STRATEGY STREQUAL "none") return() endif() +if (CI AND NOT APPLE) + setenv(CCACHE_DISABLE 1) + return() +endif() + find_command( VARIABLE CCACHE_PROGRAM @@ -42,7 +47,3 @@ if(CI) else() setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,random_seed,clang_index_store,gcno_cwd") endif() - -if (CI AND NOT APPLE) - set(ENABLE_CCACHE OFF) -endif() From 11ba45146768a23395da26fcc9eaa3d51fcf34ea Mon Sep 17 00:00:00 2001 From: Meghan Denny Date: Sat, 7 Dec 2024 02:04:25 -0800 Subject: [PATCH 069/176] [build images] --- scripts/bootstrap.sh | 53 ++++++++++++++++++++++++++++++++++++++++---- scripts/machine.mjs | 1 + 2 files changed, 50 insertions(+), 4 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 94aaa1354faffa..5d99ffbc481c11 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -134,10 +134,26 @@ install_gcc13_ubuntu18() { execute_sudo ldconfig # Set environment variables for the toolchain - append_to_profile "export LD_LIBRARY_PATH=\"/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LD_LIBRARY_PATH\"" - append_to_profile "export LIBRARY_PATH=\"/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LIBRARY_PATH\"" - append_to_profile "export CPLUS_INCLUDE_PATH=\"/usr/include/c++/13:/usr/include/${triplet}/c++/13:\$CPLUS_INCLUDE_PATH\"" - append_to_profile "export C_INCLUDE_PATH=\"/usr/lib/gcc/${triplet}/13/include:\$C_INCLUDE_PATH\"" + # append_to_profile "export LD_LIBRARY_PATH=\"/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LD_LIBRARY_PATH\"" + # append_to_profile "export LIBRARY_PATH=\"/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LIBRARY_PATH\"" + # append_to_profile "export CPLUS_INCLUDE_PATH=\"/usr/include/c++/13:/usr/include/${triplet}/c++/13:\$CPLUS_INCLUDE_PATH\"" + # append_to_profile "export C_INCLUDE_PATH=\"/usr/lib/gcc/${triplet}/13/include:\$C_INCLUDE_PATH\"" + + append_to_profile "CC=clang-$(llvm_version)" + append_to_profile "CXX=clang++-$(llvm_version)" + append_to_profile "AR=llvm-ar-$(llvm_version)" + append_to_profile "RANLIB=llvm-ranlib-$(llvm_version)" + append_to_profile "LD=lld-$(llvm_version)" + append_to_profile "LTO_FLAG=\"-flto=full -fwhole-program-vtables -fforce-emit-vtables\"" + append_to_profile "LD_LIBRARY_PATH=/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LD_LIBRARY_PATH" + append_to_profile "LIBRARY_PATH=/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LIBRARY_PATH" + append_to_profile "CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${triplet}/c++/13:\$CPLUS_INCLUDE_PATH" + append_to_profile "C_INCLUDE_PATH=/usr/lib/gcc/${triplet}/13/include:\$C_INCLUDE_PATH" + append_to_profile "DEFAULT_CFLAGS=\"-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables -DU_STATIC_IMPLEMENTATION=1\"" + append_to_profile "CFLAGS=\"\$DEFAULT_CFLAGS \$CFLAGS -stdlib=libstdc++\"" + append_to_profile "CXXFLAGS=\"\$DEFAULT_CFLAGS \$CXXFLAGS -stdlib=libstdc++\"" + append_to_profile "LDFLAGS=\"-fuse-ld=lld -L/usr/lib/gcc/x86_64-linux-gnu/13 -L/usr/lib/x86_64-linux-gnu\"" + } download_file() { @@ -607,6 +623,24 @@ install_packages() { esac } +clean_packagemanager() { + case "$pm" in + apt) + package_manager autoremove + package_manager clean + ;; + apk) + package_manager cache clean + ;; + brew) + package_manager cleanup + ;; + dnf) + package_manager clean all + ;; + esac +} + install_brew() { print "Installing Homebrew..." @@ -1179,6 +1213,15 @@ install_chromium() { esac } +shrink_filesystem() { + clean_packagemanager +} + +zero_free_space() { + sudo dd if=/dev/zero of=/zero bs=1M || true + execute_sudo rm -f /zero +} + main() { check_features "$@" check_operating_system @@ -1190,6 +1233,8 @@ main() { install_common_software install_build_essentials install_chromium + shrink_filesystem + zero_free_space } main "$@" diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 479dbb4cfd7844..ff592bcaefba21 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -777,6 +777,7 @@ const docker = { spawnSafe: execSafe, upload, attach, + snapshot, close: kill, [Symbol.asyncDispose]: kill, }; From 9a6f355f9696a727da4c3ea3c16761a42cadbced Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 13:34:17 +0100 Subject: [PATCH 070/176] wip --- .buildkite/ci.mjs | 69 ++++++++++++-- flake.nix | 180 ++++++++++++++++++++++++++++++++++++ scripts/create-nix-amis.mjs | 130 ++++++++++++++++++++++++++ 3 files changed, 372 insertions(+), 7 deletions(-) create mode 100644 flake.nix create mode 100755 scripts/create-nix-amis.mjs diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 096f6ebf30aba6..de0b421e5899dd 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -104,9 +104,9 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" }, - { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04" }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04" }, + { os: "linux", arch: "aarch64", distro: "nix", release: "latest" }, + { os: "linux", arch: "x64", distro: "nix", release: "latest" }, + { os: "linux", arch: "x64", baseline: true, distro: "nix", release: "latest" }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, @@ -290,10 +290,10 @@ function getEc2Agent(platform, options) { /** * @param {Platform} platform - * @returns {string} + * @returns {Agent} */ function getCppAgent(platform, dryRun) { - const { os, arch } = platform; + const { os, arch, distro } = platform; if (os === "darwin") { return { @@ -303,6 +303,15 @@ function getCppAgent(platform, dryRun) { }; } + if (distro === "nix") { + return { + queue: "linux-nix", + os: "linux", + arch, + nix: "true", + }; + } + return getEc2Agent(platform, { instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge", cpuCount: 32, @@ -342,7 +351,7 @@ function getZigAgent(platform, dryRun) { * @returns {Agent} */ function getTestAgent(platform, dryRun) { - const { os, arch } = platform; + const { os, arch, distro } = platform; if (os === "darwin") { return { @@ -988,6 +997,47 @@ async function getPipelineOptions() { }; } +/** + * @param {Record} [options] + * @returns {Step} + */ +function getCreateNixAmisStep(options = {}) { + return { + key: "create-nix-amis", + group: getBuildkiteEmoji("nix"), + steps: [ + { + key: "create-nix-ami-x64", + label: `${getBuildkiteEmoji("nix")} Create Nix AMI (x64)`, + command: ["node", "./scripts/create-nix-amis.mjs", "--arch=x64", "--ci"].join(" "), + agents: { + queue: "build-image", + arch: "x64", + }, + env: { + DEBUG: "1", + }, + retry: getRetry(), + timeout_in_minutes: 3 * 60, + }, + { + key: "create-nix-ami-arm64", + label: `${getBuildkiteEmoji("nix")} Create Nix AMI (arm64)`, + command: ["node", "./scripts/create-nix-amis.mjs", "--arch=arm64", "--ci"].join(" "), + agents: { + queue: "build-image", + arch: "arm64", + }, + env: { + DEBUG: "1", + }, + retry: getRetry(), + timeout_in_minutes: 3 * 60, + }, + ], + }; +} + /** * @param {PipelineOptions} [options] * @returns {Promise} @@ -1023,7 +1073,12 @@ async function getPipeline(options = {}) { steps.push({ key: "build-images", group: getBuildkiteEmoji("aws"), - steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform, !publishImages)), + steps: [ + getCreateNixAmisStep(), + ...[...imagePlatforms.values()] + .filter(platform => platform.distro !== "nix") + .map(platform => getBuildImageStep(platform, !publishImages)), + ], }); } diff --git a/flake.nix b/flake.nix new file mode 100644 index 00000000000000..76c1e1ad1c2ed2 --- /dev/null +++ b/flake.nix @@ -0,0 +1,180 @@ +{ + description = "Bun build environment"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + rust-overlay = { + url = "github:oxalica/rust-overlay"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + }; + + outputs = { self, nixpkgs, flake-utils, rust-overlay }: + flake-utils.lib.eachDefaultSystem (system: + let + overlays = [ (import rust-overlay) ]; + pkgs = import nixpkgs { + inherit system overlays; + config = { + allowUnfree = true; + permittedInsecurePackages = [ + "nodejs-16.20.2" + ]; + }; + }; + + # Function to create build environment for a specific architecture + makeBuildEnv = arch: pkgs.buildEnv { + name = "bun-build-tools-${arch}"; + paths = with pkgs; [ + # Core build tools + bash + coreutils + gcc13 + # Full LLVM 18 toolchain + llvmPackages_18.llvm + llvmPackages_18.clang + llvmPackages_18.lld + llvmPackages_18.lldb + llvmPackages_18.bintools + cmake + ninja + pkg-config + gnumake + binutils + file + + # Languages needed for build + nodejs_22 + python3 + go + (rust-bin.stable.latest.default.override { + extensions = [ "rust-src" "rust-analysis" ]; + }) + (perl.withPackages (p: with p; [ + MathBigInt + JSON + DataDumper + FileSlurp + ])) + + # Development tools + git + curl + wget + unzip + xz + ccache + + # SSL Certificates + cacert + + # Libraries + zlib + openssl + libffi + ]; + + + # Download arm64 binary for linux arm64, x64 binary for linux x64 + preFixup = '' + ${pkgs.curl}/bin/curl -L "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-${arch}.zip" + unzip $out/bun-linux-${arch}.zip + cp $out/bun-linux-${arch}/bun $out/bin/bun + chmod +x $out/bin/bun + rm -rf $out/bun-linux-${arch} $out/bun-linux-${arch}.zip + ''; + + pathsToLink = [ "/bin" "/lib" "/lib64" "/include" "/share" "/etc/ssl" ]; + extraOutputsToInstall = [ "dev" "out" "bin" ]; + ignoreCollisions = true; + }; + + # Create both x64 and arm64 environments + buildEnvX64 = makeBuildEnv "x64"; + buildEnvArm64 = makeBuildEnv "arm64"; + + in + { + packages = { + default = buildEnvX64; + x64 = buildEnvX64; + arm64 = buildEnvArm64; + }; + + devShells = { + default = pkgs.mkShell { + buildInputs = with pkgs; [ + packer + awscli2 + ]; + + shellHook = '' + echo "Bun build environment tools installed" + echo "To build AMIs, run: packer build -var 'arch=x64' packer.json" + echo " or: packer build -var 'arch=arm64' packer.json" + ''; + }; + + # CI shells for different architectures + ci-x64 = pkgs.mkShell { + buildInputs = with pkgs; [ + buildkite-agent + # Include the x64 build environment tools + buildEnvX64 + ]; + + shellHook = '' + echo "BuildKite CI environment initialized (x64)" + + # Set up BuildKite agent configuration if needed + if [ -z "$BUILDKITE_AGENT_TOKEN" ]; then + echo "Warning: BUILDKITE_AGENT_TOKEN is not set" + fi + + # Set BuildKite meta-data for architecture + export BUILDKITE_AGENT_META_DATA="architecture=x64,${BUILDKITE_AGENT_META_DATA:-}" + ''; + }; + + ci-arm64 = pkgs.mkShell { + buildInputs = with pkgs; [ + buildkite-agent + # Include the arm64 build environment tools + buildEnvArm64 + ]; + + shellHook = '' + echo "BuildKite CI environment initialized (arm64)" + + # Set up BuildKite agent configuration if needed + if [ -z "$BUILDKITE_AGENT_TOKEN" ]; then + echo "Warning: BUILDKITE_AGENT_TOKEN is not set" + fi + + # Set BuildKite meta-data for architecture + export BUILDKITE_AGENT_META_DATA="architecture=arm64,${BUILDKITE_AGENT_META_DATA:-}" + ''; + }; + + # Generic CI shell that defaults to x64 + ci = pkgs.mkShell { + buildInputs = with pkgs; [ + buildkite-agent + # Include the x64 build environment tools by default + buildEnvX64 + ]; + + shellHook = '' + echo "BuildKite CI environment initialized (default: x64)" + + # Set up BuildKite agent configuration if needed + if [ -z "$BUILDKITE_AGENT_TOKEN" ]; then + echo "Warning: BUILDKITE_AGENT_TOKEN is not set" + fi + ''; + }; + }; + }); +} \ No newline at end of file diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs new file mode 100755 index 00000000000000..9c0b48541eb8fd --- /dev/null +++ b/scripts/create-nix-amis.mjs @@ -0,0 +1,130 @@ +#!/usr/bin/env node + +import { parseArgs } from "node:util"; +import { getBuildNumber, getSecret, isCI, parseArch, spawnSafe, startGroup, readFile } from "./utils.mjs"; +import { join } from "node:path"; + +async function main() { + const { + values: { arch, ci }, + } = parseArgs({ + options: { + arch: { type: "string" }, + ci: { type: "boolean" }, + }, + }); + + if (!arch) { + throw new Error("--arch is required"); + } + + const architecture = parseArch(arch); + const flakeTarget = architecture === "arm64" ? "arm64" : "x64"; + + // Read the flake.nix content + const flakeContent = await readFile("flake.nix"); + + // Create user data script that will set up our environment + const userData = `#!/bin/bash +set -euxo pipefail + +# Install required packages +apt-get update +apt-get install -y curl xz-utils git sudo + +# Install Nix +curl -L https://nixos.org/nix/install | sh -s -- --daemon + +# Source Nix +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + +# Enable flakes +mkdir -p /etc/nix +cat > /etc/nix/nix.conf << 'EOF' +experimental-features = nix-command flakes +trusted-users = root buildkite-agent +auto-optimise-store = true +EOF + +# Create buildkite-agent user and group +useradd -m -s /bin/bash buildkite-agent +usermod -aG sudo buildkite-agent +echo "buildkite-agent ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/buildkite-agent + +# Copy flake.nix to the instance +mkdir -p /home/buildkite-agent/bun +cat > /home/buildkite-agent/bun/flake.nix << 'EOF' +${flakeContent} +EOF + +# Set ownership +chown -R buildkite-agent:buildkite-agent /home/buildkite-agent/bun + +# Install BuildKite agent +sh -c 'echo deb https://apt.buildkite.com/buildkite-agent stable main > /etc/apt/sources.list.d/buildkite-agent.list' +apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 +apt-get update +apt-get install -y buildkite-agent + +# Configure BuildKite agent +cat > /etc/buildkite-agent/buildkite-agent.cfg << 'EOF' +token="xxx" +name="%hostname-%n" +tags="queue=linux-nix,arch=${architecture}" +build-path="/var/lib/buildkite-agent/builds" +hooks-path="/etc/buildkite-agent/hooks" +plugins-path="/etc/buildkite-agent/plugins" +EOF + +# Create BuildKite hook to set up Nix environment +mkdir -p /etc/buildkite-agent/hooks +cat > /etc/buildkite-agent/hooks/environment << 'EOF' +#!/bin/bash +set -euo pipefail + +# Source Nix +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + +# Set up build environment using flake +cd /home/buildkite-agent/bun +nix develop .#ci-${flakeTarget} -c true + +# Add Nix to PATH +export PATH="/nix/var/nix/profiles/default/bin:$PATH" +EOF + +chmod +x /etc/buildkite-agent/hooks/environment + +# Set proper ownership for BuildKite directories +chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent + +# Start BuildKite agent service +systemctl enable buildkite-agent +systemctl start buildkite-agent + +# Set system limits for buildkite-agent +cat > /etc/security/limits.d/buildkite-agent.conf << 'EOF' +buildkite-agent soft nofile 1048576 +buildkite-agent hard nofile 1048576 +buildkite-agent soft nproc 1048576 +buildkite-agent hard nproc 1048576 +EOF +`; + + // Use machine.mjs to create the AMI, but with Ubuntu as base + await spawnSafe([ + "node", + "./scripts/machine.mjs", + "publish-image", + `--os=linux`, + `--arch=${architecture}`, + `--distro=ubuntu`, + `--release=18.04`, // Ubuntu 18.04 has glibc 2.26 + `--user-data=${userData}`, + "--cloud=aws", + "--ci", + "--authorized-org=oven-sh", + ]); +} + +await main(); From 790ff4593982cfcf1f50404665ba715a0bd08420 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 13:56:32 +0100 Subject: [PATCH 071/176] [build images] --- flake.lock | 82 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ flake.nix | 13 ++++----- 2 files changed, 87 insertions(+), 8 deletions(-) create mode 100644 flake.lock diff --git a/flake.lock b/flake.lock new file mode 100644 index 00000000000000..a0af77b8304c3a --- /dev/null +++ b/flake.lock @@ -0,0 +1,82 @@ +{ + "nodes": { + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1733392399, + "narHash": "sha256-kEsTJTUQfQFIJOcLYFt/RvNxIK653ZkTBIs4DG+cBns=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "d0797a04b81caeae77bcff10a9dde78bc17f5661", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs", + "rust-overlay": "rust-overlay" + } + }, + "rust-overlay": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1733538766, + "narHash": "sha256-FEDfBpM82XGdHDbLDJC4lV+QXSVN1rERt1MqtBGJZds=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "66526479b295ad238843a8a7367d2da7ec102757", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix index 76c1e1ad1c2ed2..29d43d38b29bb7 100644 --- a/flake.nix +++ b/flake.nix @@ -76,8 +76,8 @@ libffi ]; - - # Download arm64 binary for linux arm64, x64 binary for linux x64 + # Bun depends on itself to compile due to codegen scripts. + # Download a recent binary. preFixup = '' ${pkgs.curl}/bin/curl -L "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-${arch}.zip" unzip $out/bun-linux-${arch}.zip @@ -106,14 +106,11 @@ devShells = { default = pkgs.mkShell { buildInputs = with pkgs; [ - packer awscli2 ]; shellHook = '' - echo "Bun build environment tools installed" - echo "To build AMIs, run: packer build -var 'arch=x64' packer.json" - echo " or: packer build -var 'arch=arm64' packer.json" + echo "To compile a release build of Bun, run: bun build:release" ''; }; @@ -134,7 +131,7 @@ fi # Set BuildKite meta-data for architecture - export BUILDKITE_AGENT_META_DATA="architecture=x64,${BUILDKITE_AGENT_META_DATA:-}" + export BUILDKITE_AGENT_META_DATA="architecture=x64,''${BUILDKITE_AGENT_META_DATA:-}" ''; }; @@ -154,7 +151,7 @@ fi # Set BuildKite meta-data for architecture - export BUILDKITE_AGENT_META_DATA="architecture=arm64,${BUILDKITE_AGENT_META_DATA:-}" + export BUILDKITE_AGENT_META_DATA="architecture=arm64,''${BUILDKITE_AGENT_META_DATA:-}" ''; }; From 7011d79426fd01dec7e4ad13154f12d3cb64ce0d Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 13:59:50 +0100 Subject: [PATCH 072/176] [build images] --- .buildkite/ci.mjs | 62 ++++++++++++++++++++++------------------------- 1 file changed, 29 insertions(+), 33 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index de0b421e5899dd..bc1a2fbe2de781 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -1002,40 +1002,36 @@ async function getPipelineOptions() { * @returns {Step} */ function getCreateNixAmisStep(options = {}) { - return { - key: "create-nix-amis", - group: getBuildkiteEmoji("nix"), - steps: [ - { - key: "create-nix-ami-x64", - label: `${getBuildkiteEmoji("nix")} Create Nix AMI (x64)`, - command: ["node", "./scripts/create-nix-amis.mjs", "--arch=x64", "--ci"].join(" "), - agents: { - queue: "build-image", - arch: "x64", - }, - env: { - DEBUG: "1", - }, - retry: getRetry(), - timeout_in_minutes: 3 * 60, + return [ + { + key: "create-nix-ami-x64", + label: `${getBuildkiteEmoji("nix")} Create Nix AMI (x64)`, + command: ["node", "./scripts/create-nix-amis.mjs", "--arch=x64", "--ci"].join(" "), + agents: { + queue: "build-image", + arch: "x64", }, - { - key: "create-nix-ami-arm64", - label: `${getBuildkiteEmoji("nix")} Create Nix AMI (arm64)`, - command: ["node", "./scripts/create-nix-amis.mjs", "--arch=arm64", "--ci"].join(" "), - agents: { - queue: "build-image", - arch: "arm64", - }, - env: { - DEBUG: "1", - }, - retry: getRetry(), - timeout_in_minutes: 3 * 60, + env: { + DEBUG: "1", }, - ], - }; + retry: getRetry(), + timeout_in_minutes: 3 * 60, + }, + { + key: "create-nix-ami-arm64", + label: `${getBuildkiteEmoji("nix")} Create Nix AMI (arm64)`, + command: ["node", "./scripts/create-nix-amis.mjs", "--arch=arm64", "--ci"].join(" "), + agents: { + queue: "build-image", + arch: "arm64", + }, + env: { + DEBUG: "1", + }, + retry: getRetry(), + timeout_in_minutes: 3 * 60, + }, + ]; } /** @@ -1074,7 +1070,7 @@ async function getPipeline(options = {}) { key: "build-images", group: getBuildkiteEmoji("aws"), steps: [ - getCreateNixAmisStep(), + ...getCreateNixAmisStep(), ...[...imagePlatforms.values()] .filter(platform => platform.distro !== "nix") .map(platform => getBuildImageStep(platform, !publishImages)), From a653bfb2d761954a559954a5764702f8f666b50c Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:04:06 +0100 Subject: [PATCH 073/176] [build images] --- .buildkite/ci.mjs | 43 +++++++++++++------------------------------ 1 file changed, 13 insertions(+), 30 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index bc1a2fbe2de781..a8d80add83470c 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -1001,37 +1001,20 @@ async function getPipelineOptions() { * @param {Record} [options] * @returns {Step} */ -function getCreateNixAmisStep(options = {}) { - return [ - { - key: "create-nix-ami-x64", - label: `${getBuildkiteEmoji("nix")} Create Nix AMI (x64)`, - command: ["node", "./scripts/create-nix-amis.mjs", "--arch=x64", "--ci"].join(" "), - agents: { - queue: "build-image", - arch: "x64", - }, - env: { - DEBUG: "1", - }, - retry: getRetry(), - timeout_in_minutes: 3 * 60, +function getCreateNixAmisStep(platform) { + return { + key: `${getImageKey(platform)}-build-image`, + label: `${getBuildkiteEmoji("nix")} Create Nix AMI (${platform.arch})`, + command: ["node", "./scripts/create-nix-amis.mjs", "--arch=" + platform.arch, "--ci"].join(" "), + agents: { + queue: "build-image", }, - { - key: "create-nix-ami-arm64", - label: `${getBuildkiteEmoji("nix")} Create Nix AMI (arm64)`, - command: ["node", "./scripts/create-nix-amis.mjs", "--arch=arm64", "--ci"].join(" "), - agents: { - queue: "build-image", - arch: "arm64", - }, - env: { - DEBUG: "1", - }, - retry: getRetry(), - timeout_in_minutes: 3 * 60, + env: { + DEBUG: "1", }, - ]; + retry: getRetry(), + timeout_in_minutes: 3 * 60, + }; } /** @@ -1070,7 +1053,7 @@ async function getPipeline(options = {}) { key: "build-images", group: getBuildkiteEmoji("aws"), steps: [ - ...getCreateNixAmisStep(), + ...getCreateNixAmisStep(Array.from(imagePlatforms.values()).filter(platform => platform.distro === "nix")), ...[...imagePlatforms.values()] .filter(platform => platform.distro !== "nix") .map(platform => getBuildImageStep(platform, !publishImages)), From 2876409195118be9386e4e4c4b74214a6df91e98 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:05:43 +0100 Subject: [PATCH 074/176] [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index a8d80add83470c..80a153ef32079f 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -1003,7 +1003,7 @@ async function getPipelineOptions() { */ function getCreateNixAmisStep(platform) { return { - key: `${getImageKey(platform)}-build-image`, + key: `nix-${platform.arch}-build-image`, label: `${getBuildkiteEmoji("nix")} Create Nix AMI (${platform.arch})`, command: ["node", "./scripts/create-nix-amis.mjs", "--arch=" + platform.arch, "--ci"].join(" "), agents: { From 02a4f1635da71e889b6e49ed3292eefac58025d4 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:06:44 +0100 Subject: [PATCH 075/176] [build images] --- .buildkite/ci.mjs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 80a153ef32079f..b4dcee0113361f 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -1053,7 +1053,9 @@ async function getPipeline(options = {}) { key: "build-images", group: getBuildkiteEmoji("aws"), steps: [ - ...getCreateNixAmisStep(Array.from(imagePlatforms.values()).filter(platform => platform.distro === "nix")), + ...Array.from(imagePlatforms.values()) + .filter(platform => platform.distro === "nix") + .map(getCreateNixAmisStep), ...[...imagePlatforms.values()] .filter(platform => platform.distro !== "nix") .map(platform => getBuildImageStep(platform, !publishImages)), From 32de236f59136f55fa467bcda9f68a32c2c32348 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:12:47 +0100 Subject: [PATCH 076/176] [build images] --- .buildkite/ci.mjs | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index b4dcee0113361f..389110b02ea91b 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -104,9 +104,9 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "nix", release: "latest" }, - { os: "linux", arch: "x64", distro: "nix", release: "latest" }, - { os: "linux", arch: "x64", baseline: true, distro: "nix", release: "latest" }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "18.04", nix: true }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04", nix: true }, + { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", nix: true, release: "18.04" }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, @@ -303,15 +303,6 @@ function getCppAgent(platform, dryRun) { }; } - if (distro === "nix") { - return { - queue: "linux-nix", - os: "linux", - arch, - nix: "true", - }; - } - return getEc2Agent(platform, { instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge", cpuCount: 32, @@ -1054,10 +1045,10 @@ async function getPipeline(options = {}) { group: getBuildkiteEmoji("aws"), steps: [ ...Array.from(imagePlatforms.values()) - .filter(platform => platform.distro === "nix") + .filter(platform => platform.nix) .map(getCreateNixAmisStep), ...[...imagePlatforms.values()] - .filter(platform => platform.distro !== "nix") + .filter(platform => !platform.nix) .map(platform => getBuildImageStep(platform, !publishImages)), ], }); From b87fba7391fc8f76f6ef45f9176fd84aeb53d3c8 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:13:34 +0100 Subject: [PATCH 077/176] [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 389110b02ea91b..70f7b7ac6b81ef 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -994,7 +994,7 @@ async function getPipelineOptions() { */ function getCreateNixAmisStep(platform) { return { - key: `nix-${platform.arch}-build-image`, + key: `${getImageKey(platform)}-build-image`, label: `${getBuildkiteEmoji("nix")} Create Nix AMI (${platform.arch})`, command: ["node", "./scripts/create-nix-amis.mjs", "--arch=" + platform.arch, "--ci"].join(" "), agents: { From e8843f1ddccf9c5e72021dd4ba52f7c3b01c1c64 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:32:24 +0100 Subject: [PATCH 078/176] more files --- scripts/docker.mjs | 300 +++++++++++++++++++++++++ scripts/google.mjs | 510 +++++++++++++++++++++++++++++++++++++++++++ scripts/orbstack.mjs | 190 ++++++++++++++++ scripts/tart.mjs | 287 ++++++++++++++++++++++++ 4 files changed, 1287 insertions(+) create mode 100644 scripts/docker.mjs create mode 100644 scripts/google.mjs create mode 100644 scripts/orbstack.mjs create mode 100644 scripts/tart.mjs diff --git a/scripts/docker.mjs b/scripts/docker.mjs new file mode 100644 index 00000000000000..60c9aa2ea54030 --- /dev/null +++ b/scripts/docker.mjs @@ -0,0 +1,300 @@ +import { inspect } from "node:util"; +import { $, isCI, spawn, spawnSafe, which } from "./utils.mjs"; + +export const docker = { + get name() { + return "docker"; + }, + + /** + * @typedef {"linux" | "darwin" | "windows"} DockerOs + * @typedef {"amd64" | "arm64"} DockerArch + * @typedef {`${DockerOs}/${DockerArch}`} DockerPlatform + */ + + /** + * @param {Platform} platform + * @returns {DockerPlatform} + */ + getPlatform(platform) { + const { os, arch } = platform; + if (arch === "aarch64") { + return `${os}/arm64`; + } else if (arch === "x64") { + return `${os}/amd64`; + } + throw new Error(`Unsupported platform: ${inspect(platform)}`); + }, + + /** + * @typedef DockerSpawnOptions + * @property {DockerPlatform} [platform] + * @property {boolean} [json] + */ + + /** + * @param {string[]} args + * @param {DockerSpawnOptions & import("./utils.mjs").SpawnOptions} [options] + * @returns {Promise} + */ + async spawn(args, options = {}) { + const docker = which("docker", { required: true }); + + let env = { ...process.env }; + if (isCI) { + env["BUILDKIT_PROGRESS"] = "plain"; + } + + const { json, platform } = options; + if (json) { + args.push("--format=json"); + } + if (platform) { + args.push(`--platform=${platform}`); + } + + const { error, stdout } = await spawnSafe($`${docker} ${args}`, { env, ...options }); + if (error) { + return; + } + if (!json) { + return stdout; + } + + try { + return JSON.parse(stdout); + } catch { + return; + } + }, + + /** + * @typedef {Object} DockerImage + * @property {string} Id + * @property {string[]} RepoTags + * @property {string[]} RepoDigests + * @property {string} Created + * @property {DockerOs} Os + * @property {DockerArch} Architecture + * @property {number} Size + */ + + /** + * @param {string} url + * @param {DockerPlatform} [platform] + * @returns {Promise} + */ + async pullImage(url, platform) { + const done = await this.spawn($`pull ${url}`, { + platform, + throwOnError: error => !/No such image|manifest unknown/i.test(inspect(error)), + }); + return !!done; + }, + + /** + * @param {string} url + * @param {DockerPlatform} [platform] + * @returns {Promise} + */ + async inspectImage(url, platform) { + /** @type {DockerImage[]} */ + const images = await this.spawn($`image inspect ${url}`, { + json: true, + throwOnError: error => !/No such image/i.test(inspect(error)), + }); + + if (!images) { + const pulled = await this.pullImage(url, platform); + if (pulled) { + return this.inspectImage(url, platform); + } + } + + const { os, arch } = platform || {}; + return images + ?.filter(({ Os, Architecture }) => !os || !arch || (Os === os && Architecture === arch)) + ?.find((a, b) => (a.Created < b.Created ? 1 : -1)); + }, + + /** + * @typedef {Object} DockerContainer + * @property {string} Id + * @property {string} Name + * @property {string} Image + * @property {string} Created + * @property {DockerContainerState} State + * @property {DockerContainerNetworkSettings} NetworkSettings + */ + + /** + * @typedef {Object} DockerContainerState + * @property {"exited" | "running"} Status + * @property {number} [Pid] + * @property {number} ExitCode + * @property {string} [Error] + * @property {string} StartedAt + * @property {string} FinishedAt + */ + + /** + * @typedef {Object} DockerContainerNetworkSettings + * @property {string} [IPAddress] + */ + + /** + * @param {string} containerId + * @returns {Promise} + */ + async inspectContainer(containerId) { + const containers = await this.spawn($`container inspect ${containerId}`, { json: true }); + return containers?.find(a => a.Id === containerId); + }, + + /** + * @returns {Promise} + */ + async listContainers() { + const containers = await this.spawn($`container ls --all`, { json: true }); + return containers || []; + }, + + /** + * @typedef {Object} DockerRunOptions + * @property {string[]} [command] + * @property {DockerPlatform} [platform] + * @property {string} [name] + * @property {boolean} [detach] + * @property {"always" | "never"} [pull] + * @property {boolean} [rm] + * @property {"no" | "on-failure" | "always"} [restart] + */ + + /** + * @param {string} url + * @param {DockerRunOptions} [options] + * @returns {Promise} + */ + async runContainer(url, options = {}) { + const { detach, command = [], ...containerOptions } = options; + const args = Object.entries(containerOptions) + .filter(([_, value]) => typeof value !== "undefined") + .map(([key, value]) => (typeof value === "boolean" ? `--${key}` : `--${key}=${value}`)); + if (detach) { + args.push("--detach"); + } else { + args.push("--tty", "--interactive"); + } + + const stdio = detach ? "pipe" : "inherit"; + const result = await this.spawn($`run ${args} ${url} ${command}`, { stdio }); + if (!detach) { + return; + } + + const containerId = result.trim(); + const container = await this.inspectContainer(containerId); + if (!container) { + throw new Error(`Failed to run container: ${inspect(result)}`); + } + return container; + }, + + /** + * @param {Platform} platform + * @returns {Promise} + */ + async getBaseImage(platform) { + const { os, distro, release } = platform; + const dockerPlatform = this.getPlatform(platform); + + let url; + if (os === "linux") { + if (distro === "debian" || distro === "ubuntu" || distro === "alpine") { + url = `docker.io/library/${distro}:${release}`; + } else if (distro === "amazonlinux") { + url = `public.ecr.aws/amazonlinux/amazonlinux:${release}`; + } + } + + if (url) { + const image = await this.inspectImage(url, dockerPlatform); + if (image) { + return image; + } + } + + throw new Error(`Unsupported platform: ${inspect(platform)}`); + }, + + /** + * @param {DockerContainer} container + * @param {MachineOptions} [options] + * @returns {Machine} + */ + toMachine(container, options = {}) { + const { Id: containerId } = container; + + const exec = (command, options) => { + return spawn(["docker", "exec", containerId, ...command], options); + }; + + const execSafe = (command, options) => { + return spawnSafe(["docker", "exec", containerId, ...command], options); + }; + + const upload = async (source, destination) => { + await spawn(["docker", "cp", source, `${containerId}:${destination}`]); + }; + + const attach = async () => { + const { exitCode, error } = await spawn(["docker", "exec", "-it", containerId, "sh"], { + stdio: "inherit", + }); + + if (exitCode === 0 || exitCode === 130) { + return; + } + + throw error; + }; + + const snapshot = async name => { + await spawn(["docker", "commit", containerId]); + }; + + const kill = async () => { + await spawn(["docker", "kill", containerId]); + }; + + return { + cloud: "docker", + id: containerId, + spawn: exec, + spawnSafe: execSafe, + upload, + attach, + snapshot, + close: kill, + [Symbol.asyncDispose]: kill, + }; + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async createMachine(options) { + const { Id: imageId, Os, Architecture } = await docker.getBaseImage(options); + + const container = await docker.runContainer(imageId, { + platform: `${Os}/${Architecture}`, + command: ["sleep", "1d"], + detach: true, + rm: true, + restart: "no", + }); + + return this.toMachine(container, options); + }, +}; diff --git a/scripts/google.mjs b/scripts/google.mjs new file mode 100644 index 00000000000000..f5fb1daf552712 --- /dev/null +++ b/scripts/google.mjs @@ -0,0 +1,510 @@ +import { $, spawnSafe, which, getUsernameForDistro } from "./utils.mjs"; + +export const google = { + get cloud() { + return "google"; + }, + + /** + * @param {string[]} args + * @param {import("./utils.mjs").SpawnOptions} [options] + * @returns {Promise} + */ + async spawn(args, options = {}) { + const gcloud = which("gcloud", { required: true }); + + let env = { ...process.env }; + // if (isCI) { + // env; // TODO: Add Google Cloud credentials + // } else { + // env["TERM"] = "dumb"; + // } + + const { stdout } = await spawnSafe($`${gcloud} ${args} --format json`, { + env, + ...options, + }); + try { + return JSON.parse(stdout); + } catch { + return; + } + }, + + /** + * @param {Record} [options] + * @returns {string[]} + */ + getFilters(options = {}) { + const filter = Object.entries(options) + .filter(([, value]) => value !== undefined) + .map(([key, value]) => [value.includes("*") ? `${key}~${value}` : `${key}=${value}`]) + .join(" AND "); + return filter ? ["--filter", filter] : []; + }, + + /** + * @param {Record} options + * @returns {string[]} + */ + getFlags(options) { + return Object.entries(options) + .filter(([, value]) => value !== undefined) + .flatMap(([key, value]) => { + if (typeof value === "boolean") { + return value ? [`--${key}`] : []; + } + return [`--${key}=${value}`]; + }); + }, + + /** + * @param {Record} options + * @returns {string} + * @link https://cloud.google.com/sdk/gcloud/reference/topic/escaping + */ + getMetadata(options) { + const delimiter = Math.random().toString(36).substring(2, 15); + const entries = Object.entries(options) + .map(([key, value]) => `${key}=${value}`) + .join(delimiter); + return `^${delimiter}^${entries}`; + }, + + /** + * @param {string} name + * @returns {string} + */ + getLabel(name) { + return name.replace(/[^a-z0-9_-]/g, "-").toLowerCase(); + }, + + /** + * @typedef {Object} GoogleImage + * @property {string} id + * @property {string} name + * @property {string} family + * @property {"X86_64" | "ARM64"} architecture + * @property {string} diskSizeGb + * @property {string} selfLink + * @property {"READY"} status + * @property {string} creationTimestamp + */ + + /** + * @param {Partial} [options] + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/images/list + */ + async listImages(options) { + const filters = google.getFilters(options); + const images = await google.spawn($`compute images list ${filters} --preview-images --show-deprecated`); + return images.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1)); + }, + + /** + * @param {Record} options + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/images/create + */ + async createImage(options) { + const { name, ...otherOptions } = options; + const flags = this.getFlags(otherOptions); + const imageId = name || "i-" + Math.random().toString(36).substring(2, 15); + return this.spawn($`compute images create ${imageId} ${flags}`); + }, + + /** + * @typedef {Object} GoogleInstance + * @property {string} id + * @property {string} name + * @property {"RUNNING"} status + * @property {string} machineType + * @property {string} zone + * @property {GoogleDisk[]} disks + * @property {GoogleNetworkInterface[]} networkInterfaces + * @property {object} [scheduling] + * @property {"STANDARD" | "SPOT"} [scheduling.provisioningModel] + * @property {boolean} [scheduling.preemptible] + * @property {Record} [labels] + * @property {string} selfLink + * @property {string} creationTimestamp + */ + + /** + * @typedef {Object} GoogleDisk + * @property {string} deviceName + * @property {boolean} boot + * @property {"X86_64" | "ARM64"} architecture + * @property {string[]} [licenses] + * @property {number} diskSizeGb + */ + + /** + * @typedef {Object} GoogleNetworkInterface + * @property {"IPV4_ONLY" | "IPV4_IPV6" | "IPV6_ONLY"} stackType + * @property {string} name + * @property {string} network + * @property {string} networkIP + * @property {string} subnetwork + * @property {GoogleAccessConfig[]} accessConfigs + */ + + /** + * @typedef {Object} GoogleAccessConfig + * @property {string} name + * @property {"ONE_TO_ONE_NAT" | "INTERNAL_NAT"} type + * @property {string} [natIP] + */ + + /** + * @param {Record} options + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/create + */ + async createInstance(options) { + const { name, ...otherOptions } = options || {}; + const flags = this.getFlags(otherOptions); + const instanceId = name || "i-" + Math.random().toString(36).substring(2, 15); + const [instance] = await this.spawn($`compute instances create ${instanceId} ${flags}`); + return instance; + }, + + /** + * @param {string} instanceId + * @param {string} zoneId + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/stop + */ + async stopInstance(instanceId, zoneId) { + await this.spawn($`compute instances stop ${instanceId} --zone=${zoneId}`); + }, + + /** + * @param {string} instanceId + * @param {string} zoneId + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/delete + */ + async deleteInstance(instanceId, zoneId) { + await this.spawn($`compute instances delete ${instanceId} --delete-disks=all --zone=${zoneId}`, { + throwOnError: error => !/not found/i.test(inspect(error)), + }); + }, + + /** + * @param {string} instanceId + * @param {string} username + * @param {string} zoneId + * @param {object} [options] + * @param {boolean} [options.wait] + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/reset-windows-password + */ + async resetWindowsPassword(instanceId, username, zoneId, options = {}) { + const attempts = options.wait ? 15 : 1; + for (let i = 0; i < attempts; i++) { + const result = await this.spawn( + $`compute reset-windows-password ${instanceId} --user=${username} --zone=${zoneId}`, + { + throwOnError: error => !/instance may not be ready for use/i.test(inspect(error)), + }, + ); + if (result) { + const { password } = result; + if (password) { + return password; + } + } + await new Promise(resolve => setTimeout(resolve, 60000 * i)); + } + }, + + /** + * @param {Partial} options + * @returns {Promise} + */ + async listInstances(options) { + const filters = this.getFilters(options); + const instances = await this.spawn($`compute instances list ${filters}`); + return instances.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1)); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async getMachineImage(options) { + const { os, arch, distro, release } = options; + const architecture = arch === "aarch64" ? "ARM64" : "X86_64"; + + /** @type {string | undefined} */ + let family; + if (os === "linux") { + if (!distro || distro === "debian") { + family = `debian-${release || "*"}`; + } else if (distro === "ubuntu") { + family = `ubuntu-${release?.replace(/\./g, "") || "*"}`; + } else if (distro === "fedora") { + family = `fedora-coreos-${release || "*"}`; + } else if (distro === "rhel") { + family = `rhel-${release || "*"}`; + } + } else if (os === "windows" && arch === "x64") { + if (!distro || distro === "server") { + family = `windows-${release || "*"}`; + } + } + + if (family) { + const images = await this.listImages({ family, architecture }); + if (images.length) { + const [image] = images; + return image; + } + } + + throw new Error(`Unsupported platform: ${inspect(options)}`); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async createMachine(options) { + const { name, os, arch, distro, instanceType, tags, preemptible, detached } = options; + const image = await google.getMachineImage(options); + const { selfLink: imageUrl } = image; + + const username = getUsername(distro || os); + const userData = getUserData({ ...options, username }); + + /** @type {Record} */ + let metadata; + if (os === "windows") { + metadata = { + "enable-windows-ssh": "TRUE", + "sysprep-specialize-script-ps1": userData, + }; + } else { + metadata = { + "user-data": userData, + }; + } + + const instance = await google.createInstance({ + "name": name, + "zone": "us-central1-a", + "image": imageUrl, + "machine-type": instanceType || (arch === "aarch64" ? "t2a-standard-2" : "t2d-standard-2"), + "boot-disk-auto-delete": true, + "boot-disk-size": `${getDiskSize(options)}GB`, + "metadata": this.getMetadata(metadata), + "labels": Object.entries(tags || {}) + .filter(([, value]) => value !== undefined) + .map(([key, value]) => `${this.getLabel(key)}=${value}`) + .join(","), + "provisioning-model": preemptible ? "SPOT" : "STANDARD", + "instance-termination-action": preemptible || !detached ? "DELETE" : undefined, + "no-restart-on-failure": true, + "threads-per-core": 1, + "max-run-duration": detached ? undefined : "6h", + }); + + return this.toMachine(instance, options); + }, + + /** + * @param {GoogleInstance} instance + * @param {MachineOptions} [options] + * @returns {Machine} + */ + toMachine(instance, options = {}) { + const { id: instanceId, name, zone: zoneUrl, machineType: machineTypeUrl, labels } = instance; + const machineType = machineTypeUrl.split("/").pop(); + const zoneId = zoneUrl.split("/").pop(); + + let os, arch, distro, release; + const { disks = [] } = instance; + for (const { boot, architecture, licenses = [] } of disks) { + if (!boot) { + continue; + } + + if (architecture === "X86_64") { + arch = "x64"; + } else if (architecture === "ARM64") { + arch = "aarch64"; + } + + for (const license of licenses) { + const linuxMatch = /(debian|ubuntu|fedora|rhel)-(\d+)/i.exec(license); + if (linuxMatch) { + os = "linux"; + [, distro, release] = linuxMatch; + } else { + const windowsMatch = /windows-server-(\d+)-dc-core/i.exec(license); + if (windowsMatch) { + os = "windows"; + distro = "windowsserver"; + [, release] = windowsMatch; + } + } + } + } + + let publicIp; + const { networkInterfaces = [] } = instance; + for (const { accessConfigs = [] } of networkInterfaces) { + for (const { type, natIP } of accessConfigs) { + if (type === "ONE_TO_ONE_NAT" && natIP) { + publicIp = natIP; + } + } + } + + let preemptible; + const { scheduling } = instance; + if (scheduling) { + const { provisioningModel, preemptible: isPreemptible } = scheduling; + preemptible = provisioningModel === "SPOT" || isPreemptible; + } + + /** + * @returns {SshOptions} + */ + const connect = () => { + if (!publicIp) { + throw new Error(`Failed to find public IP for instance: ${name}`); + } + + /** @type {string | undefined} */ + let username; + + const { os, distro } = options; + if (os || distro) { + username = getUsernameForDistro(distro || os); + } + + return { hostname: publicIp, username }; + }; + + const spawn = async (command, options) => { + const connectOptions = connect(); + return spawnSsh({ ...connectOptions, command }, options); + }; + + const spawnSafe = async (command, options) => { + const connectOptions = connect(); + return spawnSshSafe({ ...connectOptions, command }, options); + }; + + const rdp = async () => { + const { hostname, username } = connect(); + const rdpUsername = `${username}-rdp`; + const password = await google.resetWindowsPassword(instanceId, rdpUsername, zoneId, { wait: true }); + return { hostname, username: rdpUsername, password }; + }; + + const attach = async () => { + const connectOptions = connect(); + await spawnSshSafe({ ...connectOptions }); + }; + + const upload = async (source, destination) => { + const connectOptions = connect(); + await spawnScp({ ...connectOptions, source, destination }); + }; + + const snapshot = async name => { + const stopResult = await this.stopInstance(instanceId, zoneId); + console.log(stopResult); + const image = await this.createImage({ + ["source-disk"]: instanceId, + ["zone"]: zoneId, + ["name"]: name || `${instanceId}-snapshot-${Date.now()}`, + }); + console.log(image); + return; + }; + + const terminate = async () => { + await google.deleteInstance(instanceId, zoneId); + }; + + return { + cloud: "google", + os, + arch, + distro, + release, + id: instanceId, + imageId: undefined, + name, + instanceType: machineType, + region: zoneId, + publicIp, + preemptible, + labels, + spawn, + spawnSafe, + rdp, + attach, + upload, + snapshot, + close: terminate, + [Symbol.asyncDispose]: terminate, + }; + }, + + /** + * @param {Record} [labels] + * @returns {Promise} + */ + async getMachines(labels) { + const filters = labels ? this.getFilters({ labels }) : {}; + const instances = await google.listInstances(filters); + return instances.map(instance => this.toMachine(instance)); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async getImage(options) { + const { os, arch, distro, release } = options; + const architecture = arch === "aarch64" ? "ARM64" : "X86_64"; + + let name; + let username; + if (os === "linux") { + if (distro === "debian") { + name = `debian-${release}-*`; + username = "admin"; + } else if (distro === "ubuntu") { + name = `ubuntu-${release.replace(/\./g, "")}-*`; + username = "ubuntu"; + } + } else if (os === "windows" && arch === "x64") { + if (distro === "server") { + name = `windows-server-${release}-dc-core-*`; + username = "administrator"; + } + } + + if (name && username) { + const images = await google.listImages({ name, architecture }); + if (images.length) { + const [image] = images; + const { name, selfLink } = image; + return { + id: selfLink, + name, + username, + }; + } + } + + throw new Error(`Unsupported platform: ${inspect(platform)}`); + }, +}; diff --git a/scripts/orbstack.mjs b/scripts/orbstack.mjs new file mode 100644 index 00000000000000..ca60e2e0396582 --- /dev/null +++ b/scripts/orbstack.mjs @@ -0,0 +1,190 @@ +import { inspect } from "node:util"; +import { $, mkdtemp, rm, spawnSafe, writeFile, getUsernameForDistro } from "./utils.mjs"; +import { getUserData } from "./machine.mjs"; + +/** + * @link https://docs.orbstack.dev/ + */ +export const orbstack = { + get name() { + return "orbstack"; + }, + + /** + * @typedef {Object} OrbstackImage + * @property {string} distro + * @property {string} version + * @property {string} arch + */ + + /** + * @param {Platform} platform + * @returns {OrbstackImage} + */ + getImage(platform) { + const { os, arch, distro, release } = platform; + if (os !== "linux" || !/^debian|ubuntu|alpine|fedora|centos$/.test(distro)) { + throw new Error(`Unsupported platform: ${inspect(platform)}`); + } + + return { + distro, + version: release, + arch: arch === "aarch64" ? "arm64" : "amd64", + }; + }, + + /** + * @typedef {Object} OrbstackVm + * @property {string} id + * @property {string} name + * @property {"running"} state + * @property {OrbstackImage} image + * @property {OrbstackConfig} config + */ + + /** + * @typedef {Object} OrbstackConfig + * @property {string} default_username + * @property {boolean} isolated + */ + + /** + * @typedef {Object} OrbstackVmOptions + * @property {string} [name] + * @property {OrbstackImage} image + * @property {string} [username] + * @property {string} [password] + * @property {string} [userData] + */ + + /** + * @param {OrbstackVmOptions} options + * @returns {Promise} + */ + async createVm(options) { + const { name, image, username, password, userData } = options; + const { distro, version, arch } = image; + const uniqueId = name || `linux-${distro}-${version}-${arch}-${Math.random().toString(36).slice(2, 11)}`; + + const args = [`--arch=${arch}`, `${distro}:${version}`, uniqueId]; + if (username) { + args.push(`--user=${username}`); + } + if (password) { + args.push(`--set-password=${password}`); + } + + let userDataPath; + if (userData) { + userDataPath = mkdtemp("orbstack-user-data-", "user-data.txt"); + writeFile(userDataPath, userData); + args.push(`--user-data=${userDataPath}`); + } + + try { + await spawnSafe($`orbctl create ${args}`); + } finally { + if (userDataPath) { + rm(userDataPath); + } + } + + return this.inspectVm(uniqueId); + }, + + /** + * @param {string} name + */ + async deleteVm(name) { + await spawnSafe($`orbctl delete ${name}`, { + throwOnError: error => !/machine not found/i.test(inspect(error)), + }); + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async inspectVm(name) { + const { exitCode, stdout } = await spawnSafe($`orbctl info ${name} --format=json`, { + throwOnError: error => !/machine not found/i.test(inspect(error)), + }); + if (exitCode === 0) { + return JSON.parse(stdout); + } + }, + + /** + * @returns {Promise} + */ + async listVms() { + const { stdout } = await spawnSafe($`orbctl list --format=json`); + return JSON.parse(stdout); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async createMachine(options) { + const { distro } = options; + const username = getUsernameForDistro(distro); + const userData = getUserData({ ...options, username }); + + const image = this.getImage(options); + const vm = await this.createVm({ + image, + username, + userData, + }); + + return this.toMachine(vm, options); + }, + + /** + * @param {OrbstackVm} vm + * @returns {Machine} + */ + toMachine(vm) { + const { id, name, config } = vm; + + const { default_username: username } = config; + const connectOptions = { + username, + hostname: `${name}@orb`, + }; + + const exec = async (command, options) => { + return spawnSsh({ ...connectOptions, command }, options); + }; + + const execSafe = async (command, options) => { + return spawnSshSafe({ ...connectOptions, command }, options); + }; + + const attach = async () => { + await spawnSshSafe({ ...connectOptions }); + }; + + const upload = async (source, destination) => { + await spawnSafe(["orbctl", "push", `--machine=${name}`, source, destination]); + }; + + const close = async () => { + await this.deleteVm(name); + }; + + return { + cloud: "orbstack", + id, + name, + spawn: exec, + spawnSafe: execSafe, + upload, + attach, + close, + [Symbol.asyncDispose]: close, + }; + }, +}; diff --git a/scripts/tart.mjs b/scripts/tart.mjs new file mode 100644 index 00000000000000..123a463236b17e --- /dev/null +++ b/scripts/tart.mjs @@ -0,0 +1,287 @@ +import { inspect } from "node:util"; +import { + isPrivileged, + spawnSafe, + which +} from "./utils.mjs"; + +/** + * @link https://tart.run/ + * @link https://github.com/cirruslabs/tart + */ +export const tart = { + get name() { + return "tart"; + }, + + /** + * @param {string[]} args + * @param {import("./utils.mjs").SpawnOptions} options + * @returns {Promise} + */ + async spawn(args, options) { + const tart = which("tart", { required: true }); + const { json } = options || {}; + const command = json ? [tart, ...args, "--format=json"] : [tart, ...args]; + + const { stdout } = await spawnSafe(command, options); + if (!json) { + return stdout; + } + + try { + return JSON.parse(stdout); + } catch { + return; + } + }, + + /** + * @typedef {"sequoia" | "sonoma" | "ventura" | "monterey"} TartDistro + * @typedef {`ghcr.io/cirruslabs/macos-${TartDistro}-xcode`} TartImage + * @link https://github.com/orgs/cirruslabs/packages?repo_name=macos-image-templates + */ + + /** + * @param {Platform} platform + * @returns {TartImage} + */ + getImage(platform) { + const { os, arch, release } = platform; + if (os !== "darwin" || arch !== "aarch64") { + throw new Error(`Unsupported platform: ${inspect(platform)}`); + } + const distros = { + "15": "sequoia", + "14": "sonoma", + "13": "ventura", + "12": "monterey", + }; + const distro = distros[release]; + if (!distro) { + throw new Error(`Unsupported macOS release: ${distro}`); + } + return `ghcr.io/cirruslabs/macos-${distro}-xcode`; + }, + + /** + * @typedef {Object} TartVm + * @property {string} Name + * @property {"running" | "stopped"} State + * @property {"local"} Source + * @property {number} Size + * @property {number} Disk + * @property {number} [CPU] + * @property {number} [Memory] + */ + + /** + * @returns {Promise} + */ + async listVms() { + return this.spawn(["list"], { json: true }); + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async getVm(name) { + const result = await this.spawn(["get", name], { + json: true, + throwOnError: error => !/does not exist/i.test(inspect(error)), + }); + return { + Name: name, + ...result, + }; + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async stopVm(name) { + await this.spawn(["stop", name, "--timeout=0"], { + throwOnError: error => !/does not exist|is not running/i.test(inspect(error)), + }); + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async deleteVm(name) { + await this.stopVm(name); + await this.spawn(["delete", name], { + throwOnError: error => !/does not exist/i.test(inspect(error)), + }); + }, + + /** + * @param {string} name + * @param {TartImage} image + * @returns {Promise} + */ + async cloneVm(name, image) { + const localName = image.split("/").pop(); + const localVm = await this.getVm(localName); + if (localVm) { + const { Name } = localVm; + await this.spawn(["clone", Name, name]); + return; + } + + console.log(`Cloning macOS image: ${image} (this will take a long time)`); + await this.spawn(["clone", image, localName]); + await this.spawn(["clone", localName, name]); + }, + + /** + * @typedef {Object} TartMount + * @property {boolean} [readOnly] + * @property {string} source + * @property {string} destination + */ + + /** + * @typedef {Object} TartVmOptions + * @property {number} [cpuCount] + * @property {number} [memoryGb] + * @property {number} [diskSizeGb] + * @property {boolean} [no-graphics] + * @property {boolean} [no-audio] + * @property {boolean} [no-clipboard] + * @property {boolean} [recovery] + * @property {boolean} [vnc] + * @property {boolean} [vnc-experimental] + * @property {boolean} [net-softnet] + * @property {TartMount[]} [dir] + */ + + /** + * @param {string} name + * @param {TartVmOptions} options + * @returns {Promise} + */ + async runVm(name, options = {}) { + const { cpuCount, memoryGb, diskSizeGb, dir, ...vmOptions } = options; + + const setArgs = ["--random-mac", "--random-serial"]; + if (cpuCount) { + setArgs.push(`--cpu=${cpuCount}`); + } + if (memoryGb) { + setArgs.push(`--memory=${memoryGb}`); + } + if (diskSizeGb) { + setArgs.push(`--disk-size=${diskSizeGb}`); + } + await this.spawn(["set", name, ...setArgs]); + + const args = Object.entries(vmOptions) + .filter(([, value]) => value !== undefined) + .flatMap(([key, value]) => (typeof value === "boolean" ? (value ? [`--${key}`] : []) : [`--${key}=${value}`])); + if (dir?.length) { + args.push( + ...dir.map(({ source, destination, readOnly }) => `--dir=${source}:${destination}${readOnly ? ":ro" : ""}`), + ); + } + + // This command is blocking, so it needs to be detached and not awaited + this.spawn(["run", name, ...args], { detached: true }); + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async getVmIp(name) { + const stdout = await this.spawn(["ip", name], { + retryOnError: error => /no IP address found/i.test(inspect(error)), + throwOnError: error => !/does not exist/i.test(inspect(error)), + }); + return stdout?.trim(); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async createMachine(options) { + const { name, imageName, cpuCount, memoryGb, diskSizeGb, rdp } = options; + + const image = imageName || this.getImage(options); + const machineId = name || `i-${Math.random().toString(36).slice(2, 11)}`; + await this.cloneVm(machineId, image); + + await this.runVm(machineId, { + cpuCount, + memoryGb, + diskSizeGb, + "net-softnet": isPrivileged(), + "no-audio": true, + "no-clipboard": true, + "no-graphics": true, + "vnc-experimental": rdp, + }); + + return this.toMachine(machineId); + }, + + /** + * @param {string} name + * @returns {Machine} + */ + toMachine(name) { + const connect = async () => { + const hostname = await this.getVmIp(name); + return { + hostname, + // hardcoded by base images + username: "admin", + password: "admin", + }; + }; + + const exec = async (command, options) => { + const connectOptions = await connect(); + return spawnSsh({ ...connectOptions, command }, options); + }; + + const execSafe = async (command, options) => { + const connectOptions = await connect(); + return spawnSshSafe({ ...connectOptions, command }, options); + }; + + const attach = async () => { + const connectOptions = await connect(); + await spawnSshSafe({ ...connectOptions }); + }; + + const upload = async (source, destination) => { + const connectOptions = await connect(); + await spawnScp({ ...connectOptions, source, destination }); + }; + + const rdp = async () => { + const connectOptions = await connect(); + await spawnRdp({ ...connectOptions }); + }; + + const close = async () => { + await this.deleteVm(name); + }; + + return { + cloud: "tart", + id: name, + spawn: exec, + spawnSafe: execSafe, + attach, + upload, + close, + [Symbol.asyncDispose]: close, + }; + }, +}; From 223c14792b6c984286d221b9d882519ee8d837bc Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:32:36 +0100 Subject: [PATCH 079/176] files --- scripts/machine.mjs | 1337 +------------------------------------------ scripts/utils.mjs | 28 + 2 files changed, 56 insertions(+), 1309 deletions(-) diff --git a/scripts/machine.mjs b/scripts/machine.mjs index ff592bcaefba21..58d5951a477186 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -31,776 +31,15 @@ import { isWindows, sha256, isPrivileged, + getUsernameForDistro, } from "./utils.mjs"; import { basename, extname, join, relative, resolve } from "node:path"; import { existsSync, mkdtempSync, readdirSync } from "node:fs"; import { fileURLToPath } from "node:url"; - -/** - * @link https://tart.run/ - * @link https://github.com/cirruslabs/tart - */ -const tart = { - get name() { - return "tart"; - }, - - /** - * @param {string[]} args - * @param {import("./utils.mjs").SpawnOptions} options - * @returns {Promise} - */ - async spawn(args, options) { - const tart = which("tart", { required: true }); - const { json } = options || {}; - const command = json ? [tart, ...args, "--format=json"] : [tart, ...args]; - - const { stdout } = await spawnSafe(command, options); - if (!json) { - return stdout; - } - - try { - return JSON.parse(stdout); - } catch { - return; - } - }, - - /** - * @typedef {"sequoia" | "sonoma" | "ventura" | "monterey"} TartDistro - * @typedef {`ghcr.io/cirruslabs/macos-${TartDistro}-xcode`} TartImage - * @link https://github.com/orgs/cirruslabs/packages?repo_name=macos-image-templates - */ - - /** - * @param {Platform} platform - * @returns {TartImage} - */ - getImage(platform) { - const { os, arch, release } = platform; - if (os !== "darwin" || arch !== "aarch64") { - throw new Error(`Unsupported platform: ${inspect(platform)}`); - } - const distros = { - "15": "sequoia", - "14": "sonoma", - "13": "ventura", - "12": "monterey", - }; - const distro = distros[release]; - if (!distro) { - throw new Error(`Unsupported macOS release: ${distro}`); - } - return `ghcr.io/cirruslabs/macos-${distro}-xcode`; - }, - - /** - * @typedef {Object} TartVm - * @property {string} Name - * @property {"running" | "stopped"} State - * @property {"local"} Source - * @property {number} Size - * @property {number} Disk - * @property {number} [CPU] - * @property {number} [Memory] - */ - - /** - * @returns {Promise} - */ - async listVms() { - return this.spawn(["list"], { json: true }); - }, - - /** - * @param {string} name - * @returns {Promise} - */ - async getVm(name) { - const result = await this.spawn(["get", name], { - json: true, - throwOnError: error => !/does not exist/i.test(inspect(error)), - }); - return { - Name: name, - ...result, - }; - }, - - /** - * @param {string} name - * @returns {Promise} - */ - async stopVm(name) { - await this.spawn(["stop", name, "--timeout=0"], { - throwOnError: error => !/does not exist|is not running/i.test(inspect(error)), - }); - }, - - /** - * @param {string} name - * @returns {Promise} - */ - async deleteVm(name) { - await this.stopVm(name); - await this.spawn(["delete", name], { - throwOnError: error => !/does not exist/i.test(inspect(error)), - }); - }, - - /** - * @param {string} name - * @param {TartImage} image - * @returns {Promise} - */ - async cloneVm(name, image) { - const localName = image.split("/").pop(); - const localVm = await this.getVm(localName); - if (localVm) { - const { Name } = localVm; - await this.spawn(["clone", Name, name]); - return; - } - - console.log(`Cloning macOS image: ${image} (this will take a long time)`); - await this.spawn(["clone", image, localName]); - await this.spawn(["clone", localName, name]); - }, - - /** - * @typedef {Object} TartMount - * @property {boolean} [readOnly] - * @property {string} source - * @property {string} destination - */ - - /** - * @typedef {Object} TartVmOptions - * @property {number} [cpuCount] - * @property {number} [memoryGb] - * @property {number} [diskSizeGb] - * @property {boolean} [no-graphics] - * @property {boolean} [no-audio] - * @property {boolean} [no-clipboard] - * @property {boolean} [recovery] - * @property {boolean} [vnc] - * @property {boolean} [vnc-experimental] - * @property {boolean} [net-softnet] - * @property {TartMount[]} [dir] - */ - - /** - * @param {string} name - * @param {TartVmOptions} options - * @returns {Promise} - */ - async runVm(name, options = {}) { - const { cpuCount, memoryGb, diskSizeGb, dir, ...vmOptions } = options; - - const setArgs = ["--random-mac", "--random-serial"]; - if (cpuCount) { - setArgs.push(`--cpu=${cpuCount}`); - } - if (memoryGb) { - setArgs.push(`--memory=${memoryGb}`); - } - if (diskSizeGb) { - setArgs.push(`--disk-size=${diskSizeGb}`); - } - await this.spawn(["set", name, ...setArgs]); - - const args = Object.entries(vmOptions) - .filter(([, value]) => value !== undefined) - .flatMap(([key, value]) => (typeof value === "boolean" ? (value ? [`--${key}`] : []) : [`--${key}=${value}`])); - if (dir?.length) { - args.push( - ...dir.map(({ source, destination, readOnly }) => `--dir=${source}:${destination}${readOnly ? ":ro" : ""}`), - ); - } - - // This command is blocking, so it needs to be detached and not awaited - this.spawn(["run", name, ...args], { detached: true }); - }, - - /** - * @param {string} name - * @returns {Promise} - */ - async getVmIp(name) { - const stdout = await this.spawn(["ip", name], { - retryOnError: error => /no IP address found/i.test(inspect(error)), - throwOnError: error => !/does not exist/i.test(inspect(error)), - }); - return stdout?.trim(); - }, - - /** - * @param {MachineOptions} options - * @returns {Promise} - */ - async createMachine(options) { - const { name, imageName, cpuCount, memoryGb, diskSizeGb, rdp } = options; - - const image = imageName || this.getImage(options); - const machineId = name || `i-${Math.random().toString(36).slice(2, 11)}`; - await this.cloneVm(machineId, image); - - await this.runVm(machineId, { - cpuCount, - memoryGb, - diskSizeGb, - "net-softnet": isPrivileged(), - "no-audio": true, - "no-clipboard": true, - "no-graphics": true, - "vnc-experimental": rdp, - }); - - return this.toMachine(machineId); - }, - - /** - * @param {string} name - * @returns {Machine} - */ - toMachine(name) { - const connect = async () => { - const hostname = await this.getVmIp(name); - return { - hostname, - // hardcoded by base images - username: "admin", - password: "admin", - }; - }; - - const exec = async (command, options) => { - const connectOptions = await connect(); - return spawnSsh({ ...connectOptions, command }, options); - }; - - const execSafe = async (command, options) => { - const connectOptions = await connect(); - return spawnSshSafe({ ...connectOptions, command }, options); - }; - - const attach = async () => { - const connectOptions = await connect(); - await spawnSshSafe({ ...connectOptions }); - }; - - const upload = async (source, destination) => { - const connectOptions = await connect(); - await spawnScp({ ...connectOptions, source, destination }); - }; - - const rdp = async () => { - const connectOptions = await connect(); - await spawnRdp({ ...connectOptions }); - }; - - const close = async () => { - await this.deleteVm(name); - }; - - return { - cloud: "tart", - id: name, - spawn: exec, - spawnSafe: execSafe, - attach, - upload, - close, - [Symbol.asyncDispose]: close, - }; - }, -}; - -/** - * @link https://docs.orbstack.dev/ - */ -const orbstack = { - get name() { - return "orbstack"; - }, - - /** - * @typedef {Object} OrbstackImage - * @property {string} distro - * @property {string} version - * @property {string} arch - */ - - /** - * @param {Platform} platform - * @returns {OrbstackImage} - */ - getImage(platform) { - const { os, arch, distro, release } = platform; - if (os !== "linux" || !/^debian|ubuntu|alpine|fedora|centos$/.test(distro)) { - throw new Error(`Unsupported platform: ${inspect(platform)}`); - } - - return { - distro, - version: release, - arch: arch === "aarch64" ? "arm64" : "amd64", - }; - }, - - /** - * @typedef {Object} OrbstackVm - * @property {string} id - * @property {string} name - * @property {"running"} state - * @property {OrbstackImage} image - * @property {OrbstackConfig} config - */ - - /** - * @typedef {Object} OrbstackConfig - * @property {string} default_username - * @property {boolean} isolated - */ - - /** - * @typedef {Object} OrbstackVmOptions - * @property {string} [name] - * @property {OrbstackImage} image - * @property {string} [username] - * @property {string} [password] - * @property {string} [userData] - */ - - /** - * @param {OrbstackVmOptions} options - * @returns {Promise} - */ - async createVm(options) { - const { name, image, username, password, userData } = options; - const { distro, version, arch } = image; - const uniqueId = name || `linux-${distro}-${version}-${arch}-${Math.random().toString(36).slice(2, 11)}`; - - const args = [`--arch=${arch}`, `${distro}:${version}`, uniqueId]; - if (username) { - args.push(`--user=${username}`); - } - if (password) { - args.push(`--set-password=${password}`); - } - - let userDataPath; - if (userData) { - userDataPath = mkdtemp("orbstack-user-data-", "user-data.txt"); - writeFile(userDataPath, userData); - args.push(`--user-data=${userDataPath}`); - } - - try { - await spawnSafe($`orbctl create ${args}`); - } finally { - if (userDataPath) { - rm(userDataPath); - } - } - - return this.inspectVm(uniqueId); - }, - - /** - * @param {string} name - */ - async deleteVm(name) { - await spawnSafe($`orbctl delete ${name}`, { - throwOnError: error => !/machine not found/i.test(inspect(error)), - }); - }, - - /** - * @param {string} name - * @returns {Promise} - */ - async inspectVm(name) { - const { exitCode, stdout } = await spawnSafe($`orbctl info ${name} --format=json`, { - throwOnError: error => !/machine not found/i.test(inspect(error)), - }); - if (exitCode === 0) { - return JSON.parse(stdout); - } - }, - - /** - * @returns {Promise} - */ - async listVms() { - const { stdout } = await spawnSafe($`orbctl list --format=json`); - return JSON.parse(stdout); - }, - - /** - * @param {MachineOptions} options - * @returns {Promise} - */ - async createMachine(options) { - const { distro } = options; - const username = getUsername(distro); - const userData = getUserData({ ...options, username }); - - const image = this.getImage(options); - const vm = await this.createVm({ - image, - username, - userData, - }); - - return this.toMachine(vm, options); - }, - - /** - * @param {OrbstackVm} vm - * @returns {Machine} - */ - toMachine(vm) { - const { id, name, config } = vm; - - const { default_username: username } = config; - const connectOptions = { - username, - hostname: `${name}@orb`, - }; - - const exec = async (command, options) => { - return spawnSsh({ ...connectOptions, command }, options); - }; - - const execSafe = async (command, options) => { - return spawnSshSafe({ ...connectOptions, command }, options); - }; - - const attach = async () => { - await spawnSshSafe({ ...connectOptions }); - }; - - const upload = async (source, destination) => { - await spawnSafe(["orbctl", "push", `--machine=${name}`, source, destination]); - }; - - const close = async () => { - await this.deleteVm(name); - }; - - return { - cloud: "orbstack", - id, - name, - spawn: exec, - spawnSafe: execSafe, - upload, - attach, - close, - [Symbol.asyncDispose]: close, - }; - }, -}; - -const docker = { - get name() { - return "docker"; - }, - - /** - * @typedef {"linux" | "darwin" | "windows"} DockerOs - * @typedef {"amd64" | "arm64"} DockerArch - * @typedef {`${DockerOs}/${DockerArch}`} DockerPlatform - */ - - /** - * @param {Platform} platform - * @returns {DockerPlatform} - */ - getPlatform(platform) { - const { os, arch } = platform; - if (arch === "aarch64") { - return `${os}/arm64`; - } else if (arch === "x64") { - return `${os}/amd64`; - } - throw new Error(`Unsupported platform: ${inspect(platform)}`); - }, - - /** - * @typedef DockerSpawnOptions - * @property {DockerPlatform} [platform] - * @property {boolean} [json] - */ - - /** - * @param {string[]} args - * @param {DockerSpawnOptions & import("./utils.mjs").SpawnOptions} [options] - * @returns {Promise} - */ - async spawn(args, options = {}) { - const docker = which("docker", { required: true }); - - let env = { ...process.env }; - if (isCI) { - env["BUILDKIT_PROGRESS"] = "plain"; - } - - const { json, platform } = options; - if (json) { - args.push("--format=json"); - } - if (platform) { - args.push(`--platform=${platform}`); - } - - const { error, stdout } = await spawnSafe($`${docker} ${args}`, { env, ...options }); - if (error) { - return; - } - if (!json) { - return stdout; - } - - try { - return JSON.parse(stdout); - } catch { - return; - } - }, - - /** - * @typedef {Object} DockerImage - * @property {string} Id - * @property {string[]} RepoTags - * @property {string[]} RepoDigests - * @property {string} Created - * @property {DockerOs} Os - * @property {DockerArch} Architecture - * @property {number} Size - */ - - /** - * @param {string} url - * @param {DockerPlatform} [platform] - * @returns {Promise} - */ - async pullImage(url, platform) { - const done = await this.spawn($`pull ${url}`, { - platform, - throwOnError: error => !/No such image|manifest unknown/i.test(inspect(error)), - }); - return !!done; - }, - - /** - * @param {string} url - * @param {DockerPlatform} [platform] - * @returns {Promise} - */ - async inspectImage(url, platform) { - /** @type {DockerImage[]} */ - const images = await this.spawn($`image inspect ${url}`, { - json: true, - throwOnError: error => !/No such image/i.test(inspect(error)), - }); - - if (!images) { - const pulled = await this.pullImage(url, platform); - if (pulled) { - return this.inspectImage(url, platform); - } - } - - const { os, arch } = platform || {}; - return images - ?.filter(({ Os, Architecture }) => !os || !arch || (Os === os && Architecture === arch)) - ?.find((a, b) => (a.Created < b.Created ? 1 : -1)); - }, - - /** - * @typedef {Object} DockerContainer - * @property {string} Id - * @property {string} Name - * @property {string} Image - * @property {string} Created - * @property {DockerContainerState} State - * @property {DockerContainerNetworkSettings} NetworkSettings - */ - - /** - * @typedef {Object} DockerContainerState - * @property {"exited" | "running"} Status - * @property {number} [Pid] - * @property {number} ExitCode - * @property {string} [Error] - * @property {string} StartedAt - * @property {string} FinishedAt - */ - - /** - * @typedef {Object} DockerContainerNetworkSettings - * @property {string} [IPAddress] - */ - - /** - * @param {string} containerId - * @returns {Promise} - */ - async inspectContainer(containerId) { - const containers = await this.spawn($`container inspect ${containerId}`, { json: true }); - return containers?.find(a => a.Id === containerId); - }, - - /** - * @returns {Promise} - */ - async listContainers() { - const containers = await this.spawn($`container ls --all`, { json: true }); - return containers || []; - }, - - /** - * @typedef {Object} DockerRunOptions - * @property {string[]} [command] - * @property {DockerPlatform} [platform] - * @property {string} [name] - * @property {boolean} [detach] - * @property {"always" | "never"} [pull] - * @property {boolean} [rm] - * @property {"no" | "on-failure" | "always"} [restart] - */ - - /** - * @param {string} url - * @param {DockerRunOptions} [options] - * @returns {Promise} - */ - async runContainer(url, options = {}) { - const { detach, command = [], ...containerOptions } = options; - const args = Object.entries(containerOptions) - .filter(([_, value]) => typeof value !== "undefined") - .map(([key, value]) => (typeof value === "boolean" ? `--${key}` : `--${key}=${value}`)); - if (detach) { - args.push("--detach"); - } else { - args.push("--tty", "--interactive"); - } - - const stdio = detach ? "pipe" : "inherit"; - const result = await this.spawn($`run ${args} ${url} ${command}`, { stdio }); - if (!detach) { - return; - } - - const containerId = result.trim(); - const container = await this.inspectContainer(containerId); - if (!container) { - throw new Error(`Failed to run container: ${inspect(result)}`); - } - return container; - }, - - /** - * @param {Platform} platform - * @returns {Promise} - */ - async getBaseImage(platform) { - const { os, distro, release } = platform; - const dockerPlatform = this.getPlatform(platform); - - let url; - if (os === "linux") { - if (distro === "debian" || distro === "ubuntu" || distro === "alpine") { - url = `docker.io/library/${distro}:${release}`; - } else if (distro === "amazonlinux") { - url = `public.ecr.aws/amazonlinux/amazonlinux:${release}`; - } - } - - if (url) { - const image = await this.inspectImage(url, dockerPlatform); - if (image) { - return image; - } - } - - throw new Error(`Unsupported platform: ${inspect(platform)}`); - }, - - /** - * @param {DockerContainer} container - * @param {MachineOptions} [options] - * @returns {Machine} - */ - toMachine(container, options = {}) { - const { Id: containerId } = container; - - const exec = (command, options) => { - return spawn(["docker", "exec", containerId, ...command], options); - }; - - const execSafe = (command, options) => { - return spawnSafe(["docker", "exec", containerId, ...command], options); - }; - - const upload = async (source, destination) => { - await spawn(["docker", "cp", source, `${containerId}:${destination}`]); - }; - - const attach = async () => { - const { exitCode, error } = await spawn(["docker", "exec", "-it", containerId, "sh"], { - stdio: "inherit", - }); - - if (exitCode === 0 || exitCode === 130) { - return; - } - - throw error; - }; - - const snapshot = async name => { - await spawn(["docker", "commit", containerId]); - }; - - const kill = async () => { - await spawn(["docker", "kill", containerId]); - }; - - return { - cloud: "docker", - id: containerId, - spawn: exec, - spawnSafe: execSafe, - upload, - attach, - snapshot, - close: kill, - [Symbol.asyncDispose]: kill, - }; - }, - - /** - * @param {MachineOptions} options - * @returns {Promise} - */ - async createMachine(options) { - const { Id: imageId, Os, Architecture } = await docker.getBaseImage(options); - - const container = await docker.runContainer(imageId, { - platform: `${Os}/${Architecture}`, - command: ["sleep", "1d"], - detach: true, - rm: true, - restart: "no", - }); - - return this.toMachine(container, options); - }, -}; +import { orbstack } from "./orbstack.mjs"; +import { docker } from "./docker.mjs"; +import { google } from "./google.mjs"; +import { tart } from "./tart.mjs"; const aws = { get name() { @@ -1197,7 +436,7 @@ const aws = { return device; }); - const username = getUsername(Name); + const username = getUsernameForDistro(Name); let userData = getUserData({ ...options, username }); if (os === "windows") { @@ -1344,515 +583,6 @@ const aws = { }, }; -const google = { - get cloud() { - return "google"; - }, - - /** - * @param {string[]} args - * @param {import("./utils.mjs").SpawnOptions} [options] - * @returns {Promise} - */ - async spawn(args, options = {}) { - const gcloud = which("gcloud", { required: true }); - - let env = { ...process.env }; - // if (isCI) { - // env; // TODO: Add Google Cloud credentials - // } else { - // env["TERM"] = "dumb"; - // } - - const { stdout } = await spawnSafe($`${gcloud} ${args} --format json`, { - env, - ...options, - }); - try { - return JSON.parse(stdout); - } catch { - return; - } - }, - - /** - * @param {Record} [options] - * @returns {string[]} - */ - getFilters(options = {}) { - const filter = Object.entries(options) - .filter(([, value]) => value !== undefined) - .map(([key, value]) => [value.includes("*") ? `${key}~${value}` : `${key}=${value}`]) - .join(" AND "); - return filter ? ["--filter", filter] : []; - }, - - /** - * @param {Record} options - * @returns {string[]} - */ - getFlags(options) { - return Object.entries(options) - .filter(([, value]) => value !== undefined) - .flatMap(([key, value]) => { - if (typeof value === "boolean") { - return value ? [`--${key}`] : []; - } - return [`--${key}=${value}`]; - }); - }, - - /** - * @param {Record} options - * @returns {string} - * @link https://cloud.google.com/sdk/gcloud/reference/topic/escaping - */ - getMetadata(options) { - const delimiter = Math.random().toString(36).substring(2, 15); - const entries = Object.entries(options) - .map(([key, value]) => `${key}=${value}`) - .join(delimiter); - return `^${delimiter}^${entries}`; - }, - - /** - * @param {string} name - * @returns {string} - */ - getLabel(name) { - return name.replace(/[^a-z0-9_-]/g, "-").toLowerCase(); - }, - - /** - * @typedef {Object} GoogleImage - * @property {string} id - * @property {string} name - * @property {string} family - * @property {"X86_64" | "ARM64"} architecture - * @property {string} diskSizeGb - * @property {string} selfLink - * @property {"READY"} status - * @property {string} creationTimestamp - */ - - /** - * @param {Partial} [options] - * @returns {Promise} - * @link https://cloud.google.com/sdk/gcloud/reference/compute/images/list - */ - async listImages(options) { - const filters = google.getFilters(options); - const images = await google.spawn($`compute images list ${filters} --preview-images --show-deprecated`); - return images.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1)); - }, - - /** - * @param {Record} options - * @returns {Promise} - * @link https://cloud.google.com/sdk/gcloud/reference/compute/images/create - */ - async createImage(options) { - const { name, ...otherOptions } = options; - const flags = this.getFlags(otherOptions); - const imageId = name || "i-" + Math.random().toString(36).substring(2, 15); - return this.spawn($`compute images create ${imageId} ${flags}`); - }, - - /** - * @typedef {Object} GoogleInstance - * @property {string} id - * @property {string} name - * @property {"RUNNING"} status - * @property {string} machineType - * @property {string} zone - * @property {GoogleDisk[]} disks - * @property {GoogleNetworkInterface[]} networkInterfaces - * @property {object} [scheduling] - * @property {"STANDARD" | "SPOT"} [scheduling.provisioningModel] - * @property {boolean} [scheduling.preemptible] - * @property {Record} [labels] - * @property {string} selfLink - * @property {string} creationTimestamp - */ - - /** - * @typedef {Object} GoogleDisk - * @property {string} deviceName - * @property {boolean} boot - * @property {"X86_64" | "ARM64"} architecture - * @property {string[]} [licenses] - * @property {number} diskSizeGb - */ - - /** - * @typedef {Object} GoogleNetworkInterface - * @property {"IPV4_ONLY" | "IPV4_IPV6" | "IPV6_ONLY"} stackType - * @property {string} name - * @property {string} network - * @property {string} networkIP - * @property {string} subnetwork - * @property {GoogleAccessConfig[]} accessConfigs - */ - - /** - * @typedef {Object} GoogleAccessConfig - * @property {string} name - * @property {"ONE_TO_ONE_NAT" | "INTERNAL_NAT"} type - * @property {string} [natIP] - */ - - /** - * @param {Record} options - * @returns {Promise} - * @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/create - */ - async createInstance(options) { - const { name, ...otherOptions } = options || {}; - const flags = this.getFlags(otherOptions); - const instanceId = name || "i-" + Math.random().toString(36).substring(2, 15); - const [instance] = await this.spawn($`compute instances create ${instanceId} ${flags}`); - return instance; - }, - - /** - * @param {string} instanceId - * @param {string} zoneId - * @returns {Promise} - * @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/stop - */ - async stopInstance(instanceId, zoneId) { - await this.spawn($`compute instances stop ${instanceId} --zone=${zoneId}`); - }, - - /** - * @param {string} instanceId - * @param {string} zoneId - * @returns {Promise} - * @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/delete - */ - async deleteInstance(instanceId, zoneId) { - await this.spawn($`compute instances delete ${instanceId} --delete-disks=all --zone=${zoneId}`, { - throwOnError: error => !/not found/i.test(inspect(error)), - }); - }, - - /** - * @param {string} instanceId - * @param {string} username - * @param {string} zoneId - * @param {object} [options] - * @param {boolean} [options.wait] - * @returns {Promise} - * @link https://cloud.google.com/sdk/gcloud/reference/compute/reset-windows-password - */ - async resetWindowsPassword(instanceId, username, zoneId, options = {}) { - const attempts = options.wait ? 15 : 1; - for (let i = 0; i < attempts; i++) { - const result = await this.spawn( - $`compute reset-windows-password ${instanceId} --user=${username} --zone=${zoneId}`, - { - throwOnError: error => !/instance may not be ready for use/i.test(inspect(error)), - }, - ); - if (result) { - const { password } = result; - if (password) { - return password; - } - } - await new Promise(resolve => setTimeout(resolve, 60000 * i)); - } - }, - - /** - * @param {Partial} options - * @returns {Promise} - */ - async listInstances(options) { - const filters = this.getFilters(options); - const instances = await this.spawn($`compute instances list ${filters}`); - return instances.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1)); - }, - - /** - * @param {MachineOptions} options - * @returns {Promise} - */ - async getMachineImage(options) { - const { os, arch, distro, release } = options; - const architecture = arch === "aarch64" ? "ARM64" : "X86_64"; - - /** @type {string | undefined} */ - let family; - if (os === "linux") { - if (!distro || distro === "debian") { - family = `debian-${release || "*"}`; - } else if (distro === "ubuntu") { - family = `ubuntu-${release?.replace(/\./g, "") || "*"}`; - } else if (distro === "fedora") { - family = `fedora-coreos-${release || "*"}`; - } else if (distro === "rhel") { - family = `rhel-${release || "*"}`; - } - } else if (os === "windows" && arch === "x64") { - if (!distro || distro === "server") { - family = `windows-${release || "*"}`; - } - } - - if (family) { - const images = await this.listImages({ family, architecture }); - if (images.length) { - const [image] = images; - return image; - } - } - - throw new Error(`Unsupported platform: ${inspect(options)}`); - }, - - /** - * @param {MachineOptions} options - * @returns {Promise} - */ - async createMachine(options) { - const { name, os, arch, distro, instanceType, tags, preemptible, detached } = options; - const image = await google.getMachineImage(options); - const { selfLink: imageUrl } = image; - - const username = getUsername(distro || os); - const userData = getUserData({ ...options, username }); - - /** @type {Record} */ - let metadata; - if (os === "windows") { - metadata = { - "enable-windows-ssh": "TRUE", - "sysprep-specialize-script-ps1": userData, - }; - } else { - metadata = { - "user-data": userData, - }; - } - - const instance = await google.createInstance({ - "name": name, - "zone": "us-central1-a", - "image": imageUrl, - "machine-type": instanceType || (arch === "aarch64" ? "t2a-standard-2" : "t2d-standard-2"), - "boot-disk-auto-delete": true, - "boot-disk-size": `${getDiskSize(options)}GB`, - "metadata": this.getMetadata(metadata), - "labels": Object.entries(tags || {}) - .filter(([, value]) => value !== undefined) - .map(([key, value]) => `${this.getLabel(key)}=${value}`) - .join(","), - "provisioning-model": preemptible ? "SPOT" : "STANDARD", - "instance-termination-action": preemptible || !detached ? "DELETE" : undefined, - "no-restart-on-failure": true, - "threads-per-core": 1, - "max-run-duration": detached ? undefined : "6h", - }); - - return this.toMachine(instance, options); - }, - - /** - * @param {GoogleInstance} instance - * @param {MachineOptions} [options] - * @returns {Machine} - */ - toMachine(instance, options = {}) { - const { id: instanceId, name, zone: zoneUrl, machineType: machineTypeUrl, labels } = instance; - const machineType = machineTypeUrl.split("/").pop(); - const zoneId = zoneUrl.split("/").pop(); - - let os, arch, distro, release; - const { disks = [] } = instance; - for (const { boot, architecture, licenses = [] } of disks) { - if (!boot) { - continue; - } - - if (architecture === "X86_64") { - arch = "x64"; - } else if (architecture === "ARM64") { - arch = "aarch64"; - } - - for (const license of licenses) { - const linuxMatch = /(debian|ubuntu|fedora|rhel)-(\d+)/i.exec(license); - if (linuxMatch) { - os = "linux"; - [, distro, release] = linuxMatch; - } else { - const windowsMatch = /windows-server-(\d+)-dc-core/i.exec(license); - if (windowsMatch) { - os = "windows"; - distro = "windowsserver"; - [, release] = windowsMatch; - } - } - } - } - - let publicIp; - const { networkInterfaces = [] } = instance; - for (const { accessConfigs = [] } of networkInterfaces) { - for (const { type, natIP } of accessConfigs) { - if (type === "ONE_TO_ONE_NAT" && natIP) { - publicIp = natIP; - } - } - } - - let preemptible; - const { scheduling } = instance; - if (scheduling) { - const { provisioningModel, preemptible: isPreemptible } = scheduling; - preemptible = provisioningModel === "SPOT" || isPreemptible; - } - - /** - * @returns {SshOptions} - */ - const connect = () => { - if (!publicIp) { - throw new Error(`Failed to find public IP for instance: ${name}`); - } - - /** @type {string | undefined} */ - let username; - - const { os, distro } = options; - if (os || distro) { - username = getUsername(distro || os); - } - - return { hostname: publicIp, username }; - }; - - const spawn = async (command, options) => { - const connectOptions = connect(); - return spawnSsh({ ...connectOptions, command }, options); - }; - - const spawnSafe = async (command, options) => { - const connectOptions = connect(); - return spawnSshSafe({ ...connectOptions, command }, options); - }; - - const rdp = async () => { - const { hostname, username } = connect(); - const rdpUsername = `${username}-rdp`; - const password = await google.resetWindowsPassword(instanceId, rdpUsername, zoneId, { wait: true }); - return { hostname, username: rdpUsername, password }; - }; - - const attach = async () => { - const connectOptions = connect(); - await spawnSshSafe({ ...connectOptions }); - }; - - const upload = async (source, destination) => { - const connectOptions = connect(); - await spawnScp({ ...connectOptions, source, destination }); - }; - - const snapshot = async name => { - const stopResult = await this.stopInstance(instanceId, zoneId); - console.log(stopResult); - const image = await this.createImage({ - ["source-disk"]: instanceId, - ["zone"]: zoneId, - ["name"]: name || `${instanceId}-snapshot-${Date.now()}`, - }); - console.log(image); - return; - }; - - const terminate = async () => { - await google.deleteInstance(instanceId, zoneId); - }; - - return { - cloud: "google", - os, - arch, - distro, - release, - id: instanceId, - imageId: undefined, - name, - instanceType: machineType, - region: zoneId, - publicIp, - preemptible, - labels, - spawn, - spawnSafe, - rdp, - attach, - upload, - snapshot, - close: terminate, - [Symbol.asyncDispose]: terminate, - }; - }, - - /** - * @param {Record} [labels] - * @returns {Promise} - */ - async getMachines(labels) { - const filters = labels ? this.getFilters({ labels }) : {}; - const instances = await google.listInstances(filters); - return instances.map(instance => this.toMachine(instance)); - }, - - /** - * @param {MachineOptions} options - * @returns {Promise} - */ - async getImage(options) { - const { os, arch, distro, release } = options; - const architecture = arch === "aarch64" ? "ARM64" : "X86_64"; - - let name; - let username; - if (os === "linux") { - if (distro === "debian") { - name = `debian-${release}-*`; - username = "admin"; - } else if (distro === "ubuntu") { - name = `ubuntu-${release.replace(/\./g, "")}-*`; - username = "ubuntu"; - } - } else if (os === "windows" && arch === "x64") { - if (distro === "server") { - name = `windows-server-${release}-dc-core-*`; - username = "administrator"; - } - } - - if (name && username) { - const images = await google.listImages({ name, architecture }); - if (images.length) { - const [image] = images; - const { name, selfLink } = image; - return { - id: selfLink, - name, - username, - }; - } - } - - throw new Error(`Unsupported platform: ${inspect(platform)}`); - }, -}; - /** * @typedef CloudInit * @property {string} [distro] @@ -1865,12 +595,25 @@ const google = { * @param {CloudInit} cloudInit * @returns {string} */ -function getUserData(cloudInit) { - const { os } = cloudInit; +export function getUserData(cloudInit) { + const { os, userData } = cloudInit; + + let defaultConfig; if (os === "windows") { - return getWindowsStartupScript(cloudInit); + defaultConfig = getWindowsStartupScript(cloudInit); + } else { + defaultConfig = getCloudInit(cloudInit); + } + + // If no custom user data, return default config + if (!userData) { + return defaultConfig; } - return getCloudInit(cloudInit); + + // Append custom user data after default config + return `${defaultConfig} + +${userData}`; } /** @@ -2003,39 +746,11 @@ function getWindowsStartupScript(cloudInit) { `; } -/** - * @param {string} distro - * @returns {string} - */ -function getUsername(distro) { - if (/windows/i.test(distro)) { - return "administrator"; - } - - if (/alpine|centos/i.test(distro)) { - return "root"; - } - - if (/debian/i.test(distro)) { - return "admin"; - } - - if (/ubuntu/i.test(distro)) { - return "ubuntu"; - } - - if (/amazon|amzn|al\d+|rhel/i.test(distro)) { - return "ec2-user"; - } - - throw new Error(`Unsupported distro: ${distro}`); -} - /** * @param {MachineOptions} options * @returns {number} */ -function getDiskSize(options) { +export function getDiskSize(options) { const { os, diskSizeGb } = options; if (diskSizeGb) { @@ -2401,6 +1116,7 @@ function getCloud(name) { * @property {string} [publicIp] * @property {boolean} [preemptible] * @property {Record} tags + * @property {string} [userData] * @property {(command: string[], options?: import("./utils.mjs").SpawnOptions) => Promise} spawn * @property {(command: string[], options?: import("./utils.mjs").SpawnOptions) => Promise} spawnSafe * @property {(source: string, destination: string) => Promise} upload @@ -2430,6 +1146,7 @@ function getCloud(name) { * @property {boolean} [bootstrap] * @property {boolean} [ci] * @property {boolean} [rdp] + * @property {string} [userData] * @property {SshKey[]} sshKeys */ @@ -2467,6 +1184,7 @@ async function main() { "ci": { type: "boolean" }, "rdp": { type: "boolean" }, "vnc": { type: "boolean" }, + "user-data": { type: "string" }, "authorized-user": { type: "string", multiple: true }, "authorized-org": { type: "string", multiple: true }, "no-bootstrap": { type: "boolean" }, @@ -2516,6 +1234,7 @@ async function main() { ci: !!args["ci"], rdp: !!args["rdp"] || !!args["vnc"], sshKeys, + userData: args["user-data"] ? readFile(args["user-data"]) : undefined, }; const { detached, bootstrap, ci, os, arch, distro, release } = options; diff --git a/scripts/utils.mjs b/scripts/utils.mjs index 198712a34c7977..72271c80710f8b 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -1860,6 +1860,34 @@ export function getUsername() { return username; } +/** + * @param {string} distro + * @returns {string} + */ +export function getUsernameForDistro(distro) { + if (/windows/i.test(distro)) { + return "administrator"; + } + + if (/alpine|centos/i.test(distro)) { + return "root"; + } + + if (/debian/i.test(distro)) { + return "admin"; + } + + if (/ubuntu/i.test(distro)) { + return "ubuntu"; + } + + if (/amazon|amzn|al\d+|rhel/i.test(distro)) { + return "ec2-user"; + } + + throw new Error(`Unsupported distro: ${distro}`); +} + /** * @typedef {object} User * @property {string} username From de8a9f60f48fd6f248f7fedeba3cbe02e71d10e0 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:34:25 +0100 Subject: [PATCH 080/176] [build images] --- scripts/create-nix-amis.mjs | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 9c0b48541eb8fd..ae363cb41d8623 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -24,7 +24,7 @@ async function main() { // Read the flake.nix content const flakeContent = await readFile("flake.nix"); - // Create user data script that will set up our environment + // Create user data script const userData = `#!/bin/bash set -euxo pipefail @@ -108,10 +108,9 @@ buildkite-agent soft nofile 1048576 buildkite-agent hard nofile 1048576 buildkite-agent soft nproc 1048576 buildkite-agent hard nproc 1048576 -EOF -`; +EOF`; - // Use machine.mjs to create the AMI, but with Ubuntu as base + // Use machine.mjs to create the AMI with the user data await spawnSafe([ "node", "./scripts/machine.mjs", @@ -119,11 +118,11 @@ EOF `--os=linux`, `--arch=${architecture}`, `--distro=ubuntu`, - `--release=18.04`, // Ubuntu 18.04 has glibc 2.26 + `--release=18.04`, + `--cloud=aws`, + `--ci`, + `--authorized-org=oven-sh`, `--user-data=${userData}`, - "--cloud=aws", - "--ci", - "--authorized-org=oven-sh", ]); } From ebb3dcd75bdb0adc283bbae70188956daad6bf83 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:37:23 +0100 Subject: [PATCH 081/176] [build images] --- scripts/create-nix-amis.mjs | 40 +++++++++++++++++++++++-------------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index ae363cb41d8623..26e9fa0cb837c8 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -1,8 +1,9 @@ #!/usr/bin/env node import { parseArgs } from "node:util"; -import { getBuildNumber, getSecret, isCI, parseArch, spawnSafe, startGroup, readFile } from "./utils.mjs"; +import { getBuildNumber, getSecret, isCI, parseArch, spawnSafe, startGroup, readFile, mkdtemp, rm } from "./utils.mjs"; import { join } from "node:path"; +import { writeFile } from "node:fs/promises"; async function main() { const { @@ -110,20 +111,29 @@ buildkite-agent soft nproc 1048576 buildkite-agent hard nproc 1048576 EOF`; - // Use machine.mjs to create the AMI with the user data - await spawnSafe([ - "node", - "./scripts/machine.mjs", - "publish-image", - `--os=linux`, - `--arch=${architecture}`, - `--distro=ubuntu`, - `--release=18.04`, - `--cloud=aws`, - `--ci`, - `--authorized-org=oven-sh`, - `--user-data=${userData}`, - ]); + // Write user data to a temporary file + const userDataFile = mkdtemp("user-data-", "user-data.sh"); + await writeFile(userDataFile, userData); + + try { + // Use machine.mjs to create the AMI with the user data + await spawnSafe([ + "node", + "./scripts/machine.mjs", + "publish-image", + `--os=linux`, + `--arch=${architecture}`, + `--distro=ubuntu`, + `--release=18.04`, + `--cloud=aws`, + `--ci`, + `--authorized-org=oven-sh`, + `--user-data=${userDataFile}`, + ]); + } finally { + // Clean up the temporary file + await rm(userDataFile); + } } await main(); From 537fb6c6b1fdc367a807c2adac6d23551457f0c5 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:56:49 +0100 Subject: [PATCH 082/176] [build images] --- flake.nix | 24 ---------- scripts/create-nix-amis.mjs | 91 +++++++++++++++++++++++-------------- scripts/utils.mjs | 8 ++-- 3 files changed, 61 insertions(+), 62 deletions(-) diff --git a/flake.nix b/flake.nix index 29d43d38b29bb7..023dc66f290bf4 100644 --- a/flake.nix +++ b/flake.nix @@ -117,59 +117,35 @@ # CI shells for different architectures ci-x64 = pkgs.mkShell { buildInputs = with pkgs; [ - buildkite-agent # Include the x64 build environment tools buildEnvX64 ]; shellHook = '' echo "BuildKite CI environment initialized (x64)" - - # Set up BuildKite agent configuration if needed - if [ -z "$BUILDKITE_AGENT_TOKEN" ]; then - echo "Warning: BUILDKITE_AGENT_TOKEN is not set" - fi - - # Set BuildKite meta-data for architecture - export BUILDKITE_AGENT_META_DATA="architecture=x64,''${BUILDKITE_AGENT_META_DATA:-}" ''; }; ci-arm64 = pkgs.mkShell { buildInputs = with pkgs; [ - buildkite-agent # Include the arm64 build environment tools buildEnvArm64 ]; shellHook = '' echo "BuildKite CI environment initialized (arm64)" - - # Set up BuildKite agent configuration if needed - if [ -z "$BUILDKITE_AGENT_TOKEN" ]; then - echo "Warning: BUILDKITE_AGENT_TOKEN is not set" - fi - - # Set BuildKite meta-data for architecture - export BUILDKITE_AGENT_META_DATA="architecture=arm64,''${BUILDKITE_AGENT_META_DATA:-}" ''; }; # Generic CI shell that defaults to x64 ci = pkgs.mkShell { buildInputs = with pkgs; [ - buildkite-agent # Include the x64 build environment tools by default buildEnvX64 ]; shellHook = '' echo "BuildKite CI environment initialized (default: x64)" - - # Set up BuildKite agent configuration if needed - if [ -z "$BUILDKITE_AGENT_TOKEN" ]; then - echo "Warning: BUILDKITE_AGENT_TOKEN is not set" - fi ''; }; }; diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 26e9fa0cb837c8..eeaf59dfe20759 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -7,11 +7,11 @@ import { writeFile } from "node:fs/promises"; async function main() { const { - values: { arch, ci }, + values: { arch, cloud }, } = parseArgs({ options: { arch: { type: "string" }, - ci: { type: "boolean" }, + cloud: { type: "string" }, }, }); @@ -19,6 +19,10 @@ async function main() { throw new Error("--arch is required"); } + if (!cloud) { + throw new Error("--cloud is required"); + } + const architecture = parseArch(arch); const flakeTarget = architecture === "arm64" ? "arm64" : "x64"; @@ -27,11 +31,19 @@ async function main() { // Create user data script const userData = `#!/bin/bash -set -euxo pipefail +set -euo pipefail + +echo "Setting up environment..." +export DEBIAN_FRONTEND=noninteractive +echo "export DEBIAN_FRONTEND=noninteractive" >> ~/.bashrc + +echo "Installing required packages..." # Install required packages -apt-get update -apt-get install -y curl xz-utils git sudo +apt-get update -qq +apt-get install -y curl xz-utils git sudo --no-install-recommends + +echo "Installing Nix..." # Install Nix curl -L https://nixos.org/nix/install | sh -s -- --daemon @@ -39,6 +51,8 @@ curl -L https://nixos.org/nix/install | sh -s -- --daemon # Source Nix . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh +echo "Configuring Nix..." + # Enable flakes mkdir -p /etc/nix cat > /etc/nix/nix.conf << 'EOF' @@ -52,14 +66,7 @@ useradd -m -s /bin/bash buildkite-agent usermod -aG sudo buildkite-agent echo "buildkite-agent ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/buildkite-agent -# Copy flake.nix to the instance -mkdir -p /home/buildkite-agent/bun -cat > /home/buildkite-agent/bun/flake.nix << 'EOF' -${flakeContent} -EOF - -# Set ownership -chown -R buildkite-agent:buildkite-agent /home/buildkite-agent/bun +echo "Installing BuildKite agent..." # Install BuildKite agent sh -c 'echo deb https://apt.buildkite.com/buildkite-agent stable main > /etc/apt/sources.list.d/buildkite-agent.list' @@ -67,6 +74,8 @@ apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C2FA5C apt-get update apt-get install -y buildkite-agent +echo "Configuring BuildKite agent..." + # Configure BuildKite agent cat > /etc/buildkite-agent/buildkite-agent.cfg << 'EOF' token="xxx" @@ -77,7 +86,16 @@ hooks-path="/etc/buildkite-agent/hooks" plugins-path="/etc/buildkite-agent/plugins" EOF -# Create BuildKite hook to set up Nix environment +echo "Copying flake.nix to the instance..." +mkdir -p /home/buildkite-agent/bun +cat > /home/buildkite-agent/bun/flake.nix << 'EOF' +${flakeContent} +EOF + +echo "Setting ownership..." +chown -R buildkite-agent:buildkite-agent /home/buildkite-agent/bun + +echo "Creating BuildKite hook to set up Nix environment..." mkdir -p /etc/buildkite-agent/hooks cat > /etc/buildkite-agent/hooks/environment << 'EOF' #!/bin/bash @@ -96,20 +114,20 @@ EOF chmod +x /etc/buildkite-agent/hooks/environment -# Set proper ownership for BuildKite directories +echo "Setting proper ownership for BuildKite directories..." chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent -# Start BuildKite agent service -systemctl enable buildkite-agent -systemctl start buildkite-agent - -# Set system limits for buildkite-agent +echo "Setting system limits for buildkite-agent..." cat > /etc/security/limits.d/buildkite-agent.conf << 'EOF' buildkite-agent soft nofile 1048576 buildkite-agent hard nofile 1048576 buildkite-agent soft nproc 1048576 buildkite-agent hard nproc 1048576 -EOF`; +EOF + +echo "Enabling and starting BuildKite agent service..." +systemctl enable buildkite-agent +systemctl start buildkite-agent`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); @@ -117,19 +135,24 @@ EOF`; try { // Use machine.mjs to create the AMI with the user data - await spawnSafe([ - "node", - "./scripts/machine.mjs", - "publish-image", - `--os=linux`, - `--arch=${architecture}`, - `--distro=ubuntu`, - `--release=18.04`, - `--cloud=aws`, - `--ci`, - `--authorized-org=oven-sh`, - `--user-data=${userDataFile}`, - ]); + await spawnSafe( + [ + "node", + "./scripts/machine.mjs", + "publish-image", + `--os=linux`, + `--arch=${architecture}`, + `--distro=ubuntu`, + `--release=18.04`, + `--cloud=${cloud}`, + `--ci`, + `--authorized-org=oven-sh`, + `--user-data=${userDataFile}`, + ], + { + stdio: "inherit", + }, + ); } finally { // Clean up the temporary file await rm(userDataFile); diff --git a/scripts/utils.mjs b/scripts/utils.mjs index 72271c80710f8b..93c874fb9929b8 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -242,7 +242,7 @@ export async function spawn(command, options = {}) { cwd: options["cwd"] ?? process.cwd(), timeout: options["timeout"] ?? undefined, env: options["env"] ?? undefined, - stdio: [stdin ? "pipe" : "ignore", "pipe", "pipe"], + stdio: stdin === "inherit" ? "inherit" : [stdin ? "pipe" : "ignore", "pipe", "pipe"], ...options, }; @@ -354,7 +354,7 @@ export function spawnSync(command, options = {}) { cwd: options["cwd"] ?? process.cwd(), timeout: options["timeout"] ?? undefined, env: options["env"] ?? undefined, - stdio: [typeof stdin === "undefined" ? "ignore" : "pipe", "pipe", "pipe"], + stdio: stdin === "inherit" ? "inherit" : [typeof stdin === "undefined" ? "ignore" : "pipe", "pipe", "pipe"], input: stdin, ...options, }; @@ -378,8 +378,8 @@ export function spawnSync(command, options = {}) { } else { exitCode = status ?? 1; signalCode = signal || undefined; - stdout = stdoutBuffer?.toString(); - stderr = stderrBuffer?.toString(); + stdout = stdoutBuffer?.toString?.() ?? ""; + stderr = stderrBuffer?.toString?.() ?? ""; } if (exitCode !== 0 && isWindows) { From 58af2ee825074b109083d8d59e9e71754982187d Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 14:58:38 +0100 Subject: [PATCH 083/176] [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 70f7b7ac6b81ef..451f8c7196e1b5 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -996,7 +996,7 @@ function getCreateNixAmisStep(platform) { return { key: `${getImageKey(platform)}-build-image`, label: `${getBuildkiteEmoji("nix")} Create Nix AMI (${platform.arch})`, - command: ["node", "./scripts/create-nix-amis.mjs", "--arch=" + platform.arch, "--ci"].join(" "), + command: ["node", "./scripts/create-nix-amis.mjs", "--arch=" + platform.arch, "--cloud=aws"].join(" "), agents: { queue: "build-image", }, From 1950adb3a540d095a83c69ac69406894667b8d4c Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:08:33 +0100 Subject: [PATCH 084/176] [build images] --- scripts/create-nix-amis.mjs | 1 + scripts/machine.mjs | 40 ++++++++++++++++++++++++++++--------- 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index eeaf59dfe20759..16284deaa5b774 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -148,6 +148,7 @@ systemctl start buildkite-agent`; `--ci`, `--authorized-org=oven-sh`, `--user-data=${userDataFile}`, + "--no-bootstrap", ], { stdio: "inherit", diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 58d5951a477186..d0c6a599a2c7bd 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -598,22 +598,44 @@ const aws = { export function getUserData(cloudInit) { const { os, userData } = cloudInit; - let defaultConfig; + // For Windows, use PowerShell script if (os === "windows") { - defaultConfig = getWindowsStartupScript(cloudInit); - } else { - defaultConfig = getCloudInit(cloudInit); + const defaultConfig = getWindowsStartupScript(cloudInit); + if (!userData) { + return defaultConfig; + } + // For Windows, append PowerShell scripts + return `${defaultConfig} + +${userData}`; } - // If no custom user data, return default config + // For Linux, handle cloud-init and shell scripts if (!userData) { - return defaultConfig; + return getCloudInit(cloudInit); } - // Append custom user data after default config - return `${defaultConfig} + // If user data is a shell script (doesn't start with #cloud-config), + // wrap it in a cloud-init script that runs after the default config + if (!userData.trim().startsWith("#cloud-config")) { + return `#cloud-config +${getCloudInit(cloudInit).replace("#cloud-config\n", "")} +runcmd: + - | + cat > /tmp/user-data.sh << 'EOFUSERDATA' +${userData} +EOFUSERDATA + - chmod +x /tmp/user-data.sh + - /tmp/user-data.sh +`; + } -${userData}`; + // If user data is cloud-init, merge it with default config + const defaultConfig = getCloudInit(cloudInit).replace("#cloud-config\n", ""); + const customConfig = userData.replace("#cloud-config\n", ""); + return `#cloud-config +${defaultConfig} +${customConfig}`; } /** From 8f8d7b82cfb1fd9bd0a384fbe1f4166d6b4d0216 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:16:20 +0100 Subject: [PATCH 085/176] [build images] --- scripts/machine.mjs | 78 ++++++++++++++++++++++++++------------------- 1 file changed, 45 insertions(+), 33 deletions(-) diff --git a/scripts/machine.mjs b/scripts/machine.mjs index d0c6a599a2c7bd..2c314ee436894a 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -616,26 +616,24 @@ ${userData}`; } // If user data is a shell script (doesn't start with #cloud-config), - // wrap it in a cloud-init script that runs after the default config + // use cloud-init's write_files and runcmd to run it if (!userData.trim().startsWith("#cloud-config")) { - return `#cloud-config -${getCloudInit(cloudInit).replace("#cloud-config\n", "")} + const defaultConfig = getCloudInit(cloudInit); + return `${defaultConfig} +write_files: + - path: /tmp/user-script.sh + permissions: '0755' + content: | +${userData + .split("\n") + .map(line => ` ${line}`) + .join("\n")} runcmd: - - | - cat > /tmp/user-data.sh << 'EOFUSERDATA' -${userData} -EOFUSERDATA - - chmod +x /tmp/user-data.sh - - /tmp/user-data.sh -`; + - /tmp/user-script.sh`; } - // If user data is cloud-init, merge it with default config - const defaultConfig = getCloudInit(cloudInit).replace("#cloud-config\n", ""); - const customConfig = userData.replace("#cloud-config\n", ""); - return `#cloud-config -${defaultConfig} -${customConfig}`; + // If user data is cloud-init, use it directly + return userData; } /** @@ -645,7 +643,7 @@ ${customConfig}`; function getCloudInit(cloudInit) { const username = cloudInit["username"] || "root"; const password = cloudInit["password"] || crypto.randomUUID(); - const authorizedKeys = JSON.stringify(cloudInit["sshKeys"]?.map(({ publicKey }) => publicKey) || []); + const authorizedKeys = cloudInit["sshKeys"]?.map(({ publicKey }) => publicKey) || []; let sftpPath = "/usr/lib/openssh/sftp-server"; switch (cloudInit["distro"]) { @@ -668,22 +666,36 @@ function getCloudInit(cloudInit) { // https://cloudinit.readthedocs.io/en/stable/ return `#cloud-config - write_files: - - path: /etc/ssh/sshd_config - content: | - PermitRootLogin yes - PasswordAuthentication no - PubkeyAuthentication yes - UsePAM yes - UseLogin yes - Subsystem sftp ${sftpPath} - chpasswd: - expire: false - list: ${JSON.stringify(users)} - disable_root: false - ssh_pwauth: true - ssh_authorized_keys: ${authorizedKeys} - `; +users: + - name: ${username} + sudo: ALL=(ALL) NOPASSWD:ALL + shell: /bin/bash + ssh_authorized_keys: +${authorizedKeys.map(key => ` - ${key}`).join("\n")} + +write_files: + - path: /etc/ssh/sshd_config + permissions: '0644' + owner: root:root + content: | + Port 22 + Protocol 2 + HostKey /etc/ssh/ssh_host_rsa_key + HostKey /etc/ssh/ssh_host_ecdsa_key + HostKey /etc/ssh/ssh_host_ed25519_key + SyslogFacility AUTHPRIV + PermitRootLogin yes + AuthorizedKeysFile .ssh/authorized_keys + PasswordAuthentication no + ChallengeResponseAuthentication no + GSSAPIAuthentication yes + GSSAPICleanupCredentials no + UsePAM yes + X11Forwarding yes + PrintMotd no + AcceptEnv LANG LC_* + Subsystem sftp ${sftpPath} +`; } /** From acdb0852495d0d1b16bbebe549cef6cf3c61805e Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:26:15 +0100 Subject: [PATCH 086/176] [build images] --- scripts/machine.mjs | 89 ++++++++++++++++++++++++++++++--------------- 1 file changed, 60 insertions(+), 29 deletions(-) diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 2c314ee436894a..7a4db05cd1d0d8 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -438,6 +438,7 @@ const aws = { const username = getUsernameForDistro(Name); + // Only include minimal cloud-init for SSH access let userData = getUserData({ ...options, username }); if (os === "windows") { userData = `${userData}-ExecutionPolicy Unrestricted -NoProfile -NonInteractivetrue`; @@ -492,7 +493,11 @@ const aws = { ["instance-market-options"]: marketOptions, }); - return aws.toMachine(instance, { ...options, username, keyPath }); + const machine = aws.toMachine(instance, { ...options, username, keyPath }); + + await setupUserData(machine, options); + + return machine; }, /** @@ -520,6 +525,29 @@ const aws = { return { hostname: PublicIpAddress, username, identityPaths }; }; + const waitForSsh = async () => { + const connectOptions = await connect(); + const { hostname, username, identityPaths } = connectOptions; + + // Try to connect until it succeeds + for (let i = 0; i < 30; i++) { + try { + await spawnSshSafe({ + hostname, + username, + identityPaths, + command: ["true"], + }); + return; + } catch (error) { + if (i === 29) { + throw error; + } + await new Promise(resolve => setTimeout(resolve, 5000)); + } + } + }; + const spawn = async (command, options) => { const connectOptions = await connect(); return spawnSsh({ ...connectOptions, command }, options); @@ -577,6 +605,7 @@ const aws = { attach, rdp, snapshot, + waitForSsh, close: terminate, [Symbol.asyncDispose]: terminate, }; @@ -600,40 +629,42 @@ export function getUserData(cloudInit) { // For Windows, use PowerShell script if (os === "windows") { - const defaultConfig = getWindowsStartupScript(cloudInit); - if (!userData) { - return defaultConfig; - } - // For Windows, append PowerShell scripts - return `${defaultConfig} - -${userData}`; + return getWindowsStartupScript(cloudInit); } - // For Linux, handle cloud-init and shell scripts + // For Linux, just set up SSH access + return getCloudInit(cloudInit); +} + +/** + * @param {MachineOptions} options + * @returns {Promise} + */ +async function setupUserData(machine, options) { + const { os, userData } = options; if (!userData) { - return getCloudInit(cloudInit); + return; } - // If user data is a shell script (doesn't start with #cloud-config), - // use cloud-init's write_files and runcmd to run it - if (!userData.trim().startsWith("#cloud-config")) { - const defaultConfig = getCloudInit(cloudInit); - return `${defaultConfig} -write_files: - - path: /tmp/user-script.sh - permissions: '0755' - content: | -${userData - .split("\n") - .map(line => ` ${line}`) - .join("\n")} -runcmd: - - /tmp/user-script.sh`; - } + // Write user data to a temporary file + const tmpFile = mkdtemp("user-data-", os === "windows" ? "setup.ps1" : "setup.sh"); + await writeFile(tmpFile, userData); - // If user data is cloud-init, use it directly - return userData; + try { + // Upload the script + const remotePath = os === "windows" ? "C:\\Windows\\Temp\\setup.ps1" : "/tmp/setup.sh"; + await machine.upload(tmpFile, remotePath); + + // Execute the script + if (os === "windows") { + await machine.spawnSafe(["powershell", remotePath], { stdio: "inherit" }); + } else { + await machine.spawnSafe(["bash", remotePath], { stdio: "inherit" }); + } + } finally { + // Clean up the temporary file + rm(tmpFile); + } } /** From 9e19ede648981ed33d31090fbc77149e2fd50104 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:29:35 +0100 Subject: [PATCH 087/176] sudo [build images] --- scripts/create-nix-amis.mjs | 38 ++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 16284deaa5b774..ea3328868e17fd 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -40,13 +40,13 @@ echo "export DEBIAN_FRONTEND=noninteractive" >> ~/.bashrc echo "Installing required packages..." # Install required packages -apt-get update -qq -apt-get install -y curl xz-utils git sudo --no-install-recommends +sudo apt-get update -qq +sudo apt-get install -y curl xz-utils git sudo --no-install-recommends echo "Installing Nix..." # Install Nix -curl -L https://nixos.org/nix/install | sh -s -- --daemon +curl -L https://nixos.org/nix/install | sudo sh -s -- --daemon # Source Nix . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh @@ -54,7 +54,7 @@ curl -L https://nixos.org/nix/install | sh -s -- --daemon echo "Configuring Nix..." # Enable flakes -mkdir -p /etc/nix +sudo mkdir -p /etc/nix cat > /etc/nix/nix.conf << 'EOF' experimental-features = nix-command flakes trusted-users = root buildkite-agent @@ -62,22 +62,22 @@ auto-optimise-store = true EOF # Create buildkite-agent user and group -useradd -m -s /bin/bash buildkite-agent -usermod -aG sudo buildkite-agent +sudo useradd -m -s /bin/bash buildkite-agent +sudo usermod -aG sudo buildkite-agent echo "buildkite-agent ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/buildkite-agent echo "Installing BuildKite agent..." # Install BuildKite agent sh -c 'echo deb https://apt.buildkite.com/buildkite-agent stable main > /etc/apt/sources.list.d/buildkite-agent.list' -apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 -apt-get update -apt-get install -y buildkite-agent +sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 +sudo apt-get update +sudo apt-get install -y buildkite-agent echo "Configuring BuildKite agent..." # Configure BuildKite agent -cat > /etc/buildkite-agent/buildkite-agent.cfg << 'EOF' +sudo cat > /etc/buildkite-agent/buildkite-agent.cfg << 'EOF' token="xxx" name="%hostname-%n" tags="queue=linux-nix,arch=${architecture}" @@ -87,17 +87,17 @@ plugins-path="/etc/buildkite-agent/plugins" EOF echo "Copying flake.nix to the instance..." -mkdir -p /home/buildkite-agent/bun -cat > /home/buildkite-agent/bun/flake.nix << 'EOF' +sudo mkdir -p /home/buildkite-agent/bun +sudo cat > /home/buildkite-agent/bun/flake.nix << 'EOF' ${flakeContent} EOF echo "Setting ownership..." -chown -R buildkite-agent:buildkite-agent /home/buildkite-agent/bun +sudo chown -R buildkite-agent:buildkite-agent /home/buildkite-agent/bun echo "Creating BuildKite hook to set up Nix environment..." -mkdir -p /etc/buildkite-agent/hooks -cat > /etc/buildkite-agent/hooks/environment << 'EOF' +sudo mkdir -p /etc/buildkite-agent/hooks +sudo cat > /etc/buildkite-agent/hooks/environment << 'EOF' #!/bin/bash set -euo pipefail @@ -112,13 +112,13 @@ nix develop .#ci-${flakeTarget} -c true export PATH="/nix/var/nix/profiles/default/bin:$PATH" EOF -chmod +x /etc/buildkite-agent/hooks/environment +sudo chmod +x /etc/buildkite-agent/hooks/environment echo "Setting proper ownership for BuildKite directories..." chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent echo "Setting system limits for buildkite-agent..." -cat > /etc/security/limits.d/buildkite-agent.conf << 'EOF' +sudo cat > /etc/security/limits.d/buildkite-agent.conf << 'EOF' buildkite-agent soft nofile 1048576 buildkite-agent hard nofile 1048576 buildkite-agent soft nproc 1048576 @@ -126,8 +126,8 @@ buildkite-agent hard nproc 1048576 EOF echo "Enabling and starting BuildKite agent service..." -systemctl enable buildkite-agent -systemctl start buildkite-agent`; +sudo systemctl enable buildkite-agent +sudo systemctl start buildkite-agent`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From 04f696449660932af0b49f9a3d4361bfa0390971 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:30:08 +0100 Subject: [PATCH 088/176] [sudo] build images --- scripts/create-nix-amis.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index ea3328868e17fd..aca05310603a58 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -69,7 +69,7 @@ echo "buildkite-agent ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/buildkite-agent echo "Installing BuildKite agent..." # Install BuildKite agent -sh -c 'echo deb https://apt.buildkite.com/buildkite-agent stable main > /etc/apt/sources.list.d/buildkite-agent.list' +sudo sh -c 'echo deb https://apt.buildkite.com/buildkite-agent stable main > /etc/apt/sources.list.d/buildkite-agent.list' sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 sudo apt-get update sudo apt-get install -y buildkite-agent From 887573dad21d6b87ab579dc17318958208ca732e Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:30:21 +0100 Subject: [PATCH 089/176] [build images] --- scripts/create-nix-amis.mjs | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index aca05310603a58..ea0e7a16c469ac 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -68,7 +68,6 @@ echo "buildkite-agent ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/buildkite-agent echo "Installing BuildKite agent..." -# Install BuildKite agent sudo sh -c 'echo deb https://apt.buildkite.com/buildkite-agent stable main > /etc/apt/sources.list.d/buildkite-agent.list' sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 sudo apt-get update From 66a6cfcde92fa80450287821328cad42b14e74ff Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:33:43 +0100 Subject: [PATCH 090/176] [build images] --- scripts/create-nix-amis.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index ea0e7a16c469ac..bb31394c497f8f 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -55,7 +55,7 @@ echo "Configuring Nix..." # Enable flakes sudo mkdir -p /etc/nix -cat > /etc/nix/nix.conf << 'EOF' +sudo cat > /etc/nix/nix.conf << 'EOF' experimental-features = nix-command flakes trusted-users = root buildkite-agent auto-optimise-store = true From 13c0340ad2e66e13b1b7cf4af9fc3284b1a72482 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:39:30 +0100 Subject: [PATCH 091/176] [build images] --- scripts/create-nix-amis.mjs | 34 +++++++++++----------------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index bb31394c497f8f..baed7d38fb1d28 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -31,52 +31,40 @@ async function main() { // Create user data script const userData = `#!/bin/bash -set -euo pipefail +set -euxo pipefail echo "Setting up environment..." export DEBIAN_FRONTEND=noninteractive -echo "export DEBIAN_FRONTEND=noninteractive" >> ~/.bashrc echo "Installing required packages..." - -# Install required packages sudo apt-get update -qq sudo apt-get install -y curl xz-utils git sudo --no-install-recommends echo "Installing Nix..." - -# Install Nix -curl -L https://nixos.org/nix/install | sudo sh -s -- --daemon - -# Source Nix -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh +sh <(curl -L https://nixos.org/nix/install) --daemon echo "Configuring Nix..." +# Source Nix in this shell +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh # Enable flakes sudo mkdir -p /etc/nix -sudo cat > /etc/nix/nix.conf << 'EOF' +sudo tee /etc/nix/nix.conf > /dev/null << 'EOF' experimental-features = nix-command flakes trusted-users = root buildkite-agent auto-optimise-store = true EOF -# Create buildkite-agent user and group -sudo useradd -m -s /bin/bash buildkite-agent -sudo usermod -aG sudo buildkite-agent -echo "buildkite-agent ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/buildkite-agent - echo "Installing BuildKite agent..." - +# Install BuildKite agent sudo sh -c 'echo deb https://apt.buildkite.com/buildkite-agent stable main > /etc/apt/sources.list.d/buildkite-agent.list' sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 sudo apt-get update sudo apt-get install -y buildkite-agent echo "Configuring BuildKite agent..." - # Configure BuildKite agent -sudo cat > /etc/buildkite-agent/buildkite-agent.cfg << 'EOF' +sudo tee /etc/buildkite-agent/buildkite-agent.cfg > /dev/null << 'EOF' token="xxx" name="%hostname-%n" tags="queue=linux-nix,arch=${architecture}" @@ -87,7 +75,7 @@ EOF echo "Copying flake.nix to the instance..." sudo mkdir -p /home/buildkite-agent/bun -sudo cat > /home/buildkite-agent/bun/flake.nix << 'EOF' +sudo tee /home/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' ${flakeContent} EOF @@ -96,7 +84,7 @@ sudo chown -R buildkite-agent:buildkite-agent /home/buildkite-agent/bun echo "Creating BuildKite hook to set up Nix environment..." sudo mkdir -p /etc/buildkite-agent/hooks -sudo cat > /etc/buildkite-agent/hooks/environment << 'EOF' +sudo tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' #!/bin/bash set -euo pipefail @@ -114,10 +102,10 @@ EOF sudo chmod +x /etc/buildkite-agent/hooks/environment echo "Setting proper ownership for BuildKite directories..." -chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent +sudo chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent echo "Setting system limits for buildkite-agent..." -sudo cat > /etc/security/limits.d/buildkite-agent.conf << 'EOF' +sudo tee /etc/security/limits.d/buildkite-agent.conf > /dev/null << 'EOF' buildkite-agent soft nofile 1048576 buildkite-agent hard nofile 1048576 buildkite-agent soft nproc 1048576 From ae7e5666c24d69583889ed3a4344dfb5bff648b3 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:47:56 +0100 Subject: [PATCH 092/176] [build images] --- scripts/create-nix-amis.mjs | 63 +++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index baed7d38fb1d28..b6d10a5a3eb1e5 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -71,8 +71,71 @@ tags="queue=linux-nix,arch=${architecture}" build-path="/var/lib/buildkite-agent/builds" hooks-path="/etc/buildkite-agent/hooks" plugins-path="/etc/buildkite-agent/plugins" +debug=true +disconnect-after-job=false +health-check-addr=0.0.0.0:8080 +git-clean-flags="-ffdq" +git-clone-flags="-v --recursive" +experiment="git-mirrors" +meta-data-ec2-tags=true +no-command-eval=true EOF +# Create SSH directory for buildkite-agent +sudo mkdir -p /var/lib/buildkite-agent/.ssh +sudo chmod 700 /var/lib/buildkite-agent/.ssh + +# Set up known hosts +sudo tee /var/lib/buildkite-agent/.ssh/known_hosts > /dev/null << 'EOF' +github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== +EOF + +# Set proper ownership +sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent/.ssh +sudo chmod 600 /var/lib/buildkite-agent/.ssh/known_hosts + +echo "Setting up build environment..." +# Create build directory with proper permissions +sudo mkdir -p /var/lib/buildkite-agent/builds +sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent/builds +sudo chmod 755 /var/lib/buildkite-agent/builds + +# Set up the command hook to use Nix for command evaluation +sudo tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' +#!/bin/bash +set -euo pipefail + +# Source Nix +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + +# Change to the build directory +cd "$BUILDKITE_BUILD_DIR" + +# Use Nix to evaluate and run the command in the proper environment +nix develop .#ci-${flakeTarget} -c eval "$BUILDKITE_COMMAND" +EOF + +sudo chmod +x /etc/buildkite-agent/hooks/command + +# Set up the environment hook +sudo tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' +#!/bin/bash +set -euo pipefail + +# Source Nix +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + +# Add Nix to PATH +export PATH="/nix/var/nix/profiles/default/bin:$PATH" + +# Set build environment variables +export BUILDKITE_BUILD_PATH="/var/lib/buildkite-agent/builds" +export BUILDKITE_HOOKS_PATH="/etc/buildkite-agent/hooks" +export BUILDKITE_PLUGINS_PATH="/etc/buildkite-agent/plugins" +EOF + +sudo chmod +x /etc/buildkite-agent/hooks/environment + echo "Copying flake.nix to the instance..." sudo mkdir -p /home/buildkite-agent/bun sudo tee /home/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' From 3bfe5d785e17acb692745b3f6e2cc5147d8fab8d Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 15:54:12 +0100 Subject: [PATCH 093/176] [build images] --- scripts/create-nix-amis.mjs | 115 +++++------------------------------- 1 file changed, 15 insertions(+), 100 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index b6d10a5a3eb1e5..8f5516bc896209 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -26,6 +26,9 @@ async function main() { const architecture = parseArch(arch); const flakeTarget = architecture === "arm64" ? "arm64" : "x64"; + // Read the agent.mjs content + const agentScript = await readFile("scripts/agent.mjs"); + // Read the flake.nix content const flakeContent = await readFile("flake.nix"); @@ -62,110 +65,22 @@ sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C sudo apt-get update sudo apt-get install -y buildkite-agent -echo "Configuring BuildKite agent..." -# Configure BuildKite agent -sudo tee /etc/buildkite-agent/buildkite-agent.cfg > /dev/null << 'EOF' -token="xxx" -name="%hostname-%n" -tags="queue=linux-nix,arch=${architecture}" -build-path="/var/lib/buildkite-agent/builds" -hooks-path="/etc/buildkite-agent/hooks" -plugins-path="/etc/buildkite-agent/plugins" -debug=true -disconnect-after-job=false -health-check-addr=0.0.0.0:8080 -git-clean-flags="-ffdq" -git-clone-flags="-v --recursive" -experiment="git-mirrors" -meta-data-ec2-tags=true -no-command-eval=true -EOF - -# Create SSH directory for buildkite-agent -sudo mkdir -p /var/lib/buildkite-agent/.ssh -sudo chmod 700 /var/lib/buildkite-agent/.ssh - -# Set up known hosts -sudo tee /var/lib/buildkite-agent/.ssh/known_hosts > /dev/null << 'EOF' -github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== -EOF - -# Set proper ownership -sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent/.ssh -sudo chmod 600 /var/lib/buildkite-agent/.ssh/known_hosts - -echo "Setting up build environment..." -# Create build directory with proper permissions -sudo mkdir -p /var/lib/buildkite-agent/builds -sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent/builds -sudo chmod 755 /var/lib/buildkite-agent/builds - -# Set up the command hook to use Nix for command evaluation -sudo tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' -#!/bin/bash -set -euo pipefail - -# Source Nix -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh - -# Change to the build directory -cd "$BUILDKITE_BUILD_DIR" - -# Use Nix to evaluate and run the command in the proper environment -nix develop .#ci-${flakeTarget} -c eval "$BUILDKITE_COMMAND" -EOF - -sudo chmod +x /etc/buildkite-agent/hooks/command - -# Set up the environment hook -sudo tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' -#!/bin/bash -set -euo pipefail - -# Source Nix -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh - -# Add Nix to PATH -export PATH="/nix/var/nix/profiles/default/bin:$PATH" - -# Set build environment variables -export BUILDKITE_BUILD_PATH="/var/lib/buildkite-agent/builds" -export BUILDKITE_HOOKS_PATH="/etc/buildkite-agent/hooks" -export BUILDKITE_PLUGINS_PATH="/etc/buildkite-agent/plugins" +echo "Setting up agent.mjs..." +# Copy agent.mjs to the instance +sudo mkdir -p /usr/local/share/bun +sudo tee /usr/local/share/bun/agent.mjs > /dev/null << 'EOF' +${agentScript} EOF - -sudo chmod +x /etc/buildkite-agent/hooks/environment +sudo chmod +x /usr/local/share/bun/agent.mjs echo "Copying flake.nix to the instance..." -sudo mkdir -p /home/buildkite-agent/bun -sudo tee /home/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' +sudo mkdir -p /var/lib/buildkite-agent/bun +sudo tee /var/lib/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' ${flakeContent} EOF echo "Setting ownership..." -sudo chown -R buildkite-agent:buildkite-agent /home/buildkite-agent/bun - -echo "Creating BuildKite hook to set up Nix environment..." -sudo mkdir -p /etc/buildkite-agent/hooks -sudo tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' -#!/bin/bash -set -euo pipefail - -# Source Nix -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh - -# Set up build environment using flake -cd /home/buildkite-agent/bun -nix develop .#ci-${flakeTarget} -c true - -# Add Nix to PATH -export PATH="/nix/var/nix/profiles/default/bin:$PATH" -EOF - -sudo chmod +x /etc/buildkite-agent/hooks/environment - -echo "Setting proper ownership for BuildKite directories..." -sudo chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent +sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent/bun echo "Setting system limits for buildkite-agent..." sudo tee /etc/security/limits.d/buildkite-agent.conf > /dev/null << 'EOF' @@ -175,9 +90,9 @@ buildkite-agent soft nproc 1048576 buildkite-agent hard nproc 1048576 EOF -echo "Enabling and starting BuildKite agent service..." -sudo systemctl enable buildkite-agent -sudo systemctl start buildkite-agent`; +echo "Setting up Nix environment and installing BuildKite agent..." +cd /var/lib/buildkite-agent/bun +nix develop .#ci-${flakeTarget} -c /usr/local/share/bun/agent.mjs install start`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From 19019698f4fd0be41334f1aaf7b8d0d3d9151fba Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 16:00:32 +0100 Subject: [PATCH 094/176] [build images] --- scripts/create-nix-amis.mjs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 8f5516bc896209..077afaaf54004e 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -92,7 +92,10 @@ EOF echo "Setting up Nix environment and installing BuildKite agent..." cd /var/lib/buildkite-agent/bun -nix develop .#ci-${flakeTarget} -c /usr/local/share/bun/agent.mjs install start`; +# Initialize flake.lock with proper permissions +sudo -u buildkite-agent -i nix flake update +# Now run the agent in the Nix environment +sudo -u buildkite-agent -i nix develop .#ci-${flakeTarget} -c /usr/local/share/bun/agent.mjs install start`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From 2918a4eb75404f95c4371f7aee50145cdf7d586d Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 16:04:38 +0100 Subject: [PATCH 095/176] [build images] --- scripts/create-nix-amis.mjs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 077afaaf54004e..0db179ad92720b 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -93,9 +93,9 @@ EOF echo "Setting up Nix environment and installing BuildKite agent..." cd /var/lib/buildkite-agent/bun # Initialize flake.lock with proper permissions -sudo -u buildkite-agent -i nix flake update +sudo -u buildkite-agent sh -c 'cd "$1" && nix flake update' -- /var/lib/buildkite-agent/bun # Now run the agent in the Nix environment -sudo -u buildkite-agent -i nix develop .#ci-${flakeTarget} -c /usr/local/share/bun/agent.mjs install start`; +sudo -u buildkite-agent sh -c 'cd "$1" && nix develop .#ci-${flakeTarget} -c /usr/local/share/bun/agent.mjs install start' -- /var/lib/buildkite-agent/bun`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From 551004fb906a30a15f411408c9ea43d132bac021 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 16:09:40 +0100 Subject: [PATCH 096/176] [build images] --- scripts/create-nix-amis.mjs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 0db179ad92720b..42d3b4144d1597 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -93,9 +93,9 @@ EOF echo "Setting up Nix environment and installing BuildKite agent..." cd /var/lib/buildkite-agent/bun # Initialize flake.lock with proper permissions -sudo -u buildkite-agent sh -c 'cd "$1" && nix flake update' -- /var/lib/buildkite-agent/bun +sudo -u buildkite-agent sh -c '. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && cd "$1" && nix flake update' -- /var/lib/buildkite-agent/bun # Now run the agent in the Nix environment -sudo -u buildkite-agent sh -c 'cd "$1" && nix develop .#ci-${flakeTarget} -c /usr/local/share/bun/agent.mjs install start' -- /var/lib/buildkite-agent/bun`; +sudo -u buildkite-agent sh -c '. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && cd "$1" && nix develop .#ci-${flakeTarget} -c /usr/local/share/bun/agent.mjs install start' -- /var/lib/buildkite-agent/bun`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From 24460b1095a532abfc7c9179dba832c9ca330415 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 16:15:28 +0100 Subject: [PATCH 097/176] [build images] --- scripts/create-nix-amis.mjs | 36 ++++++++++++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 42d3b4144d1597..34390f1c4bf7aa 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -92,10 +92,42 @@ EOF echo "Setting up Nix environment and installing BuildKite agent..." cd /var/lib/buildkite-agent/bun + +# Set up the command hook to use Nix for command evaluation +sudo mkdir -p /etc/buildkite-agent/hooks +sudo tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' +#!/bin/bash +set -euo pipefail + +# Source Nix +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + +# Change to the build directory +cd "$BUILDKITE_BUILD_DIR" + +# Use Nix to evaluate and run the command in the proper environment +nix develop .#ci-${flakeTarget} -c eval "$BUILDKITE_COMMAND" +EOF +sudo chmod +x /etc/buildkite-agent/hooks/command + +# Set up the environment hook +sudo tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' +#!/bin/bash +set -euo pipefail + +# Source Nix +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + +# Add Nix to PATH +export PATH="/nix/var/nix/profiles/default/bin:$PATH" +EOF +sudo chmod +x /etc/buildkite-agent/hooks/environment + # Initialize flake.lock with proper permissions sudo -u buildkite-agent sh -c '. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && cd "$1" && nix flake update' -- /var/lib/buildkite-agent/bun -# Now run the agent in the Nix environment -sudo -u buildkite-agent sh -c '. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && cd "$1" && nix develop .#ci-${flakeTarget} -c /usr/local/share/bun/agent.mjs install start' -- /var/lib/buildkite-agent/bun`; + +# Now start the agent +sudo -u buildkite-agent sh -c '. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && cd "$1" && /usr/local/share/bun/agent.mjs install start' -- /var/lib/buildkite-agent/bun`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From 1fb5aa916bb483a21cef7131e0aa1ad160a705a4 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 16:20:20 +0100 Subject: [PATCH 098/176] [build images] --- scripts/create-nix-amis.mjs | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 34390f1c4bf7aa..d8902ecf3e2433 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -90,10 +90,24 @@ buildkite-agent soft nproc 1048576 buildkite-agent hard nproc 1048576 EOF -echo "Setting up Nix environment and installing BuildKite agent..." +echo "Evaluating Nix environment..." +# Switch to buildkite-agent user and evaluate the Nix environment +sudo -i -u buildkite-agent bash << EOF +set -euxo pipefail cd /var/lib/buildkite-agent/bun -# Set up the command hook to use Nix for command evaluation +# Source Nix +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + +# Update flake lock and evaluate the environment +nix flake update +nix develop .#ci-${flakeTarget} -c true + +# Create a marker to indicate environment is ready +touch .nix-env-ready +EOF + +echo "Setting up hooks..." sudo mkdir -p /etc/buildkite-agent/hooks sudo tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' #!/bin/bash @@ -110,7 +124,6 @@ nix develop .#ci-${flakeTarget} -c eval "$BUILDKITE_COMMAND" EOF sudo chmod +x /etc/buildkite-agent/hooks/command -# Set up the environment hook sudo tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' #!/bin/bash set -euo pipefail @@ -123,11 +136,8 @@ export PATH="/nix/var/nix/profiles/default/bin:$PATH" EOF sudo chmod +x /etc/buildkite-agent/hooks/environment -# Initialize flake.lock with proper permissions -sudo -u buildkite-agent sh -c '. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && cd "$1" && nix flake update' -- /var/lib/buildkite-agent/bun - -# Now start the agent -sudo -u buildkite-agent sh -c '. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && cd "$1" && /usr/local/share/bun/agent.mjs install start' -- /var/lib/buildkite-agent/bun`; +echo "Installing BuildKite agent service..." +sudo -u buildkite-agent /usr/local/share/bun/agent.mjs install start`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From 6977bb361fc6dd45c91b6511d382b18909b1b5a0 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 16:33:21 +0100 Subject: [PATCH 099/176] [build images] --- flake.nix | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/flake.nix b/flake.nix index 023dc66f290bf4..b879f361acaff5 100644 --- a/flake.nix +++ b/flake.nix @@ -24,6 +24,18 @@ }; }; + # Function to create a derivation for downloading Bun binary + getBunBinary = arch: pkgs.fetchzip { + name = "bun-binary-${arch}"; + url = if arch == "x64" + then "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-${arch}.zip" + else "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip"; + stripRoot = false; + sha256 = if arch == "x64" + then "sha256-e5OtTccoPG7xKQVvZiuvo3VSBC8mRteOj1d0GF+nEtk=" + else "sha256-ph2lNX4o1Jd/zNSFH+1i/02j6jOFMAXH3ZPayAvFOTI="; # We'll need to replace this with the actual arm64 hash + }; + # Function to create build environment for a specific architecture makeBuildEnv = arch: pkgs.buildEnv { name = "bun-build-tools-${arch}"; @@ -74,17 +86,10 @@ zlib openssl libffi - ]; - # Bun depends on itself to compile due to codegen scripts. - # Download a recent binary. - preFixup = '' - ${pkgs.curl}/bin/curl -L "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-${arch}.zip" - unzip $out/bun-linux-${arch}.zip - cp $out/bun-linux-${arch}/bun $out/bin/bun - chmod +x $out/bin/bun - rm -rf $out/bun-linux-${arch} $out/bun-linux-${arch}.zip - ''; + # Include the Bun binary + (getBunBinary arch) + ]; pathsToLink = [ "/bin" "/lib" "/lib64" "/include" "/share" "/etc/ssl" ]; extraOutputsToInstall = [ "dev" "out" "bin" ]; From 4d468ffab0e4adabbd12310256ceae5dbf0acbd8 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 16:41:26 +0100 Subject: [PATCH 100/176] [build images] --- scripts/create-nix-amis.mjs | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index d8902ecf3e2433..bd55538fe7d890 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -137,7 +137,18 @@ EOF sudo chmod +x /etc/buildkite-agent/hooks/environment echo "Installing BuildKite agent service..." -sudo -u buildkite-agent /usr/local/share/bun/agent.mjs install start`; +# Must run this inside of nix, from the buildkite-agent user because it has node installed. +sudo -i -u buildkite-agent bash << EOF + set -euxo pipefail + . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + + # Update flake lock and evaluate the environment + nix flake update + nix develop .#ci-${flakeTarget} -c true + + # Install and start the agent + /usr/local/share/bun/agent.mjs install start +EOF // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From 595279766ce46af77e2986ba45f890f84d749dfd Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 16:42:01 +0100 Subject: [PATCH 101/176] [build images] --- scripts/create-nix-amis.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index bd55538fe7d890..03cfc05398ac5b 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -148,7 +148,7 @@ sudo -i -u buildkite-agent bash << EOF # Install and start the agent /usr/local/share/bun/agent.mjs install start -EOF +EOF`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From 6a8aaa7896aa6620d2fff8ace36422ced52ec2e9 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 16:49:32 +0100 Subject: [PATCH 102/176] [build images] --- scripts/create-nix-amis.mjs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 03cfc05398ac5b..52907c6e5a647c 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -142,6 +142,8 @@ sudo -i -u buildkite-agent bash << EOF set -euxo pipefail . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + cd /var/lib/buildkite-agent/bun + # Update flake lock and evaluate the environment nix flake update nix develop .#ci-${flakeTarget} -c true From e4f2044249d2cc7f6dfc37461e6b3ef600e55e74 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 08:09:28 -0800 Subject: [PATCH 103/176] [build images] --- scripts/create-nix-amis.mjs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 52907c6e5a647c..0c5ac1989124a3 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -146,10 +146,9 @@ sudo -i -u buildkite-agent bash << EOF # Update flake lock and evaluate the environment nix flake update - nix develop .#ci-${flakeTarget} -c true # Install and start the agent - /usr/local/share/bun/agent.mjs install start + nix develop .#ci-${flakeTarget} -c "/usr/local/share/bun/agent.mjs install start" EOF`; // Write user data to a temporary file From 2454fec34e6793750203435fab5792090ded7f90 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 08:20:15 -0800 Subject: [PATCH 104/176] [build images] --- scripts/create-nix-amis.mjs | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 0c5ac1989124a3..4982c0ff9ffd10 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -66,13 +66,21 @@ sudo apt-get update sudo apt-get install -y buildkite-agent echo "Setting up agent.mjs..." -# Copy agent.mjs to the instance +echo "Creating directory /usr/local/share/bun" sudo mkdir -p /usr/local/share/bun +ls -la /usr/local/share/bun + +echo "Writing agent.mjs content:" sudo tee /usr/local/share/bun/agent.mjs > /dev/null << 'EOF' ${agentScript} EOF sudo chmod +x /usr/local/share/bun/agent.mjs +echo "Verifying agent.mjs was written:" +ls -la /usr/local/share/bun/agent.mjs +echo "Content of agent.mjs:" +cat /usr/local/share/bun/agent.mjs + echo "Copying flake.nix to the instance..." sudo mkdir -p /var/lib/buildkite-agent/bun sudo tee /var/lib/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' @@ -148,7 +156,14 @@ sudo -i -u buildkite-agent bash << EOF nix flake update # Install and start the agent - nix develop .#ci-${flakeTarget} -c "/usr/local/share/bun/agent.mjs install start" + if [ -f "/usr/local/share/bun/agent.mjs" ]; then + echo "Found agent.mjs, executing..." + nix develop .#ci-${flakeTarget} -c bash -c "node /usr/local/share/bun/agent.mjs install start" + else + echo "ERROR: agent.mjs not found at /usr/local/share/bun/agent.mjs" + ls -la /usr/local/share/bun/ + exit 1 + fi EOF`; // Write user data to a temporary file @@ -183,3 +198,4 @@ EOF`; } await main(); + ` \ No newline at end of file From a0c9942e07521512e1522860cdf80e7b3ba628ae Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 08:21:46 -0800 Subject: [PATCH 105/176] [build images] --- scripts/create-nix-amis.mjs | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 4982c0ff9ffd10..db92dbb76dd66d 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -198,4 +198,3 @@ EOF`; } await main(); - ` \ No newline at end of file From 6764f4d1b8895467441d4e2ee079bfd1ab0cbfad Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 08:28:53 -0800 Subject: [PATCH 106/176] [build images] --- scripts/create-nix-amis.mjs | 31 +++++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index db92dbb76dd66d..3c22b4c26c1315 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -27,10 +27,32 @@ async function main() { const flakeTarget = architecture === "arm64" ? "arm64" : "x64"; // Read the agent.mjs content - const agentScript = await readFile("scripts/agent.mjs"); + let agentScript, flakeContent, utilsContent; + try { + agentScript = await readFile(join(process.cwd(), "scripts", "agent.mjs"), "utf8"); + console.log("Successfully read agent.mjs"); + } catch (error) { + console.error("Failed to read agent.mjs:", error); + throw error; + } // Read the flake.nix content - const flakeContent = await readFile("flake.nix"); + try { + flakeContent = await readFile(join(process.cwd(), "flake.nix"), "utf8"); + console.log("Successfully read flake.nix"); + } catch (error) { + console.error("Failed to read flake.nix:", error); + throw error; + } + + // Read the utils.mjs content + try { + utilsContent = await readFile(join(process.cwd(), "scripts", "utils.mjs"), "utf8"); + console.log("Successfully read utils.mjs"); + } catch (error) { + console.error("Failed to read utils.mjs:", error); + throw error; + } // Create user data script const userData = `#!/bin/bash @@ -76,6 +98,11 @@ ${agentScript} EOF sudo chmod +x /usr/local/share/bun/agent.mjs +echo "Writing utils.mjs content:" +sudo tee /usr/local/share/bun/utils.mjs > /dev/null << 'EOF' +${utilsContent} +EOF + echo "Verifying agent.mjs was written:" ls -la /usr/local/share/bun/agent.mjs echo "Content of agent.mjs:" From 4694118a2d73c25448706ec31857ee66ab33e572 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 08:40:05 -0800 Subject: [PATCH 107/176] [build images] --- scripts/create-nix-amis.mjs | 37 +++++++++++++++++-------------------- 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 3c22b4c26c1315..7257cc5e5ac18c 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -172,26 +172,23 @@ EOF sudo chmod +x /etc/buildkite-agent/hooks/environment echo "Installing BuildKite agent service..." -# Must run this inside of nix, from the buildkite-agent user because it has node installed. -sudo -i -u buildkite-agent bash << EOF - set -euxo pipefail - . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh - - cd /var/lib/buildkite-agent/bun - - # Update flake lock and evaluate the environment - nix flake update - - # Install and start the agent - if [ -f "/usr/local/share/bun/agent.mjs" ]; then - echo "Found agent.mjs, executing..." - nix develop .#ci-${flakeTarget} -c bash -c "node /usr/local/share/bun/agent.mjs install start" - else - echo "ERROR: agent.mjs not found at /usr/local/share/bun/agent.mjs" - ls -la /usr/local/share/bun/ - exit 1 - fi -EOF`; +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + +cd /var/lib/buildkite-agent/bun + +# Update flake lock and evaluate the environment +nix flake update + +# Install and start the agent +if [ -f "/usr/local/share/bun/agent.mjs" ]; then + echo "Found agent.mjs, executing..." + sudo nix develop .#ci-${flakeTarget} -c bash -c "node /usr/local/share/bun/agent.mjs install" +else + echo "ERROR: agent.mjs not found at /usr/local/share/bun/agent.mjs" + ls -la /usr/local/share/bun/ + exit 1 +fi +`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From e74b326526ab14b96780a2561db45e159d5d6cf7 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 19:28:24 -0800 Subject: [PATCH 108/176] [build images] --- scripts/create-nix-amis.mjs | 89 +++++++++++++++++++++---------------- 1 file changed, 50 insertions(+), 39 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 7257cc5e5ac18c..f5ccc84443fbb7 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -87,37 +87,37 @@ sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C sudo apt-get update sudo apt-get install -y buildkite-agent -echo "Setting up agent.mjs..." -echo "Creating directory /usr/local/share/bun" +# Create required directories with correct permissions +echo "Setting up directories..." sudo mkdir -p /usr/local/share/bun -ls -la /usr/local/share/bun +sudo mkdir -p /var/lib/buildkite-agent/bun +sudo mkdir -p /var/cache/buildkite-agent +sudo mkdir -p /var/log/buildkite-agent +sudo mkdir -p /etc/buildkite-agent/hooks + +# Set correct ownership +sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent +sudo chown -R buildkite-agent:buildkite-agent /var/cache/buildkite-agent +sudo chown -R buildkite-agent:buildkite-agent /var/log/buildkite-agent +sudo chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent +sudo chown -R buildkite-agent:buildkite-agent /usr/local/share/bun -echo "Writing agent.mjs content:" -sudo tee /usr/local/share/bun/agent.mjs > /dev/null << 'EOF' +echo "Writing agent.mjs and utils.mjs..." +sudo -u buildkite-agent tee /usr/local/share/bun/agent.mjs > /dev/null << 'EOF' ${agentScript} EOF -sudo chmod +x /usr/local/share/bun/agent.mjs - -echo "Writing utils.mjs content:" -sudo tee /usr/local/share/bun/utils.mjs > /dev/null << 'EOF' +sudo -u buildkite-agent tee /usr/local/share/bun/utils.mjs > /dev/null << 'EOF' ${utilsContent} EOF -echo "Verifying agent.mjs was written:" -ls -la /usr/local/share/bun/agent.mjs -echo "Content of agent.mjs:" -cat /usr/local/share/bun/agent.mjs +sudo chmod +x /usr/local/share/bun/agent.mjs -echo "Copying flake.nix to the instance..." -sudo mkdir -p /var/lib/buildkite-agent/bun -sudo tee /var/lib/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' +echo "Copying flake.nix..." +sudo -u buildkite-agent tee /var/lib/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' ${flakeContent} EOF -echo "Setting ownership..." -sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent/bun - -echo "Setting system limits for buildkite-agent..." +echo "Setting system limits..." sudo tee /etc/security/limits.d/buildkite-agent.conf > /dev/null << 'EOF' buildkite-agent soft nofile 1048576 buildkite-agent hard nofile 1048576 @@ -125,8 +125,7 @@ buildkite-agent soft nproc 1048576 buildkite-agent hard nproc 1048576 EOF -echo "Evaluating Nix environment..." -# Switch to buildkite-agent user and evaluate the Nix environment +echo "Setting up Nix environment..." sudo -i -u buildkite-agent bash << EOF set -euxo pipefail cd /var/lib/buildkite-agent/bun @@ -143,8 +142,7 @@ touch .nix-env-ready EOF echo "Setting up hooks..." -sudo mkdir -p /etc/buildkite-agent/hooks -sudo tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' +sudo -u buildkite-agent tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' #!/bin/bash set -euo pipefail @@ -152,14 +150,14 @@ set -euo pipefail . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh # Change to the build directory -cd "$BUILDKITE_BUILD_DIR" +cd "\$BUILDKITE_BUILD_DIR" # Use Nix to evaluate and run the command in the proper environment -nix develop .#ci-${flakeTarget} -c eval "$BUILDKITE_COMMAND" +nix develop .#ci-${flakeTarget} -c eval "\$BUILDKITE_COMMAND" EOF sudo chmod +x /etc/buildkite-agent/hooks/command -sudo tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' +sudo -u buildkite-agent tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' #!/bin/bash set -euo pipefail @@ -167,28 +165,41 @@ set -euo pipefail . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh # Add Nix to PATH -export PATH="/nix/var/nix/profiles/default/bin:$PATH" +export PATH="/nix/var/nix/profiles/default/bin:\$PATH" EOF sudo chmod +x /etc/buildkite-agent/hooks/environment echo "Installing BuildKite agent service..." -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh - -cd /var/lib/buildkite-agent/bun - -# Update flake lock and evaluate the environment -nix flake update - -# Install and start the agent if [ -f "/usr/local/share/bun/agent.mjs" ]; then echo "Found agent.mjs, executing..." - sudo nix develop .#ci-${flakeTarget} -c bash -c "node /usr/local/share/bun/agent.mjs install" + # First run nix-shell as buildkite-agent to get the environment + sudo -i -u buildkite-agent bash << 'ENVSETUP' + set -euxo pipefail + . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + cd /var/lib/buildkite-agent/bun + # Instead of running install directly, create a wrapped command that we'll run as root + nix develop .#ci-${flakeTarget} -c bash -c 'echo "#!/bin/bash\nset -euxo pipefail\n\nexport PATH=\"\$PATH\"" > /tmp/agent-install.sh' + nix develop .#ci-${flakeTarget} -c bash -c 'echo "export NODE_PATH=\"\$NODE_PATH\"" >> /tmp/agent-install.sh' + nix develop .#ci-${flakeTarget} -c bash -c 'echo "node /usr/local/share/bun/agent.mjs install" >> /tmp/agent-install.sh' + chmod +x /tmp/agent-install.sh +ENVSETUP + + # Now run the wrapped command as root to handle systemd installation + sudo bash /tmp/agent-install.sh + rm /tmp/agent-install.sh + + # Start the agent as the buildkite-agent user + sudo -i -u buildkite-agent bash << EOF + set -euxo pipefail + . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh + cd /var/lib/buildkite-agent/bun + nix develop .#ci-${flakeTarget} -c node /usr/local/share/bun/agent.mjs start +EOF else echo "ERROR: agent.mjs not found at /usr/local/share/bun/agent.mjs" ls -la /usr/local/share/bun/ exit 1 -fi -`; +fi`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); From cb064bfd2763238de55b7900a389c5c1e1bfe605 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 20:06:36 -0800 Subject: [PATCH 109/176] [build images] --- scripts/create-nix-amis.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index f5ccc84443fbb7..59d7f15d308954 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -186,7 +186,7 @@ ENVSETUP # Now run the wrapped command as root to handle systemd installation sudo bash /tmp/agent-install.sh - rm /tmp/agent-install.sh + sudo rm /tmp/agent-install.sh # Start the agent as the buildkite-agent user sudo -i -u buildkite-agent bash << EOF From ba31a327d9db0b95a8eccc6b15005f21badbdfc4 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 20:44:30 -0800 Subject: [PATCH 110/176] [build images] --- scripts/create-nix-amis.mjs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 59d7f15d308954..9c9100669947ad 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -187,13 +187,6 @@ ENVSETUP # Now run the wrapped command as root to handle systemd installation sudo bash /tmp/agent-install.sh sudo rm /tmp/agent-install.sh - - # Start the agent as the buildkite-agent user - sudo -i -u buildkite-agent bash << EOF - set -euxo pipefail - . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh - cd /var/lib/buildkite-agent/bun - nix develop .#ci-${flakeTarget} -c node /usr/local/share/bun/agent.mjs start EOF else echo "ERROR: agent.mjs not found at /usr/local/share/bun/agent.mjs" From 54ede97f3a2d270aab3c32ea3a6c8f6fe61ad282 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 20:50:43 -0800 Subject: [PATCH 111/176] [build images] --- scripts/create-nix-amis.mjs | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 9c9100669947ad..bc007048d6a32d 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -187,7 +187,6 @@ ENVSETUP # Now run the wrapped command as root to handle systemd installation sudo bash /tmp/agent-install.sh sudo rm /tmp/agent-install.sh -EOF else echo "ERROR: agent.mjs not found at /usr/local/share/bun/agent.mjs" ls -la /usr/local/share/bun/ From 68c7b5cccd8a37fdaeb9f9f4302e1d9f01fd6255 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 21:16:58 -0800 Subject: [PATCH 112/176] [build images] --- scripts/create-nix-amis.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index bc007048d6a32d..a5970b4cd8c5ca 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -203,7 +203,7 @@ fi`; [ "node", "./scripts/machine.mjs", - "publish-image", + "create-image", `--os=linux`, `--arch=${architecture}`, `--distro=ubuntu`, From 8635218bdd554be4695c47cfe92b3415e5004215 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 22:17:42 -0800 Subject: [PATCH 113/176] [build images] --- flake.nix | 89 ++++++++++++++++++++++++------------- scripts/agent.mjs | 49 +++++++++++++++++--- scripts/create-nix-amis.mjs | 18 ++++++-- 3 files changed, 117 insertions(+), 39 deletions(-) diff --git a/flake.nix b/flake.nix index b879f361acaff5..2fd3b6c52b5b28 100644 --- a/flake.nix +++ b/flake.nix @@ -1,5 +1,5 @@ { - description = "Bun build environment"; + description = "Bun flake and build environment"; inputs = { nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; @@ -8,6 +8,18 @@ url = "github:oxalica/rust-overlay"; inputs.nixpkgs.follows = "nixpkgs"; }; + olderBunVersion = { + "1.1.38" = { + x64 = { + url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-x64.zip?nix=true"; + sha256 = "sha256-e5OtTccoPG7xKQVvZiuvo3VSBC8mRteOj1d0GF+nEtk="; + }; + arm64 = { + url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip?nix=true"; + sha256 = "sha256-ph2lNX4o1Jd/zNSFH+1i/02j6jOFMAXH3ZPayAvFOTI="; + }; + }; + }; }; outputs = { self, nixpkgs, flake-utils, rust-overlay }: @@ -18,23 +30,20 @@ inherit system overlays; config = { allowUnfree = true; - permittedInsecurePackages = [ - "nodejs-16.20.2" - ]; }; }; # Function to create a derivation for downloading Bun binary - getBunBinary = arch: pkgs.fetchzip { - name = "bun-binary-${arch}"; - url = if arch == "x64" - then "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-${arch}.zip" - else "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip"; - stripRoot = false; - sha256 = if arch == "x64" - then "sha256-e5OtTccoPG7xKQVvZiuvo3VSBC8mRteOj1d0GF+nEtk=" - else "sha256-ph2lNX4o1Jd/zNSFH+1i/02j6jOFMAXH3ZPayAvFOTI="; # We'll need to replace this with the actual arm64 hash - }; + getBunBinary = arch: pkgs.runCommand "bun-binary-${arch}" {} '' + mkdir -p $out/bin + cp ${pkgs.fetchzip { + name = "bun-binary-${arch}"; + url = olderBunVersion.${bunBuildVersion}.${arch}.url; + stripRoot = false; + sha256 = olderBunVersion.${bunBuildVersion}.${arch}.sha256; + }}/bun $out/bin/ + chmod +x $out/bin/bun + ''; # Function to create build environment for a specific architecture makeBuildEnv = arch: pkgs.buildEnv { @@ -100,22 +109,54 @@ buildEnvX64 = makeBuildEnv "x64"; buildEnvArm64 = makeBuildEnv "arm64"; + # Function to build Bun for release + buildBun = arch: pkgs.stdenv.mkDerivation { + pname = "bun"; + version = "latest"; + + src = ./.; + + nativeBuildInputs = [ + (if arch == "x64" then buildEnvX64 else buildEnvArm64) + ]; + + buildPhase = '' + export HOME=$TMPDIR + bun build:release + ''; + + installPhase = '' + mkdir -p $out/bin + cp build/release/bun $out/bin/ + chmod +x $out/bin/bun + ''; + + meta = with pkgs.lib; { + description = "Incredibly fast JavaScript runtime, bundler, transpiler and package manager"; + homepage = "https://bun.sh"; + license = licenses.mit; + platforms = platforms.linux; + }; + }; + in { packages = { default = buildEnvX64; - x64 = buildEnvX64; - arm64 = buildEnvArm64; + build-x64 = buildEnvX64; + build-arm64 = buildEnvArm64; + x64 = buildBun "x64"; + arm64 = buildBun "arm64"; }; devShells = { default = pkgs.mkShell { buildInputs = with pkgs; [ - awscli2 ]; shellHook = '' - echo "To compile a release build of Bun, run: bun build:release" + echo "To compile a release build of Bun:\n bun build:release" + echo "To compile a debug build of Bun:\n bun build:debug" ''; }; @@ -125,10 +166,6 @@ # Include the x64 build environment tools buildEnvX64 ]; - - shellHook = '' - echo "BuildKite CI environment initialized (x64)" - ''; }; ci-arm64 = pkgs.mkShell { @@ -136,10 +173,6 @@ # Include the arm64 build environment tools buildEnvArm64 ]; - - shellHook = '' - echo "BuildKite CI environment initialized (arm64)" - ''; }; # Generic CI shell that defaults to x64 @@ -148,10 +181,6 @@ # Include the x64 build environment tools by default buildEnvX64 ]; - - shellHook = '' - echo "BuildKite CI environment initialized (default: x64)" - ''; }; }; }); diff --git a/scripts/agent.mjs b/scripts/agent.mjs index e94f0658d0e071..ffe104c3aa5e9d 100755 --- a/scripts/agent.mjs +++ b/scripts/agent.mjs @@ -3,7 +3,7 @@ // An agent that starts buildkite-agent and runs others services. import { join } from "node:path"; -import { realpathSync } from "node:fs"; +import { existsSync, realpathSync } from "node:fs"; import { isWindows, getOs, @@ -22,9 +22,17 @@ import { spawnSafe, spawn, mkdir, + isLinux, } from "./utils.mjs"; import { parseArgs } from "node:util"; +/** + * @returns {boolean} + */ +function isNixInstalled() { + return existsSync("/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"); +} + /** * @param {"install" | "start"} action */ @@ -68,6 +76,12 @@ async function doBuildkiteAgent(action) { if (isOpenRc()) { const servicePath = "/etc/init.d/buildkite-agent"; + let nixEnv = ""; + + if (isNixInstalled()) { + nixEnv = `. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh;`; + } + const service = `#!/sbin/openrc-run name="buildkite-agent" description="Buildkite Agent" @@ -76,15 +90,21 @@ async function doBuildkiteAgent(action) { command_user=${escape(username)} pidfile=${escape(pidPath)} - start_stop_daemon_args=" \ - --background \ - --make-pidfile \ - --stdout ${escape(agentLogPath)} \ + start_stop_daemon_args=" \\ + --background \\ + --make-pidfile \\ + --stdout ${escape(agentLogPath)} \\ --stderr ${escape(agentLogPath)}" + start_pre() { + # Source Nix environment if it exists + ${nixEnv} + } + depend() { need net use dns logger + ${nixEnv ? "use nix-daemon" : ""} } `; writeFile(servicePath, service, { mode: 0o755 }); @@ -94,11 +114,23 @@ async function doBuildkiteAgent(action) { if (isSystemd()) { const servicePath = "/etc/systemd/system/buildkite-agent.service"; + let nix = ""; + + if (isNixInstalled()) { + nix = ` +# Source Nix environment if it exists +ExecStartPre=/bin/sh -c '. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' +Environment=PATH=/nix/var/nix/profiles/default/bin:${process.env.PATH.replaceAll(" ", "\\ ")} +Environment=NIX_PATH=/nix/var/nix/profiles/per-user/root/channels + `; + } + const service = ` [Unit] Description=Buildkite Agent After=syslog.target After=network-online.target + ${nix ? "Wants=nix-daemon.service" : ""} [Service] Type=simple @@ -108,6 +140,8 @@ async function doBuildkiteAgent(action) { Restart=on-failure KillMode=process + ${nix} + [Journal] Storage=persistent StateDirectory=${escape(agentLogPath)} @@ -158,6 +192,10 @@ async function doBuildkiteAgent(action) { "experiment": "normalised-upload-paths,resolve-commit-after-checkout,agent-api", }; + if (isLinux && isNixInstalled()) { + options["env-path"] = "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"; + } + let ephemeral; if (cloud) { const jobId = await getCloudMetadataTag("buildkite:job-uuid"); @@ -184,6 +222,7 @@ async function doBuildkiteAgent(action) { "distro": getDistro(), "distro-version": getDistroVersion(), "cloud": cloud, + "nix": isNixInstalled() ? "true" : undefined, }; if (cloud) { diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index a5970b4cd8c5ca..aa1dc591477c4a 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -131,11 +131,21 @@ set -euxo pipefail cd /var/lib/buildkite-agent/bun # Source Nix -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh +if ! . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh; then + echo "Failed to source Nix environment" + exit 1 +fi # Update flake lock and evaluate the environment -nix flake update -nix develop .#ci-${flakeTarget} -c true +if ! nix flake update; then + echo "Failed to update flake" + exit 1 +fi + +if ! nix develop .#ci-${flakeTarget} -c true; then + echo "Failed to evaluate environment" + exit 1 +fi # Create a marker to indicate environment is ready touch .nix-env-ready @@ -153,7 +163,7 @@ set -euo pipefail cd "\$BUILDKITE_BUILD_DIR" # Use Nix to evaluate and run the command in the proper environment -nix develop .#ci-${flakeTarget} -c eval "\$BUILDKITE_COMMAND" +nix develop .#ci-${flakeTarget} --command bash -c "\$BUILDKITE_COMMAND" EOF sudo chmod +x /etc/buildkite-agent/hooks/command From 38f79d8d1b05afbb93f1b2a4ab61c354665cfcf0 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 22:21:59 -0800 Subject: [PATCH 114/176] [build images] --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 2fd3b6c52b5b28..42b758c8159354 100644 --- a/flake.nix +++ b/flake.nix @@ -22,7 +22,7 @@ }; }; - outputs = { self, nixpkgs, flake-utils, rust-overlay }: + outputs = { self, nixpkgs, flake-utils, rust-overlay, olderBunVersion }: flake-utils.lib.eachDefaultSystem (system: let overlays = [ (import rust-overlay) ]; From 458cc88c7ce74839869a8a4e65ba98598642c417 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 22:23:31 -0800 Subject: [PATCH 115/176] [build images] --- flake.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.nix b/flake.nix index 42b758c8159354..28a850058a9313 100644 --- a/flake.nix +++ b/flake.nix @@ -9,7 +9,7 @@ inputs.nixpkgs.follows = "nixpkgs"; }; olderBunVersion = { - "1.1.38" = { + default = { x64 = { url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-x64.zip?nix=true"; sha256 = "sha256-e5OtTccoPG7xKQVvZiuvo3VSBC8mRteOj1d0GF+nEtk="; @@ -38,9 +38,9 @@ mkdir -p $out/bin cp ${pkgs.fetchzip { name = "bun-binary-${arch}"; - url = olderBunVersion.${bunBuildVersion}.${arch}.url; + url = olderBunVersion.default.${arch}.url; stripRoot = false; - sha256 = olderBunVersion.${bunBuildVersion}.${arch}.sha256; + sha256 = olderBunVersion.default.${arch}.sha256; }}/bun $out/bin/ chmod +x $out/bin/bun ''; From 972dfb0d4c42fe9c719a73e01ce9be81d86bdb27 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 22:31:39 -0800 Subject: [PATCH 116/176] [build images] --- flake.nix | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/flake.nix b/flake.nix index 28a850058a9313..9a94e68fae9edd 100644 --- a/flake.nix +++ b/flake.nix @@ -8,23 +8,21 @@ url = "github:oxalica/rust-overlay"; inputs.nixpkgs.follows = "nixpkgs"; }; - olderBunVersion = { - default = { - x64 = { - url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-x64.zip?nix=true"; - sha256 = "sha256-e5OtTccoPG7xKQVvZiuvo3VSBC8mRteOj1d0GF+nEtk="; - }; - arm64 = { - url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip?nix=true"; - sha256 = "sha256-ph2lNX4o1Jd/zNSFH+1i/02j6jOFMAXH3ZPayAvFOTI="; - }; - }; - }; }; - outputs = { self, nixpkgs, flake-utils, rust-overlay, olderBunVersion }: + outputs = { self, nixpkgs, flake-utils, rust-overlay }: flake-utils.lib.eachDefaultSystem (system: let + olderBunVersion = { + x64 = { + url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-x64.zip?nix=true"; + sha256 = "sha256-e5OtTccoPG7xKQVvZiuvo3VSBC8mRteOj1d0GF+nEtk="; + }; + arm64 = { + url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip?nix=true"; + sha256 = "sha256-ph2lNX4o1Jd/zNSFH+1i/02j6jOFMAXH3ZPayAvFOTI="; + }; + }; overlays = [ (import rust-overlay) ]; pkgs = import nixpkgs { inherit system overlays; @@ -38,9 +36,9 @@ mkdir -p $out/bin cp ${pkgs.fetchzip { name = "bun-binary-${arch}"; - url = olderBunVersion.default.${arch}.url; + url = olderBunVersion.${arch}.url; stripRoot = false; - sha256 = olderBunVersion.default.${arch}.sha256; + sha256 = olderBunVersion.${arch}.sha256; }}/bun $out/bin/ chmod +x $out/bin/bun ''; From bd8452aae06f37da3cb99932a36685ffe62c42ad Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sat, 7 Dec 2024 22:36:35 -0800 Subject: [PATCH 117/176] [build images] --- flake.nix | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/flake.nix b/flake.nix index 9a94e68fae9edd..d785eefa5d385a 100644 --- a/flake.nix +++ b/flake.nix @@ -15,11 +15,11 @@ let olderBunVersion = { x64 = { - url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-x64.zip?nix=true"; + url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-x64.zip"; sha256 = "sha256-e5OtTccoPG7xKQVvZiuvo3VSBC8mRteOj1d0GF+nEtk="; }; arm64 = { - url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip?nix=true"; + url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip"; sha256 = "sha256-ph2lNX4o1Jd/zNSFH+1i/02j6jOFMAXH3ZPayAvFOTI="; }; }; @@ -153,8 +153,10 @@ ]; shellHook = '' - echo "To compile a release build of Bun:\n bun build:release" - echo "To compile a debug build of Bun:\n bun build:debug" + echo "To compile a release build of Bun:" + echo " bun build:release" + echo "To compile a debug build of Bun:" + echo " bun build:debug" ''; }; From 7c9020efd75b29a9745df6419baf5e2f7166c433 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 8 Dec 2024 07:42:08 +0100 Subject: [PATCH 118/176] [build images] --- flake.nix | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index d785eefa5d385a..0baee01700c0cb 100644 --- a/flake.nix +++ b/flake.nix @@ -15,10 +15,13 @@ let olderBunVersion = { x64 = { + dir = "bun-linux-x64"; url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-x64.zip"; sha256 = "sha256-e5OtTccoPG7xKQVvZiuvo3VSBC8mRteOj1d0GF+nEtk="; + }; arm64 = { + dir = "bun-linux-aarch64"; url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip"; sha256 = "sha256-ph2lNX4o1Jd/zNSFH+1i/02j6jOFMAXH3ZPayAvFOTI="; }; @@ -39,7 +42,7 @@ url = olderBunVersion.${arch}.url; stripRoot = false; sha256 = olderBunVersion.${arch}.sha256; - }}/bun $out/bin/ + }}/${olderBunVersion.${arch}.dir}/bun $out/bin/ chmod +x $out/bin/bun ''; From b1c591257b1007174b460be8cf021d1448035345 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 8 Dec 2024 08:41:26 +0100 Subject: [PATCH 119/176] [build images] --- scripts/create-nix-amis.mjs | 218 +++++++++++++++--------------------- 1 file changed, 91 insertions(+), 127 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index aa1dc591477c4a..23c47f90fa9608 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -26,31 +26,15 @@ async function main() { const architecture = parseArch(arch); const flakeTarget = architecture === "arm64" ? "arm64" : "x64"; - // Read the agent.mjs content + // Read the required files let agentScript, flakeContent, utilsContent; try { agentScript = await readFile(join(process.cwd(), "scripts", "agent.mjs"), "utf8"); - console.log("Successfully read agent.mjs"); - } catch (error) { - console.error("Failed to read agent.mjs:", error); - throw error; - } - - // Read the flake.nix content - try { flakeContent = await readFile(join(process.cwd(), "flake.nix"), "utf8"); - console.log("Successfully read flake.nix"); - } catch (error) { - console.error("Failed to read flake.nix:", error); - throw error; - } - - // Read the utils.mjs content - try { utilsContent = await readFile(join(process.cwd(), "scripts", "utils.mjs"), "utf8"); - console.log("Successfully read utils.mjs"); + console.log("Successfully read configuration files"); } catch (error) { - console.error("Failed to read utils.mjs:", error); + console.error("Failed to read configuration files:", error); throw error; } @@ -80,44 +64,91 @@ trusted-users = root buildkite-agent auto-optimise-store = true EOF -echo "Installing BuildKite agent..." -# Install BuildKite agent -sudo sh -c 'echo deb https://apt.buildkite.com/buildkite-agent stable main > /etc/apt/sources.list.d/buildkite-agent.list' -sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 32A37959C2FA5C3C99EFBC32A79206696452D198 -sudo apt-get update -sudo apt-get install -y buildkite-agent - -# Create required directories with correct permissions -echo "Setting up directories..." -sudo mkdir -p /usr/local/share/bun +# Create required directories sudo mkdir -p /var/lib/buildkite-agent/bun sudo mkdir -p /var/cache/buildkite-agent sudo mkdir -p /var/log/buildkite-agent -sudo mkdir -p /etc/buildkite-agent/hooks -# Set correct ownership -sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent -sudo chown -R buildkite-agent:buildkite-agent /var/cache/buildkite-agent -sudo chown -R buildkite-agent:buildkite-agent /var/log/buildkite-agent -sudo chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent -sudo chown -R buildkite-agent:buildkite-agent /usr/local/share/bun +# Create a Nix expression for the buildkite service +sudo mkdir -p /etc/buildkite-agent +cat > /etc/buildkite-agent/service.nix << 'NIXEOF' +{ pkgs ? import {} }: + +let + buildkite-agent = pkgs.buildkite-agent; + flakeTarget = if pkgs.stdenv.isAarch64 then "arm64" else "x64"; +in { + systemd.services.buildkite-agent = { + description = "Buildkite Agent"; + after = [ "nix-daemon.service" "network-online.target" ]; + wants = [ "nix-daemon.service" "network-online.target" ]; + wantedBy = [ "multi-user.target" ]; + + environment = { + HOME = "/var/lib/buildkite-agent"; + USER = "buildkite-agent"; + NIX_PATH = "/nix/var/nix/profiles/per-user/root/channels"; + PATH = "\${pkgs.lib.makeBinPath [ pkgs.bash pkgs.nix pkgs.nodejs_20 ]}"; + }; + + serviceConfig = { + ExecStart = "/usr/local/share/bun/agent.mjs start"; + User = "buildkite-agent"; + Group = "buildkite-agent"; + RestartSec = "5"; + Restart = "always"; + TimeoutStopSec = "20"; + }; + }; + + users.users.buildkite-agent = { + isSystemUser = true; + group = "buildkite-agent"; + home = "/var/lib/buildkite-agent"; + createHome = true; + }; + + users.groups.buildkite-agent = {}; +} +NIXEOF + +# Install and configure buildkite-agent using Nix +nix-env -if /etc/buildkite-agent/service.nix + +# Configure buildkite-agent +cat > /etc/buildkite-agent/buildkite-agent.cfg << 'EOF' +name="%hostname-%n" +tags="queue=build-linux,os=linux,arch=${architecture}" +build-path=/var/lib/buildkite-agent/builds +hooks-path=/etc/buildkite-agent/hooks +experiment=git-mirrors,normalize-build-paths +debug=true +disconnect-after-job=true +EOF -echo "Writing agent.mjs and utils.mjs..." -sudo -u buildkite-agent tee /usr/local/share/bun/agent.mjs > /dev/null << 'EOF' -${agentScript} +# Set up hooks +sudo mkdir -p /etc/buildkite-agent/hooks +cat > /etc/buildkite-agent/hooks/environment << 'EOF' +#!/bin/bash +. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh +export PATH="/nix/var/nix/profiles/default/bin:$PATH" +export NIX_PATH="/nix/var/nix/profiles/per-user/root/channels" EOF -sudo -u buildkite-agent tee /usr/local/share/bun/utils.mjs > /dev/null << 'EOF' -${utilsContent} + +sudo tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' +#!/bin/bash +cd "$BUILDKITE_BUILD_DIR" +exec nix develop .#ci-${flakeTarget} --command bash -c "$BUILDKITE_COMMAND" EOF -sudo chmod +x /usr/local/share/bun/agent.mjs +sudo chmod +x /etc/buildkite-agent/hooks/* -echo "Copying flake.nix..." -sudo -u buildkite-agent tee /var/lib/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' +# Copy flake.nix +sudo tee /var/lib/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' ${flakeContent} EOF -echo "Setting system limits..." +# Set system limits sudo tee /etc/security/limits.d/buildkite-agent.conf > /dev/null << 'EOF' buildkite-agent soft nofile 1048576 buildkite-agent hard nofile 1048576 @@ -125,83 +156,16 @@ buildkite-agent soft nproc 1048576 buildkite-agent hard nproc 1048576 EOF -echo "Setting up Nix environment..." -sudo -i -u buildkite-agent bash << EOF -set -euxo pipefail -cd /var/lib/buildkite-agent/bun - -# Source Nix -if ! . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh; then - echo "Failed to source Nix environment" - exit 1 -fi - -# Update flake lock and evaluate the environment -if ! nix flake update; then - echo "Failed to update flake" - exit 1 -fi - -if ! nix develop .#ci-${flakeTarget} -c true; then - echo "Failed to evaluate environment" - exit 1 -fi - -# Create a marker to indicate environment is ready -touch .nix-env-ready -EOF - -echo "Setting up hooks..." -sudo -u buildkite-agent tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' -#!/bin/bash -set -euo pipefail - -# Source Nix -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh - -# Change to the build directory -cd "\$BUILDKITE_BUILD_DIR" - -# Use Nix to evaluate and run the command in the proper environment -nix develop .#ci-${flakeTarget} --command bash -c "\$BUILDKITE_COMMAND" -EOF -sudo chmod +x /etc/buildkite-agent/hooks/command - -sudo -u buildkite-agent tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' -#!/bin/bash -set -euo pipefail - -# Source Nix -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh +# Set up permissions +sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent +sudo chown -R buildkite-agent:buildkite-agent /var/cache/buildkite-agent +sudo chown -R buildkite-agent:buildkite-agent /var/log/buildkite-agent +sudo chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent -# Add Nix to PATH -export PATH="/nix/var/nix/profiles/default/bin:\$PATH" -EOF -sudo chmod +x /etc/buildkite-agent/hooks/environment - -echo "Installing BuildKite agent service..." -if [ -f "/usr/local/share/bun/agent.mjs" ]; then - echo "Found agent.mjs, executing..." - # First run nix-shell as buildkite-agent to get the environment - sudo -i -u buildkite-agent bash << 'ENVSETUP' - set -euxo pipefail - . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh - cd /var/lib/buildkite-agent/bun - # Instead of running install directly, create a wrapped command that we'll run as root - nix develop .#ci-${flakeTarget} -c bash -c 'echo "#!/bin/bash\nset -euxo pipefail\n\nexport PATH=\"\$PATH\"" > /tmp/agent-install.sh' - nix develop .#ci-${flakeTarget} -c bash -c 'echo "export NODE_PATH=\"\$NODE_PATH\"" >> /tmp/agent-install.sh' - nix develop .#ci-${flakeTarget} -c bash -c 'echo "node /usr/local/share/bun/agent.mjs install" >> /tmp/agent-install.sh' - chmod +x /tmp/agent-install.sh -ENVSETUP - - # Now run the wrapped command as root to handle systemd installation - sudo bash /tmp/agent-install.sh - sudo rm /tmp/agent-install.sh -else - echo "ERROR: agent.mjs not found at /usr/local/share/bun/agent.mjs" - ls -la /usr/local/share/bun/ - exit 1 -fi`; +# Enable and start service +sudo systemctl daemon-reload +sudo systemctl enable buildkite-agent +`; // Write user data to a temporary file const userDataFile = mkdtemp("user-data-", "user-data.sh"); @@ -214,13 +178,13 @@ fi`; "node", "./scripts/machine.mjs", "create-image", - `--os=linux`, + "--os=linux", `--arch=${architecture}`, - `--distro=ubuntu`, - `--release=18.04`, + "--distro=ubuntu", + "--release=18.04", `--cloud=${cloud}`, - `--ci`, - `--authorized-org=oven-sh`, + "--ci", + "--authorized-org=oven-sh", `--user-data=${userDataFile}`, "--no-bootstrap", ], @@ -229,7 +193,7 @@ fi`; }, ); } finally { - // Clean up the temporary file + // Clean up temporary files await rm(userDataFile); } } From 8832aced911e65590a5ae6019324622891131412 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 8 Dec 2024 00:36:37 -0800 Subject: [PATCH 120/176] [build images] --- .buildkite/ci.mjs | 12 +++- flake.nix | 2 +- scripts/agent.mjs | 1 - scripts/create-nix-amis.mjs | 131 ++++++++++++++++++++---------------- scripts/machine.mjs | 108 +---------------------------- scripts/orbstack.mjs | 9 ++- scripts/tart.mjs | 6 +- scripts/utils.mjs | 105 +++++++++++++++++++++++++++++ 8 files changed, 200 insertions(+), 174 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 451f8c7196e1b5..47a6ee80d978a6 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -992,11 +992,17 @@ async function getPipelineOptions() { * @param {Record} [options] * @returns {Step} */ -function getCreateNixAmisStep(platform) { +function getCreateNixAmisStep(platform, dryRun) { return { key: `${getImageKey(platform)}-build-image`, label: `${getBuildkiteEmoji("nix")} Create Nix AMI (${platform.arch})`, - command: ["node", "./scripts/create-nix-amis.mjs", "--arch=" + platform.arch, "--cloud=aws"].join(" "), + command: [ + "node", + "./scripts/create-nix-amis.mjs", + "--release=" + (dryRun ? "create-image" : "publish-image"), + "--arch=" + platform.arch, + "--cloud=aws", + ].join(" "), agents: { queue: "build-image", }, @@ -1046,7 +1052,7 @@ async function getPipeline(options = {}) { steps: [ ...Array.from(imagePlatforms.values()) .filter(platform => platform.nix) - .map(getCreateNixAmisStep), + .map(platform => getCreateNixAmisStep(platform, !!publishImages)), ...[...imagePlatforms.values()] .filter(platform => !platform.nix) .map(platform => getBuildImageStep(platform, !publishImages)), diff --git a/flake.nix b/flake.nix index 0baee01700c0cb..73678b35cf94a2 100644 --- a/flake.nix +++ b/flake.nix @@ -42,7 +42,7 @@ url = olderBunVersion.${arch}.url; stripRoot = false; sha256 = olderBunVersion.${arch}.sha256; - }}/${olderBunVersion.${arch}.dir}/bun $out/bin/ + }}/${olderBunVersion.${arch}.dir}/bun $out/bin/bun chmod +x $out/bin/bun ''; diff --git a/scripts/agent.mjs b/scripts/agent.mjs index ffe104c3aa5e9d..6e8f5b1f0170a6 100755 --- a/scripts/agent.mjs +++ b/scripts/agent.mjs @@ -222,7 +222,6 @@ Environment=NIX_PATH=/nix/var/nix/profiles/per-user/root/channels "distro": getDistro(), "distro-version": getDistroVersion(), "cloud": cloud, - "nix": isNixInstalled() ? "true" : undefined, }; if (cloud) { diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 23c47f90fa9608..53dbddac84143f 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -7,11 +7,12 @@ import { writeFile } from "node:fs/promises"; async function main() { const { - values: { arch, cloud }, + values: { arch, cloud, release }, } = parseArgs({ options: { arch: { type: "string" }, cloud: { type: "string" }, + release: { type: "string" }, }, }); @@ -23,6 +24,10 @@ async function main() { throw new Error("--cloud is required"); } + if (!release) { + throw new Error("--release is required"); + } + const architecture = parseArch(arch); const flakeTarget = architecture === "arm64" ? "arm64" : "x64"; @@ -47,7 +52,34 @@ export DEBIAN_FRONTEND=noninteractive echo "Installing required packages..." sudo apt-get update -qq -sudo apt-get install -y curl xz-utils git sudo --no-install-recommends +curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash - +sudo apt-get install -y curl xz-utils git sudo nodejs --no-install-recommends + +echo "Creating buildkite-agent user..." +sudo useradd -m -d /var/lib/buildkite-agent -s /bin/bash buildkite-agent + +echo "Creating required directories..." +sudo mkdir -p /var/lib/buildkite-agent/bun +sudo mkdir -p /var/cache/buildkite-agent +sudo mkdir -p /var/log/buildkite-agent +sudo mkdir -p /usr/local/share/bun +sudo mkdir -p /etc/buildkite-agent/hooks + +# Copy the agent.mjs script +sudo tee /usr/local/share/bun/agent.mjs > /dev/null << 'EOF' +${agentScript} +EOF + +sudo tee /usr/local/share/bun/utils.mjs > /dev/null << 'EOF' +${utilsContent} +EOF + +sudo chmod +x /usr/local/share/bun/agent.mjs + +# Copy flake.nix +sudo tee /var/lib/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' +${flakeContent} +EOF echo "Installing Nix..." sh <(curl -L https://nixos.org/nix/install) --daemon @@ -64,59 +96,45 @@ trusted-users = root buildkite-agent auto-optimise-store = true EOF +# Create systemd service for our agent +sudo tee /etc/systemd/system/buildkite-agent.service > /dev/null << EOF +[Unit] +Description=Buildkite Agent +After=network-online.target nix-daemon.service +Wants=network-online.target nix-daemon.service + +[Service] +Type=simple +User=buildkite-agent +Group=buildkite-agent +Environment="HOME=/var/lib/buildkite-agent" +Environment="USER=buildkite-agent" +Environment="PATH=/nix/var/nix/profiles/default/bin:/usr/local/bin:/usr/bin:/bin" +Environment="NIX_PATH=/nix/var/nix/profiles/per-user/root/channels" +ExecStart=/usr/bin/node /usr/local/share/bun/agent.mjs start +Restart=always +RestartSec=5 +TimeoutStopSec=20 + +# Set max open files +LimitNOFILE=1048576 + +[Install] +WantedBy=multi-user.target +EOF + +curl -fsSL https://keys.openpgp.org/vks/v1/by-fingerprint/32A37959C2FA5C3C99EFBC32A79206696452D198 | sudo gpg --dearmor -o /usr/share/keyrings/buildkite-agent-archive-keyring.gpg +echo "deb [signed-by=/usr/share/keyrings/buildkite-agent-archive-keyring.gpg] https://apt.buildkite.com/buildkite-agent stable main" | sudo tee /etc/apt/sources.list.d/buildkite-agent.list +sudo apt-get update -qq +sudo apt-get install -y buildkite-agent + # Create required directories sudo mkdir -p /var/lib/buildkite-agent/bun sudo mkdir -p /var/cache/buildkite-agent sudo mkdir -p /var/log/buildkite-agent -# Create a Nix expression for the buildkite service -sudo mkdir -p /etc/buildkite-agent -cat > /etc/buildkite-agent/service.nix << 'NIXEOF' -{ pkgs ? import {} }: - -let - buildkite-agent = pkgs.buildkite-agent; - flakeTarget = if pkgs.stdenv.isAarch64 then "arm64" else "x64"; -in { - systemd.services.buildkite-agent = { - description = "Buildkite Agent"; - after = [ "nix-daemon.service" "network-online.target" ]; - wants = [ "nix-daemon.service" "network-online.target" ]; - wantedBy = [ "multi-user.target" ]; - - environment = { - HOME = "/var/lib/buildkite-agent"; - USER = "buildkite-agent"; - NIX_PATH = "/nix/var/nix/profiles/per-user/root/channels"; - PATH = "\${pkgs.lib.makeBinPath [ pkgs.bash pkgs.nix pkgs.nodejs_20 ]}"; - }; - - serviceConfig = { - ExecStart = "/usr/local/share/bun/agent.mjs start"; - User = "buildkite-agent"; - Group = "buildkite-agent"; - RestartSec = "5"; - Restart = "always"; - TimeoutStopSec = "20"; - }; - }; - - users.users.buildkite-agent = { - isSystemUser = true; - group = "buildkite-agent"; - home = "/var/lib/buildkite-agent"; - createHome = true; - }; - - users.groups.buildkite-agent = {}; -} -NIXEOF - -# Install and configure buildkite-agent using Nix -nix-env -if /etc/buildkite-agent/service.nix - # Configure buildkite-agent -cat > /etc/buildkite-agent/buildkite-agent.cfg << 'EOF' +sudo tee /etc/buildkite-agent/buildkite-agent.cfg > /dev/null << 'EOF' name="%hostname-%n" tags="queue=build-linux,os=linux,arch=${architecture}" build-path=/var/lib/buildkite-agent/builds @@ -128,7 +146,7 @@ EOF # Set up hooks sudo mkdir -p /etc/buildkite-agent/hooks -cat > /etc/buildkite-agent/hooks/environment << 'EOF' +sudo tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' #!/bin/bash . /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh export PATH="/nix/var/nix/profiles/default/bin:$PATH" @@ -143,11 +161,6 @@ EOF sudo chmod +x /etc/buildkite-agent/hooks/* -# Copy flake.nix -sudo tee /var/lib/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' -${flakeContent} -EOF - # Set system limits sudo tee /etc/security/limits.d/buildkite-agent.conf > /dev/null << 'EOF' buildkite-agent soft nofile 1048576 @@ -165,6 +178,9 @@ sudo chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent # Enable and start service sudo systemctl daemon-reload sudo systemctl enable buildkite-agent + +cd /var/lib/buildkite-agent/bun +sudo -u buildkite-agent bash -c "source /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && nix develop .#ci-${flakeTarget} -c echo 'Build environment ready for ${release} - ${architecture}'" `; // Write user data to a temporary file @@ -177,11 +193,12 @@ sudo systemctl enable buildkite-agent [ "node", "./scripts/machine.mjs", - "create-image", + release, "--os=linux", `--arch=${architecture}`, "--distro=ubuntu", - "--release=18.04", + // Orbstack requires 20.04+. + "--release=" + (cloud === "orbstack" ? "20.04" : "18.04"), `--cloud=${cloud}`, "--ci", "--authorized-org=oven-sh", diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 7a4db05cd1d0d8..6d5cbc0b9db2b6 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -14,6 +14,8 @@ import { spawnSafe, spawnSyncSafe, startGroup, + spawnSshSafe, + spawnSsh, tmpdir, waitForPort, which, @@ -29,6 +31,7 @@ import { rm, homedir, isWindows, + setupUserData, sha256, isPrivileged, getUsernameForDistro, @@ -636,37 +639,6 @@ export function getUserData(cloudInit) { return getCloudInit(cloudInit); } -/** - * @param {MachineOptions} options - * @returns {Promise} - */ -async function setupUserData(machine, options) { - const { os, userData } = options; - if (!userData) { - return; - } - - // Write user data to a temporary file - const tmpFile = mkdtemp("user-data-", os === "windows" ? "setup.ps1" : "setup.sh"); - await writeFile(tmpFile, userData); - - try { - // Upload the script - const remotePath = os === "windows" ? "C:\\Windows\\Temp\\setup.ps1" : "/tmp/setup.sh"; - await machine.upload(tmpFile, remotePath); - - // Execute the script - if (os === "windows") { - await machine.spawnSafe(["powershell", remotePath], { stdio: "inherit" }); - } else { - await machine.spawnSafe(["bash", remotePath], { stdio: "inherit" }); - } - } finally { - // Clean up the temporary file - rm(tmpFile); - } -} - /** * @param {CloudInit} cloudInit * @returns {string} @@ -947,80 +919,6 @@ async function getGithubOrgSshKeys(organization) { * @property {number} [retries] */ -/** - * @param {SshOptions} options - * @param {import("./utils.mjs").SpawnOptions} [spawnOptions] - * @returns {Promise} - */ -async function spawnSsh(options, spawnOptions = {}) { - const { hostname, port, username, identityPaths, password, retries = 10, command: spawnCommand } = options; - - if (!hostname.includes("@")) { - await waitForPort({ - hostname, - port: port || 22, - }); - } - - const logPath = mkdtemp("ssh-", "ssh.log"); - const command = ["ssh", hostname, "-v", "-C", "-E", logPath, "-o", "StrictHostKeyChecking=no"]; - if (!password) { - command.push("-o", "BatchMode=yes"); - } - if (port) { - command.push("-p", port); - } - if (username) { - command.push("-l", username); - } - if (password) { - const sshPass = which("sshpass", { required: true }); - command.unshift(sshPass, "-p", password); - } else if (identityPaths) { - command.push(...identityPaths.flatMap(path => ["-i", path])); - } - const stdio = spawnCommand ? "pipe" : "inherit"; - if (spawnCommand) { - command.push(...spawnCommand); - } - - /** @type {import("./utils.mjs").SpawnResult} */ - let result; - for (let i = 0; i < retries; i++) { - result = await spawn(command, { stdio, ...spawnOptions, throwOnError: undefined }); - - const { exitCode } = result; - if (exitCode !== 255) { - break; - } - - const sshLogs = readFile(logPath, { encoding: "utf-8" }); - if (sshLogs.includes("Authenticated")) { - break; - } - - await new Promise(resolve => setTimeout(resolve, (i + 1) * 15000)); - } - - if (spawnOptions?.throwOnError) { - const { error } = result; - if (error) { - throw error; - } - } - - return result; -} - -/** - * @param {SshOptions} options - * @param {import("./utils.mjs").SpawnOptions} [spawnOptions] - * @returns {Promise} - */ -async function spawnSshSafe(options, spawnOptions = {}) { - return spawnSsh(options, { throwOnError: true, ...spawnOptions }); -} - /** * @typedef ScpOptions * @property {string} hostname diff --git a/scripts/orbstack.mjs b/scripts/orbstack.mjs index ca60e2e0396582..905c3a40a59142 100644 --- a/scripts/orbstack.mjs +++ b/scripts/orbstack.mjs @@ -1,5 +1,5 @@ import { inspect } from "node:util"; -import { $, mkdtemp, rm, spawnSafe, writeFile, getUsernameForDistro } from "./utils.mjs"; +import { $, mkdtemp, rm, spawnSafe, writeFile, getUsernameForDistro, spawnSshSafe, setupUserData } from "./utils.mjs"; import { getUserData } from "./machine.mjs"; /** @@ -78,6 +78,7 @@ export const orbstack = { let userDataPath; if (userData) { userDataPath = mkdtemp("orbstack-user-data-", "user-data.txt"); + console.log("User data path:", userData); writeFile(userDataPath, userData); args.push(`--user-data=${userDataPath}`); } @@ -139,7 +140,11 @@ export const orbstack = { userData, }); - return this.toMachine(vm, options); + const machine = this.toMachine(vm, options); + + await setupUserData(machine, options); + + return machine; }, /** diff --git a/scripts/tart.mjs b/scripts/tart.mjs index 123a463236b17e..e8701b2a48ce6f 100644 --- a/scripts/tart.mjs +++ b/scripts/tart.mjs @@ -1,9 +1,5 @@ import { inspect } from "node:util"; -import { - isPrivileged, - spawnSafe, - which -} from "./utils.mjs"; +import { isPrivileged, spawnSafe, which } from "./utils.mjs"; /** * @link https://tart.run/ diff --git a/scripts/utils.mjs b/scripts/utils.mjs index 93c874fb9929b8..d664231507ede3 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -2775,3 +2775,108 @@ export function getBuildkiteEmoji(emoji) { const [, name] = emojiMap[emoji] || []; return name ? `:${name}:` : ""; } + +/** + * @param {SshOptions} options + * @param {import("./utils.mjs").SpawnOptions} [spawnOptions] + * @returns {Promise} + */ +export async function spawnSshSafe(options, spawnOptions = {}) { + return spawnSsh(options, { throwOnError: true, ...spawnOptions }); +} + +/** + * @param {SshOptions} options + * @param {import("./utils.mjs").SpawnOptions} [spawnOptions] + * @returns {Promise} + */ +export async function spawnSsh(options, spawnOptions = {}) { + const { hostname, port, username, identityPaths, password, retries = 10, command: spawnCommand } = options; + + if (!hostname.includes("@")) { + await waitForPort({ + hostname, + port: port || 22, + }); + } + + const logPath = mkdtemp("ssh-", "ssh.log"); + const command = ["ssh", hostname, "-v", "-C", "-E", logPath, "-o", "StrictHostKeyChecking=no"]; + if (!password) { + command.push("-o", "BatchMode=yes"); + } + if (port) { + command.push("-p", port); + } + if (username) { + command.push("-l", username); + } + if (password) { + const sshPass = which("sshpass", { required: true }); + command.unshift(sshPass, "-p", password); + } else if (identityPaths) { + command.push(...identityPaths.flatMap(path => ["-i", path])); + } + const stdio = spawnCommand ? "pipe" : "inherit"; + if (spawnCommand) { + command.push(...spawnCommand); + } + + /** @type {import("./utils.mjs").SpawnResult} */ + let result; + for (let i = 0; i < retries; i++) { + result = await spawn(command, { stdio, ...spawnOptions, throwOnError: undefined }); + + const { exitCode } = result; + if (exitCode !== 255) { + break; + } + + const sshLogs = readFile(logPath, { encoding: "utf-8" }); + if (sshLogs.includes("Authenticated")) { + break; + } + + await new Promise(resolve => setTimeout(resolve, (i + 1) * 15000)); + } + + if (spawnOptions?.throwOnError) { + const { error } = result; + if (error) { + throw error; + } + } + + return result; +} + +/** + * @param {MachineOptions} options + * @returns {Promise} + */ +export async function setupUserData(machine, options) { + const { os, userData } = options; + if (!userData) { + return; + } + + // Write user data to a temporary file + const tmpFile = mkdtemp("user-data-", os === "windows" ? "setup.ps1" : "setup.sh"); + await writeFile(tmpFile, userData); + + try { + // Upload the script + const remotePath = os === "windows" ? "C:\\Windows\\Temp\\setup.ps1" : "/tmp/setup.sh"; + await machine.upload(tmpFile, remotePath); + + // Execute the script + if (os === "windows") { + await machine.spawnSafe(["powershell", remotePath], { stdio: "inherit" }); + } else { + await machine.spawnSafe(["bash", remotePath], { stdio: "inherit" }); + } + } finally { + // Clean up the temporary file + rm(tmpFile); + } +} From de6d4173e9c2eef73c8ab73fd586053bdca4c7f5 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 8 Dec 2024 00:40:24 -0800 Subject: [PATCH 121/176] [build images] --- scripts/create-nix-amis.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 53dbddac84143f..674c5d1d11885f 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -52,7 +52,7 @@ export DEBIAN_FRONTEND=noninteractive echo "Installing required packages..." sudo apt-get update -qq -curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash - +curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - sudo apt-get install -y curl xz-utils git sudo nodejs --no-install-recommends echo "Creating buildkite-agent user..." From 15609e2228edadd0e2ea6a32a879b8f71787b5af Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 8 Dec 2024 01:00:40 -0800 Subject: [PATCH 122/176] [build images] --- .buildkite/ci.mjs | 2 +- scripts/create-nix-amis.mjs | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 47a6ee80d978a6..a45561d6c5a8f0 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -999,7 +999,7 @@ function getCreateNixAmisStep(platform, dryRun) { command: [ "node", "./scripts/create-nix-amis.mjs", - "--release=" + (dryRun ? "create-image" : "publish-image"), + "--release=publish-image", "--arch=" + platform.arch, "--cloud=aws", ].join(" "), diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 674c5d1d11885f..7b243da080f1aa 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -94,6 +94,16 @@ sudo tee /etc/nix/nix.conf > /dev/null << 'EOF' experimental-features = nix-command flakes trusted-users = root buildkite-agent auto-optimise-store = true + +# Disable documentation to save space +documentation.enable = false +documentation.doc.enable = false +documentation.man.enable = false +documentation.info.enable = false + +# Global profile settings +keep-derivations = true +keep-outputs = true EOF # Create systemd service for our agent From b60abb05ec820a7f7355d87183e4537a3713105a Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 8 Dec 2024 01:02:53 -0800 Subject: [PATCH 123/176] [build images] --- scripts/create-nix-amis.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 7b243da080f1aa..c94e12c6d06f26 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -121,7 +121,7 @@ Environment="HOME=/var/lib/buildkite-agent" Environment="USER=buildkite-agent" Environment="PATH=/nix/var/nix/profiles/default/bin:/usr/local/bin:/usr/bin:/bin" Environment="NIX_PATH=/nix/var/nix/profiles/per-user/root/channels" -ExecStart=/usr/bin/node /usr/local/share/bun/agent.mjs start +ExecStart=nix develop /var/lib/buildkite-agent/bun#ci-${flakeTarget} --command bash -c "node /usr/local/share/bun/agent.mjs start" Restart=always RestartSec=5 TimeoutStopSec=20 From 19ec02124a58f1d1e2047aeb79eafa895efef1a1 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 8 Dec 2024 01:17:51 -0800 Subject: [PATCH 124/176] [build images] --- .buildkite/ci.mjs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index a45561d6c5a8f0..c99560b18e3946 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -999,7 +999,7 @@ function getCreateNixAmisStep(platform, dryRun) { command: [ "node", "./scripts/create-nix-amis.mjs", - "--release=publish-image", + "--release=" + (dryRun ? "create-image" : "publish-image"), "--arch=" + platform.arch, "--cloud=aws", ].join(" "), @@ -1052,7 +1052,7 @@ async function getPipeline(options = {}) { steps: [ ...Array.from(imagePlatforms.values()) .filter(platform => platform.nix) - .map(platform => getCreateNixAmisStep(platform, !!publishImages)), + .map(platform => getCreateNixAmisStep(platform, !publishImages)), ...[...imagePlatforms.values()] .filter(platform => !platform.nix) .map(platform => getBuildImageStep(platform, !publishImages)), From 7cad480e5ac3cbe9f89c2abc3d7cb09d045d521c Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Mon, 9 Dec 2024 15:04:58 -0800 Subject: [PATCH 125/176] [build images] --- .buildkite/ci.mjs | 64 ++++++++++++++----------------------- scripts/create-nix-amis.mjs | 16 +++------- scripts/utils.mjs | 1 + 3 files changed, 30 insertions(+), 51 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index c99560b18e3946..2ad8562d600a39 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -96,6 +96,7 @@ function getTargetLabel(target) { * @property {Distro} [distro] * @property {string} release * @property {Tier} [tier] + * @property {boolean} [nix] */ /** @@ -566,20 +567,29 @@ function getTestBunStep(platform, options = {}) { * @returns {Step} */ function getBuildImageStep(platform, dryRun) { - const { os, arch, distro, release } = platform; + const { os, arch, distro, release, nix } = platform; const action = dryRun ? "create-image" : "publish-image"; - const command = [ - "node", - "./scripts/machine.mjs", - action, - `--os=${os}`, - `--arch=${arch}`, - distro && `--distro=${distro}`, - `--release=${release}`, - "--cloud=aws", - "--ci", - "--authorized-org=oven-sh", - ]; + + /** @type {string[]} */ + let command; + if (nix) { + // TODO: move login into scripts/machine.mjs + command = ["node", "./scripts/create-nix-amis.mjs", `--release=${action}`, `--arch=${arch}`, "--cloud=aws"]; + } else { + command = [ + "node", + "./scripts/machine.mjs", + action, + `--os=${os}`, + `--arch=${arch}`, + distro && `--distro=${distro}`, + `--release=${release}`, + "--cloud=aws", + "--ci", + "--authorized-org=oven-sh", + ]; + } + return { key: `${getImageKey(platform)}-build-image`, label: `${getImageLabel(platform)} - build-image`, @@ -988,32 +998,6 @@ async function getPipelineOptions() { }; } -/** - * @param {Record} [options] - * @returns {Step} - */ -function getCreateNixAmisStep(platform, dryRun) { - return { - key: `${getImageKey(platform)}-build-image`, - label: `${getBuildkiteEmoji("nix")} Create Nix AMI (${platform.arch})`, - command: [ - "node", - "./scripts/create-nix-amis.mjs", - "--release=" + (dryRun ? "create-image" : "publish-image"), - "--arch=" + platform.arch, - "--cloud=aws", - ].join(" "), - agents: { - queue: "build-image", - }, - env: { - DEBUG: "1", - }, - retry: getRetry(), - timeout_in_minutes: 3 * 60, - }; -} - /** * @param {PipelineOptions} [options] * @returns {Promise} @@ -1052,7 +1036,7 @@ async function getPipeline(options = {}) { steps: [ ...Array.from(imagePlatforms.values()) .filter(platform => platform.nix) - .map(platform => getCreateNixAmisStep(platform, !publishImages)), + .map(platform => getBuildNixImageStep(platform, !publishImages)), ...[...imagePlatforms.values()] .filter(platform => !platform.nix) .map(platform => getBuildImageStep(platform, !publishImages)), diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index c94e12c6d06f26..3ba3a8e24dde88 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -34,9 +34,9 @@ async function main() { // Read the required files let agentScript, flakeContent, utilsContent; try { - agentScript = await readFile(join(process.cwd(), "scripts", "agent.mjs"), "utf8"); - flakeContent = await readFile(join(process.cwd(), "flake.nix"), "utf8"); - utilsContent = await readFile(join(process.cwd(), "scripts", "utils.mjs"), "utf8"); + agentScript = readFile(join(process.cwd(), "scripts", "agent.mjs"), "utf8"); + flakeContent = readFile(join(process.cwd(), "flake.nix"), "utf8"); + utilsContent = readFile(join(process.cwd(), "scripts", "utils.mjs"), "utf8"); console.log("Successfully read configuration files"); } catch (error) { console.error("Failed to read configuration files:", error); @@ -95,12 +95,6 @@ experimental-features = nix-command flakes trusted-users = root buildkite-agent auto-optimise-store = true -# Disable documentation to save space -documentation.enable = false -documentation.doc.enable = false -documentation.man.enable = false -documentation.info.enable = false - # Global profile settings keep-derivations = true keep-outputs = true @@ -121,7 +115,7 @@ Environment="HOME=/var/lib/buildkite-agent" Environment="USER=buildkite-agent" Environment="PATH=/nix/var/nix/profiles/default/bin:/usr/local/bin:/usr/bin:/bin" Environment="NIX_PATH=/nix/var/nix/profiles/per-user/root/channels" -ExecStart=nix develop /var/lib/buildkite-agent/bun#ci-${flakeTarget} --command bash -c "node /usr/local/share/bun/agent.mjs start" +ExecStart=/bin/sh -c 'exec /nix/var/nix/profiles/default/bin/nix develop /var/lib/buildkite-agent/bun#ci --command bash -c "node /usr/local/share/bun/agent.mjs start"' Restart=always RestartSec=5 TimeoutStopSec=20 @@ -166,7 +160,7 @@ EOF sudo tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' #!/bin/bash cd "$BUILDKITE_BUILD_DIR" -exec nix develop .#ci-${flakeTarget} --command bash -c "$BUILDKITE_COMMAND" +exec nix develop .#ci --command bash -c "$BUILDKITE_COMMAND" EOF sudo chmod +x /etc/buildkite-agent/hooks/* diff --git a/scripts/utils.mjs b/scripts/utils.mjs index d664231507ede3..a0431e7fea8fd1 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -2746,6 +2746,7 @@ const emojiMap = { alpine: ["🐧", "alpine"], aws: ["☁️", "aws"], amazonlinux: ["🐧", "aws"], + nix: ["🐧", "nix"], windows: ["🪟", "windows"], true: ["✅", "white_check_mark"], false: ["❌", "x"], From c6a3e79f74c4482c031b600f2eac84f886aa5df0 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Mon, 9 Dec 2024 15:06:14 -0800 Subject: [PATCH 126/176] [build images] --- .buildkite/ci.mjs | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 2ad8562d600a39..0f036f67556e66 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -107,7 +107,7 @@ const buildPlatforms = [ { os: "darwin", arch: "x64", release: "14" }, { os: "linux", arch: "aarch64", distro: "ubuntu", release: "18.04", nix: true }, { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04", nix: true }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", nix: true, release: "18.04" }, + { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04", nix: true }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, @@ -1033,14 +1033,7 @@ async function getPipeline(options = {}) { steps.push({ key: "build-images", group: getBuildkiteEmoji("aws"), - steps: [ - ...Array.from(imagePlatforms.values()) - .filter(platform => platform.nix) - .map(platform => getBuildNixImageStep(platform, !publishImages)), - ...[...imagePlatforms.values()] - .filter(platform => !platform.nix) - .map(platform => getBuildImageStep(platform, !publishImages)), - ], + steps: [...Array.from(imagePlatforms.values()).map(platform => getBuildImageStep(platform, !publishImages))], }); } From 201e40c73cd496f5d65b29bcf8fb7620abab93d6 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 9 Dec 2024 20:43:08 -0800 Subject: [PATCH 127/176] [build images] --- flake.nix | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/flake.nix b/flake.nix index 73678b35cf94a2..68c5c7b957ba4c 100644 --- a/flake.nix +++ b/flake.nix @@ -181,8 +181,10 @@ # Generic CI shell that defaults to x64 ci = pkgs.mkShell { buildInputs = with pkgs; [ - # Include the x64 build environment tools by default - buildEnvX64 + # Include architecture-specific build environment tools + (if stdenv.hostPlatform.isAarch64 + then buildEnvArm64 + else buildEnvX64) ]; }; }; From 1e916f66fef303838ae956235ced15a33a03da77 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 9 Dec 2024 20:44:18 -0800 Subject: [PATCH 128/176] [build images] --- scripts/create-nix-amis.mjs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs index 3ba3a8e24dde88..63375f965fc9ee 100755 --- a/scripts/create-nix-amis.mjs +++ b/scripts/create-nix-amis.mjs @@ -29,7 +29,6 @@ async function main() { } const architecture = parseArch(arch); - const flakeTarget = architecture === "arm64" ? "arm64" : "x64"; // Read the required files let agentScript, flakeContent, utilsContent; @@ -184,7 +183,7 @@ sudo systemctl daemon-reload sudo systemctl enable buildkite-agent cd /var/lib/buildkite-agent/bun -sudo -u buildkite-agent bash -c "source /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && nix develop .#ci-${flakeTarget} -c echo 'Build environment ready for ${release} - ${architecture}'" +sudo -u buildkite-agent bash -c "source /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && nix develop .#ci -c echo 'Build environment ready for ${release} - ${architecture}'" `; // Write user data to a temporary file From 0740dffc3e5d88f2d57e23e047212bfdc179b8c5 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 9 Dec 2024 21:12:07 -0800 Subject: [PATCH 129/176] Update flake.nix --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 68c5c7b957ba4c..f0b7912fbf920c 100644 --- a/flake.nix +++ b/flake.nix @@ -23,7 +23,7 @@ arm64 = { dir = "bun-linux-aarch64"; url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip"; - sha256 = "sha256-ph2lNX4o1Jd/zNSFH+1i/02j6jOFMAXH3ZPayAvFOTI="; + sha256 = "sha256-iE+uoF4+18shNqlPM19WfbqXwxC2CG72RS8++KGXkH4="; }; }; overlays = [ (import rust-overlay) ]; From 148d23e31dd85ac761d6152a9664fe3d7cb8644d Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Mon, 9 Dec 2024 23:48:10 -0800 Subject: [PATCH 130/176] [build images] --- scripts/agent.mjs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/scripts/agent.mjs b/scripts/agent.mjs index 6e8f5b1f0170a6..56995505852826 100755 --- a/scripts/agent.mjs +++ b/scripts/agent.mjs @@ -192,10 +192,6 @@ Environment=NIX_PATH=/nix/var/nix/profiles/per-user/root/channels "experiment": "normalised-upload-paths,resolve-commit-after-checkout,agent-api", }; - if (isLinux && isNixInstalled()) { - options["env-path"] = "/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"; - } - let ephemeral; if (cloud) { const jobId = await getCloudMetadataTag("buildkite:job-uuid"); From 3cfe70e7648173016a072ff0708a9a5ed185cdc1 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Tue, 10 Dec 2024 00:12:40 -0800 Subject: [PATCH 131/176] Fix tests [build images] --- src/bun.js/bindings/BunProcess.cpp | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/bun.js/bindings/BunProcess.cpp b/src/bun.js/bindings/BunProcess.cpp index 628240b2543ab8..1d58a9a3e13fb0 100644 --- a/src/bun.js/bindings/BunProcess.cpp +++ b/src/bun.js/bindings/BunProcess.cpp @@ -1563,7 +1563,11 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb { char cwd[PATH_MAX] = { 0 }; - getcwd(cwd, PATH_MAX); + + if (getcwd(cwd, PATH_MAX) == nullptr) { + cwd[0] = '.'; + cwd[1] = '\0'; + } header->putDirect(vm, JSC::Identifier::fromString(vm, "cwd"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast(cwd), strlen(cwd) })), 0); } @@ -1579,7 +1583,9 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb { // uname struct utsname buf; - uname(&buf); + if (uname(&buf) != 0) { + memset(&buf, 0, sizeof(buf)); + } header->putDirect(vm, JSC::Identifier::fromString(vm, "osName"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast(buf.sysname), strlen(buf.sysname) })), 0); header->putDirect(vm, JSC::Identifier::fromString(vm, "osRelease"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast(buf.release), strlen(buf.release) })), 0); @@ -1591,7 +1597,9 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb { // TODO: use HOSTNAME_MAX char host[1024] = { 0 }; - gethostname(host, 1024); + if (gethostname(host, 1024) != 0) { + host[0] = '0'; + } header->putDirect(vm, JSC::Identifier::fromString(vm, "host"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast(host), strlen(host) })), 0); } From 89f94dfdb7066425a9a81f04dfb994212de79a5c Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Tue, 10 Dec 2024 00:54:04 -0800 Subject: [PATCH 132/176] [build images] --- src/bun.js/bindings/c-bindings.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/bun.js/bindings/c-bindings.cpp b/src/bun.js/bindings/c-bindings.cpp index b952248edc5301..38f74c731f46d6 100644 --- a/src/bun.js/bindings/c-bindings.cpp +++ b/src/bun.js/bindings/c-bindings.cpp @@ -37,7 +37,7 @@ extern "C" void bun_warn_avx_missing(const char* url) strcpy(buf, str); strcpy(buf + len, url); strcpy(buf + len + strlen(url), "\n\0"); - write(STDERR_FILENO, buf, strlen(buf)); + [[maybe_unused]] auto _ = write(STDERR_FILENO, buf, strlen(buf)); } #endif From d77df3694c6574d065fb8000e4d68b1fdb8df8b1 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 17:46:24 -0800 Subject: [PATCH 133/176] Testing [build images] --- .buildkite/ci.mjs | 41 ++- cmake/tools/SetupCcache.cmake | 3 +- flake.lock | 82 ----- flake.nix | 192 ----------- scripts/bootstrap.sh | 605 +++++++++++++++++++--------------- scripts/create-nix-amis.mjs | 221 ------------- scripts/machine.mjs | 7 +- 7 files changed, 365 insertions(+), 786 deletions(-) delete mode 100644 flake.lock delete mode 100644 flake.nix delete mode 100755 scripts/create-nix-amis.mjs diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 0f036f67556e66..f3cfa66796300b 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -96,7 +96,7 @@ function getTargetLabel(target) { * @property {Distro} [distro] * @property {string} release * @property {Tier} [tier] - * @property {boolean} [nix] + * @property {string[]} [features] */ /** @@ -105,9 +105,9 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "18.04", nix: true }, - { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04", nix: true }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04", nix: true }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, + { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, @@ -567,27 +567,24 @@ function getTestBunStep(platform, options = {}) { * @returns {Step} */ function getBuildImageStep(platform, dryRun) { - const { os, arch, distro, release, nix } = platform; + const { os, arch, distro, release, features } = platform; const action = dryRun ? "create-image" : "publish-image"; /** @type {string[]} */ - let command; - if (nix) { - // TODO: move login into scripts/machine.mjs - command = ["node", "./scripts/create-nix-amis.mjs", `--release=${action}`, `--arch=${arch}`, "--cloud=aws"]; - } else { - command = [ - "node", - "./scripts/machine.mjs", - action, - `--os=${os}`, - `--arch=${arch}`, - distro && `--distro=${distro}`, - `--release=${release}`, - "--cloud=aws", - "--ci", - "--authorized-org=oven-sh", - ]; + const command = [ + "node", + "./scripts/machine.mjs", + action, + `--os=${os}`, + `--arch=${arch}`, + distro && `--distro=${distro}`, + `--release=${release}`, + "--cloud=aws", + "--ci", + "--authorized-org=oven-sh", + ]; + for (const feature of features || []) { + command.push(`--feature=${feature}`); } return { diff --git a/cmake/tools/SetupCcache.cmake b/cmake/tools/SetupCcache.cmake index 720a7acc53feba..a128fac98bd690 100644 --- a/cmake/tools/SetupCcache.cmake +++ b/cmake/tools/SetupCcache.cmake @@ -43,7 +43,8 @@ setenv(CCACHE_FILECLONE 1) setenv(CCACHE_STATSLOG ${BUILD_PATH}/ccache.log) if(CI) - setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,clang_index_store,gcno_cwd,include_file_ctime,include_file_mtime") + # FIXME: Does not work on Ubuntu 18.04 + # setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,clang_index_store,gcno_cwd,include_file_ctime,include_file_mtime") else() setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,random_seed,clang_index_store,gcno_cwd") endif() diff --git a/flake.lock b/flake.lock deleted file mode 100644 index a0af77b8304c3a..00000000000000 --- a/flake.lock +++ /dev/null @@ -1,82 +0,0 @@ -{ - "nodes": { - "flake-utils": { - "inputs": { - "systems": "systems" - }, - "locked": { - "lastModified": 1731533236, - "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "nixpkgs": { - "locked": { - "lastModified": 1733392399, - "narHash": "sha256-kEsTJTUQfQFIJOcLYFt/RvNxIK653ZkTBIs4DG+cBns=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "d0797a04b81caeae77bcff10a9dde78bc17f5661", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixos-unstable", - "repo": "nixpkgs", - "type": "github" - } - }, - "root": { - "inputs": { - "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs", - "rust-overlay": "rust-overlay" - } - }, - "rust-overlay": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1733538766, - "narHash": "sha256-FEDfBpM82XGdHDbLDJC4lV+QXSVN1rERt1MqtBGJZds=", - "owner": "oxalica", - "repo": "rust-overlay", - "rev": "66526479b295ad238843a8a7367d2da7ec102757", - "type": "github" - }, - "original": { - "owner": "oxalica", - "repo": "rust-overlay", - "type": "github" - } - }, - "systems": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - } - }, - "root": "root", - "version": 7 -} diff --git a/flake.nix b/flake.nix deleted file mode 100644 index f0b7912fbf920c..00000000000000 --- a/flake.nix +++ /dev/null @@ -1,192 +0,0 @@ -{ - description = "Bun flake and build environment"; - - inputs = { - nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; - flake-utils.url = "github:numtide/flake-utils"; - rust-overlay = { - url = "github:oxalica/rust-overlay"; - inputs.nixpkgs.follows = "nixpkgs"; - }; - }; - - outputs = { self, nixpkgs, flake-utils, rust-overlay }: - flake-utils.lib.eachDefaultSystem (system: - let - olderBunVersion = { - x64 = { - dir = "bun-linux-x64"; - url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-x64.zip"; - sha256 = "sha256-e5OtTccoPG7xKQVvZiuvo3VSBC8mRteOj1d0GF+nEtk="; - - }; - arm64 = { - dir = "bun-linux-aarch64"; - url = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v1.1.38/bun-linux-aarch64.zip"; - sha256 = "sha256-iE+uoF4+18shNqlPM19WfbqXwxC2CG72RS8++KGXkH4="; - }; - }; - overlays = [ (import rust-overlay) ]; - pkgs = import nixpkgs { - inherit system overlays; - config = { - allowUnfree = true; - }; - }; - - # Function to create a derivation for downloading Bun binary - getBunBinary = arch: pkgs.runCommand "bun-binary-${arch}" {} '' - mkdir -p $out/bin - cp ${pkgs.fetchzip { - name = "bun-binary-${arch}"; - url = olderBunVersion.${arch}.url; - stripRoot = false; - sha256 = olderBunVersion.${arch}.sha256; - }}/${olderBunVersion.${arch}.dir}/bun $out/bin/bun - chmod +x $out/bin/bun - ''; - - # Function to create build environment for a specific architecture - makeBuildEnv = arch: pkgs.buildEnv { - name = "bun-build-tools-${arch}"; - paths = with pkgs; [ - # Core build tools - bash - coreutils - gcc13 - # Full LLVM 18 toolchain - llvmPackages_18.llvm - llvmPackages_18.clang - llvmPackages_18.lld - llvmPackages_18.lldb - llvmPackages_18.bintools - cmake - ninja - pkg-config - gnumake - binutils - file - - # Languages needed for build - nodejs_22 - python3 - go - (rust-bin.stable.latest.default.override { - extensions = [ "rust-src" "rust-analysis" ]; - }) - (perl.withPackages (p: with p; [ - MathBigInt - JSON - DataDumper - FileSlurp - ])) - - # Development tools - git - curl - wget - unzip - xz - ccache - - # SSL Certificates - cacert - - # Libraries - zlib - openssl - libffi - - # Include the Bun binary - (getBunBinary arch) - ]; - - pathsToLink = [ "/bin" "/lib" "/lib64" "/include" "/share" "/etc/ssl" ]; - extraOutputsToInstall = [ "dev" "out" "bin" ]; - ignoreCollisions = true; - }; - - # Create both x64 and arm64 environments - buildEnvX64 = makeBuildEnv "x64"; - buildEnvArm64 = makeBuildEnv "arm64"; - - # Function to build Bun for release - buildBun = arch: pkgs.stdenv.mkDerivation { - pname = "bun"; - version = "latest"; - - src = ./.; - - nativeBuildInputs = [ - (if arch == "x64" then buildEnvX64 else buildEnvArm64) - ]; - - buildPhase = '' - export HOME=$TMPDIR - bun build:release - ''; - - installPhase = '' - mkdir -p $out/bin - cp build/release/bun $out/bin/ - chmod +x $out/bin/bun - ''; - - meta = with pkgs.lib; { - description = "Incredibly fast JavaScript runtime, bundler, transpiler and package manager"; - homepage = "https://bun.sh"; - license = licenses.mit; - platforms = platforms.linux; - }; - }; - - in - { - packages = { - default = buildEnvX64; - build-x64 = buildEnvX64; - build-arm64 = buildEnvArm64; - x64 = buildBun "x64"; - arm64 = buildBun "arm64"; - }; - - devShells = { - default = pkgs.mkShell { - buildInputs = with pkgs; [ - ]; - - shellHook = '' - echo "To compile a release build of Bun:" - echo " bun build:release" - echo "To compile a debug build of Bun:" - echo " bun build:debug" - ''; - }; - - # CI shells for different architectures - ci-x64 = pkgs.mkShell { - buildInputs = with pkgs; [ - # Include the x64 build environment tools - buildEnvX64 - ]; - }; - - ci-arm64 = pkgs.mkShell { - buildInputs = with pkgs; [ - # Include the arm64 build environment tools - buildEnvArm64 - ]; - }; - - # Generic CI shell that defaults to x64 - ci = pkgs.mkShell { - buildInputs = with pkgs; [ - # Include architecture-specific build environment tools - (if stdenv.hostPlatform.isAarch64 - then buildEnvArm64 - else buildEnvX64) - ]; - }; - }; - }); -} \ No newline at end of file diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 5d99ffbc481c11..a232ce6f8460d8 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Version: 7 +# Version: 8 # A script that installs the dependencies needed to build and test Bun. # This should work on macOS and Linux with a POSIX shell. @@ -11,15 +11,17 @@ # increment the version comment to indicate that a new image should be built. # Otherwise, the existing image will be retroactively updated. -pid=$$ +pid="$$" print() { echo "$@" } error() { - echo "error: $@" >&2 - kill -s TERM "$pid" + print "error: $@" >&2 + if ! [ "$$" = "$pid" ]; then + kill -s TERM "$pid" + fi exit 1 } @@ -39,24 +41,44 @@ execute_sudo() { } execute_as_user() { + sh="$(require sh)" + if [ "$sudo" = "1" ] || [ "$can_sudo" = "1" ]; then if [ -f "$(which sudo)" ]; then - execute sudo -H -n -u "$user" /bin/sh -c "$*" + execute sudo -n -u "$user" "$sh" -c "$*" elif [ -f "$(which doas)" ]; then - execute doas -u "$user" /bin/sh -c "$*" + execute doas -u "$user" "$sh" -c "$*" elif [ -f "$(which su)" ]; then - execute su -s /bin/sh "$user" -c "$*" + execute su -s "$sh" "$user" -c "$*" else - execute /bin/sh -c "$*" + execute "$sh" -c "$*" fi else - execute /bin/sh -c "$*" + execute "$sh" -c "$*" fi } grant_to_user() { path="$1" - execute_sudo chown -R "$user:$group" "$path" + if ! [ -f "$path" ] && ! [ -d "$path" ]; then + error "Could not find file or directory: \"$path\"" + fi + + chown="$(require chown)" + execute_sudo "$chown" -R "$user:$group" "$path" + if ! [ "$user" = "$current_user" ] || ! [ "$group" = "$current_group" ]; then + execute_sudo "$chown" -R "$current_user:$current_group" "$path" + fi +} + +grant_to_everyone() { + path="$1" + if ! [ -f "$path" ] && ! [ -d "$path" ]; then + error "Could not find file or directory: \"$path\"" + fi + + chmod="$(require chmod)" + execute_sudo "$chmod" 777 "$path" } which() { @@ -68,15 +90,15 @@ require() { if ! [ -f "$path" ]; then error "Command \"$1\" is required, but is not installed." fi - echo "$path" + print "$path" } fetch() { - curl=$(which curl) + curl="$(which curl)" if [ -f "$curl" ]; then execute "$curl" -fsSL "$1" else - wget=$(which wget) + wget="$(which wget)" if [ -f "$wget" ]; then execute "$wget" -qO- "$1" else @@ -85,149 +107,115 @@ fetch() { fi } +compare_version() { + if [ "$1" = "$2" ]; then + print "0" + elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then + print "-1" + else + print "1" + fi +} -install_gcc13_ubuntu18() { - if ! [ "$distro" = "ubuntu" ]; then - return - fi - if ! { [ "$release" = "18.04" ] && [ "$arch" = "x64" ] || [ "$release" = "20.04" ] && [ "$arch" = "aarch64" ]; }; then - return - fi - - print "Installing GCC 13 toolchain for Ubuntu 18.04..." - - # Add the Ubuntu Toolchain PPA - execute_sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test - execute_sudo apt-get update - - # Install GCC 13 and related packages - install_packages \ - gcc-13 \ - g++-13 \ - libgcc-13-dev \ - libstdc++-13-dev \ - libasan6 \ - libubsan1 \ - libatomic1 \ - libtsan0 \ - liblsan0 \ - libgfortran5 \ - libc6-dev - - # Set up GCC 13 as the default compiler - execute_sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \ - --slave /usr/bin/g++ g++ /usr/bin/g++-13 \ - --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \ - --slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \ - --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13 - - # Get system triplet dynamically - triplet=$(gcc -dumpmachine | sed 's/-pc-/-/') # Remove -pc- if present - - # Configure library paths for Ubuntu 18.04 - execute_sudo mkdir -p "/usr/lib/gcc/${triplet}/13" - execute_sudo ln -sf "/usr/lib/${triplet}/libstdc++.so.6" "/usr/lib/gcc/${triplet}/13/" - - # Update library paths configuration - execute_sudo sh -c "echo '/usr/lib/gcc/${triplet}/13' > /etc/ld.so.conf.d/gcc-13.conf" - execute_sudo sh -c "echo '/usr/lib/${triplet}' >> /etc/ld.so.conf.d/gcc-13.conf" - execute_sudo ldconfig - - # Set environment variables for the toolchain - # append_to_profile "export LD_LIBRARY_PATH=\"/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LD_LIBRARY_PATH\"" - # append_to_profile "export LIBRARY_PATH=\"/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LIBRARY_PATH\"" - # append_to_profile "export CPLUS_INCLUDE_PATH=\"/usr/include/c++/13:/usr/include/${triplet}/c++/13:\$CPLUS_INCLUDE_PATH\"" - # append_to_profile "export C_INCLUDE_PATH=\"/usr/lib/gcc/${triplet}/13/include:\$C_INCLUDE_PATH\"" - - append_to_profile "CC=clang-$(llvm_version)" - append_to_profile "CXX=clang++-$(llvm_version)" - append_to_profile "AR=llvm-ar-$(llvm_version)" - append_to_profile "RANLIB=llvm-ranlib-$(llvm_version)" - append_to_profile "LD=lld-$(llvm_version)" - append_to_profile "LTO_FLAG=\"-flto=full -fwhole-program-vtables -fforce-emit-vtables\"" - append_to_profile "LD_LIBRARY_PATH=/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LD_LIBRARY_PATH" - append_to_profile "LIBRARY_PATH=/usr/lib/gcc/${triplet}/13:/usr/lib/${triplet}:\$LIBRARY_PATH" - append_to_profile "CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${triplet}/c++/13:\$CPLUS_INCLUDE_PATH" - append_to_profile "C_INCLUDE_PATH=/usr/lib/gcc/${triplet}/13/include:\$C_INCLUDE_PATH" - append_to_profile "DEFAULT_CFLAGS=\"-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables -DU_STATIC_IMPLEMENTATION=1\"" - append_to_profile "CFLAGS=\"\$DEFAULT_CFLAGS \$CFLAGS -stdlib=libstdc++\"" - append_to_profile "CXXFLAGS=\"\$DEFAULT_CFLAGS \$CXXFLAGS -stdlib=libstdc++\"" - append_to_profile "LDFLAGS=\"-fuse-ld=lld -L/usr/lib/gcc/x86_64-linux-gnu/13 -L/usr/lib/x86_64-linux-gnu\"" +create_directory() { + path="$1" + path_dir="$path" + while ! [ -d "$path_dir" ]; do + path_dir="$(dirname "$path_dir")" + done -} + path_needs_sudo="0" + if ! [ -r "$path_dir" ] || ! [ -w "$path_dir" ]; then + path_needs_sudo="1" + fi -download_file() { - url="$1" - filename="${2:-$(basename "$url")}" - tmp="$(execute mktemp -d)" - execute chmod 755 "$tmp" + mkdir="$(require mkdir)" + if [ "$path_needs_sudo" = "1" ]; then + execute_sudo "$mkdir" -p "$path" + else + execute "$mkdir" -p "$path" + fi - path="$tmp/$filename" - fetch "$url" >"$path" - execute chmod 644 "$path" + grant_to_user "$path" +} +create_tmp_directory() { + mktemp="$(require mktemp)" + path="$(execute "$mktemp" -d)" + grant_to_everyone "$path" print "$path" } -compare_version() { - if [ "$1" = "$2" ]; then - echo "0" - elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then - echo "-1" - else - echo "1" +create_file() { + path="$1" + path_dir="$(dirname "$path")" + if ! [ -d "$path_dir" ]; then + create_directory "$path_dir" fi -} -append_to_file() { - file="$1" - content="$2" + path_needs_sudo="0" + if ! [ -r "$path" ] || ! [ -w "$path" ]; then + path_needs_sudo="1" + fi - file_needs_sudo="0" - if [ -f "$file" ]; then - if ! [ -r "$file" ] || ! [ -w "$file" ]; then - file_needs_sudo="1" - fi + if [ "$path_needs_sudo" = "1" ]; then + execute_sudo touch "$path" else - execute_as_user mkdir -p "$(dirname "$file")" - execute_as_user touch "$file" + execute touch "$path" fi - echo "$content" | while read -r line; do - if ! grep -q "$line" "$file"; then - if [ "$file_needs_sudo" = "1" ]; then - execute_sudo sh -c "echo '$line' >> '$file'" - else - echo "$line" >>"$file" - fi - fi - done + content="$2" + if [ -n "$content" ]; then + append_file "$path" "$content" + fi + + grant_to_user "$path" } -append_to_file_sudo() { - file="$1" - content="$2" +append_file() { + path="$1" + if ! [ -f "$path" ]; then + create_file "$path" + fi - if ! [ -f "$file" ]; then - execute_sudo mkdir -p "$(dirname "$file")" - execute_sudo touch "$file" + path_needs_sudo="0" + if ! [ -r "$path" ] || ! [ -w "$path" ]; then + path_needs_sudo="1" fi - echo "$content" | while read -r line; do - if ! grep -q "$line" "$file"; then - echo "$line" | execute_sudo tee "$file" >/dev/null + content="$2" + print "$content" | while read -r line; do + if ! grep -q "$line" "$path"; then + sh="$(require sh)" + if [ "$path_needs_sudo" = "1" ]; then + execute_sudo "$sh" -c "echo '$line' >> '$path'" + else + execute "$sh" -c "echo '$line' >> '$path'" + fi fi done } +download_file() { + file_url="$1" + file_tmp_dir="$(create_tmp_directory)" + file_tmp_path="$file_tmp_dir/$(basename "$file_url")" + + fetch "$file_url" >"$file_tmp_path" + grant_to_everyone "$file_tmp_path" + + print "$file_tmp_path" +} + append_to_profile() { content="$1" - profiles=".profile" + profiles=".profile .zprofile .bash_profile .bashrc .zshrc" for profile in $profiles; do - file="$home/$profile" - if [ "$ci" = "1" ] || [ -f "$file" ]; then - append_to_file "$file" "$content" - fi + for profile_path in "$current_home/$profile" "$home/$profile"; do + if [ "$ci" = "1" ] || [ -f "$profile_path" ]; then + append_file "$profile_path" "$content" + fi + done done } @@ -238,7 +226,7 @@ append_to_path() { fi append_to_profile "export PATH=\"$path:\$PATH\"" - export PATH="$path:$PATH" + # export PATH="$path:$PATH" } move_to_bin() { @@ -261,19 +249,22 @@ move_to_bin() { check_features() { print "Checking features..." - case "$CI" in - true | 1) - ci=1 - print "CI: enabled" - ;; - esac - - case "$@" in - *--ci*) - ci=1 - print "CI: enabled" - ;; - esac + for arg in "$@"; do + case "$arg" in + *--ci*) + ci=1 + print "CI: enabled" + ;; + *--osxcross*) + osxcross=1 + print "Cross-compiling to macOS: enabled" + ;; + *--gcc-13*) + gcc_version="13" + print "GCC 13: enabled" + ;; + esac + done } check_operating_system() { @@ -282,17 +273,29 @@ check_operating_system() { os="$("$uname" -s)" case "$os" in - Linux*) os="linux" ;; - Darwin*) os="darwin" ;; - *) error "Unsupported operating system: $os" ;; + Linux*) + os="linux" + ;; + Darwin*) + os="darwin" + ;; + *) + error "Unsupported operating system: $os" + ;; esac print "Operating System: $os" arch="$("$uname" -m)" case "$arch" in - x86_64 | x64 | amd64) arch="x64" ;; - aarch64 | arm64) arch="aarch64" ;; - *) error "Unsupported architecture: $arch" ;; + x86_64 | x64 | amd64) + arch="x64" + ;; + aarch64 | arm64) + arch="aarch64" + ;; + *) + error "Unsupported architecture: $arch" + ;; esac print "Architecture: $arch" @@ -306,7 +309,7 @@ check_operating_system() { abi="musl" alpine="$(cat /etc/alpine-release)" if [ "$alpine" ~ "_" ]; then - release="$(echo "$alpine" | cut -d_ -f1)-edge" + release="$(print "$alpine" | cut -d_ -f1)-edge" else release="$alpine" fi @@ -326,6 +329,7 @@ check_operating_system() { distro="$("$sw_vers" -productName)" release="$("$sw_vers" -productVersion)" fi + case "$arch" in x64) sysctl="$(which sysctl)" @@ -348,7 +352,7 @@ check_operating_system() { ldd="$(which ldd)" if [ -f "$ldd" ]; then ldd_version="$($ldd --version 2>&1)" - abi_version="$(echo "$ldd_version" | grep -o -E '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -n 1)" + abi_version="$(print "$ldd_version" | grep -o -E '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -n 1)" case "$ldd_version" in *musl*) abi="musl" @@ -465,6 +469,10 @@ check_user() { can_sudo=1 print "Sudo: can be used" fi + + current_user="$user" + current_group="$group" + current_home="$home" } check_ulimit() { @@ -476,15 +484,12 @@ check_ulimit() { systemd_conf="/etc/systemd/system.conf" if [ -f "$systemd_conf" ]; then limits_conf="/etc/security/limits.d/99-unlimited.conf" - if ! [ -f "$limits_conf" ]; then - execute_sudo mkdir -p "$(dirname "$limits_conf")" - execute_sudo touch "$limits_conf" - fi + create_file "$limits_conf" fi limits="core data fsize memlock nofile rss stack cpu nproc as locks sigpending msgqueue" for limit in $limits; do - limit_upper="$(echo "$limit" | tr '[:lower:]' '[:upper:]')" + limit_upper="$(print "$limit" | tr '[:lower:]' '[:upper:]')" limit_value="unlimited" case "$limit" in @@ -496,13 +501,13 @@ check_ulimit() { if [ -f "$limits_conf" ]; then limit_users="root *" for limit_user in $limit_users; do - append_to_file "$limits_conf" "$limit_user soft $limit $limit_value" - append_to_file "$limits_conf" "$limit_user hard $limit $limit_value" + append_file "$limits_conf" "$limit_user soft $limit $limit_value" + append_file "$limits_conf" "$limit_user hard $limit $limit_value" done fi if [ -f "$systemd_conf" ]; then - append_to_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value" + append_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value" fi done @@ -519,13 +524,13 @@ check_ulimit() { esac rc_ulimit="$rc_ulimit -$limit_flag $limit_value" done - append_to_file "$rc_conf" "rc_ulimit=\"$rc_ulimit\"" + append_file "$rc_conf" "rc_ulimit=\"$rc_ulimit\"" fi pam_confs="/etc/pam.d/common-session /etc/pam.d/common-session-noninteractive" for pam_conf in $pam_confs; do if [ -f "$pam_conf" ]; then - append_to_file "$pam_conf" "session optional pam_limits.so" + append_file "$pam_conf" "session optional pam_limits.so" fi done @@ -623,30 +628,12 @@ install_packages() { esac } -clean_packagemanager() { - case "$pm" in - apt) - package_manager autoremove - package_manager clean - ;; - apk) - package_manager cache clean - ;; - brew) - package_manager cleanup - ;; - dnf) - package_manager clean all - ;; - esac -} - install_brew() { print "Installing Homebrew..." bash="$(require bash)" script=$(download_file "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh") - NONINTERACTIVE=1 execute_as_user "$bash" "$script" + execute_as_user "$bash" -c "NONINTERACTIVE=1 $script" case "$arch" in x64) @@ -712,9 +699,9 @@ install_common_software() { install_rosetta install_nodejs + install_bun install_tailscale install_buildkite - install_bun } nodejs_version_exact() { @@ -727,7 +714,7 @@ nodejs_version_exact() { } nodejs_version() { - echo "$(nodejs_version_exact)" | cut -d. -f1 + print "$(nodejs_version_exact)" | cut -d. -f1 } install_nodejs() { @@ -763,14 +750,21 @@ install_nodejs() { } install_nodejs_headers() { - headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")" - headers_dir="$(dirname "$headers_tar")" - execute tar -xzf "$headers_tar" -C "$headers_dir" - headers_include="$headers_dir/node-v$(nodejs_version_exact)/include" - execute_sudo cp -R "$headers_include/" "/usr" + nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")" + nodejs_headers_dir="$(dirname "$nodejs_headers_tar")" + execute tar -xzf "$nodejs_headers_tar" -C "$nodejs_headers_dir" + + nodejs_headers_include="$nodejs_headers_dir/node-v$(nodejs_version_exact)/include" + execute_sudo cp -R "$nodejs_headers_include/" "/usr" +} + +bun_version_exact() { + print "1.1.38" } install_bun() { + install_packages unzip + case "$pm" in apk) install_packages \ @@ -779,44 +773,28 @@ install_bun() { ;; esac - bash="$(require bash)" - script=$(download_file "https://bun.sh/install") - # export BUN_INSTALL="$home/.bun" - # rm -rf "$BUN_INSTALL" - # mkdir -p "$BUN_INSTALL" - # chown -R "$user:$group" "$BUN_INSTALL" - - version="${1:-"latest"}" - case "$version" in - latest) - execute_as_user "$bash" "$script" + case "$abi" in + musl) + bun_triplet="bun-$os-$arch-$abi" ;; *) - execute_as_user "$bash" "$script" -s "$version" + bun_triplet="bun-$os-$arch" ;; esac - move_to_bin "$home/.bun/bin/bun" - - # bunabi="" - # if [ "$abi" = "musl" ]; then - # bunabi="-musl" - # fi - # buntarget="bun-${os}-${arch}${bunabi}" - # sudo chown -R $user:$group $home - # curl -LO "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest/${buntarget}.zip" --retry 5 - # unzip ${buntarget}.zip - # sudo mkdir -p "$home/.bun/bin" - # sudo mv ${buntarget}/bun "$home/.bun/bin" - # sudo chmod +x $home/.bun/bin/bun - # sudo chown -R $user:$group $home/.bun - # # echo "export PATH=\$PATH:$home/.bun/bin" | sudo tee $home/.profile - # append_to_path "$home/.profile" - # export PATH=$PATH:$home/.bun/bin - - bun_path="$(which bun)" - bunx_path="$(dirname "$bun_path")/bunx" - execute_sudo ln -sf "$bun_path" "$bunx_path" + unzip="$(require unzip)" + bun_download_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v$(bun_version_exact)/$bun_triplet.zip" + bun_zip="$(download_file "$bun_download_url")" + bun_tmpdir="$(dirname "$bun_zip")" + execute "$unzip" -o "$bun_zip" -d "$bun_tmpdir" + + bun_path="/opt/bun" + create_directory "$bun_path/bin" + execute mv "$bun_tmpdir/$bun_triplet/bun" "$bun_path/bin/bun" + execute ln -sf "$bun_path/bin/bun" "$bun_path/bin/bunx" + + append_to_path "$bun_path/bin" + append_to_profile "export BUN_INSTALL=$bun_path" } install_cmake() { @@ -858,17 +836,6 @@ install_rosetta() { install_build_essentials() { case "$pm" in apt) - - # Install modern CMake for Ubuntu 18.04 - if [ "$distro" = "ubuntu" ]; then - # Add Kitware's CMake repository - wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | execute_sudo tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null - execute_sudo apt-add-repository "deb https://apt.kitware.com/ubuntu/ $(lsb_release -cs) main" - execute_sudo apt-get update - - append_to_profile "export DEBIAN_FRONTEND=noninteractive" - fi - install_packages \ build-essential \ ninja-build \ @@ -918,11 +885,10 @@ install_build_essentials() { ruby \ perl - # Install GCC 13 specifically for Ubuntu 18.04 - install_gcc13_ubuntu18 - install_cmake install_llvm + install_osxcross + install_gcc install_ccache install_rust install_docker @@ -933,7 +899,7 @@ llvm_version_exact() { } llvm_version() { - echo "$(llvm_version_exact)" | cut -d. -f1 + print "$(llvm_version_exact)" | cut -d. -f1 } install_llvm() { @@ -941,7 +907,7 @@ install_llvm() { apt) bash="$(require bash)" llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" - execute_sudo "$bash" "$llvm_script" "$(llvm_version)" + execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all ;; brew) install_packages "llvm@$(llvm_version)" @@ -959,6 +925,55 @@ install_llvm() { esac } +install_gcc() { + if ! [ "$os" = "linux" ] || ! [ "$distro" = "ubuntu" ] || [ -z "$gcc_version" ]; then + return + fi + + # Taken from WebKit's Dockerfile. + # https://github.com/oven-sh/WebKit/blob/816a3c02e0f8b53f8eec06b5ed911192589b51e2/Dockerfile + + execute_sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y + execute_sudo apt update -y + install_packages \ + "gcc-$gcc_version" \ + "g++-$gcc_version" \ + "libgcc-$gcc_version-dev" \ + "libstdc++-$gcc_version-dev" \ + libasan6 \ + libubsan1 \ + libatomic1 \ + libtsan0 \ + liblsan0 \ + libgfortran5 \ + libc6-dev + + execute_sudo update-alternatives \ + --install /usr/bin/gcc gcc "/usr/bin/gcc-$gcc_version" 130 \ + --slave /usr/bin/g++ g++ "/usr/bin/g++-$gcc_version" \ + --slave /usr/bin/gcc-ar gcc-ar "/usr/bin/gcc-ar-$gcc_version" \ + --slave /usr/bin/gcc-nm gcc-nm "/usr/bin/gcc-nm-$gcc_version" \ + --slave /usr/bin/gcc-ranlib gcc-ranlib "/usr/bin/gcc-ranlib-$gcc_version" + + case "$arch" in + x64) + arch_path="x86_64-linux-gnu" + ;; + aarch64) + arch_path="aarch64-linux-gnu" + ;; + esac + + gcc_path="/usr/lib/gcc/$arch_path/$gcc_version" + create_directory "$gcc_path" + execute_sudo ln -sf /usr/lib/$arch_path/libstdc++.so.6 "$gcc_path/libstdc++.so.6" + + ld_conf_path="/etc/ld.so.conf.d/gcc-$gcc_version.conf" + append_file "$ld_conf_path" "$gcc_path" + append_file "$ld_conf_path" "/usr/lib/$arch_path" + execute_sudo ldconfig +} + install_ccache() { case "$pm" in apt | apk | brew) @@ -975,9 +990,23 @@ install_rust() { cargo ;; *) + rust_home="/opt/rust" + create_directory "$rust_home" + append_to_profile "export RUSTUP_HOME=$rust_home" + append_to_profile "export CARGO_HOME=$rust_home" + sh="$(require sh)" - script=$(download_file "https://sh.rustup.rs") - execute_as_user "$sh" "$script" -y + rustup_script=$(download_file "https://sh.rustup.rs") + execute "$sh" -c "RUSTUP_HOME=$rust_home CARGO_HOME=$rust_home $rustup_script -y --no-modify-path" + append_to_path "$rust_home/bin" + ;; + esac + + case "$osxcross" in + 1) + rustup="$(require rustup)" + execute_as_user "$rustup" target add aarch64-apple-darwin + execute_as_user "$rustup" target add x86_64-apple-darwin ;; esac } @@ -1020,6 +1049,46 @@ install_docker() { fi } +macos_sdk_version() { + # https://github.com/alexey-lysiuk/macos-sdk/releases + print "13.3" +} + +install_osxcross() { + if ! [ "$os" = "linux" ] || ! [ "$osxcross" = "1" ]; then + return + fi + + install_packages \ + libssl-dev \ + lzma-dev \ + libxml2-dev \ + zlib1g-dev \ + bzip2 \ + cpio + + osxcross_path="/opt/osxcross" + create_directory "$osxcross_path" + + osxcross_commit="29fe6dd35522073c9df5800f8cd1feb4b9a993a8" + osxcross_tar="$(download_file "https://github.com/tpoechtrager/osxcross/archive/$osxcross_commit.tar.gz")" + execute tar -xzf "$osxcross_tar" -C "$osxcross_path" + + osxcross_build_path="$osxcross_path/build" + execute mv "$osxcross_path/osxcross-$osxcross_commit" "$osxcross_build_path" + + osxcross_sdk_tar="$(download_file "https://github.com/alexey-lysiuk/macos-sdk/releases/download/$(macos_sdk_version)/MacOSX$(macos_sdk_version).tar.xz")" + execute mv "$osxcross_sdk_tar" "$osxcross_build_path/tarballs/MacOSX$(macos_sdk_version).sdk.tar.xz" + + bash="$(require bash)" + execute_sudo ln -sf "$(which clang-$(llvm_version))" /usr/bin/clang + execute_sudo ln -sf "$(which clang++-$(llvm_version))" /usr/bin/clang++ + execute_sudo "$bash" -c "UNATTENDED=1 TARGET_DIR='$osxcross_path' $osxcross_build_path/build.sh" + + execute_sudo rm -rf "$osxcross_build_path" + grant_to_user "$osxcross_path" +} + install_tailscale() { if [ "$docker" = "1" ]; then return @@ -1085,14 +1154,12 @@ create_buildkite_user() { buildkite_paths="$home /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /var/run/buildkite-agent/buildkite-agent.sock" for path in $buildkite_paths; do - execute_sudo mkdir -p "$path" - execute_sudo chown -R "$user:$group" "$path" + create_directory "$path" done - buildkite_files="/var/run/buildkite-agent/buildkite-agent.pid /var/run/buildkite-agent/.profile" + buildkite_files="/var/run/buildkite-agent/buildkite-agent.pid" for file in $buildkite_files; do - execute_sudo touch "$file" - execute_sudo chown "$user:$group" "$file" + create_file "$file" done } @@ -1102,27 +1169,22 @@ install_buildkite() { fi buildkite_version="3.87.0" - case "$os-$arch" in - linux-aarch64) - buildkite_filename="buildkite-agent-linux-arm64-$buildkite_version.tar.gz" - ;; - linux-x64) - buildkite_filename="buildkite-agent-linux-amd64-$buildkite_version.tar.gz" - ;; - darwin-aarch64) - buildkite_filename="buildkite-agent-darwin-arm64-$buildkite_version.tar.gz" + case "$arch" in + aarch64) + buildkite_arch="arm64" ;; - darwin-x64) - buildkite_filename="buildkite-agent-darwin-amd64-$buildkite_version.tar.gz" + x64) + buildkite_arch="amd64" ;; esac + + buildkite_filename="buildkite-agent-$os-$buildkite_arch-$buildkite_version.tar.gz" buildkite_url="https://github.com/buildkite/agent/releases/download/v$buildkite_version/$buildkite_filename" - buildkite_filepath="$(download_file "$buildkite_url" "$buildkite_filename")" - buildkite_tmpdir="$(dirname "$buildkite_filepath")" + buildkite_tar="$(download_file "$buildkite_url")" + buildkite_tmpdir="$(dirname "$buildkite_tar")" - execute tar -xzf "$buildkite_filepath" -C "$buildkite_tmpdir" + execute tar -xzf "$buildkite_tar" -C "$buildkite_tmpdir" move_to_bin "$buildkite_tmpdir/buildkite-agent" - execute rm -rf "$buildkite_tmpdir" } install_chromium() { @@ -1213,13 +1275,23 @@ install_chromium() { esac } -shrink_filesystem() { - clean_packagemanager -} +clean_system() { + if ! [ "$ci" = "1" ]; then + return + fi + + print "Cleaning system..." + + tmp_paths="/tmp /var/tmp" + for path in $tmp_paths; do + execute_sudo rm -rf "$path"/* + done -zero_free_space() { - sudo dd if=/dev/zero of=/zero bs=1M || true - execute_sudo rm -f /zero + case "$pm" in + apt | apk | yum | dnf | brew) + package_manager clean + ;; + esac } main() { @@ -1233,8 +1305,7 @@ main() { install_common_software install_build_essentials install_chromium - shrink_filesystem - zero_free_space + clean_system } main "$@" diff --git a/scripts/create-nix-amis.mjs b/scripts/create-nix-amis.mjs deleted file mode 100755 index 63375f965fc9ee..00000000000000 --- a/scripts/create-nix-amis.mjs +++ /dev/null @@ -1,221 +0,0 @@ -#!/usr/bin/env node - -import { parseArgs } from "node:util"; -import { getBuildNumber, getSecret, isCI, parseArch, spawnSafe, startGroup, readFile, mkdtemp, rm } from "./utils.mjs"; -import { join } from "node:path"; -import { writeFile } from "node:fs/promises"; - -async function main() { - const { - values: { arch, cloud, release }, - } = parseArgs({ - options: { - arch: { type: "string" }, - cloud: { type: "string" }, - release: { type: "string" }, - }, - }); - - if (!arch) { - throw new Error("--arch is required"); - } - - if (!cloud) { - throw new Error("--cloud is required"); - } - - if (!release) { - throw new Error("--release is required"); - } - - const architecture = parseArch(arch); - - // Read the required files - let agentScript, flakeContent, utilsContent; - try { - agentScript = readFile(join(process.cwd(), "scripts", "agent.mjs"), "utf8"); - flakeContent = readFile(join(process.cwd(), "flake.nix"), "utf8"); - utilsContent = readFile(join(process.cwd(), "scripts", "utils.mjs"), "utf8"); - console.log("Successfully read configuration files"); - } catch (error) { - console.error("Failed to read configuration files:", error); - throw error; - } - - // Create user data script - const userData = `#!/bin/bash -set -euxo pipefail - -echo "Setting up environment..." -export DEBIAN_FRONTEND=noninteractive - -echo "Installing required packages..." -sudo apt-get update -qq -curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash - -sudo apt-get install -y curl xz-utils git sudo nodejs --no-install-recommends - -echo "Creating buildkite-agent user..." -sudo useradd -m -d /var/lib/buildkite-agent -s /bin/bash buildkite-agent - -echo "Creating required directories..." -sudo mkdir -p /var/lib/buildkite-agent/bun -sudo mkdir -p /var/cache/buildkite-agent -sudo mkdir -p /var/log/buildkite-agent -sudo mkdir -p /usr/local/share/bun -sudo mkdir -p /etc/buildkite-agent/hooks - -# Copy the agent.mjs script -sudo tee /usr/local/share/bun/agent.mjs > /dev/null << 'EOF' -${agentScript} -EOF - -sudo tee /usr/local/share/bun/utils.mjs > /dev/null << 'EOF' -${utilsContent} -EOF - -sudo chmod +x /usr/local/share/bun/agent.mjs - -# Copy flake.nix -sudo tee /var/lib/buildkite-agent/bun/flake.nix > /dev/null << 'EOF' -${flakeContent} -EOF - -echo "Installing Nix..." -sh <(curl -L https://nixos.org/nix/install) --daemon - -echo "Configuring Nix..." -# Source Nix in this shell -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh - -# Enable flakes -sudo mkdir -p /etc/nix -sudo tee /etc/nix/nix.conf > /dev/null << 'EOF' -experimental-features = nix-command flakes -trusted-users = root buildkite-agent -auto-optimise-store = true - -# Global profile settings -keep-derivations = true -keep-outputs = true -EOF - -# Create systemd service for our agent -sudo tee /etc/systemd/system/buildkite-agent.service > /dev/null << EOF -[Unit] -Description=Buildkite Agent -After=network-online.target nix-daemon.service -Wants=network-online.target nix-daemon.service - -[Service] -Type=simple -User=buildkite-agent -Group=buildkite-agent -Environment="HOME=/var/lib/buildkite-agent" -Environment="USER=buildkite-agent" -Environment="PATH=/nix/var/nix/profiles/default/bin:/usr/local/bin:/usr/bin:/bin" -Environment="NIX_PATH=/nix/var/nix/profiles/per-user/root/channels" -ExecStart=/bin/sh -c 'exec /nix/var/nix/profiles/default/bin/nix develop /var/lib/buildkite-agent/bun#ci --command bash -c "node /usr/local/share/bun/agent.mjs start"' -Restart=always -RestartSec=5 -TimeoutStopSec=20 - -# Set max open files -LimitNOFILE=1048576 - -[Install] -WantedBy=multi-user.target -EOF - -curl -fsSL https://keys.openpgp.org/vks/v1/by-fingerprint/32A37959C2FA5C3C99EFBC32A79206696452D198 | sudo gpg --dearmor -o /usr/share/keyrings/buildkite-agent-archive-keyring.gpg -echo "deb [signed-by=/usr/share/keyrings/buildkite-agent-archive-keyring.gpg] https://apt.buildkite.com/buildkite-agent stable main" | sudo tee /etc/apt/sources.list.d/buildkite-agent.list -sudo apt-get update -qq -sudo apt-get install -y buildkite-agent - -# Create required directories -sudo mkdir -p /var/lib/buildkite-agent/bun -sudo mkdir -p /var/cache/buildkite-agent -sudo mkdir -p /var/log/buildkite-agent - -# Configure buildkite-agent -sudo tee /etc/buildkite-agent/buildkite-agent.cfg > /dev/null << 'EOF' -name="%hostname-%n" -tags="queue=build-linux,os=linux,arch=${architecture}" -build-path=/var/lib/buildkite-agent/builds -hooks-path=/etc/buildkite-agent/hooks -experiment=git-mirrors,normalize-build-paths -debug=true -disconnect-after-job=true -EOF - -# Set up hooks -sudo mkdir -p /etc/buildkite-agent/hooks -sudo tee /etc/buildkite-agent/hooks/environment > /dev/null << 'EOF' -#!/bin/bash -. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh -export PATH="/nix/var/nix/profiles/default/bin:$PATH" -export NIX_PATH="/nix/var/nix/profiles/per-user/root/channels" -EOF - -sudo tee /etc/buildkite-agent/hooks/command > /dev/null << 'EOF' -#!/bin/bash -cd "$BUILDKITE_BUILD_DIR" -exec nix develop .#ci --command bash -c "$BUILDKITE_COMMAND" -EOF - -sudo chmod +x /etc/buildkite-agent/hooks/* - -# Set system limits -sudo tee /etc/security/limits.d/buildkite-agent.conf > /dev/null << 'EOF' -buildkite-agent soft nofile 1048576 -buildkite-agent hard nofile 1048576 -buildkite-agent soft nproc 1048576 -buildkite-agent hard nproc 1048576 -EOF - -# Set up permissions -sudo chown -R buildkite-agent:buildkite-agent /var/lib/buildkite-agent -sudo chown -R buildkite-agent:buildkite-agent /var/cache/buildkite-agent -sudo chown -R buildkite-agent:buildkite-agent /var/log/buildkite-agent -sudo chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent - -# Enable and start service -sudo systemctl daemon-reload -sudo systemctl enable buildkite-agent - -cd /var/lib/buildkite-agent/bun -sudo -u buildkite-agent bash -c "source /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh && nix develop .#ci -c echo 'Build environment ready for ${release} - ${architecture}'" -`; - - // Write user data to a temporary file - const userDataFile = mkdtemp("user-data-", "user-data.sh"); - await writeFile(userDataFile, userData); - - try { - // Use machine.mjs to create the AMI with the user data - await spawnSafe( - [ - "node", - "./scripts/machine.mjs", - release, - "--os=linux", - `--arch=${architecture}`, - "--distro=ubuntu", - // Orbstack requires 20.04+. - "--release=" + (cloud === "orbstack" ? "20.04" : "18.04"), - `--cloud=${cloud}`, - "--ci", - "--authorized-org=oven-sh", - `--user-data=${userDataFile}`, - "--no-bootstrap", - ], - { - stdio: "inherit", - }, - ); - } finally { - // Clean up temporary files - await rm(userDataFile); - } -} - -await main(); diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 6d5cbc0b9db2b6..ccd1ce833d7476 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -1147,6 +1147,7 @@ async function main() { "ci": { type: "boolean" }, "rdp": { type: "boolean" }, "vnc": { type: "boolean" }, + "feature-flags": { type: "string", multiple: true }, "user-data": { type: "string" }, "authorized-user": { type: "string", multiple: true }, "authorized-org": { type: "string", multiple: true }, @@ -1195,12 +1196,13 @@ async function main() { detached: !!args["detached"], bootstrap: args["no-bootstrap"] !== true, ci: !!args["ci"], + features: args["feature"], rdp: !!args["rdp"] || !!args["vnc"], sshKeys, userData: args["user-data"] ? readFile(args["user-data"]) : undefined, }; - const { detached, bootstrap, ci, os, arch, distro, release } = options; + const { detached, bootstrap, ci, os, arch, distro, release, features } = options; const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; let bootstrapPath, agentPath; @@ -1310,6 +1312,9 @@ async function main() { } else { const remotePath = "/tmp/bootstrap.sh"; const args = ci ? ["--ci"] : []; + for (const feature of features || []) { + args.push(`--feature=${feature}`); + } await startGroup("Running bootstrap...", async () => { await machine.upload(bootstrapPath, remotePath); await machine.spawnSafe(["sh", remotePath, ...args], { stdio: "inherit" }); From 4ae7896c300a04493dd7ec5fb92521cefd2bf466 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 17:49:16 -0800 Subject: [PATCH 134/176] Try symbol --- src/bun.js/bindings/workaround-missing-symbols.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index bd244cf1674c00..9d16741ae6ec93 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -104,6 +104,9 @@ float __wrap_expf(float x) { return expf(x); } } // extern "C" +extern "C" __attribute__((used)) char _libc_single_threaded = 0; +extern "C" __attribute__((used)) char __libc_single_threaded = 0; + #endif // glibc // musl From a9b1737f21a36ceb692af94e50fa98c5b94aa550 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Tue, 10 Dec 2024 17:49:49 -0800 Subject: [PATCH 135/176] [build images] --- CONTRIBUTING.md | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1a7f41ae5c0080..d483bf06844619 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,11 +2,6 @@ Configuring a development environment for Bun can take 10-30 minutes depending o If you are using Windows, please refer to [this guide](/docs/project/building-windows.md) -{% details summary="For Ubuntu users" %} -TL;DR: Ubuntu 22.04 is suggested. -Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04. -{% /details %} - ## Install Dependencies Using your system's package manager, install Bun's dependencies: @@ -58,7 +53,7 @@ $ brew install bun ## Install LLVM -Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager: +Bun requires LLVM 18 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager: {% codetabs group="os" %} @@ -89,7 +84,7 @@ $ sudo zypper install clang16 lld16 llvm16 If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-16.0.6). -Make sure Clang/LLVM 16 is in your path: +Make sure Clang/LLVM 18 is in your path: ```bash $ which clang-16 From ba64df8c47b8ed723d16216c0cacf1f0aff01d62 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 17:53:28 -0800 Subject: [PATCH 136/176] Fix type [build images] --- scripts/machine.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/machine.mjs b/scripts/machine.mjs index ccd1ce833d7476..9a69ec41386f9f 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -1147,7 +1147,7 @@ async function main() { "ci": { type: "boolean" }, "rdp": { type: "boolean" }, "vnc": { type: "boolean" }, - "feature-flags": { type: "string", multiple: true }, + "feature": { type: "string", multiple: true }, "user-data": { type: "string" }, "authorized-user": { type: "string", multiple: true }, "authorized-org": { type: "string", multiple: true }, From a4572e769333daaf4d2c0286d47b142e15552b20 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 17:58:05 -0800 Subject: [PATCH 137/176] Fix [build images] --- scripts/bootstrap.sh | 6 ------ 1 file changed, 6 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index a232ce6f8460d8..cb55244b94233a 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -1286,12 +1286,6 @@ clean_system() { for path in $tmp_paths; do execute_sudo rm -rf "$path"/* done - - case "$pm" in - apt | apk | yum | dnf | brew) - package_manager clean - ;; - esac } main() { From aaa229e925a72f6ef450dbdf2f9c658a475f3712 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 18:08:48 -0800 Subject: [PATCH 138/176] Fix [build images] --- .buildkite/ci.mjs | 2 +- scripts/bootstrap.sh | 10 +++------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index f3cfa66796300b..213062b609a2c5 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -105,7 +105,7 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", features: ["gcc-13"] }, { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index cb55244b94233a..97274cd0e851c5 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -788,13 +788,9 @@ install_bun() { bun_tmpdir="$(dirname "$bun_zip")" execute "$unzip" -o "$bun_zip" -d "$bun_tmpdir" - bun_path="/opt/bun" - create_directory "$bun_path/bin" - execute mv "$bun_tmpdir/$bun_triplet/bun" "$bun_path/bin/bun" - execute ln -sf "$bun_path/bin/bun" "$bun_path/bin/bunx" - - append_to_path "$bun_path/bin" - append_to_profile "export BUN_INSTALL=$bun_path" + move_to_bin "$bun_tmpdir/$bun_triplet/bun" + bun_path="$(require bun)" + execute ln -sf "$bun_path" "$(dirname "$bun_path")/bunx" } install_cmake() { From 9c4a9dc8f463b0a7e5601490bbadef8f436ab602 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 18:11:42 -0800 Subject: [PATCH 139/176] Fix [build images] --- scripts/bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 97274cd0e851c5..d8997ade23675d 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -790,7 +790,7 @@ install_bun() { move_to_bin "$bun_tmpdir/$bun_triplet/bun" bun_path="$(require bun)" - execute ln -sf "$bun_path" "$(dirname "$bun_path")/bunx" + execute_sudo ln -sf "$bun_path" "$(dirname "$bun_path")/bunx" } install_cmake() { From 6f99d5510be56da2d42fdb1ef8900912e917707a Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 18:29:20 -0800 Subject: [PATCH 140/176] Remove last usages of nix [build images] --- scripts/agent.mjs | 38 +------------------------------------- 1 file changed, 1 insertion(+), 37 deletions(-) diff --git a/scripts/agent.mjs b/scripts/agent.mjs index 56995505852826..601e1c36a49c4f 100755 --- a/scripts/agent.mjs +++ b/scripts/agent.mjs @@ -3,7 +3,7 @@ // An agent that starts buildkite-agent and runs others services. import { join } from "node:path"; -import { existsSync, realpathSync } from "node:fs"; +import { realpathSync } from "node:fs"; import { isWindows, getOs, @@ -20,19 +20,10 @@ import { getEnv, writeFile, spawnSafe, - spawn, mkdir, - isLinux, } from "./utils.mjs"; import { parseArgs } from "node:util"; -/** - * @returns {boolean} - */ -function isNixInstalled() { - return existsSync("/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"); -} - /** * @param {"install" | "start"} action */ @@ -76,12 +67,6 @@ async function doBuildkiteAgent(action) { if (isOpenRc()) { const servicePath = "/etc/init.d/buildkite-agent"; - let nixEnv = ""; - - if (isNixInstalled()) { - nixEnv = `. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh;`; - } - const service = `#!/sbin/openrc-run name="buildkite-agent" description="Buildkite Agent" @@ -96,41 +81,22 @@ async function doBuildkiteAgent(action) { --stdout ${escape(agentLogPath)} \\ --stderr ${escape(agentLogPath)}" - start_pre() { - # Source Nix environment if it exists - ${nixEnv} - } - depend() { need net use dns logger - ${nixEnv ? "use nix-daemon" : ""} } `; writeFile(servicePath, service, { mode: 0o755 }); - writeFile(`/etc/conf.d/buildkite-agent`, `rc_ulimit="-n 262144"`); await spawnSafe(["rc-update", "add", "buildkite-agent", "default"], { stdio: "inherit", privileged: true }); } if (isSystemd()) { const servicePath = "/etc/systemd/system/buildkite-agent.service"; - let nix = ""; - - if (isNixInstalled()) { - nix = ` -# Source Nix environment if it exists -ExecStartPre=/bin/sh -c '. /nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' -Environment=PATH=/nix/var/nix/profiles/default/bin:${process.env.PATH.replaceAll(" ", "\\ ")} -Environment=NIX_PATH=/nix/var/nix/profiles/per-user/root/channels - `; - } - const service = ` [Unit] Description=Buildkite Agent After=syslog.target After=network-online.target - ${nix ? "Wants=nix-daemon.service" : ""} [Service] Type=simple @@ -140,8 +106,6 @@ Environment=NIX_PATH=/nix/var/nix/profiles/per-user/root/channels Restart=on-failure KillMode=process - ${nix} - [Journal] Storage=persistent StateDirectory=${escape(agentLogPath)} From 7828790db8c7373150f951222bd87b74165b58ad Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 18:55:50 -0800 Subject: [PATCH 141/176] Fix permissions [build images] --- scripts/bootstrap.sh | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index d8997ade23675d..2958a9c28f58f7 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -66,19 +66,7 @@ grant_to_user() { chown="$(require chown)" execute_sudo "$chown" -R "$user:$group" "$path" - if ! [ "$user" = "$current_user" ] || ! [ "$group" = "$current_group" ]; then - execute_sudo "$chown" -R "$current_user:$current_group" "$path" - fi -} - -grant_to_everyone() { - path="$1" - if ! [ -f "$path" ] && ! [ -d "$path" ]; then - error "Could not find file or directory: \"$path\"" - fi - - chmod="$(require chmod)" - execute_sudo "$chmod" 777 "$path" + execute_sudo chmod -R 777 "$path" } which() { @@ -142,7 +130,7 @@ create_directory() { create_tmp_directory() { mktemp="$(require mktemp)" path="$(execute "$mktemp" -d)" - grant_to_everyone "$path" + grant_to_user "$path" print "$path" } @@ -202,7 +190,7 @@ download_file() { file_tmp_path="$file_tmp_dir/$(basename "$file_url")" fetch "$file_url" >"$file_tmp_path" - grant_to_everyone "$file_tmp_path" + grant_to_user "$file_tmp_path" print "$file_tmp_path" } From 099696aee19ff26a543edbe49b1844f857f2e47f Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 19:14:32 -0800 Subject: [PATCH 142/176] Fix shell path? [build images] --- scripts/agent.mjs | 2 +- scripts/bootstrap.sh | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/agent.mjs b/scripts/agent.mjs index 601e1c36a49c4f..ece3359cc79111 100755 --- a/scripts/agent.mjs +++ b/scripts/agent.mjs @@ -141,7 +141,7 @@ async function doBuildkiteAgent(action) { shell = `"${cmd}" /S /C`; } else { const sh = which("sh", { required: true }); - shell = `${sh} -e -c`; + shell = `${sh} -elc`; } const flags = ["enable-job-log-tmpfile", "no-feature-reporting"]; diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 2958a9c28f58f7..f58d4a61780897 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -45,16 +45,16 @@ execute_as_user() { if [ "$sudo" = "1" ] || [ "$can_sudo" = "1" ]; then if [ -f "$(which sudo)" ]; then - execute sudo -n -u "$user" "$sh" -c "$*" + execute sudo -n -u "$user" "$sh" -lc "$*" elif [ -f "$(which doas)" ]; then - execute doas -u "$user" "$sh" -c "$*" + execute doas -u "$user" "$sh" -lc "$*" elif [ -f "$(which su)" ]; then - execute su -s "$sh" "$user" -c "$*" + execute su -s "$sh" "$user" -lc "$*" else - execute "$sh" -c "$*" + execute "$sh" -lc "$*" fi else - execute "$sh" -c "$*" + execute "$sh" -lc "$*" fi } @@ -621,7 +621,7 @@ install_brew() { bash="$(require bash)" script=$(download_file "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh") - execute_as_user "$bash" -c "NONINTERACTIVE=1 $script" + execute_as_user "$bash" -lc "NONINTERACTIVE=1 $script" case "$arch" in x64) @@ -981,7 +981,7 @@ install_rust() { sh="$(require sh)" rustup_script=$(download_file "https://sh.rustup.rs") - execute "$sh" -c "RUSTUP_HOME=$rust_home CARGO_HOME=$rust_home $rustup_script -y --no-modify-path" + execute "$sh" -lc "$rustup_script -y --no-modify-path" append_to_path "$rust_home/bin" ;; esac @@ -1067,7 +1067,7 @@ install_osxcross() { bash="$(require bash)" execute_sudo ln -sf "$(which clang-$(llvm_version))" /usr/bin/clang execute_sudo ln -sf "$(which clang++-$(llvm_version))" /usr/bin/clang++ - execute_sudo "$bash" -c "UNATTENDED=1 TARGET_DIR='$osxcross_path' $osxcross_build_path/build.sh" + execute_sudo "$bash" -lc "UNATTENDED=1 TARGET_DIR='$osxcross_path' $osxcross_build_path/build.sh" execute_sudo rm -rf "$osxcross_build_path" grant_to_user "$osxcross_path" From f210496ee24ca99dd9d5f619a40b41f013ccafa6 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 19:51:49 -0800 Subject: [PATCH 143/176] Fix typo [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 213062b609a2c5..c59aaea492a2c5 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -584,7 +584,7 @@ function getBuildImageStep(platform, dryRun) { "--authorized-org=oven-sh", ]; for (const feature of features || []) { - command.push(`--feature=${feature}`); + command.push(`--${feature}`); } return { From a8d26b31905fb4118e38c491680b8230cf62a4dc Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Tue, 10 Dec 2024 19:54:31 -0800 Subject: [PATCH 144/176] Fix typo [build images] --- .buildkite/ci.mjs | 2 +- scripts/machine.mjs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index c59aaea492a2c5..213062b609a2c5 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -584,7 +584,7 @@ function getBuildImageStep(platform, dryRun) { "--authorized-org=oven-sh", ]; for (const feature of features || []) { - command.push(`--${feature}`); + command.push(`--feature=${feature}`); } return { diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 9a69ec41386f9f..c83fcb57fd23fa 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -1313,7 +1313,7 @@ async function main() { const remotePath = "/tmp/bootstrap.sh"; const args = ci ? ["--ci"] : []; for (const feature of features || []) { - args.push(`--feature=${feature}`); + args.push(`--${feature}`); } await startGroup("Running bootstrap...", async () => { await machine.upload(bootstrapPath, remotePath); From 629ba98c804c8ce1f09d2ff70e5c77e098f9654f Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Tue, 10 Dec 2024 23:32:30 -0800 Subject: [PATCH 145/176] [build images] --- scripts/bootstrap.sh | 44 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 42 insertions(+), 2 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index f58d4a61780897..a5abc2f4c331eb 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -214,7 +214,7 @@ append_to_path() { fi append_to_profile "export PATH=\"$path:\$PATH\"" - # export PATH="$path:$PATH" + export PATH="$path:$PATH" } move_to_bin() { @@ -526,6 +526,24 @@ check_ulimit() { if [ -f "$systemctl" ]; then execute_sudo "$systemctl" daemon-reload fi + + # Configure dpkg and apt for faster operation in CI environments + if [ "$ci" = "1" ] && [ "$pm" = "apt" ]; then + dpkg_conf="/etc/dpkg/dpkg.cfg.d/01-ci-options" + execute_sudo create_directory "$(dirname "$dpkg_conf")" + append_file "$dpkg_conf" "force-unsafe-io" + append_file "$dpkg_conf" "no-debsig" + + apt_conf="/etc/apt/apt.conf.d/99-ci-options" + execute_sudo create_directory "$(dirname "$apt_conf")" + append_file "$apt_conf" 'Acquire::Languages "none";' + append_file "$apt_conf" 'Acquire::GzipIndexes "true";' + append_file "$apt_conf" 'Acquire::CompressionTypes::Order:: "gz";' + append_file "$apt_conf" 'APT::Get::Install-Recommends "false";' + append_file "$apt_conf" 'APT::Get::Install-Suggests "false";' + append_file "$apt_conf" 'Dpkg::Options { "--force-confdef"; "--force-confold"; }' + fi + } package_manager() { @@ -922,7 +940,7 @@ install_gcc() { install_packages \ "gcc-$gcc_version" \ "g++-$gcc_version" \ - "libgcc-$gcc_version-dev" \ + "libgcc-$gcc_version-dev" \ "libstdc++-$gcc_version-dev" \ libasan6 \ libubsan1 \ @@ -948,6 +966,18 @@ install_gcc() { ;; esac + llvm_v="18" + + append_to_profile "CC=clang-${llvm_v}" + append_to_profile "CXX=clang++-${llvm_v}" + append_to_profile "AR=llvm-ar-${llvm_v}" + append_to_profile "RANLIB=llvm-ranlib-${llvm_v}" + append_to_profile "LD=lld-${llvm_v}" + append_to_profile "LD_LIBRARY_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}:/usr/lib/${arch_triplet}" + append_to_profile "LIBRARY_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}:/usr/lib/${arch_triplet}" + append_to_profile "CPLUS_INCLUDE_PATH=/usr/include/c++/${gcc_version}:/usr/include/${arch_triplet}/c++/${gcc_version}" + append_to_profile "C_INCLUDE_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}/include" + gcc_path="/usr/lib/gcc/$arch_path/$gcc_version" create_directory "$gcc_path" execute_sudo ln -sf /usr/lib/$arch_path/libstdc++.so.6 "$gcc_path/libstdc++.so.6" @@ -956,6 +986,16 @@ install_gcc() { append_file "$ld_conf_path" "$gcc_path" append_file "$ld_conf_path" "/usr/lib/$arch_path" execute_sudo ldconfig + + execute_sudo ln -sf $(which clang-${llvm_v}) /usr/bin/clang + execute_sudo ln -sf $(which clang+${llvm_v}) /usr/bin/clang++ + execute_sudo ln -sf $(which lld-${llvm_v}) /usr/bin/lld + execute_sudo ln -sf $(which lldb-${llvm_v}) /usr/bin/lldb + execute_sudo ln -sf $(which clangd-${llvm_v}) /usr/bin/clangd + execute_sudo ln -sf $(which llvm-ar-${llvm_v}) /usr/bin/llvm-ar + execute_sudo ln -sf $(which ld.lld-${llvm_v}) /usr/bin/ld + execute_sudo ln -sf $(which clang) /usr/bin/cc + execute_sudo ln -sf $(which clang++) /usr/bin/c++ } install_ccache() { From 9f3558bd589cea47d57c365dfc026f0d7fca5fed Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Tue, 10 Dec 2024 23:33:25 -0800 Subject: [PATCH 146/176] [build images] --- scripts/bootstrap.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index a5abc2f4c331eb..bc48c423db6a9b 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -968,15 +968,15 @@ install_gcc() { llvm_v="18" - append_to_profile "CC=clang-${llvm_v}" - append_to_profile "CXX=clang++-${llvm_v}" - append_to_profile "AR=llvm-ar-${llvm_v}" - append_to_profile "RANLIB=llvm-ranlib-${llvm_v}" - append_to_profile "LD=lld-${llvm_v}" - append_to_profile "LD_LIBRARY_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}:/usr/lib/${arch_triplet}" - append_to_profile "LIBRARY_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}:/usr/lib/${arch_triplet}" - append_to_profile "CPLUS_INCLUDE_PATH=/usr/include/c++/${gcc_version}:/usr/include/${arch_triplet}/c++/${gcc_version}" - append_to_profile "C_INCLUDE_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}/include" + append_to_profile "export CC=clang-${llvm_v}" + append_to_profile "export CXX=clang++-${llvm_v}" + append_to_profile "export AR=llvm-ar-${llvm_v}" + append_to_profile "export RANLIB=llvm-ranlib-${llvm_v}" + append_to_profile "export LD=lld-${llvm_v}" + append_to_profile "export LD_LIBRARY_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}:/usr/lib/${arch_triplet}" + append_to_profile "export LIBRARY_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}:/usr/lib/${arch_triplet}" + append_to_profile "export CPLUS_INCLUDE_PATH=/usr/include/c++/${gcc_version}:/usr/include/${arch_triplet}/c++/${gcc_version}" + append_to_profile "export C_INCLUDE_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}/include" gcc_path="/usr/lib/gcc/$arch_path/$gcc_version" create_directory "$gcc_path" From 2b81932bb91f4c1facd356ab704278a4ca981462 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Tue, 10 Dec 2024 23:35:07 -0800 Subject: [PATCH 147/176] [build images] --- scripts/bootstrap.sh | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index bc48c423db6a9b..851ed4495e0f6f 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -973,10 +973,10 @@ install_gcc() { append_to_profile "export AR=llvm-ar-${llvm_v}" append_to_profile "export RANLIB=llvm-ranlib-${llvm_v}" append_to_profile "export LD=lld-${llvm_v}" - append_to_profile "export LD_LIBRARY_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}:/usr/lib/${arch_triplet}" - append_to_profile "export LIBRARY_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}:/usr/lib/${arch_triplet}" - append_to_profile "export CPLUS_INCLUDE_PATH=/usr/include/c++/${gcc_version}:/usr/include/${arch_triplet}/c++/${gcc_version}" - append_to_profile "export C_INCLUDE_PATH=/usr/lib/gcc/${arch_triplet}/${gcc_version}/include" + append_to_profile "export LD_LIBRARY_PATH=/usr/lib/gcc/${arch_path}/${gcc_version}:/usr/lib/${arch_path}" + append_to_profile "export LIBRARY_PATH=/usr/lib/gcc/${arch_path}/${gcc_version}:/usr/lib/${arch_path}" + append_to_profile "export CPLUS_INCLUDE_PATH=/usr/include/c++/${gcc_version}:/usr/include/${arch_path}/c++/${gcc_version}" + append_to_profile "export C_INCLUDE_PATH=/usr/lib/gcc/${arch_path}/${gcc_version}/include" gcc_path="/usr/lib/gcc/$arch_path/$gcc_version" create_directory "$gcc_path" @@ -987,13 +987,13 @@ install_gcc() { append_file "$ld_conf_path" "/usr/lib/$arch_path" execute_sudo ldconfig - execute_sudo ln -sf $(which clang-${llvm_v}) /usr/bin/clang - execute_sudo ln -sf $(which clang+${llvm_v}) /usr/bin/clang++ - execute_sudo ln -sf $(which lld-${llvm_v}) /usr/bin/lld - execute_sudo ln -sf $(which lldb-${llvm_v}) /usr/bin/lldb - execute_sudo ln -sf $(which clangd-${llvm_v}) /usr/bin/clangd - execute_sudo ln -sf $(which llvm-ar-${llvm_v}) /usr/bin/llvm-ar - execute_sudo ln -sf $(which ld.lld-${llvm_v}) /usr/bin/ld + execute_sudo ln -sf $(which clang-$llvm_v) /usr/bin/clang + execute_sudo ln -sf $(which clang++-$llvm_v) /usr/bin/clang++ + execute_sudo ln -sf $(which lld-$llvm_v) /usr/bin/lld + execute_sudo ln -sf $(which lldb-$llvm_v) /usr/bin/lldb + execute_sudo ln -sf $(which clangd-$llvm_v) /usr/bin/clangd + execute_sudo ln -sf $(which llvm-ar-$llvm_v) /usr/bin/llvm-ar + execute_sudo ln -sf $(which ld.lld-$llvm_v) /usr/bin/ld execute_sudo ln -sf $(which clang) /usr/bin/cc execute_sudo ln -sf $(which clang++) /usr/bin/c++ } From 3279cd5162f1bf75aef4a72ca6180e19d902fbd6 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Tue, 10 Dec 2024 23:42:50 -0800 Subject: [PATCH 148/176] [build images] --- src/codegen/bindgen.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/codegen/bindgen.ts b/src/codegen/bindgen.ts index d548a9a7cde09f..ef6d870908043e 100644 --- a/src/codegen/bindgen.ts +++ b/src/codegen/bindgen.ts @@ -683,7 +683,7 @@ function emitConvertEnumFunction(w: CodeWriter, type: TypeImpl) { w.line(`{`); w.line(` static constexpr std::pair mappings[] = {`); for (const value of type.data) { - w.line(` { ${str(value)}, ${name}::${pascal(value)} },`); + w.line(` { ${str(value)}_s, ${name}::${pascal(value)} },`); } w.line(` };`); w.line(` static constexpr SortedArrayMap enumerationMapping { mappings };`); From b1551c5398f485d33b3644b52f9b74aa56477ffb Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 06:23:03 -0800 Subject: [PATCH 149/176] [build images] --- .buildkite/Dockerfile | 157 +++++++++++++++++++++++++++++ .buildkite/Dockerfile-bootstrap.sh | 70 +++++++++++++ .buildkite/ci.mjs | 10 +- .dockerignore | 3 + scripts/machine.mjs | 57 ++++++++--- 5 files changed, 281 insertions(+), 16 deletions(-) create mode 100644 .buildkite/Dockerfile create mode 100644 .buildkite/Dockerfile-bootstrap.sh diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile new file mode 100644 index 00000000000000..1f65b2b108ece9 --- /dev/null +++ b/.buildkite/Dockerfile @@ -0,0 +1,157 @@ +ARG LLVM_VERSION="18" +ARG REPORTED_LLVM_VERSION="18.1.8" +ARG OLD_BUN_VERSION="1.1.38" +ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables" +ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables" +ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}" + +FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64 +FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64 +FROM base-$TARGETARCH as base + +ARG LLVM_VERSION +ARG OLD_BUN_VERSION +ARG DEFAULT_CFLAGS +ARG TARGETARCH +ARG DEFAULT_CXXFLAGS +ARG DEFAULT_CFLAGS +ARG REPORTED_LLVM_VERSION + +ENV DEBIAN_FRONTEND=noninteractive \ + CI=true \ + DOCKER=true + +RUN echo "Acquire::Queue-Mode \"host\";" > /etc/apt/apt.conf.d/99-apt-queue-mode.conf \ + && echo "Acquire::Timeout \"120\";" >> /etc/apt/apt.conf.d/99-apt-timeout.conf \ + && echo "Acquire::Retries \"3\";" >> /etc/apt/apt.conf.d/99-apt-retries.conf \ + && echo "APT::Install-Recommends \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-recommends.conf \ + && echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf \ + && echo "Acquire::Pipeline::Depth \"5\";" >> /etc/apt/apt.conf.d/99-apt-pipeline-depth.conf + +RUN apt-get update && apt-get install -y --no-install-recommends \ + wget curl git python3 python3-pip ninja-build \ + software-properties-common apt-transport-https \ + ca-certificates gnupg lsb-release unzip \ + libxml2-dev ruby ruby-dev bison gawk perl make golang \ + && wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null \ + && apt-add-repository "deb https://apt.kitware.com/ubuntu/ $(lsb_release -cs) main" \ + && add-apt-repository ppa:ubuntu-toolchain-r/test \ + && apt-get update \ + && apt-get install -y cmake gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \ + libasan6 libubsan1 libatomic1 libtsan0 liblsan0 \ + libgfortran5 libc6-dev \ + && wget https://apt.llvm.org/llvm.sh \ + && chmod +x llvm.sh \ + && ./llvm.sh ${LLVM_VERSION} all \ + && rm llvm.sh + +RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \ + --slave /usr/bin/g++ g++ /usr/bin/g++-13 \ + --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \ + --slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \ + --slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13 + +RUN echo "ARCH_PATH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64-linux-gnu" || echo "x86_64-linux-gnu")" >> /etc/environment \ + && echo "BUN_ARCH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64" || echo "x64")" >> /etc/environment + +ENV LD_LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \ + LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \ + CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${ARCH_PATH}/c++/13 \ + C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include \ + CFLAGS=${DEFAULT_CFLAGS} \ + CXXFLAGS="${DEFAULT_CFLAGS} ${DEFAULT_CXXFLAGS}" + +RUN if [ "$TARGETARCH" = "arm64" ]; then \ + export ARCH_PATH="aarch64-linux-gnu"; \ + else \ + export ARCH_PATH="x86_64-linux-gnu"; \ + fi \ + && mkdir -p /usr/lib/gcc/${ARCH_PATH}/13 \ + && ln -sf /usr/lib/${ARCH_PATH}/libstdc++.so.6 /usr/lib/gcc/${ARCH_PATH}/13/ \ + && echo "/usr/lib/gcc/${ARCH_PATH}/13" > /etc/ld.so.conf.d/gcc-13.conf \ + && echo "/usr/lib/${ARCH_PATH}" >> /etc/ld.so.conf.d/gcc-13.conf \ + && ldconfig + +RUN for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \ + && ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \ + && ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \ + && ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \ + && ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \ + && ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \ + && ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \ + && ln -sf /usr/bin/ld.lld /usr/bin/ld \ + && ln -sf /usr/bin/clang /usr/bin/cc \ + && ln -sf /usr/bin/clang++ /usr/bin/c++ + +ENV CC="clang-${LLVM_VERSION}" \ + CXX="clang++-${LLVM_VERSION}" \ + AR="llvm-ar-${LLVM_VERSION}" \ + RANLIB="llvm-ranlib-${LLVM_VERSION}" \ + LD="lld-${LLVM_VERSION}" + +RUN --mount=type=tmpfs,target=/tmp \ + bash -c '\ + set -euxo pipefail && \ + source /etc/environment && \ + echo "Downloading bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip from https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip" && \ + curl -fsSL https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip -o /tmp/bun.zip && \ + unzip /tmp/bun.zip -d /tmp/bun && \ + mv /tmp/bun/*/bun /usr/bin/bun && \ + chmod +x /usr/bin/bun' + +ENV LLVM_VERSION=${REPORTED_LLVM_VERSION} + +WORKDIR /workspace + + +FROM --platform=$BUILDPLATFORM base as buildkite +ARG BUILDKITE_AGENT_TAGS + +RUN curl -sL https://raw.githubusercontent.com/buildkite/agent/master/install.sh | bash -s -- "3.50.3" \ + && useradd -r -m -d /var/lib/buildkite-agent buildkite-agent \ + && mkdir -p /etc/buildkite-agent \ + && chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent && mkdir -p /var/lib/buildkite-agent/cache/bun + +COPY --chown=buildkite-agent:buildkite-agent ../scripts/agent.mjs /var/bun/scripts/ + +ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun +ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS} + + +WORKDIR /var/bun/scripts +USER buildkite-agent + +# Install Rust nightly +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ + && export PATH=$HOME/.cargo/bin:$PATH \ + && rustup install nightly \ + && rustup default nightly + +ENV PATH=/root/.cargo/bin:$PATH + + +CMD ["bun", "/var/bun/scripts/agent.mjs", "start"] + +FROM --platform=$BUILDPLATFORM base as bun-build-linux-local + +ARG LLVM_VERSION +WORKDIR /workspace/bun + +COPY . /workspace/bun + + +# Install Rust nightly +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ + && export PATH=$HOME/.cargo/bin:$PATH \ + && rustup install nightly \ + && rustup default nightly + +ENV PATH=/root/.cargo/bin:$PATH + +RUN --mount=type=tmpfs,target=/workspace/bun/build \ + && ls -la \ + && bun run build:release \ + && mkdir -p /target \ + && cp -r /workspace/bun/build/release/bun /target/bun + + diff --git a/.buildkite/Dockerfile-bootstrap.sh b/.buildkite/Dockerfile-bootstrap.sh new file mode 100644 index 00000000000000..393fd3fe821237 --- /dev/null +++ b/.buildkite/Dockerfile-bootstrap.sh @@ -0,0 +1,70 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Ensure /tmp/agent.mjs, /tmp/Dockerfile are present +if [ ! -f /tmp/agent.mjs ] || [ ! -f /tmp/Dockerfile ]; then + # Print each missing file + if [ ! -f /tmp/agent.mjs ]; then + echo "error: /tmp/agent.mjs is missing" + fi + if [ ! -f /tmp/Dockerfile ]; then + echo "error: /tmp/Dockerfile is missing" + fi + exit 1 +fi + +# Install Docker +dnf update -y + +dnf install -y docker +systemctl enable docker +systemctl start docker + +# Create builder +docker buildx create --name builder --driver docker-container --bootstrap --use + +# Set up Docker to start on boot +cat << 'EOF' > /etc/systemd/system/buildkite-agent.service +[Unit] +Description=Buildkite Docker Container +After=docker.service +Requires=docker.service + +[Service] +TimeoutStartSec=0 +Restart=always +ExecStartPre=-/usr/bin/docker stop buildkite +ExecStartPre=-/usr/bin/docker rm buildkite +ExecStart=/usr/bin/docker run \ + --name buildkite \ + --restart=unless-stopped \ + buildkite:latest + +[Install] +WantedBy=multi-user.target + +EOF + +echo "Building Buildkite image" + +# Make the directory match up with the Dockerfile +mkdir -p /tmp/fakebun/scripts /tmp/fakebun/.buildkite +cp /tmp/agent.mjs /tmp/fakebun/scripts/ +cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile + +cd /tmp/fakebun + +# Build the Buildkite image +docker buildx build \ + --platform $(uname -m | sed 's/aarch64/linux\/arm64/;s/x86_64/linux\/amd64/') \ + --tag buildkite:latest \ + --target buildkite \ + -f .buildkite/Dockerfile \ + . + +# Enable the service, but don't start it yet +systemctl enable buildkite-agent + +echo "Bootstrap complete" +echo "To start the Buildkite agent, run: " +echo " systemctl start buildkite-agent" \ No newline at end of file diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 5205c373039b95..4082f986ddf516 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -105,9 +105,9 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", features: ["gcc-13"] }, - { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, + { os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", docker: true }, + { os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", docker: true }, + { os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", docker: true }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, @@ -559,7 +559,8 @@ function getTestBunStep(platform, options = {}) { * @returns {Step} */ function getBuildImageStep(platform, dryRun) { - const { os, arch, distro, release, features } = platform; + const { os, arch, distro, release, docker } = platform; + const action = dryRun ? "create-image" : "publish-image"; /** @type {string[]} */ @@ -574,6 +575,7 @@ function getBuildImageStep(platform, dryRun) { "--cloud=aws", "--ci", "--authorized-org=oven-sh", + docker ? "--docker" : "", ]; for (const feature of features || []) { command.push(`--feature=${feature}`); diff --git a/.dockerignore b/.dockerignore index 6a0ae98134ec53..d76783768281be 100644 --- a/.dockerignore +++ b/.dockerignore @@ -16,3 +16,6 @@ zig-out build vendor node_modules +*.trace + +packages/bun-uws/fuzzing \ No newline at end of file diff --git a/scripts/machine.mjs b/scripts/machine.mjs index c83fcb57fd23fa..eaed5314c0663f 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -1154,6 +1154,7 @@ async function main() { "no-bootstrap": { type: "boolean" }, "buildkite-token": { type: "string" }, "tailscale-authkey": { type: "string" }, + "docker": { type: "boolean" }, }, }); @@ -1200,14 +1201,22 @@ async function main() { rdp: !!args["rdp"] || !!args["vnc"], sshKeys, userData: args["user-data"] ? readFile(args["user-data"]) : undefined, + isDockerImage: !!args["docker"], }; - const { detached, bootstrap, ci, os, arch, distro, release, features } = options; + let { detached, bootstrap, ci, os, arch, distro, release, features, isDockerImage } = options; const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; - let bootstrapPath, agentPath; + let bootstrapPath, agentPath, dockerfilePath; if (bootstrap) { - bootstrapPath = resolve(import.meta.dirname, os === "windows" ? "bootstrap.ps1" : "bootstrap.sh"); + bootstrapPath = resolve( + import.meta.dirname, + os === "windows" + ? "bootstrap.ps1" + : isDockerImage && os === "linux" && distro === "amazonlinux" + ? "../.buildkite/Dockerfile-bootstrap.sh" + : "bootstrap.sh", + ); if (!existsSync(bootstrapPath)) { throw new Error(`Script not found: ${bootstrapPath}`); } @@ -1221,6 +1230,14 @@ async function main() { agentPath = join(tmpPath, "agent.mjs"); await spawnSafe($`${npx} esbuild ${entryPath} --bundle --platform=node --format=esm --outfile=${agentPath}`); } + + if (isDockerImage) { + dockerfilePath = resolve(import.meta.dirname, "../.buildkite/Dockerfile"); + + if (!existsSync(dockerfilePath)) { + throw new Error(`Dockerfile not found: ${dockerfilePath}`); + } + } } /** @type {Machine} */ @@ -1310,15 +1327,31 @@ async function main() { await machine.spawnSafe(["powershell", remotePath, ...args], { stdio: "inherit" }); }); } else { - const remotePath = "/tmp/bootstrap.sh"; - const args = ci ? ["--ci"] : []; - for (const feature of features || []) { - args.push(`--${feature}`); + if (!isDockerImage) { + const remotePath = "/tmp/bootstrap.sh"; + const args = ci ? ["--ci"] : []; + for (const feature of features || []) { + args.push(`--${feature}`); + } + await startGroup("Running bootstrap...", async () => { + await machine.upload(bootstrapPath, remotePath); + await machine.spawnSafe(["sh", remotePath, ...args], { stdio: "inherit" }); + }); + } else if (dockerfilePath) { + const remotePath = "/tmp/bootstrap.sh"; + + await startGroup("Running Docker bootstrap...", async () => { + await machine.upload(bootstrapPath, remotePath); + console.log("Uploaded bootstrap.sh"); + await machine.upload(dockerfilePath, "/tmp/Dockerfile"); + console.log("Uploaded Dockerfile"); + await machine.upload(agentPath, "/tmp/agent.mjs"); + console.log("Uploaded agent.mjs"); + agentPath = ""; + bootstrapPath = ""; + await machine.spawnSafe(["sudo", "bash", remotePath], { stdio: "inherit", cwd: "/tmp" }); + }); } - await startGroup("Running bootstrap...", async () => { - await machine.upload(bootstrapPath, remotePath); - await machine.spawnSafe(["sh", remotePath, ...args], { stdio: "inherit" }); - }); } } @@ -1327,7 +1360,7 @@ async function main() { const remotePath = "C:\\buildkite-agent\\agent.mjs"; await startGroup("Installing agent...", async () => { await machine.upload(agentPath, remotePath); - if (cloud.name === "docker") { + if (cloud.name === "docker" || isDockerImage) { return; } await machine.spawnSafe(["node", remotePath, "install"], { stdio: "inherit" }); From e9ab841da576b21ea5478d77a07ca90c365f39df Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 06:32:02 -0800 Subject: [PATCH 150/176] [build images] --- .buildkite/ci.mjs | 3 --- 1 file changed, 3 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 4082f986ddf516..af2429721d8010 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -577,9 +577,6 @@ function getBuildImageStep(platform, dryRun) { "--authorized-org=oven-sh", docker ? "--docker" : "", ]; - for (const feature of features || []) { - command.push(`--feature=${feature}`); - } return { key: `${getImageKey(platform)}-build-image`, From a25bfae76dcc9062558c046ab1a9757edff27420 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 06:40:11 -0800 Subject: [PATCH 151/176] [build images] --- .buildkite/Dockerfile | 9 ++-- .buildkite/Dockerfile-bootstrap.sh | 78 +++++++++++++++++++++++++----- 2 files changed, 69 insertions(+), 18 deletions(-) diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index 1f65b2b108ece9..a06b5a5c9dd8fa 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -25,8 +25,7 @@ RUN echo "Acquire::Queue-Mode \"host\";" > /etc/apt/apt.conf.d/99-apt-queue-mode && echo "Acquire::Timeout \"120\";" >> /etc/apt/apt.conf.d/99-apt-timeout.conf \ && echo "Acquire::Retries \"3\";" >> /etc/apt/apt.conf.d/99-apt-retries.conf \ && echo "APT::Install-Recommends \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-recommends.conf \ - && echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf \ - && echo "Acquire::Pipeline::Depth \"5\";" >> /etc/apt/apt.conf.d/99-apt-pipeline-depth.conf + && echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf RUN apt-get update && apt-get install -y --no-install-recommends \ wget curl git python3 python3-pip ninja-build \ @@ -149,9 +148,7 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ ENV PATH=/root/.cargo/bin:$PATH RUN --mount=type=tmpfs,target=/workspace/bun/build \ - && ls -la \ + ls -la \ && bun run build:release \ && mkdir -p /target \ - && cp -r /workspace/bun/build/release/bun /target/bun - - + && cp -r /workspace/bun/build/release/bun /target/bun \ No newline at end of file diff --git a/.buildkite/Dockerfile-bootstrap.sh b/.buildkite/Dockerfile-bootstrap.sh index 393fd3fe821237..0c55a273e39a6c 100644 --- a/.buildkite/Dockerfile-bootstrap.sh +++ b/.buildkite/Dockerfile-bootstrap.sh @@ -1,6 +1,18 @@ #!/usr/bin/env bash set -euo pipefail +# Check if running as root +if [ "$EUID" -ne 0 ]; then + echo "error: must run as root" + exit 1 +fi + +# Check OS compatibility +if ! command -v dnf &> /dev/null; then + echo "error: this script requires dnf (RHEL/Fedora/CentOS)" + exit 1 +fi + # Ensure /tmp/agent.mjs, /tmp/Dockerfile are present if [ ! -f /tmp/agent.mjs ] || [ ! -f /tmp/Dockerfile ]; then # Print each missing file @@ -15,44 +27,65 @@ fi # Install Docker dnf update -y - dnf install -y docker + systemctl enable docker -systemctl start docker +systemctl start docker || { + echo "error: failed to start Docker" + exit 1 +} # Create builder -docker buildx create --name builder --driver docker-container --bootstrap --use +docker buildx create --name builder --driver docker-container --bootstrap --use || { + echo "error: failed to create Docker buildx builder" + exit 1 +} # Set up Docker to start on boot cat << 'EOF' > /etc/systemd/system/buildkite-agent.service [Unit] Description=Buildkite Docker Container -After=docker.service -Requires=docker.service +After=docker.service network-online.target +Requires=docker.service network-online.target [Service] TimeoutStartSec=0 Restart=always +RestartSec=5 ExecStartPre=-/usr/bin/docker stop buildkite ExecStartPre=-/usr/bin/docker rm buildkite ExecStart=/usr/bin/docker run \ --name buildkite \ --restart=unless-stopped \ + --network host \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ buildkite:latest [Install] WantedBy=multi-user.target - EOF echo "Building Buildkite image" -# Make the directory match up with the Dockerfile +# Clean up any previous build artifacts +rm -rf /tmp/fakebun mkdir -p /tmp/fakebun/scripts /tmp/fakebun/.buildkite -cp /tmp/agent.mjs /tmp/fakebun/scripts/ -cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile -cd /tmp/fakebun +# Copy required files +cp /tmp/agent.mjs /tmp/fakebun/scripts/ || { + echo "error: failed to copy agent.mjs" + exit 1 +} +cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile || { + echo "error: failed to copy Dockerfile" + exit 1 +} + +cd /tmp/fakebun || { + echo "error: failed to change directory" + exit 1 +} # Build the Buildkite image docker buildx build \ @@ -60,10 +93,31 @@ docker buildx build \ --tag buildkite:latest \ --target buildkite \ -f .buildkite/Dockerfile \ - . + . || { + echo "error: Docker build failed" + exit 1 +} + +# Create but don't start the container +docker container create \ + --name buildkite \ + --restart=unless-stopped \ + --network host \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + buildkite:latest || { + echo "error: failed to create buildkite container" + exit 1 +} + +# Reload systemd to pick up new service +systemctl daemon-reload # Enable the service, but don't start it yet -systemctl enable buildkite-agent +systemctl enable buildkite-agent || { + echo "error: failed to enable buildkite-agent service" + exit 1 +} echo "Bootstrap complete" echo "To start the Buildkite agent, run: " From 774a352ccd218b3d27c9fcfd83f55982ec75b104 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 06:54:39 -0800 Subject: [PATCH 152/176] [build images] --- .buildkite/Dockerfile | 7 +++++-- .buildkite/Dockerfile-bootstrap.sh | 7 ++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index a06b5a5c9dd8fa..836f31ef2bb1d0 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -82,8 +82,8 @@ RUN for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done && ln -sf /usr/bin/clang /usr/bin/cc \ && ln -sf /usr/bin/clang++ /usr/bin/c++ -ENV CC="clang-${LLVM_VERSION}" \ - CXX="clang++-${LLVM_VERSION}" \ +ENV CC="clang" \ + CXX="clang++" \ AR="llvm-ar-${LLVM_VERSION}" \ RANLIB="llvm-ranlib-${LLVM_VERSION}" \ LD="lld-${LLVM_VERSION}" @@ -147,6 +147,9 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ ENV PATH=/root/.cargo/bin:$PATH +ENV LLVM_VERSION=${REPORTED_LLVM_VERSION} + + RUN --mount=type=tmpfs,target=/workspace/bun/build \ ls -la \ && bun run build:release \ diff --git a/.buildkite/Dockerfile-bootstrap.sh b/.buildkite/Dockerfile-bootstrap.sh index 0c55a273e39a6c..b7beaa43cbd7b2 100644 --- a/.buildkite/Dockerfile-bootstrap.sh +++ b/.buildkite/Dockerfile-bootstrap.sh @@ -27,7 +27,7 @@ fi # Install Docker dnf update -y -dnf install -y docker +dnf install -y ddocker systemctl enable docker systemctl start docker || { @@ -98,13 +98,10 @@ docker buildx build \ exit 1 } -# Create but don't start the container +# Create container to ensure image is cached in AMI docker container create \ --name buildkite \ --restart=unless-stopped \ - --network host \ - -v /var/run/docker.sock:/var/run/docker.sock \ - -v /tmp:/tmp \ buildkite:latest || { echo "error: failed to create buildkite container" exit 1 From 0838807e86e3c36134f1f33413e4b2a6bc6d7240 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 07:00:54 -0800 Subject: [PATCH 153/176] [build images] --- .buildkite/Dockerfile-bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/Dockerfile-bootstrap.sh b/.buildkite/Dockerfile-bootstrap.sh index b7beaa43cbd7b2..112ad3d51d7d6d 100644 --- a/.buildkite/Dockerfile-bootstrap.sh +++ b/.buildkite/Dockerfile-bootstrap.sh @@ -27,7 +27,7 @@ fi # Install Docker dnf update -y -dnf install -y ddocker +dnf install -y docker systemctl enable docker systemctl start docker || { From 121d843c71f51ded793833a93c76764725e15a3d Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 07:12:48 -0800 Subject: [PATCH 154/176] [build images] --- .buildkite/Dockerfile | 1 - .buildkite/Dockerfile-bootstrap.sh | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index 836f31ef2bb1d0..4f03cd61ec7bef 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -11,7 +11,6 @@ FROM base-$TARGETARCH as base ARG LLVM_VERSION ARG OLD_BUN_VERSION -ARG DEFAULT_CFLAGS ARG TARGETARCH ARG DEFAULT_CXXFLAGS ARG DEFAULT_CFLAGS diff --git a/.buildkite/Dockerfile-bootstrap.sh b/.buildkite/Dockerfile-bootstrap.sh index 112ad3d51d7d6d..b8ccba7d17dded 100644 --- a/.buildkite/Dockerfile-bootstrap.sh +++ b/.buildkite/Dockerfile-bootstrap.sh @@ -93,6 +93,7 @@ docker buildx build \ --tag buildkite:latest \ --target buildkite \ -f .buildkite/Dockerfile \ + --load \ . || { echo "error: Docker build failed" exit 1 From b9cb65c95b2329a7f7b72854551d8bda0c24c56f Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 08:47:05 -0800 Subject: [PATCH 155/176] [build images] --- .buildkite/Dockerfile | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index 4f03cd61ec7bef..ddd14db657b55d 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -105,12 +105,24 @@ WORKDIR /workspace FROM --platform=$BUILDPLATFORM base as buildkite ARG BUILDKITE_AGENT_TAGS -RUN curl -sL https://raw.githubusercontent.com/buildkite/agent/master/install.sh | bash -s -- "3.50.3" \ - && useradd -r -m -d /var/lib/buildkite-agent buildkite-agent \ - && mkdir -p /etc/buildkite-agent \ - && chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent && mkdir -p /var/lib/buildkite-agent/cache/bun -COPY --chown=buildkite-agent:buildkite-agent ../scripts/agent.mjs /var/bun/scripts/ +# Install Rust nightly +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ + && export PATH=$HOME/.cargo/bin:$PATH \ + && rustup install nightly \ + && rustup default nightly + + +RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \ + echo "Downloading buildkite-cli-v3.4.1/bk_3.4.1_linux_$ARCH.deb from https://github.com/buildkite/cli/releases/download/v3.4.1/bk_3.4.1_linux_$ARCH.deb" && \ + curl -fsSL https://github.com/buildkite/cli/releases/download/v3.4.1/bk_3.4.1_linux_$ARCH.deb -o /tmp/bk.deb && \ + dpkg -i /tmp/bk.deb + +RUN useradd -r -m -d /var/lib/buildkite-agent buildkite-agent \ + && mkdir -p /etc/buildkite-agent \ + && chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent && mkdir -p /var/lib/buildkite-agent/cache/bun + +COPY --chown=buildkite-agent:buildkite-agent ../*/agent.mjs /var/bun/scripts/ ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS} @@ -119,11 +131,6 @@ ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS} WORKDIR /var/bun/scripts USER buildkite-agent -# Install Rust nightly -RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ - && export PATH=$HOME/.cargo/bin:$PATH \ - && rustup install nightly \ - && rustup default nightly ENV PATH=/root/.cargo/bin:$PATH From 49f6a9b4d4c5c500e01dd9d5c994ea3211162375 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 08:48:57 -0800 Subject: [PATCH 156/176] [build images] --- .buildkite/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index ddd14db657b55d..d7899834da4718 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -116,7 +116,7 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \ echo "Downloading buildkite-cli-v3.4.1/bk_3.4.1_linux_$ARCH.deb from https://github.com/buildkite/cli/releases/download/v3.4.1/bk_3.4.1_linux_$ARCH.deb" && \ curl -fsSL https://github.com/buildkite/cli/releases/download/v3.4.1/bk_3.4.1_linux_$ARCH.deb -o /tmp/bk.deb && \ - dpkg -i /tmp/bk.deb + dpkg -i /tmp/bk.deb && cp $(which bk) /usr/bin/buildkite-agent RUN useradd -r -m -d /var/lib/buildkite-agent buildkite-agent \ && mkdir -p /etc/buildkite-agent \ From d988fd6226a0bb469a26068b4afc14ed7e299b3a Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 13:58:22 -0800 Subject: [PATCH 157/176] [build images] --- scripts/machine.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/machine.mjs b/scripts/machine.mjs index eaed5314c0663f..f7022f0edc7df9 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -1201,7 +1201,7 @@ async function main() { rdp: !!args["rdp"] || !!args["vnc"], sshKeys, userData: args["user-data"] ? readFile(args["user-data"]) : undefined, - isDockerImage: !!args["docker"], + // isDockerImage: !!args["docker"], }; let { detached, bootstrap, ci, os, arch, distro, release, features, isDockerImage } = options; From 58f539938c8005e1eed5cd8cbd127d4cd58177f6 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 14:02:00 -0800 Subject: [PATCH 158/176] [build images] --- .buildkite/ci.mjs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index af2429721d8010..772e425e261cab 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -105,9 +105,9 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", docker: true }, - { os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", docker: true }, - { os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", docker: true }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, + { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, From dcbf4132235b36e950b8266ccf16dfcef875c13b Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 14:02:40 -0800 Subject: [PATCH 159/176] [build images] --- .buildkite/ci.mjs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 772e425e261cab..7f01c53bf50a03 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -559,7 +559,7 @@ function getTestBunStep(platform, options = {}) { * @returns {Step} */ function getBuildImageStep(platform, dryRun) { - const { os, arch, distro, release, docker } = platform; + const { os, arch, distro, release, features } = platform; const action = dryRun ? "create-image" : "publish-image"; @@ -575,8 +575,10 @@ function getBuildImageStep(platform, dryRun) { "--cloud=aws", "--ci", "--authorized-org=oven-sh", - docker ? "--docker" : "", ]; + for (const feature of features || []) { + command.push(`--feature=${feature}`); + } return { key: `${getImageKey(platform)}-build-image`, From 7603f58eee6cb237cfa6863111e885ed0edb5d7a Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 14:05:03 -0800 Subject: [PATCH 160/176] [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 7f01c53bf50a03..1e1541717a5582 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -105,7 +105,7 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", features: ["gcc-13"] }, { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, From 83b602aefc5775c808402dfa7cbd34484fc7b73b Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 14:24:18 -0800 Subject: [PATCH 161/176] [build images] --- .buildkite/ci.mjs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 1e1541717a5582..c820e8c9803113 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -177,12 +177,16 @@ function getPlatformLabel(platform) { * @returns {string} */ function getImageKey(platform) { - const { os, arch, distro, release } = platform; + const { os, arch, distro, release, features } = platform; const version = release.replace(/\./g, ""); + let key = `${os}-${arch}-${version}`; if (distro) { - return `${os}-${arch}-${distro}-${version}`; + key += `-${distro}`; } - return `${os}-${arch}-${version}`; + if (features?.length) { + key += `-with-${features.join("-")}`; + } + return key; } /** From b17ba8d7a521e0c5eb002df34fdef79d1b9b2176 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 14:25:06 -0800 Subject: [PATCH 162/176] [build images] --- .buildkite/ci.mjs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index c820e8c9803113..d090c17cc495e6 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -204,8 +204,14 @@ function getImageLabel(platform) { * @returns {string} */ function getImageName(platform, dryRun) { - const { os, arch, distro, release } = platform; - const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; + const { os, arch, distro, release, features } = platform; + let name = `${os}-${arch}-${release}`; + if (distro) { + name += `-${distro}`; + } + if (features?.length) { + name += `-with-${features.join("-")}`; + } if (dryRun) { return `${name}-build-${getBuildNumber()}`; } From 31aeb97e584c2cf08e7a4912ccc675fd01805fa6 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 14:33:56 -0800 Subject: [PATCH 163/176] [build images] --- scripts/machine.mjs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/scripts/machine.mjs b/scripts/machine.mjs index f7022f0edc7df9..38afd9ddd871b7 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -1205,7 +1205,14 @@ async function main() { }; let { detached, bootstrap, ci, os, arch, distro, release, features, isDockerImage } = options; - const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; + + let name = `${os}-${arch}-${release}`; + if (distro) { + name += `-${distro}`; + } + if (features?.length) { + name += `-with-${features.join("-")}`; + } let bootstrapPath, agentPath, dockerfilePath; if (bootstrap) { From 91ba75bfcdece8bb61715c55cde7cf08bdf396c7 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 15:08:35 -0800 Subject: [PATCH 164/176] [build images] --- scripts/bootstrap.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 851ed4495e0f6f..42988c4f338f60 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -937,7 +937,7 @@ install_gcc() { execute_sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y execute_sudo apt update -y - install_packages \ + execute_sudo apt install -y \ "gcc-$gcc_version" \ "g++-$gcc_version" \ "libgcc-$gcc_version-dev" \ From bad87b873ebf1aba9c07257b75c17f1d2390a81d Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 15:40:01 -0800 Subject: [PATCH 165/176] Try this [build images] --- .buildkite/ci.mjs | 6 +++--- scripts/machine.mjs | 11 +++++------ 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index d090c17cc495e6..7d12333ad4211b 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -105,9 +105,9 @@ function getTargetLabel(target) { const buildPlatforms = [ { os: "darwin", arch: "aarch64", release: "14" }, { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", features: ["gcc-13"] }, - { os: "linux", arch: "x64", distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "18.04", features: ["gcc-13"] }, + { os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] }, + { os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] }, + { os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 38afd9ddd871b7..ec42d2f01cdeac 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -1201,10 +1201,9 @@ async function main() { rdp: !!args["rdp"] || !!args["vnc"], sshKeys, userData: args["user-data"] ? readFile(args["user-data"]) : undefined, - // isDockerImage: !!args["docker"], }; - let { detached, bootstrap, ci, os, arch, distro, release, features, isDockerImage } = options; + let { detached, bootstrap, ci, os, arch, distro, release, features } = options; let name = `${os}-${arch}-${release}`; if (distro) { @@ -1220,7 +1219,7 @@ async function main() { import.meta.dirname, os === "windows" ? "bootstrap.ps1" - : isDockerImage && os === "linux" && distro === "amazonlinux" + : features?.includes("docker") ? "../.buildkite/Dockerfile-bootstrap.sh" : "bootstrap.sh", ); @@ -1238,7 +1237,7 @@ async function main() { await spawnSafe($`${npx} esbuild ${entryPath} --bundle --platform=node --format=esm --outfile=${agentPath}`); } - if (isDockerImage) { + if (features?.includes("docker")) { dockerfilePath = resolve(import.meta.dirname, "../.buildkite/Dockerfile"); if (!existsSync(dockerfilePath)) { @@ -1334,7 +1333,7 @@ async function main() { await machine.spawnSafe(["powershell", remotePath, ...args], { stdio: "inherit" }); }); } else { - if (!isDockerImage) { + if (!features?.includes("docker")) { const remotePath = "/tmp/bootstrap.sh"; const args = ci ? ["--ci"] : []; for (const feature of features || []) { @@ -1367,7 +1366,7 @@ async function main() { const remotePath = "C:\\buildkite-agent\\agent.mjs"; await startGroup("Installing agent...", async () => { await machine.upload(agentPath, remotePath); - if (cloud.name === "docker" || isDockerImage) { + if (cloud.name === "docker" || features?.includes("docker")) { return; } await machine.spawnSafe(["node", remotePath, "install"], { stdio: "inherit" }); From 73b0ebaffa70d5ecbf90d99b75d7af3ba41c10c2 Mon Sep 17 00:00:00 2001 From: Ashcon Partovi Date: Wed, 11 Dec 2024 16:23:49 -0800 Subject: [PATCH 166/176] Fix image [build image] --- .buildkite/Dockerfile | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index d7899834da4718..cbea7b391f6ea5 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -114,9 +114,11 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \ - echo "Downloading buildkite-cli-v3.4.1/bk_3.4.1_linux_$ARCH.deb from https://github.com/buildkite/cli/releases/download/v3.4.1/bk_3.4.1_linux_$ARCH.deb" && \ - curl -fsSL https://github.com/buildkite/cli/releases/download/v3.4.1/bk_3.4.1_linux_$ARCH.deb -o /tmp/bk.deb && \ - dpkg -i /tmp/bk.deb && cp $(which bk) /usr/bin/buildkite-agent + echo "Downloading buildkite" && \ + curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \ + mkdir -p /tmp/buildkite-agent && \ + tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \ + mv /tmp/buildkite-agent/buildkite-agent /usr/bin/buildkite-agent RUN useradd -r -m -d /var/lib/buildkite-agent buildkite-agent \ && mkdir -p /etc/buildkite-agent \ From 9ff6e446bc5acd648b1fa8f7e0e428c0f06a5e8b Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 18:20:59 -0800 Subject: [PATCH 167/176] [build images] --- .buildkite/Dockerfile | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index cbea7b391f6ea5..74dd78fa7d050a 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -120,19 +120,15 @@ RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \ mv /tmp/buildkite-agent/buildkite-agent /usr/bin/buildkite-agent -RUN useradd -r -m -d /var/lib/buildkite-agent buildkite-agent \ - && mkdir -p /etc/buildkite-agent \ - && chown -R buildkite-agent:buildkite-agent /etc/buildkite-agent && mkdir -p /var/lib/buildkite-agent/cache/bun +RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun -COPY --chown=buildkite-agent:buildkite-agent ../*/agent.mjs /var/bun/scripts/ +COPY ../*/agent.mjs /var/bun/scripts/ ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS} WORKDIR /var/bun/scripts -USER buildkite-agent - ENV PATH=/root/.cargo/bin:$PATH From a9b6504824846f739da570362a2101d86ffda1c5 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 19:26:24 -0800 Subject: [PATCH 168/176] [build images] --- .buildkite/Dockerfile | 15 ++- cmake/targets/BuildBun.cmake | 10 ++ .../bindings/workaround-missing-symbols.cpp | 126 ++++++++++++++++++ 3 files changed, 148 insertions(+), 3 deletions(-) diff --git a/.buildkite/Dockerfile b/.buildkite/Dockerfile index 74dd78fa7d050a..2ace3c0f5052ee 100644 --- a/.buildkite/Dockerfile +++ b/.buildkite/Dockerfile @@ -31,11 +31,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ software-properties-common apt-transport-https \ ca-certificates gnupg lsb-release unzip \ libxml2-dev ruby ruby-dev bison gawk perl make golang \ - && wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null \ - && apt-add-repository "deb https://apt.kitware.com/ubuntu/ $(lsb_release -cs) main" \ && add-apt-repository ppa:ubuntu-toolchain-r/test \ && apt-get update \ - && apt-get install -y cmake gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \ + && apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \ libasan6 libubsan1 libatomic1 libtsan0 liblsan0 \ libgfortran5 libc6-dev \ && wget https://apt.llvm.org/llvm.sh \ @@ -43,6 +41,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ && ./llvm.sh ${LLVM_VERSION} all \ && rm llvm.sh + +RUN --mount=type=tmpfs,target=/tmp \ + cmake_version="3.30.5" && \ + if [ "$TARGETARCH" = "arm64" ]; then \ + cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-aarch64.sh"; \ + else \ + cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-x86_64.sh"; \ + fi && \ + wget -O /tmp/cmake.sh "$cmake_url" && \ + sh /tmp/cmake.sh --skip-license --prefix=/usr + RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \ --slave /usr/bin/g++ g++ /usr/bin/g++-13 \ --slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \ diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index d398f66edec1c5..6e9eab917f8d95 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -884,8 +884,18 @@ endif() if(LINUX) if(NOT ABI STREQUAL "musl") + # on arm64 + if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64") target_link_options(${bun} PUBLIC + -Wl,--wrap=exp -Wl,--wrap=expf + -Wl,--wrap=log + -Wl,--wrap=log2 + -Wl,--wrap=log2f + -Wl,--wrap=logf + -Wl,--wrap=pow + -Wl,--wrap=powf + -Wl,--wrap=fcntl64 ) endif() diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 9d16741ae6ec93..3cfb58408616d1 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -84,6 +84,13 @@ extern "C" int kill(int pid, int sig) __asm__(".symver expf,expf@GLIBC_2.2.5"); #elif defined(__aarch64__) __asm__(".symver expf,expf@GLIBC_2.17"); +__asm__(".symver powf,powf@GLIBC_2.17"); +__asm__(".symver pow,pow@GLIBC_2.17"); +__asm__(".symver log,log@GLIBC_2.17"); +__asm__(".symver exp,exp@GLIBC_2.17"); +__asm__(".symver logf,logf@GLIBC_2.17"); +__asm__(".symver log2f,log2f@GLIBC_2.17"); +__asm__(".symver log2,log2@GLIBC_2.17"); #endif #if defined(__x86_64__) || defined(__aarch64__) @@ -96,14 +103,133 @@ extern "C" { float BUN_WRAP_GLIBC_SYMBOL(expf)(float); +#if defined(__aarch64__) + +float BUN_WRAP_GLIBC_SYMBOL(powf)(float, float); +double BUN_WRAP_GLIBC_SYMBOL(pow)(double, double); +double BUN_WRAP_GLIBC_SYMBOL(log)(double); +double BUN_WRAP_GLIBC_SYMBOL(exp)(double); +float BUN_WRAP_GLIBC_SYMBOL(logf)(float); +float BUN_WRAP_GLIBC_SYMBOL(log2f)(float); +double BUN_WRAP_GLIBC_SYMBOL(log2)(double); +int BUN_WRAP_GLIBC_SYMBOL(fcntl64)(int, int, ...); + +#endif + #if defined(__x86_64__) || defined(__aarch64__) float __wrap_expf(float x) { return expf(x); } +#if defined(__aarch64__) + +float __wrap_powf(float x, float y) { return powf(x, y); } +double __wrap_pow(double x, double y) { return pow(x, y); } +double __wrap_log(double x) { return log(x); } +double __wrap_exp(double x) { return exp(x); } +float __wrap_logf(float x) { return logf(x); } +float __wrap_log2f(float x) { return log2f(x); } +double __wrap_log2(double x) { return log2(x); } + +#endif + #endif // x86_64 or aarch64 } // extern "C" +#if defined(__aarch64__) + +typedef int (*fcntl64_func)(int fd, int cmd, ...); + +enum arg_type { + NO_ARG, + INT_ARG, + PTR_ARG +}; + +static enum arg_type get_arg_type(int cmd) +{ + switch (cmd) { + // Commands that take no argument + case F_GETFD: + case F_GETFL: + case F_GETOWN: + case F_GETSIG: + case F_GETLEASE: + case F_GETPIPE_SZ: +#ifdef F_GET_SEALS + case F_GET_SEALS: +#endif + return NO_ARG; + + // Commands that take an integer argument + case F_DUPFD: + case F_DUPFD_CLOEXEC: + case F_SETFD: + case F_SETFL: + case F_SETOWN: + case F_SETSIG: + case F_SETLEASE: + case F_NOTIFY: + case F_SETPIPE_SZ: +#ifdef F_ADD_SEALS + case F_ADD_SEALS: +#endif + return INT_ARG; + + // Commands that take a pointer argument + case F_GETLK: + case F_SETLK: + case F_SETLKW: + case F_GETOWN_EX: + case F_SETOWN_EX: + return PTR_ARG; + + default: + return PTR_ARG; // Default to pointer for unknown commands + } +} + +extern "C" int __wrap_fcntl64(int fd, int cmd, ...) +{ + va_list ap; + enum arg_type type = get_arg_type(cmd); + + static fcntl64_func real_fcntl64; + static std::once_flag real_fcntl64_initialized; + std::call_once(real_fcntl64_initialized, []() { + real_fcntl64 = (fcntl64_func)dlsym(RTLD_NEXT, "fcntl64"); + if (!real_fcntl64) { + real_fcntl64 = (fcntl64_func)dlsym(RTLD_NEXT, "fcntl"); + } + }); + + switch (type) { + case NO_ARG: + return real_fcntl64(fd, cmd); + + case INT_ARG: { + va_start(ap, cmd); + int arg = va_arg(ap, int); + va_end(ap); + return real_fcntl64(fd, cmd, arg); + } + + case PTR_ARG: { + va_start(ap, cmd); + void* arg = va_arg(ap, void*); + va_end(ap); + return real_fcntl64(fd, cmd, arg); + } + + default: + va_end(ap); + errno = EINVAL; + return -1; + } +} + +#endif + extern "C" __attribute__((used)) char _libc_single_threaded = 0; extern "C" __attribute__((used)) char __libc_single_threaded = 0; From 9c431797683d0c1711b554be2ea497fe91ea3e55 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 19:35:09 -0800 Subject: [PATCH 169/176] [build images] --- cmake/targets/BuildBun.cmake | 1 + 1 file changed, 1 insertion(+) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 9a0b625bbc0365..575e01f1103613 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -939,6 +939,7 @@ if(WIN32) set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.def) target_link_options(${bun} PUBLIC /DEF:${BUN_SYMBOLS_PATH}) elseif(APPLE) + set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.txt) target_link_options(${bun} PUBLIC -exported_symbols_list ${BUN_SYMBOLS_PATH}) else() From 63722b95f9ee8fba9e168cb0db3d1ad98e649b18 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 19:35:38 -0800 Subject: [PATCH 170/176] [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index fcb57bfa493241..43723ba3aec643 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -203,7 +203,7 @@ function getImageLabel(platform) { * @returns {string} */ function getImageName(platform, options) { - const { os, arch, distro, release } = platform; + const { os, arch, distro, release, features = [] } = platform; const { buildImages, publishImages } = options; const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; From 7bba103a3dd4b2db337ed18af908fd9d71750ef8 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 19:45:13 -0800 Subject: [PATCH 171/176] [build image] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 43723ba3aec643..d76acbe3df8e6a 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -205,7 +205,7 @@ function getImageLabel(platform) { function getImageName(platform, options) { const { os, arch, distro, release, features = [] } = platform; const { buildImages, publishImages } = options; - const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; + let name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; if (features?.length) { name += `-with-${features.join("-")}`; From f7ee0b6daa3fefd34fc8e1388dcc4991552a9559 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 19:46:47 -0800 Subject: [PATCH 172/176] [build images] --- cmake/targets/BuildBun.cmake | 1 + 1 file changed, 1 insertion(+) diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 575e01f1103613..a846750ffad0cb 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -900,6 +900,7 @@ if(LINUX) -Wl,--wrap=fcntl64 ) endif() + endif() if(NOT ABI STREQUAL "musl") target_link_options(${bun} PUBLIC From 8a08ee519811985796944f75d56f704ba1b67988 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 20:25:29 -0800 Subject: [PATCH 173/176] [build images] --- .buildkite/ci.mjs | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index d76acbe3df8e6a..2b9b502bdf51e5 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -176,7 +176,7 @@ function getPlatformLabel(platform) { * @returns {string} */ function getImageKey(platform) { - const { os, arch, distro, release, features } = platform; + const { os, arch, distro, release, features, abi } = platform; const version = release.replace(/\./g, ""); let key = `${os}-${arch}-${version}`; if (distro) { @@ -185,6 +185,11 @@ function getImageKey(platform) { if (features?.length) { key += `-with-${features.join("-")}`; } + + if (abi) { + key += `-${abi}`; + } + return key; } @@ -203,17 +208,15 @@ function getImageLabel(platform) { * @returns {string} */ function getImageName(platform, options) { - const { os, arch, distro, release, features = [] } = platform; + const { os } = platform; const { buildImages, publishImages } = options; - let name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; - if (features?.length) { - name += `-with-${features.join("-")}`; - } + const name = getImageBasename(platform); if (buildImages && !publishImages) { return `${name}-build-${getBuildNumber()}`; } + return `${name}-v${getBootstrapVersion(os)}`; } From 26213e08f86cb42c3493107772c7654539002c42 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 20:25:45 -0800 Subject: [PATCH 174/176] [build images] --- .buildkite/ci.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 2b9b502bdf51e5..9f39a72a3d0948 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -211,7 +211,7 @@ function getImageName(platform, options) { const { os } = platform; const { buildImages, publishImages } = options; - const name = getImageBasename(platform); + const name = getImageKey(platform); if (buildImages && !publishImages) { return `${name}-build-${getBuildNumber()}`; From 9181842111c3a446521500dc6fc1d69b48954efb Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Wed, 11 Dec 2024 20:59:15 -0800 Subject: [PATCH 175/176] [build images] --- scripts/machine.mjs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/scripts/machine.mjs b/scripts/machine.mjs index ec42d2f01cdeac..8d0ae49ec0dbd5 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -1205,10 +1205,16 @@ async function main() { let { detached, bootstrap, ci, os, arch, distro, release, features } = options; - let name = `${os}-${arch}-${release}`; + let name = `${os}-${arch}-${(release || "").replace(/\./g, "")}`; + if (distro) { name += `-${distro}`; } + + if (distro === "alpine") { + name += `-musl`; + } + if (features?.length) { name += `-with-${features.join("-")}`; } From 2998a8102ea58968fb0e1b7d3495096ed937e8de Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Thu, 12 Dec 2024 01:24:47 -0800 Subject: [PATCH 176/176] [build images] --- scripts/utils.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/utils.mjs b/scripts/utils.mjs index a04da78aa98876..18aa8c51c83a27 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -2751,7 +2751,7 @@ export function getLoggedInUserCountOrDetails() { return 0; } - let message = users.length + " currently logged in users:"; + let message = `${users.length} currently logged in users:`; for (const user of users) { message += `\n- ${user.username} on ${user.terminal} since ${user.datetime}${user.ip ? ` from ${user.ip}` : ""}`;