repo
stringlengths
7
37
pull_number
stringlengths
3
6
instance_id
stringlengths
13
43
language
stringclasses
7 values
issue_numbers
listlengths
1
3
base_commit
stringlengths
40
40
patch
stringlengths
410
63.3k
test_patch
stringlengths
373
58.6k
problem_statement
stringlengths
224
7.81k
hints_text
stringlengths
2
23k
all_hints_text
stringlengths
2
27.7k
commit_urls
listlengths
1
102
created_at
stringdate
2024-12-27 11:29:35
2025-08-27 02:54:41
commit_url
stringlengths
72
102
rebuild_cmds
listlengths
1
1
test_cmds
listlengths
1
1
print_cmds
listlengths
1
1
log_parser
stringlengths
1.97k
12.1k
FAIL_TO_PASS
listlengths
1
2.76k
PASS_TO_PASS
listlengths
0
36k
docker_image
stringlengths
43
73
libsdl-org/SDL
11761
libsdl-org__SDL-11761
C
[ "8963" ]
028107124358a3d256c8c3acf4e4db6f20e4c348
diff --git a/.github/workflows/generic.yml b/.github/workflows/generic.yml index 1398bca8381d5..84e1d97fe7a2f 100644 --- a/.github/workflows/generic.yml +++ b/.github/workflows/generic.yml @@ -207,7 +207,7 @@ jobs: ctest -VV --test-dir build/ -j2 - name: "Build test apk's (CMake)" id: apks - if: ${{ steps.build.outcome == 'success' && matrix.platform.android-apks != '' }} + if: ${{ always() && steps.build.outcome == 'success' && matrix.platform.android-apks != '' }} # shell: ${{ matrix.platform.shell }} run: | ${{ matrix.platform.source-cmd }} @@ -218,7 +218,7 @@ jobs: -- ${{ matrix.platform.cmake-build-arguments }} - name: 'Install (CMake)' id: install - if: ${{ steps.build.outcome == 'success' }} + if: ${{ always() && steps.build.outcome == 'success' }} # shell: ${{ matrix.platform.shell }} run: | ${{ matrix.platform.source-cmd }} @@ -227,7 +227,7 @@ jobs: ( cd prefix; find . ) | LC_ALL=C sort -u - name: 'Package (CPack)' id: package - if: ${{ steps.build.outcome == 'success' }} + if: ${{ always() && steps.build.outcome == 'success' }} # shell: ${{ matrix.platform.shell }} run: | # DMG creation on macOS occasionally fails, so try multiple times diff --git a/CMakeLists.txt b/CMakeLists.txt index 94b1c9bfe0531..5c4f9cbd412f9 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -498,16 +498,21 @@ else() sdl_include_directories(NO_EXPORT SYSTEM PRIVATE "$<BUILD_INTERFACE:${SDL3_SOURCE_DIR}/src/video/khronos>") endif() -if(MSVC AND TARGET SDL3-shared) +if(MSVC AND TARGET SDL3-shared AND NOT SDL_LIBC) if(SDL_CPU_X64) enable_language(ASM_MASM) set(asm_src "${SDL3_SOURCE_DIR}/src/stdlib/SDL_mslibc_x64.masm") target_compile_options(SDL3-shared PRIVATE "$<$<COMPILE_LANGUAGE:ASM_MASM>:/nologo>") set_property(SOURCE "${asm_src}" PROPERTY LANGUAGE "ASM_MASM") target_sources(SDL3-shared PRIVATE "${asm_src}") - elseif(SDL_CPU_ARM32 OR SDL_CPU_ARM64) - # FIXME: ARM assembler (armasm.exe/armasm64.exe) is NOT ASM_MASM, and does currently not work with CMake - # (https://gitlab.kitware.com/cmake/cmake/-/issues/18912) + elseif(SDL_CPU_ARM64) + enable_language(ASM_MARMASM) + set(asm_src "${SDL3_SOURCE_DIR}/src/stdlib/SDL_mslibc_arm64.masm") + target_compile_options(SDL3-shared PRIVATE "$<$<COMPILE_LANGUAGE:ASM_MARMASM>:/nologo>") + set_property(SOURCE "${asm_src}" PROPERTY LANGUAGE "ASM_MARMASM") + target_sources(SDL3-shared PRIVATE "${asm_src}") + elseif(SDL_CPU_ARM32) + # FIXME endif() endif() @@ -1889,8 +1894,10 @@ elseif(WINDOWS) if(TARGET SDL3-shared AND MSVC AND NOT SDL_LIBC) # Prevent codegen that would use the VC runtime libraries. target_compile_options(SDL3-shared PRIVATE $<$<COMPILE_LANGUAGE:C,CXX>:/GS-> $<$<COMPILE_LANGUAGE:C,CXX>:/Gs1048576>) + target_compile_options(SDL_uclibc PRIVATE $<$<COMPILE_LANGUAGE:C,CXX>:/GS-> $<$<COMPILE_LANGUAGE:C,CXX>:/Gs1048576>) if(SDL_CPU_X86) target_compile_options(SDL3-shared PRIVATE "/arch:SSE") + target_compile_options(SDL3-SDL_uclibc PRIVATE "/arch:SSE") endif() endif() @@ -3388,6 +3395,17 @@ if(SDL_SHARED) # (__rt_sdiv, __rt_udiv, __rt_sdiv64, _rt_udiv64, __dtou64, __u64tod, __i64tos) target_link_libraries(SDL3-shared PRIVATE msvcrt.lib) endif() + find_library(HAVE_ONECORE_LIB NAMES "onecore.lib") + if(HAVE_ONECORE_LIB) + # SDL_malloc.c: __imp_MapViewOfFileNuma2 referenced in function MapViewOfFile2 + target_link_libraries(SDL3-shared PRIVATE onecore.lib) + endif() + find_library(HAVE_VOLATILEACCESSU_LIB NAMES "volatileaccessu.lib") + if(HAVE_VOLATILEACCESSU_LIB) + # SDL_malloc.c : RtlSetVolatileMemory referenced in function RtlFillVolatileMemory + # SDL_malloc.c : RtlFillDeviceMemory referenced in function RtlZeroDeviceMemory + target_link_libraries(SDL3-shared PRIVATE volatileaccessu.lib) + endif() endif() if(HAS_Q_NO_USE_LIBIRC) target_compile_options(SDL3-shared PRIVATE /Q_no-use-libirc) diff --git a/VisualC/SDL/SDL.vcxproj b/VisualC/SDL/SDL.vcxproj index e860035ae625a..ada62e856d8c7 100644 --- a/VisualC/SDL/SDL.vcxproj +++ b/VisualC/SDL/SDL.vcxproj @@ -43,7 +43,6 @@ </PropertyGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" /> <ImportGroup Label="ExtensionSettings"> - <Import Project="$(VCTargetsPath)\BuildCustomizations\masm.props" /> </ImportGroup> <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets"> <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" /> @@ -647,9 +646,6 @@ <ClCompile Include="..\..\src\stdlib\SDL_memmove.c" /> <ClCompile Include="..\..\src\stdlib\SDL_memset.c" /> <ClCompile Include="..\..\src\stdlib\SDL_mslibc.c" /> - <MASM Condition="'$(Platform)'=='x64'" Include="..\..\src\stdlib\SDL_mslibc_x64.masm"> - <PrecompiledHeader>NotUsing</PrecompiledHeader> - </MASM> <ClCompile Include="..\..\src\stdlib\SDL_murmur3.c" /> <ClCompile Include="..\..\src\stdlib\SDL_qsort.c" /> <ClCompile Include="..\..\src\stdlib\SDL_random.c" /> @@ -729,6 +725,5 @@ </ItemGroup> <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" /> <ImportGroup Label="ExtensionTargets"> - <Import Project="$(VCTargetsPath)\BuildCustomizations\masm.targets" /> </ImportGroup> </Project> diff --git a/cmake/sdlcompilers.cmake b/cmake/sdlcompilers.cmake index ec3b6124cade0..c3d8c4702e6ad 100644 --- a/cmake/sdlcompilers.cmake +++ b/cmake/sdlcompilers.cmake @@ -34,7 +34,7 @@ function(SDL_AddCommonCompilerFlags TARGET) cmake_push_check_state() check_c_compiler_flag("/W3" COMPILER_SUPPORTS_W3) if(COMPILER_SUPPORTS_W3) - target_compile_options(${TARGET} PRIVATE "/W3") + target_compile_options(${TARGET} PRIVATE "$<$<COMPILE_LANGUAGE:C,CXX>:/W3>") endif() cmake_pop_check_state() endif() @@ -131,7 +131,7 @@ function(SDL_AddCommonCompilerFlags TARGET) if(MSVC) check_c_compiler_flag(/WX HAVE_WX) if(HAVE_WX) - target_compile_options(${TARGET} PRIVATE "/WX") + target_compile_options(${TARGET} PRIVATE "$<$<COMPILE_LANGUAGE:C,CXX>:/WX>") endif() elseif(USE_GCC OR USE_CLANG OR USE_INTELCC OR USE_QNX) check_c_compiler_flag(-Werror HAVE_WERROR) diff --git a/src/SDL_hashtable.c b/src/SDL_hashtable.c index a3f6613cd0096..571e376974177 100644 --- a/src/SDL_hashtable.c +++ b/src/SDL_hashtable.c @@ -22,8 +22,31 @@ #include "SDL_hashtable.h" // XXX: We can't use SDL_assert here because it's going to call into hashtable code -#include <assert.h> -#define HT_ASSERT(x) assert(x) +#ifdef NDEBUG +#define HT_ASSERT(x) (void)(0) +#else +#if (defined(_WIN32) || defined(SDL_PLATFORM_CYGWIN)) && !defined(SDL_PLATFORM_XBOXONE) && !defined(SDL_PLATFORM_XBOXSERIES) +#include <windows.h> +#endif +/* This is not declared in any header, although it is shared between some + parts of SDL, because we don't want anything calling it without an + extremely good reason. */ +extern SDL_NORETURN void SDL_ExitProcess(int exitcode); +static void HT_ASSERT_FAIL(const char *msg) +{ + const char *caption = "SDL_HashTable Assertion Failure!"; + (void)caption; +#if (defined(_WIN32) || defined(SDL_PLATFORM_CYGWIN)) && !defined(SDL_PLATFORM_XBOXONE) && !defined(SDL_PLATFORM_XBOXSERIES) + MessageBoxA(NULL, msg, caption, MB_OK | MB_ICONERROR); +#elif defined(HAVE_STDIO_H) + fprintf(stderr, "\n\n%s\n%s\n\n", caption, msg); + fflush(stderr); +#endif + SDL_TriggerBreakpoint(); + SDL_ExitProcess(-1); +} +#define HT_ASSERT(x) if (!(x)) HT_ASSERT_FAIL("SDL_HashTable Assertion Failure: " #x) +#endif typedef struct SDL_HashItem { diff --git a/src/dynapi/SDL_dynapi.c b/src/dynapi/SDL_dynapi.c index 2fa85ec5f9920..f35f3e8bcb564 100644 --- a/src/dynapi/SDL_dynapi.c +++ b/src/dynapi/SDL_dynapi.c @@ -520,6 +520,9 @@ static void SDL_InitDynamicAPILocked(void) if (initialize_jumptable(SDL_DYNAPI_VERSION, &jump_table, sizeof(jump_table)) < 0) { // Now we're screwed. Should definitely abort now. dynapi_warn("Failed to initialize internal SDL dynapi. As this would otherwise crash, we have to abort now."); +#ifndef NDEBUG + SDL_TriggerBreakpoint(); +#endif SDL_ExitProcess(86); } } diff --git a/src/stdlib/SDL_malloc.c b/src/stdlib/SDL_malloc.c index a4de7d24ac545..ea09d1e5cbf77 100644 --- a/src/stdlib/SDL_malloc.c +++ b/src/stdlib/SDL_malloc.c @@ -28,8 +28,8 @@ #define LACKS_STRINGS_H #define LACKS_STRING_H #define LACKS_STDLIB_H -#define FORCEINLINE #define ABORT +#define NO_MALLOC_STATS 1 #define USE_LOCKS 1 #define USE_DL_PREFIX @@ -816,6 +816,7 @@ struct mallinfo { inlining are defined as macros, so these aren't used for them. */ +#if 0 /* SDL */ #ifndef FORCEINLINE #if defined(__GNUC__) #define FORCEINLINE __inline __attribute__ ((always_inline)) @@ -823,6 +824,7 @@ struct mallinfo { #define FORCEINLINE __forceinline #endif #endif +#endif /* SDL */ #ifndef NOINLINE #if defined(__GNUC__) #define NOINLINE __attribute__ ((noinline)) @@ -835,13 +837,17 @@ struct mallinfo { #ifdef __cplusplus extern "C" { +#if 0 /* SDL */ #ifndef FORCEINLINE #define FORCEINLINE inline #endif +#endif /* SDL */ #endif /* __cplusplus */ +#if 0 /* SDL */ #ifndef FORCEINLINE #define FORCEINLINE #endif +#endif /* SDL_FORCE_INLINE */ #if !ONLY_MSPACES @@ -1697,20 +1703,20 @@ static int dev_zero_fd = -1; /* Cached file descriptor for /dev/zero. */ #else /* WIN32 */ /* Win32 MMAP via VirtualAlloc */ -static FORCEINLINE void* win32mmap(size_t size) { +SDL_FORCE_INLINE void* win32mmap(size_t size) { void* ptr = VirtualAlloc(0, size, MEM_RESERVE|MEM_COMMIT, PAGE_READWRITE); return (ptr != 0)? ptr: MFAIL; } /* For direct MMAP, use MEM_TOP_DOWN to minimize interference */ -static FORCEINLINE void* win32direct_mmap(size_t size) { +SDL_FORCE_INLINE void* win32direct_mmap(size_t size) { void* ptr = VirtualAlloc(0, size, MEM_RESERVE|MEM_COMMIT|MEM_TOP_DOWN, PAGE_READWRITE); return (ptr != 0)? ptr: MFAIL; } /* This function supports releasing coalesed segments */ -static FORCEINLINE int win32munmap(void* ptr, size_t size) { +SDL_FORCE_INLINE int win32munmap(void* ptr, size_t size) { MEMORY_BASIC_INFORMATION minfo; char* cptr = (char*)ptr; while (size) { @@ -1863,7 +1869,7 @@ static FORCEINLINE int win32munmap(void* ptr, size_t size) { #elif (defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))) /* Custom spin locks for older gcc on x86 */ -static FORCEINLINE int x86_cas_lock(int *sl) { +SDL_FORCE_INLINE int x86_cas_lock(int *sl) { int ret; int val = 1; int cmp = 0; @@ -1874,7 +1880,7 @@ static FORCEINLINE int x86_cas_lock(int *sl) { return ret; } -static FORCEINLINE void x86_clear_lock(int* sl) { +SDL_FORCE_INLINE void x86_clear_lock(int* sl) { assert(*sl != 0); int prev = 0; int ret; @@ -1952,14 +1958,14 @@ struct malloc_recursive_lock { #define MLOCK_T struct malloc_recursive_lock static MLOCK_T malloc_global_mutex = { 0, 0, (THREAD_ID_T)0}; -static FORCEINLINE void recursive_release_lock(MLOCK_T *lk) { +SDL_FORCE_INLINE void recursive_release_lock(MLOCK_T *lk) { assert(lk->sl != 0); if (--lk->c == 0) { CLEAR_LOCK(&lk->sl); } } -static FORCEINLINE int recursive_acquire_lock(MLOCK_T *lk) { +SDL_FORCE_INLINE int recursive_acquire_lock(MLOCK_T *lk) { THREAD_ID_T mythreadid = CURRENT_THREAD; int spins = 0; for (;;) { @@ -1980,7 +1986,7 @@ static FORCEINLINE int recursive_acquire_lock(MLOCK_T *lk) { } } -static FORCEINLINE int recursive_try_lock(MLOCK_T *lk) { +SDL_FORCE_INLINE int recursive_try_lock(MLOCK_T *lk) { THREAD_ID_T mythreadid = CURRENT_THREAD; if (*((volatile int *)(&lk->sl)) == 0) { if (!CAS_LOCK(&lk->sl)) { diff --git a/src/stdlib/SDL_mslibc.c b/src/stdlib/SDL_mslibc.c index a527bf75d6bdf..6698403fe8904 100644 --- a/src/stdlib/SDL_mslibc.c +++ b/src/stdlib/SDL_mslibc.c @@ -728,14 +728,6 @@ void __declspec(naked) _alloca_probe_16(void) #endif // _M_IX86 -#ifdef _M_ARM64 - -void __chkstk(void); -void __chkstk() { -} - -#endif - #endif // MSC_VER #ifdef __ICL diff --git a/src/stdlib/SDL_mslibc_arm64.masm b/src/stdlib/SDL_mslibc_arm64.masm new file mode 100644 index 0000000000000..a769cc106f5ff --- /dev/null +++ b/src/stdlib/SDL_mslibc_arm64.masm @@ -0,0 +1,26 @@ +TeStackLimit EQU 0x00010 +PAGE_SIZE equ 0x1000 + + AREA CODE, READONLY + + EXPORT __chkstk + +__chkstk PROC + ldr x17,[x18, #TeStackLimit] + subs x16,sp,x15, LSL #0x4 + csel x16,xzr,x16,cc + cmp x16,x17 + b.cc chkstk_start_loop + ret +chkstk_start_loop + and x16,x16,#-PAGE_SIZE +chkstk_loop + sub x17,x17,#0x1, LSL #12 + ldr xzr,[x17] + cmp x17,x16 + b.ne chkstk_loop + ret + + ENDP + + END diff --git a/src/stdlib/SDL_string.c b/src/stdlib/SDL_string.c index ab43c13d3a850..50da685f7eb57 100644 --- a/src/stdlib/SDL_string.c +++ b/src/stdlib/SDL_string.c @@ -368,7 +368,7 @@ static size_t SDL_ScanUnsignedLongLongInternal(const char *text, int count, int negative = *text == '-'; ++text; } - if ((radix == 0 || radix == 16) && *text == '0') { + if ((radix == 0 || radix == 16) && *text == '0' && text[1] != '\0') { ++text; if (*text == 'x' || *text == 'X') { radix = 16;
diff --git a/test/testautomation_stdlib.c b/test/testautomation_stdlib.c index a2ac6719884b7..26317584f19bc 100644 --- a/test/testautomation_stdlib.c +++ b/test/testautomation_stdlib.c @@ -1324,6 +1324,9 @@ static int SDLCALL stdlib_strtox(void *arg) STRTOX_TEST_CASE(SDL_strtoull, unsigned long long, FMT_PRILLu, "-uvwxyz", 32, -991, 3); STRTOX_TEST_CASE(SDL_strtoull, unsigned long long, FMT_PRILLu, "ZzZzZzZzZzZzZzZzZzZzZzZzZ", 36, ullong_max, 25); + STRTOX_TEST_CASE(SDL_strtoull, unsigned long long, FMT_PRILLu, "0", 0, 0, 1); + STRTOX_TEST_CASE(SDL_strtoull, unsigned long long, FMT_PRILLu, "0", 10, 0, 1); + STRTOX_TEST_CASE(SDL_strtoull, unsigned long long, FMT_PRILLu, "-0", 0, 0, 2); STRTOX_TEST_CASE(SDL_strtoull, unsigned long long, FMT_PRILLu, "-0", 10, 0, 2); STRTOX_TEST_CASE(SDL_strtoull, unsigned long long, FMT_PRILLu, " - 1", 0, 0, 0); // invalid input @@ -1335,6 +1338,10 @@ static int SDLCALL stdlib_strtox(void *arg) // Since the CI runs the tests against a variety of targets, this should be fine in practice. if (sizeof(long) == 4) { + STRTOX_TEST_CASE(SDL_strtol, long, "%ld", "0", 0, 0, 1); + STRTOX_TEST_CASE(SDL_strtol, long, "%ld", "0", 10, 0, 1); + STRTOX_TEST_CASE(SDL_strtol, long, "%ld", "-0", 0, 0, 2); + STRTOX_TEST_CASE(SDL_strtol, long, "%ld", "-0", 10, 0, 2); STRTOX_TEST_CASE(SDL_strtol, long, "%ld", "2147483647", 10, 2147483647, 10); STRTOX_TEST_CASE(SDL_strtol, long, "%ld", "2147483648", 10, 2147483647, 10); STRTOX_TEST_CASE(SDL_strtol, long, "%ld", "-2147483648", 10, -2147483647L - 1, 11); @@ -1347,6 +1354,10 @@ static int SDLCALL stdlib_strtox(void *arg) } if (sizeof(long long) == 8) { + STRTOX_TEST_CASE(SDL_strtoll, long long, FMT_PRILLd, "0", 0, 0LL, 1); + STRTOX_TEST_CASE(SDL_strtoll, long long, FMT_PRILLd, "0", 10, 0LL, 1); + STRTOX_TEST_CASE(SDL_strtoll, long long, FMT_PRILLd, "-0", 0, 0LL, 2); + STRTOX_TEST_CASE(SDL_strtoll, long long, FMT_PRILLd, "-0", 10, 0LL, 2); STRTOX_TEST_CASE(SDL_strtoll, long long, FMT_PRILLd, "9223372036854775807", 10, 9223372036854775807LL, 19); STRTOX_TEST_CASE(SDL_strtoll, long long, FMT_PRILLd, "9223372036854775808", 10, 9223372036854775807LL, 19); STRTOX_TEST_CASE(SDL_strtoll, long long, FMT_PRILLd, "-9223372036854775808", 10, -9223372036854775807LL - 1, 20);
arm64 asm, __chkstk, and cmake Our current MSVC code is missing `__chkstk` for arm64 and we are blaming cmake for it referencing cmake issue reported at https://gitlab.kitware.com/cmake/cmake/-/issues/18912 Any progress on this? I found armasm support merges among cmake gitlab PRs. along with some open issues - other and newer than the one above: https://gitlab.kitware.com/cmake/cmake/-/merge_requests/7793 https://gitlab.kitware.com/cmake/cmake/-/issues/24317 @madebr ?
Any progress on this? We are scoping work for the SDL 3.2.0 release, so please let us know if this is a showstopper for you. Well, it _can_ be a showstopper. I don't have anything to test with, though. Me either... let's leave this open until someone has a repro environment. #11761 addresses this. I copied `__chkstk` from a arm64 SDL3.dll. But it's 100% not tested.
Any progress on this? We are scoping work for the SDL 3.2.0 release, so please let us know if this is a showstopper for you. Well, it _can_ be a showstopper. I don't have anything to test with, though. Me either... let's leave this open until someone has a repro environment. #11761 addresses this. I copied `__chkstk` from a arm64 SDL3.dll. But it's 100% not tested.
[ "https://github.com/libsdl-org/SDL/commit/ae6c9370c1fca3db47ed089049c84e9c0c0c7133", "https://github.com/libsdl-org/SDL/commit/4d8946171bf7189afa3317ffdc59a956bdcf1d29", "https://github.com/libsdl-org/SDL/commit/4613773fc1ab19cdf2c8ecfcba6c1557be53e328", "https://github.com/libsdl-org/SDL/commit/ad5b75f503f6b...
2024-12-28T14:28:37Z
https://github.com/libsdl-org/SDL/tree/028107124358a3d256c8c3acf4e4db6f20e4c348
[ "$vs=\"${env:ProgramFiles(x86)}\\Microsoft Visual Studio\\2022\\BuildTools\\Common7\\Tools\\VsDevCmd.bat\"; cmd.exe /c \"call `\"$vs`\" -arch=x64 && cmake --build build --parallel && cmake --install build\"" ]
[ "$env:SDL_AUDIODRIVER=\"\"dummy\"\"; $env:SDL_VIDEODRIVER=\"\"offscreen\"\"; ctest --test-dir build --no-tests=error --output-on-failure -VV -C Release 2>&1 | Tee-Object -FilePath test-output.log" ]
[ "cat test-output.log" ]
def parser(log: str) -> dict[str, str]: # Improved parser for CTest and internal testautomation output with wrapped lines import re results: dict[str, str] = {} def set_status(name: str, status: str): status = (status or "").strip().lower() if status in ("passed", "pass", "success", "ok"): new = "pass" elif status in ("skipped", "skip", "not run", "notrun", "disabled", "ignored"): new = "skip" else: # Treat anything else, including 'failed', 'error', 'timeout', etc., as fail new = "fail" prev = results.get(name) if prev == "fail": return if (prev is None) or (new == "fail") or (prev == "skip" and new == "pass"): results[name] = new # Regex to capture CTest per-test summary lines # Example: " 1/23 Test #1: testaudioinfo .................... Passed 0.02 sec" ctest_line_re = re.compile( r"\bTest\s+#\s*\d+:\s+(\S+).*?\b(Passed|Failed|Skipped|Not Run|Disabled|Timeout)\b", re.IGNORECASE, ) # "The following tests FAILED:" block entries: # e.g. " 2 - testautomation (Failed)" or "(Timeout)" failed_entry_re = re.compile( r"^\s*\d+\s*-\s*([^\s]+)\s+\(([^)]+)\)", re.IGNORECASE, ) # Internal testautomation granular results can be wrapped across lines. # We'll parse from a normalized buffer without CTest/timestamp prefixes. internal_test_re = re.compile( r">>>\s*Test\s*'([^']+)'\s*:\s*(Passed|Failed|Skipped|Not Run|Disabled|Timeout)\b", re.IGNORECASE, ) # Helper to strip CTest channel prefix and optional timestamp prefix chan_prefix_re = re.compile(r"^\s*\d+:\s+") ts_prefix_re = re.compile(r"^\d{2}/\d{2}/\d{2}\s+\d{2}:\d{2}:\d{2}:\s+") def strip_prefixes(s: str) -> str: s2 = chan_prefix_re.sub("", s, count=1) s2 = ts_prefix_re.sub("", s2, count=1) return s2 # Buffer to accumulate possibly wrapped internal test lines buf = "" buf_lines = 0 max_buf_lines = 4 # safety cap # Process the log line-by-line for raw_line in log.splitlines(): line = raw_line.rstrip("\r\n") # 1) CTest per-test summary lines on raw content m = ctest_line_re.search(line) if m: name, status = m.groups() set_status(name, status) # don't "continue" because same line might also contain something else (unlikely). # 2) Failed block entries on raw content f = failed_entry_re.search(line) if f: name = f.group(1) # Treat anything in parentheses here as failure (Failed, Timeout, etc.) set_status(name, "fail") # 3) Internal ">>> Test '...': Status" parsing with wrapping norm = strip_prefixes(line) # If a new internal test marker appears, (re)start the buffer from that marker idx = norm.find(">>> Test") if idx >= 0: buf = norm[idx:].strip() buf_lines = 1 elif buf: # Continue accumulating if buffer is active # Append stripped current line content (without prefixes) seg = norm.strip() if seg: # Deduplicate a single overlapping character at the boundary when the next # segment begins with the same char followed by a closing quote (e.g., "s':") if buf and len(seg) >= 2 and seg[0] == buf[-1] and seg[1] == "'": seg = seg[1:] # Concatenate directly to avoid inserting spaces inside quoted names buf += seg buf_lines += 1 # Try to match the internal test result in the buffer (if any) if buf: it = internal_test_re.search(buf) if it: name, status = it.groups() set_status(name, status) buf = "" buf_lines = 0 elif buf_lines >= max_buf_lines: # Give up on this buffer if it doesn't resolve in N lines buf = "" buf_lines = 0 return results
[ "video_getClosestDisplayModeRandomResolution" ]
[ "video_getFullscreenDisplayModes", "round_roundNumberCases", "stdlib_strnlen", "testqsort", "hints_getHint", "pixels_saveLoadBMP", "log10_baseCases", "exp_infCases", "pow_baseNZeroExpOddCases", "asin_outOfDomainCases", "audio_buildAudioStream", "pixels_getPixelFormatDetails", "rect_testEnclo...
starryzhang/sweb.eval.win.libsdl-org_1776_sdl-11761
libsdl-org/SDL
12806
libsdl-org__SDL-12806
C
[ "12802" ]
f35a2736b708408bd5e6273255c03513cf60605a
diff --git a/CMakeLists.txt b/CMakeLists.txt index 364cb5c0fd359..0db59b3eec21c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1893,11 +1893,13 @@ elseif(UNIX AND NOT APPLE AND NOT RISCOS AND NOT HAIKU) set (USE_POSIX_SPAWN 1) endif() elseif(WINDOWS) + enable_language(CXX) check_c_source_compiles(" #include <windows.h> int main(int argc, char **argv) { return 0; }" HAVE_WIN32_CC) sdl_glob_sources("${SDL3_SOURCE_DIR}/src/core/windows/*.c") + sdl_glob_sources("${SDL3_SOURCE_DIR}/src/core/windows/*.cpp") sdl_glob_sources("${SDL3_SOURCE_DIR}/src/main/windows/*.c") sdl_glob_sources("${SDL3_SOURCE_DIR}/src/io/windows/*.c") @@ -2008,6 +2010,7 @@ elseif(WINDOWS) if(SDL_VIDEO) set(SDL_VIDEO_DRIVER_WINDOWS 1) sdl_glob_sources("${SDL3_SOURCE_DIR}/src/video/windows/*.c") + sdl_glob_sources("${SDL3_SOURCE_DIR}/src/video/windows/*.cpp") CheckOpenVR() @@ -2135,7 +2138,7 @@ elseif(WINDOWS) set(SDL_JOYSTICK_WGI 1) endif() if(HAVE_GAMEINPUT_H) - sdl_glob_sources("${SDL3_SOURCE_DIR}/src/joystick/gdk/*.c") + sdl_glob_sources("${SDL3_SOURCE_DIR}/src/joystick/gdk/*.cpp") set(SDL_JOYSTICK_GAMEINPUT 1) endif() set(HAVE_SDL_JOYSTICK TRUE) diff --git a/VisualC-GDK/SDL/SDL.vcxproj b/VisualC-GDK/SDL/SDL.vcxproj index d15619c802ee2..baa2fffe2c2af 100644 --- a/VisualC-GDK/SDL/SDL.vcxproj +++ b/VisualC-GDK/SDL/SDL.vcxproj @@ -645,7 +645,7 @@ <ClCompile Include="..\..\src\audio\SDL_wave.c" /> <ClCompile Include="..\..\src\audio\wasapi\SDL_wasapi.c" /> <ClCompile Include="..\..\src\core\SDL_core_unsupported.c" /> - <ClCompile Include="..\..\src\core\windows\SDL_gameinput.c"/> + <ClCompile Include="..\..\src\core\windows\SDL_gameinput.cpp"/> <ClCompile Include="..\..\src\core\windows\SDL_hid.c" /> <ClCompile Include="..\..\src\core\windows\SDL_immdevice.c" /> <ClCompile Include="..\..\src\core\windows\SDL_windows.c" /> @@ -710,7 +710,7 @@ <ClCompile Include="..\..\src\hidapi\SDL_hidapi.c" /> <ClCompile Include="..\..\src\joystick\controller_type.c" /> <ClCompile Include="..\..\src\joystick\dummy\SDL_sysjoystick.c" /> - <ClCompile Include="..\..\src\joystick\gdk\SDL_gameinputjoystick.c" /> + <ClCompile Include="..\..\src\joystick\gdk\SDL_gameinputjoystick.cpp" /> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapijoystick.c" /> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapi_8bitdo.c" /> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapi_combined.c" /> @@ -891,7 +891,7 @@ <ClCompile Include="..\..\src\video\windows\SDL_windowsevents.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsframebuffer.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowskeyboard.c" /> - <ClCompile Include="..\..\src\video\windows\SDL_windowsgameinput.c" /> + <ClCompile Include="..\..\src\video\windows\SDL_windowsgameinput.cpp" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsmessagebox.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsmodes.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsmouse.c" /> diff --git a/VisualC-GDK/SDL/SDL.vcxproj.filters b/VisualC-GDK/SDL/SDL.vcxproj.filters index 03b5a6a8f817d..26f228826c966 100644 --- a/VisualC-GDK/SDL/SDL.vcxproj.filters +++ b/VisualC-GDK/SDL/SDL.vcxproj.filters @@ -27,7 +27,7 @@ <ClCompile Include="..\..\src\audio\SDL_wave.c" /> <ClCompile Include="..\..\src\audio\wasapi\SDL_wasapi.c" /> <ClCompile Include="..\..\src\core\SDL_core_unsupported.c" /> - <ClCompile Include="..\..\src\core\windows\SDL_gameinput.c" /> + <ClCompile Include="..\..\src\core\windows\SDL_gameinput.cpp" /> <ClCompile Include="..\..\src\core\windows\SDL_hid.c" /> <ClCompile Include="..\..\src\core\windows\SDL_immdevice.c" /> <ClCompile Include="..\..\src\core\windows\SDL_windows.c" /> @@ -61,7 +61,7 @@ <ClCompile Include="..\..\src\hidapi\SDL_hidapi.c" /> <ClCompile Include="..\..\src\joystick\controller_type.c" /> <ClCompile Include="..\..\src\joystick\dummy\SDL_sysjoystick.c" /> - <ClCompile Include="..\..\src\joystick\gdk\SDL_gameinputjoystick.c" /> + <ClCompile Include="..\..\src\joystick\gdk\SDL_gameinputjoystick.cpp" /> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapijoystick.c" /> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapi_8bitdo.c" /> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapi_combined.c" /> @@ -193,7 +193,7 @@ <ClCompile Include="..\..\src\video\windows\SDL_windowsevents.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsframebuffer.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowskeyboard.c" /> - <ClCompile Include="..\..\src\video\windows\SDL_windowsgameinput.c" /> + <ClCompile Include="..\..\src\video\windows\SDL_windowsgameinput.cpp" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsmessagebox.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsmodes.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsmouse.c" /> diff --git a/VisualC/SDL/SDL.vcxproj b/VisualC/SDL/SDL.vcxproj index 738a4110fbc8f..c0a4d8643665d 100644 --- a/VisualC/SDL/SDL.vcxproj +++ b/VisualC/SDL/SDL.vcxproj @@ -424,6 +424,16 @@ <ClCompile Include="..\..\src\camera\dummy\SDL_camera_dummy.c" /> <ClCompile Include="..\..\src\camera\mediafoundation\SDL_camera_mediafoundation.c" /> <ClCompile Include="..\..\src\camera\SDL_camera.c" /> + <ClCompile Include="..\..\src\core\windows\pch_cpp.cpp"> + <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">Create</PrecompiledHeader> + <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">Create</PrecompiledHeader> + <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">Create</PrecompiledHeader> + <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">Create</PrecompiledHeader> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + </ClCompile> <ClCompile Include="..\..\src\dialog\SDL_dialog.c" /> <ClCompile Include="..\..\src\dialog\SDL_dialog_utils.c" /> <ClCompile Include="..\..\src\filesystem\SDL_filesystem.c" /> @@ -543,7 +553,12 @@ <ClCompile Include="..\..\src\audio\SDL_wave.c" /> <ClCompile Include="..\..\src\audio\wasapi\SDL_wasapi.c" /> <ClCompile Include="..\..\src\core\SDL_core_unsupported.c" /> - <ClCompile Include="..\..\src\core\windows\SDL_gameinput.c" /> + <ClCompile Include="..\..\src\core\windows\SDL_gameinput.cpp"> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + </ClCompile> <ClCompile Include="..\..\src\core\windows\SDL_hid.c" /> <ClCompile Include="..\..\src\core\windows\SDL_immdevice.c" /> <ClCompile Include="..\..\src\core\windows\SDL_windows.c" /> @@ -580,7 +595,12 @@ <ClCompile Include="..\..\src\hidapi\SDL_hidapi.c" /> <ClCompile Include="..\..\src\joystick\controller_type.c" /> <ClCompile Include="..\..\src\joystick\dummy\SDL_sysjoystick.c" /> - <ClCompile Include="..\..\src\joystick\gdk\SDL_gameinputjoystick.c" /> + <ClCompile Include="..\..\src\joystick\gdk\SDL_gameinputjoystick.cpp"> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + </ClCompile> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapijoystick.c" /> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapi_8bitdo.c" /> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapi_combined.c" /> @@ -726,7 +746,12 @@ <ClCompile Include="..\..\src\video\windows\SDL_windowsevents.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsframebuffer.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowskeyboard.c" /> - <ClCompile Include="..\..\src\video\windows\SDL_windowsgameinput.c" /> + <ClCompile Include="..\..\src\video\windows\SDL_windowsgameinput.cpp"> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + <PrecompiledHeaderOutputFile Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(IntDir)$(TargetName)_cpp.pch</PrecompiledHeaderOutputFile> + </ClCompile> <ClCompile Include="..\..\src\video\windows\SDL_windowsmessagebox.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsmodes.c" /> <ClCompile Include="..\..\src\video\windows\SDL_windowsmouse.c" /> diff --git a/VisualC/SDL/SDL.vcxproj.filters b/VisualC/SDL/SDL.vcxproj.filters index a5b201ed6d5e2..2583c9f3791ea 100644 --- a/VisualC/SDL/SDL.vcxproj.filters +++ b/VisualC/SDL/SDL.vcxproj.filters @@ -537,9 +537,6 @@ <ClInclude Include="..\..\src\events\SDL_events_c.h"> <Filter>events</Filter> </ClInclude> - <ClInclude Include="..\..\src\events\SDL_eventfilter_c.h"> - <Filter>events</Filter> - </ClInclude> <ClInclude Include="..\..\src\events\SDL_keyboard_c.h"> <Filter>events</Filter> </ClInclude> @@ -962,6 +959,7 @@ <ClInclude Include="..\..\include\SDL3\SDL_storage.h" /> <ClInclude Include="..\..\include\SDL3\SDL_time.h" /> <ClInclude Include="..\..\src\events\SDL_categories_c.h" /> + <ClInclude Include="..\..\src\events\SDL_eventwatch_c.h" /> </ItemGroup> <ItemGroup> <ClCompile Include="..\..\src\audio\wasapi\SDL_wasapi.c" /> @@ -1049,7 +1047,7 @@ <ClCompile Include="..\..\src\core\SDL_core_unsupported.c"> <Filter>core</Filter> </ClCompile> - <ClCompile Include="..\..\src\core\windows\SDL_gameinput.c"> + <ClCompile Include="..\..\src\core\windows\SDL_gameinput.cpp"> <Filter>core\windows</Filter> </ClCompile> <ClCompile Include="..\..\src\core\windows\SDL_hid.c"> @@ -1088,9 +1086,6 @@ <ClCompile Include="..\..\src\events\SDL_events.c"> <Filter>events</Filter> </ClCompile> - <ClCompile Include="..\..\src\events\SDL_eventfilter.c"> - <Filter>events</Filter> - </ClCompile> <ClCompile Include="..\..\src\events\SDL_keyboard.c"> <Filter>events</Filter> </ClCompile> @@ -1184,7 +1179,7 @@ <ClCompile Include="..\..\src\joystick\dummy\SDL_sysjoystick.c"> <Filter>joystick\dummy</Filter> </ClCompile> - <ClCompile Include="..\..\src\joystick\gdk\SDL_gameinputjoystick.c"> + <ClCompile Include="..\..\src\joystick\gdk\SDL_gameinputjoystick.cpp"> <Filter>joystick\gdk</Filter> </ClCompile> <ClCompile Include="..\..\src\joystick\hidapi\SDL_hidapi_8bitdo.c"> @@ -1367,7 +1362,7 @@ <ClCompile Include="..\..\src\video\windows\SDL_windowskeyboard.c"> <Filter>video\windows</Filter> </ClCompile> - <ClCompile Include="..\..\src\video\windows\SDL_windowsgameinput.c"> + <ClCompile Include="..\..\src\video\windows\SDL_windowsgameinput.cpp"> <Filter>video\windows</Filter> </ClCompile> <ClCompile Include="..\..\src\video\windows\SDL_windowsmessagebox.c"> @@ -1606,11 +1601,12 @@ <ClCompile Include="..\..\src\storage\generic\SDL_genericstorage.c" /> <ClCompile Include="..\..\src\storage\steam\SDL_steamstorage.c" /> <ClCompile Include="..\..\src\storage\SDL_storage.c" /> + <ClCompile Include="..\..\src\events\SDL_eventwatch.c" /> + <ClCompile Include="..\..\src\core\windows\pch_cpp.cpp"> + <Filter>core\windows</Filter> + </ClCompile> </ItemGroup> <ItemGroup> <ResourceCompile Include="..\..\src\core\windows\version.rc" /> </ItemGroup> - <ItemGroup> - <MASM Include="..\..\src\stdlib\SDL_mslibc_x64.masm" /> - </ItemGroup> </Project> diff --git a/src/core/windows/SDL_gameinput.c b/src/core/windows/SDL_gameinput.cpp similarity index 85% rename from src/core/windows/SDL_gameinput.c rename to src/core/windows/SDL_gameinput.cpp index 9ac5912db9d06..e2ea3fb4ab285 100644 --- a/src/core/windows/SDL_gameinput.c +++ b/src/core/windows/SDL_gameinput.cpp @@ -25,16 +25,11 @@ #include "SDL_windows.h" #include "SDL_gameinput.h" -#ifdef SDL_PLATFORM_WIN32 -#include <initguid.h> -// {11BE2A7E-4254-445A-9C09-FFC40F006918} -DEFINE_GUID(SDL_IID_GameInput, 0x11BE2A7E, 0x4254, 0x445A, 0x9C, 0x09, 0xFF, 0xC4, 0x0F, 0x00, 0x69, 0x18); -#endif - static SDL_SharedObject *g_hGameInputDLL; static IGameInput *g_pGameInput; static int g_nGameInputRefCount; + bool SDL_InitGameInput(IGameInput **ppGameInput) { if (g_nGameInputRefCount == 0) { @@ -43,7 +38,7 @@ bool SDL_InitGameInput(IGameInput **ppGameInput) return false; } - typedef HRESULT (WINAPI *GameInputCreate_t)(IGameInput * *gameInput); + typedef HRESULT (WINAPI *GameInputCreate_t)(IGameInput **gameInput); GameInputCreate_t GameInputCreateFunc = (GameInputCreate_t)SDL_LoadFunction(g_hGameInputDLL, "GameInputCreate"); if (!GameInputCreateFunc) { SDL_UnloadObject(g_hGameInputDLL); @@ -58,15 +53,19 @@ bool SDL_InitGameInput(IGameInput **ppGameInput) } #ifdef SDL_PLATFORM_WIN32 - hr = IGameInput_QueryInterface(pGameInput, &SDL_IID_GameInput, (void **)&g_pGameInput); - IGameInput_Release(pGameInput); +#if GAMEINPUT_API_VERSION >= 1 + hr = pGameInput->QueryInterface(IID_IGameInput, (void **)&g_pGameInput); +#else + // We require GameInput v1.1 or newer + hr = E_NOINTERFACE; +#endif + pGameInput->Release(); if (FAILED(hr)) { SDL_UnloadObject(g_hGameInputDLL); return WIN_SetErrorFromHRESULT("GameInput QueryInterface failed", hr); } #else // Assume that the version we get is compatible with the current SDK - // If that isn't the case, define the correct GUID for SDL_IID_GameInput above g_pGameInput = pGameInput; #endif } @@ -85,7 +84,7 @@ void SDL_QuitGameInput(void) --g_nGameInputRefCount; if (g_nGameInputRefCount == 0) { if (g_pGameInput) { - IGameInput_Release(g_pGameInput); + g_pGameInput->Release(); g_pGameInput = NULL; } if (g_hGameInputDLL) { diff --git a/src/core/windows/SDL_gameinput.h b/src/core/windows/SDL_gameinput.h index 0022c0bdde163..4d2beb5647005 100644 --- a/src/core/windows/SDL_gameinput.h +++ b/src/core/windows/SDL_gameinput.h @@ -25,9 +25,16 @@ #ifdef HAVE_GAMEINPUT_H -#define COBJMACROS #include <gameinput.h> +#ifndef GAMEINPUT_API_VERSION +#define GAMEINPUT_API_VERSION 0 +#endif + +#if GAMEINPUT_API_VERSION == 1 +using namespace GameInput::v1; +#endif + extern bool SDL_InitGameInput(IGameInput **ppGameInput); extern void SDL_QuitGameInput(void); diff --git a/src/joystick/gdk/SDL_gameinputjoystick.c b/src/joystick/gdk/SDL_gameinputjoystick.cpp similarity index 89% rename from src/joystick/gdk/SDL_gameinputjoystick.c rename to src/joystick/gdk/SDL_gameinputjoystick.cpp index 6cf0a902b7f17..46d4ecc5f117a 100644 --- a/src/joystick/gdk/SDL_gameinputjoystick.c +++ b/src/joystick/gdk/SDL_gameinputjoystick.cpp @@ -24,6 +24,7 @@ #include "../SDL_sysjoystick.h" #include "../usb_ids.h" +#include "../../core/windows/SDL_windows.h" #include "../../core/windows/SDL_gameinput.h" // Default value for SDL_HINT_JOYSTICK_GAMEINPUT @@ -66,7 +67,7 @@ typedef struct joystick_hwdata static GAMEINPUT_InternalList g_GameInputList = { NULL }; static IGameInput *g_pGameInput = NULL; -static GameInputCallbackToken g_GameInputCallbackToken = GAMEINPUT_INVALID_CALLBACK_TOKEN_VALUE; +static GameInputCallbackToken g_GameInputCallbackToken = 0; static Uint64 g_GameInputTimestampOffset; static bool GAMEINPUT_InternalIsGamepad(const GameInputDeviceInfo *info) @@ -93,15 +94,22 @@ static bool GAMEINPUT_InternalAddOrFind(IGameInputDevice *pDevice) SDL_AssertJoysticksLocked(); - info = IGameInputDevice_GetDeviceInfo(pDevice); - if (info->capabilities & GameInputDeviceCapabilityWireless) { +#if GAMEINPUT_API_VERSION >= 1 + HRESULT hr = pDevice->GetDeviceInfo(&info); + if (FAILED(hr)) { + return WIN_SetErrorFromHRESULT("IGameInputDevice::GetDeviceInfo", hr); + } +#else + info = pDevice->GetDeviceInfo(); +#endif + if (false /*info->capabilities & GameInputDeviceCapabilityWireless*/) { bus = SDL_HARDWARE_BUS_BLUETOOTH; } else { bus = SDL_HARDWARE_BUS_USB; } vendor = info->vendorId; product = info->productId; - version = (info->firmwareVersion.major << 8) | info->firmwareVersion.minor; + //version = (info->firmwareVersion.major << 8) | info->firmwareVersion.minor; if (SDL_JoystickHandledByAnotherDriver(&SDL_GAMEINPUT_JoystickDriver, vendor, product, version, "")) { return true; @@ -130,18 +138,20 @@ static bool GAMEINPUT_InternalAddOrFind(IGameInputDevice *pDevice) // Generate a device path for (idx = 0; idx < APP_LOCAL_DEVICE_ID_SIZE; ++idx) { SDL_snprintf(tmp, SDL_arraysize(tmp), "%02hhX", info->deviceId.value[idx]); - SDL_strlcat(elem->path, tmp, SDL_arraysize(tmp)); + SDL_strlcat(elem->path, tmp, SDL_arraysize(elem->path)); } - if (info->deviceStrings) { - // In theory we could get the manufacturer and product strings here, but they're NULL for all the controllers I've tested +#if GAMEINPUT_API_VERSION >= 1 + if (info->displayName) { + product_string = info->displayName; } - +#else if (info->displayName) { - // This could give us a product string, but it's NULL for all the controllers I've tested + product_string = info->displayName->data; } +#endif - IGameInputDevice_AddRef(pDevice); + pDevice->AddRef(); elem->device = pDevice; elem->name = SDL_CreateJoystickName(vendor, product, manufacturer_string, product_string); elem->guid = SDL_CreateJoystickGUID(bus, vendor, product, version, manufacturer_string, product_string, 'g', 0); @@ -168,7 +178,7 @@ static bool GAMEINPUT_InternalRemoveByIndex(int idx) elem = g_GameInputList.devices[idx]; if (elem) { - IGameInputDevice_Release(elem->device); + elem->device->Release(); SDL_free(elem->name); SDL_free(elem); } @@ -232,10 +242,11 @@ static void CALLBACK GAMEINPUT_InternalJoystickDeviceCallback( } static void GAMEINPUT_JoystickDetect(void); +static void GAMEINPUT_JoystickQuit(void); static bool GAMEINPUT_JoystickInit(void) { - HRESULT hR; + HRESULT hr; if (!SDL_GetHintBoolean(SDL_HINT_JOYSTICK_GAMEINPUT, SDL_GAMEINPUT_DEFAULT)) { return true; @@ -245,21 +256,21 @@ static bool GAMEINPUT_JoystickInit(void) return false; } - hR = IGameInput_RegisterDeviceCallback(g_pGameInput, - NULL, + hr = g_pGameInput->RegisterDeviceCallback(NULL, GameInputKindController, GameInputDeviceConnected, GameInputBlockingEnumeration, NULL, GAMEINPUT_InternalJoystickDeviceCallback, &g_GameInputCallbackToken); - if (FAILED(hR)) { - return SDL_SetError("IGameInput::RegisterDeviceCallback failure with HRESULT of %08lX", hR); + if (FAILED(hr)) { + GAMEINPUT_JoystickQuit(); + return WIN_SetErrorFromHRESULT("IGameInput::RegisterDeviceCallback", hr); } // Calculate the relative offset between SDL timestamps and GameInput timestamps Uint64 now = SDL_GetTicksNS(); - uint64_t timestampUS = IGameInput_GetCurrentTimestamp(g_pGameInput); + uint64_t timestampUS = g_pGameInput->GetCurrentTimestamp(); g_GameInputTimestampOffset = (SDL_NS_TO_US(now) - timestampUS); GAMEINPUT_JoystickDetect(); @@ -292,7 +303,7 @@ static void GAMEINPUT_JoystickDetect(void) elem->isAdded = true; } - if (elem->isDeleteRequested || !(IGameInputDevice_GetDeviceStatus(elem->device) & GameInputDeviceConnected)) { + if (elem->isDeleteRequested || !(elem->device->GetDeviceStatus() & GameInputDeviceConnected)) { SDL_PrivateJoystickRemoved(elem->device_instance); GAMEINPUT_InternalRemoveByIndex(idx--); } @@ -357,6 +368,7 @@ static SDL_JoystickID GAMEINPUT_JoystickGetDeviceInstanceID(int device_index) static void GAMEINPUT_UpdatePowerInfo(SDL_Joystick *joystick, IGameInputDevice *device) { +#if 0 GameInputBatteryState battery_state; SDL_PowerState state; int percent = 0; @@ -385,10 +397,10 @@ static void GAMEINPUT_UpdatePowerInfo(SDL_Joystick *joystick, IGameInputDevice * percent = (int)SDL_roundf((battery_state.remainingCapacity / battery_state.fullChargeCapacity) * 100.0f); } SDL_SendJoystickPowerInfo(joystick, state, percent); +#endif } -#ifdef IGameInput_RegisterSystemButtonCallback - +#if GAMEINPUT_API_VERSION >= 1 static void CALLBACK GAMEINPUT_InternalSystemButtonCallback( _In_ GameInputCallbackToken callbackToken, _In_ void * context, @@ -415,8 +427,7 @@ static void CALLBACK GAMEINPUT_InternalSystemButtonCallback( SDL_UnlockJoysticks(); } } - -#endif // IGameInput_RegisterSystemButtonCallback +#endif // GAMEINPUT_API_VERSION >= 1 static bool GAMEINPUT_JoystickOpen(SDL_Joystick *joystick, int device_index) { @@ -441,19 +452,15 @@ static bool GAMEINPUT_JoystickOpen(SDL_Joystick *joystick, int device_index) joystick->nbuttons = 11; joystick->nhats = 1; -#ifdef IGameInput_RegisterSystemButtonCallback +#if GAMEINPUT_API_VERSION >= 1 if (info->supportedSystemButtons != GameInputSystemButtonNone) { if (info->supportedSystemButtons & GameInputSystemButtonShare) { ++joystick->nbuttons; } -#if 1 // The C macro in GameInput.h version 10.0.26100 refers to a focus policy which I guess has been removed from the final API? -#undef IGameInput_RegisterSystemButtonCallback -#define IGameInput_RegisterSystemButtonCallback(This, device, buttonFilter, context, callbackFunc, callbackToken) ((This)->lpVtbl->RegisterSystemButtonCallback(This, device, buttonFilter, context, callbackFunc, callbackToken)) -#endif - IGameInput_RegisterSystemButtonCallback(g_pGameInput, elem->device, (GameInputSystemButtonGuide | GameInputSystemButtonShare), joystick, GAMEINPUT_InternalSystemButtonCallback, &hwdata->system_button_callback_token); + g_pGameInput->RegisterSystemButtonCallback(elem->device, (GameInputSystemButtonGuide | GameInputSystemButtonShare), joystick, GAMEINPUT_InternalSystemButtonCallback, &hwdata->system_button_callback_token); } -#endif // IGameInput_RegisterSystemButtonCallback +#endif // GAMEINPUT_API_VERSION >= 1 } else { joystick->naxes = info->controllerAxisCount; joystick->nbuttons = info->controllerButtonCount; @@ -467,6 +474,7 @@ static bool GAMEINPUT_JoystickOpen(SDL_Joystick *joystick, int device_index) SDL_SetBooleanProperty(SDL_GetJoystickProperties(joystick), SDL_PROP_JOYSTICK_CAP_TRIGGER_RUMBLE_BOOLEAN, true); } +#if 0 if (info->supportedInput & GameInputKindTouch) { SDL_PrivateJoystickAddTouchpad(joystick, info->touchPointCount); } @@ -482,6 +490,7 @@ static bool GAMEINPUT_JoystickOpen(SDL_Joystick *joystick, int device_index) } else { joystick->connection_state = SDL_JOYSTICK_CONNECTION_WIRED; } +#endif return true; } @@ -492,7 +501,7 @@ static bool GAMEINPUT_JoystickRumble(SDL_Joystick *joystick, Uint16 low_frequenc GameInputRumbleParams *params = &hwdata->rumbleParams; params->lowFrequency = (float)low_frequency_rumble / (float)SDL_MAX_UINT16; params->highFrequency = (float)high_frequency_rumble / (float)SDL_MAX_UINT16; - IGameInputDevice_SetRumbleState(hwdata->devref->device, params); + hwdata->devref->device->SetRumbleState(params); return true; } @@ -503,7 +512,7 @@ static bool GAMEINPUT_JoystickRumbleTriggers(SDL_Joystick *joystick, Uint16 left GameInputRumbleParams *params = &hwdata->rumbleParams; params->leftTrigger = (float)left_rumble / (float)SDL_MAX_UINT16; params->rightTrigger = (float)right_rumble / (float)SDL_MAX_UINT16; - IGameInputDevice_SetRumbleState(hwdata->devref->device, params); + hwdata->devref->device->SetRumbleState(params); return true; } @@ -531,15 +540,15 @@ static void GAMEINPUT_JoystickUpdate(SDL_Joystick *joystick) IGameInputReading *reading = NULL; Uint64 timestamp; GameInputGamepadState state; - HRESULT hR; + HRESULT hr; - hR = IGameInput_GetCurrentReading(g_pGameInput, info->supportedInput, device, &reading); - if (FAILED(hR)) { + hr = g_pGameInput->GetCurrentReading(info->supportedInput, device, &reading); + if (FAILED(hr)) { // don't SetError here since there can be a legitimate case when there's no reading avail return; } - timestamp = SDL_US_TO_NS(IGameInputReading_GetTimestamp(reading) + g_GameInputTimestampOffset); + timestamp = SDL_US_TO_NS(reading->GetTimestamp() + g_GameInputTimestampOffset); if (GAMEINPUT_InternalIsGamepad(info)) { static WORD s_XInputButtons[] = { @@ -557,7 +566,7 @@ static void GAMEINPUT_JoystickUpdate(SDL_Joystick *joystick) }; Uint8 btnidx = 0, hat = 0; - if (IGameInputReading_GetGamepadState(reading, &state)) { + if (reading->GetGamepadState(&state)) { for (btnidx = 0; btnidx < SDL_arraysize(s_XInputButtons); ++btnidx) { WORD button_mask = s_XInputButtons[btnidx]; if (!button_mask) { @@ -599,7 +608,7 @@ static void GAMEINPUT_JoystickUpdate(SDL_Joystick *joystick) if (button_state) { uint32_t i; - uint32_t button_count = IGameInputReading_GetControllerButtonState(reading, info->controllerButtonCount, button_state); + uint32_t button_count = reading->GetControllerButtonState(info->controllerButtonCount, button_state); for (i = 0; i < button_count; ++i) { SDL_SendJoystickButton(timestamp, joystick, (Uint8)i, button_state[i]); } @@ -609,7 +618,7 @@ static void GAMEINPUT_JoystickUpdate(SDL_Joystick *joystick) #define CONVERT_AXIS(v) (Sint16)((v)*65535.0f - 32768.0f) if (axis_state) { uint32_t i; - uint32_t axis_count = IGameInputReading_GetControllerAxisState(reading, info->controllerAxisCount, axis_state); + uint32_t axis_count = reading->GetControllerAxisState(info->controllerAxisCount, axis_state); for (i = 0; i < axis_count; ++i) { SDL_SendJoystickAxis(timestamp, joystick, (Uint8)i, CONVERT_AXIS(axis_state[i])); } @@ -619,7 +628,7 @@ static void GAMEINPUT_JoystickUpdate(SDL_Joystick *joystick) if (switch_state) { uint32_t i; - uint32_t switch_count = IGameInputReading_GetControllerSwitchState(reading, info->controllerSwitchCount, switch_state); + uint32_t switch_count = reading->GetControllerSwitchState(info->controllerSwitchCount, switch_state); for (i = 0; i < switch_count; ++i) { Uint8 hat; switch (switch_state[i]) { @@ -658,6 +667,7 @@ static void GAMEINPUT_JoystickUpdate(SDL_Joystick *joystick) } } +#if 0 if (info->supportedInput & GameInputKindTouch) { GameInputTouchState *touch_state = SDL_stack_alloc(GameInputTouchState, info->touchPointCount); if (touch_state) { @@ -679,8 +689,9 @@ static void GAMEINPUT_JoystickUpdate(SDL_Joystick *joystick) // FIXME: How do we interpret the motion data? } } +#endif - IGameInputReading_Release(reading); + reading->Release(); // FIXME: We can poll this at a much lower rate GAMEINPUT_UpdatePowerInfo(joystick, device); @@ -691,7 +702,11 @@ static void GAMEINPUT_JoystickClose(SDL_Joystick* joystick) GAMEINPUT_InternalJoystickHwdata *hwdata = joystick->hwdata; if (hwdata->system_button_callback_token) { - IGameInput_UnregisterCallback(g_pGameInput, hwdata->system_button_callback_token, 5000); +#if GAMEINPUT_API_VERSION >= 1 + g_pGameInput->UnregisterCallback(hwdata->system_button_callback_token); +#else + g_pGameInput->UnregisterCallback(hwdata->system_button_callback_token, 10000); +#endif } SDL_free(hwdata); @@ -702,8 +717,14 @@ static void GAMEINPUT_JoystickQuit(void) { if (g_pGameInput) { // free the callback - IGameInput_UnregisterCallback(g_pGameInput, g_GameInputCallbackToken, /*timeoutInUs:*/ 10000); - g_GameInputCallbackToken = GAMEINPUT_INVALID_CALLBACK_TOKEN_VALUE; + if (g_GameInputCallbackToken) { +#if GAMEINPUT_API_VERSION >= 1 + g_pGameInput->UnregisterCallback(g_GameInputCallbackToken); +#else + g_pGameInput->UnregisterCallback(g_GameInputCallbackToken, 10000); +#endif + g_GameInputCallbackToken = 0; + } // free the list while (g_GameInputList.count > 0) { @@ -738,7 +759,7 @@ static bool GAMEINPUT_JoystickGetGamepadMapping(int device_index, SDL_GamepadMap out->back.kind = EMappingKind_Button; out->back.target = SDL_GAMEPAD_BUTTON_BACK; -#ifdef IGameInput_RegisterSystemButtonCallback +#if GAMEINPUT_API_VERSION >= 1 if (elem->info->supportedSystemButtons & GameInputSystemButtonGuide) { out->guide.kind = EMappingKind_Button; out->guide.target = SDL_GAMEPAD_BUTTON_GUIDE; @@ -748,7 +769,7 @@ static bool GAMEINPUT_JoystickGetGamepadMapping(int device_index, SDL_GamepadMap out->misc1.kind = EMappingKind_Button; out->misc1.target = SDL_GAMEPAD_BUTTON_GAMEINPUT_SHARE; } -#endif +#endif // GAMEINPUT_API_VERSION >= 1 out->start.kind = EMappingKind_Button; out->start.target = SDL_GAMEPAD_BUTTON_START; diff --git a/src/video/windows/SDL_windowsgameinput.c b/src/video/windows/SDL_windowsgameinput.cpp similarity index 81% rename from src/video/windows/SDL_windowsgameinput.c rename to src/video/windows/SDL_windowsgameinput.cpp index 183733a380470..7ea77ecc558b4 100644 --- a/src/video/windows/SDL_windowsgameinput.c +++ b/src/video/windows/SDL_windowsgameinput.cpp @@ -22,16 +22,14 @@ #include "SDL_windowsvideo.h" -// GameInput currently has a bug with keys stuck on focus change, and crashes on initialization on some systems, so we'll disable it until these issues are fixed. -#undef HAVE_GAMEINPUT_H - #ifdef HAVE_GAMEINPUT_H #include "../../core/windows/SDL_gameinput.h" +extern "C" { #include "../../events/SDL_mouse_c.h" #include "../../events/SDL_keyboard_c.h" #include "../../events/scancodes_windows.h" - +} #define MAX_GAMEINPUT_BUTTONS 7 // GameInputMouseWheelTiltRight is the highest button @@ -75,7 +73,14 @@ static bool GAMEINPUT_InternalAddOrFind(WIN_GameInputData *data, IGameInputDevic const GameInputDeviceInfo *info; bool result = false; - info = IGameInputDevice_GetDeviceInfo(pDevice); +#if GAMEINPUT_API_VERSION >= 1 + HRESULT hr = pDevice->GetDeviceInfo(&info); + if (FAILED(hr)) { + return WIN_SetErrorFromHRESULT("IGameInputDevice_GetDeviceInfo", hr); + } +#else + info = pDevice->GetDeviceInfo(); +#endif SDL_LockMutex(data->lock); { @@ -100,15 +105,11 @@ static bool GAMEINPUT_InternalAddOrFind(WIN_GameInputData *data, IGameInputDevic goto done; } - if (info->deviceStrings) { - // In theory we could get the manufacturer and product strings here, but they're NULL for all the devices I've tested - } - if (info->displayName) { // This could give us a product string, but it's NULL for all the devices I've tested } - IGameInputDevice_AddRef(pDevice); + pDevice->AddRef(); device->pDevice = pDevice; device->instance_id = SDL_GetNextObjectID(); device->info = info; @@ -147,15 +148,15 @@ static bool GAMEINPUT_InternalRemoveByIndex(WIN_GameInputData *data, int idx) SDL_RemoveKeyboard(device->instance_id, true); } if (device->last_mouse_reading) { - IGameInputReading_Release(device->last_mouse_reading); + device->last_mouse_reading->Release(); device->last_mouse_reading = NULL; } if (device->last_keyboard_reading) { - IGameInputReading_Release(device->last_keyboard_reading); + device->last_keyboard_reading->Release(); device->last_keyboard_reading = NULL; } } - IGameInputDevice_Release(device->pDevice); + device->pDevice->Release(); SDL_free(device->name); SDL_free(device); } @@ -217,6 +218,8 @@ bool WIN_InitGameInput(SDL_VideoDevice *_this) { WIN_GameInputData *data; HRESULT hr; + Uint64 now; + uint64_t timestampUS; bool result = false; if (_this->internal->gameinput_context) { @@ -238,22 +241,21 @@ bool WIN_InitGameInput(SDL_VideoDevice *_this) goto done; } - hr = IGameInput_RegisterDeviceCallback(data->pGameInput, - NULL, - (GameInputKindMouse | GameInputKindKeyboard), - GameInputDeviceConnected, - GameInputBlockingEnumeration, - data, - GAMEINPUT_InternalDeviceCallback, - &data->gameinput_callback_token); + hr = data->pGameInput->RegisterDeviceCallback(NULL, + (GameInputKindMouse | GameInputKindKeyboard), + GameInputDeviceConnected, + GameInputBlockingEnumeration, + data, + GAMEINPUT_InternalDeviceCallback, + &data->gameinput_callback_token); if (FAILED(hr)) { - SDL_SetError("IGameInput::RegisterDeviceCallback failure with HRESULT of %08X", hr); + WIN_SetErrorFromHRESULT("IGameInput::RegisterDeviceCallback", hr); goto done; } // Calculate the relative offset between SDL timestamps and GameInput timestamps - Uint64 now = SDL_GetTicksNS(); - uint64_t timestampUS = IGameInput_GetCurrentTimestamp(data->pGameInput); + now = SDL_GetTicksNS(); + timestampUS = data->pGameInput->GetCurrentTimestamp(); data->timestamp_offset = (SDL_NS_TO_US(now) - timestampUS); result = true; @@ -268,12 +270,12 @@ bool WIN_InitGameInput(SDL_VideoDevice *_this) static void GAMEINPUT_InitialMouseReading(WIN_GameInputData *data, SDL_Window *window, GAMEINPUT_Device *device, IGameInputReading *reading) { GameInputMouseState state; - if (SUCCEEDED(IGameInputReading_GetMouseState(reading, &state))) { - Uint64 timestamp = SDL_US_TO_NS(IGameInputReading_GetTimestamp(reading) + data->timestamp_offset); + if (reading->GetMouseState(&state)) { + Uint64 timestamp = SDL_US_TO_NS(reading->GetTimestamp() + data->timestamp_offset); SDL_MouseID mouseID = device->instance_id; for (int i = 0; i < MAX_GAMEINPUT_BUTTONS; ++i) { - const GameInputMouseButtons mask = (1 << i); + const GameInputMouseButtons mask = GameInputMouseButtons(1 << i); bool down = ((state.buttons & mask) != 0); SDL_SendMouseButton(timestamp, window, mouseID, GAMEINPUT_button_map[i], down); } @@ -284,9 +286,8 @@ static void GAMEINPUT_HandleMouseDelta(WIN_GameInputData *data, SDL_Window *wind { GameInputMouseState last; GameInputMouseState state; - if (SUCCEEDED(IGameInputReading_GetMouseState(last_reading, &last)) && - SUCCEEDED(IGameInputReading_GetMouseState(reading, &state))) { - Uint64 timestamp = SDL_US_TO_NS(IGameInputReading_GetTimestamp(reading) + data->timestamp_offset); + if (last_reading->GetMouseState(&last) && reading->GetMouseState(&state)) { + Uint64 timestamp = SDL_US_TO_NS(reading->GetTimestamp() + data->timestamp_offset); SDL_MouseID mouseID = device->instance_id; GameInputMouseState delta; @@ -301,7 +302,7 @@ static void GAMEINPUT_HandleMouseDelta(WIN_GameInputData *data, SDL_Window *wind } if (delta.buttons) { for (int i = 0; i < MAX_GAMEINPUT_BUTTONS; ++i) { - const GameInputMouseButtons mask = (1 << i); + const GameInputMouseButtons mask = GameInputMouseButtons(1 << i); if (delta.buttons & mask) { bool down = ((state.buttons & mask) != 0); SDL_SendMouseButton(timestamp, window, mouseID, GAMEINPUT_button_map[i], down); @@ -337,7 +338,7 @@ static bool KeysHaveScancode(const GameInputKeyState *keys, uint32_t count, SDL_ static void GAMEINPUT_InitialKeyboardReading(WIN_GameInputData *data, SDL_Window *window, GAMEINPUT_Device *device, IGameInputReading *reading) { - Uint64 timestamp = SDL_US_TO_NS(IGameInputReading_GetTimestamp(reading) + data->timestamp_offset); + Uint64 timestamp = SDL_US_TO_NS(reading->GetTimestamp() + data->timestamp_offset); SDL_KeyboardID keyboardID = device->instance_id; uint32_t max_keys = device->info->keyboardInfo->maxSimultaneousKeys; @@ -346,7 +347,7 @@ static void GAMEINPUT_InitialKeyboardReading(WIN_GameInputData *data, SDL_Window return; } - uint32_t num_keys = IGameInputReading_GetKeyState(reading, max_keys, keys); + uint32_t num_keys = reading->GetKeyState(max_keys, keys); if (!num_keys) { // FIXME: We probably need to track key state by keyboardID SDL_ResetKeyboard(); @@ -382,7 +383,7 @@ static void DumpKeys(const char *prefix, GameInputKeyState *keys, uint32_t count static void GAMEINPUT_HandleKeyboardDelta(WIN_GameInputData *data, SDL_Window *window, GAMEINPUT_Device *device, IGameInputReading *last_reading, IGameInputReading *reading) { - Uint64 timestamp = SDL_US_TO_NS(IGameInputReading_GetTimestamp(reading) + data->timestamp_offset); + Uint64 timestamp = SDL_US_TO_NS(reading->GetTimestamp() + data->timestamp_offset); SDL_KeyboardID keyboardID = device->instance_id; uint32_t max_keys = device->info->keyboardInfo->maxSimultaneousKeys; @@ -394,8 +395,8 @@ static void GAMEINPUT_HandleKeyboardDelta(WIN_GameInputData *data, SDL_Window *w uint32_t index_last = 0; uint32_t index_keys = 0; - uint32_t num_last = IGameInputReading_GetKeyState(last_reading, max_keys, last); - uint32_t num_keys = IGameInputReading_GetKeyState(reading, max_keys, keys); + uint32_t num_last = last_reading->GetKeyState(max_keys, last); + uint32_t num_keys = reading->GetKeyState(max_keys, keys); #ifdef DEBUG_KEYS SDL_Log("Timestamp: %llu", timestamp); DumpKeys("Last keys:", last, num_last); @@ -463,20 +464,20 @@ void WIN_UpdateGameInput(SDL_VideoDevice *_this) if (data->enabled_input & GameInputKindMouse) { if (device->last_mouse_reading) { HRESULT hr; - while (SUCCEEDED(hr = IGameInput_GetNextReading(data->pGameInput, device->last_mouse_reading, GameInputKindMouse, device->pDevice, &reading))) { + while (SUCCEEDED(hr = data->pGameInput->GetNextReading(device->last_mouse_reading, GameInputKindMouse, device->pDevice, &reading))) { GAMEINPUT_HandleMouseDelta(data, window, device, device->last_mouse_reading, reading); - IGameInputReading_Release(device->last_mouse_reading); + device->last_mouse_reading->Release(); device->last_mouse_reading = reading; } if (hr != GAMEINPUT_E_READING_NOT_FOUND) { - if (SUCCEEDED(IGameInput_GetCurrentReading(data->pGameInput, GameInputKindMouse, device->pDevice, &reading))) { + if (SUCCEEDED(data->pGameInput->GetCurrentReading(GameInputKindMouse, device->pDevice, &reading))) { GAMEINPUT_HandleMouseDelta(data, window, device, device->last_mouse_reading, reading); - IGameInputReading_Release(device->last_mouse_reading); + device->last_mouse_reading->Release(); device->last_mouse_reading = reading; } } } else { - if (SUCCEEDED(IGameInput_GetCurrentReading(data->pGameInput, GameInputKindMouse, device->pDevice, &reading))) { + if (SUCCEEDED(data->pGameInput->GetCurrentReading(GameInputKindMouse, device->pDevice, &reading))) { GAMEINPUT_InitialMouseReading(data, window, device, reading); device->last_mouse_reading = reading; } @@ -487,26 +488,26 @@ void WIN_UpdateGameInput(SDL_VideoDevice *_this) if (window->text_input_active) { // Reset raw input while text input is active if (device->last_keyboard_reading) { - IGameInputReading_Release(device->last_keyboard_reading); + device->last_keyboard_reading->Release(); device->last_keyboard_reading = NULL; } } else { if (device->last_keyboard_reading) { HRESULT hr; - while (SUCCEEDED(hr = IGameInput_GetNextReading(data->pGameInput, device->last_keyboard_reading, GameInputKindKeyboard, device->pDevice, &reading))) { + while (SUCCEEDED(hr = data->pGameInput->GetNextReading(device->last_keyboard_reading, GameInputKindKeyboard, device->pDevice, &reading))) { GAMEINPUT_HandleKeyboardDelta(data, window, device, device->last_keyboard_reading, reading); - IGameInputReading_Release(device->last_keyboard_reading); + device->last_keyboard_reading->Release(); device->last_keyboard_reading = reading; } if (hr != GAMEINPUT_E_READING_NOT_FOUND) { - if (SUCCEEDED(IGameInput_GetCurrentReading(data->pGameInput, GameInputKindKeyboard, device->pDevice, &reading))) { + if (SUCCEEDED(data->pGameInput->GetCurrentReading(GameInputKindKeyboard, device->pDevice, &reading))) { GAMEINPUT_HandleKeyboardDelta(data, window, device, device->last_keyboard_reading, reading); - IGameInputReading_Release(device->last_keyboard_reading); + device->last_keyboard_reading->Release(); device->last_keyboard_reading = reading; } } } else { - if (SUCCEEDED(IGameInput_GetCurrentReading(data->pGameInput, GameInputKindKeyboard, device->pDevice, &reading))) { + if (SUCCEEDED(data->pGameInput->GetCurrentReading(GameInputKindKeyboard, device->pDevice, &reading))) { GAMEINPUT_InitialKeyboardReading(data, window, device, reading); device->last_keyboard_reading = reading; } @@ -534,12 +535,12 @@ bool WIN_UpdateGameInputEnabled(SDL_VideoDevice *_this) GAMEINPUT_Device *device = data->devices[i]; if (device->last_mouse_reading && !raw_mouse_enabled) { - IGameInputReading_Release(device->last_mouse_reading); + device->last_mouse_reading->Release(); device->last_mouse_reading = NULL; } if (device->last_keyboard_reading && !raw_keyboard_enabled) { - IGameInputReading_Release(device->last_keyboard_reading); + device->last_keyboard_reading->Release(); device->last_keyboard_reading = NULL; } } @@ -559,9 +560,13 @@ void WIN_QuitGameInput(SDL_VideoDevice *_this) if (data->pGameInput) { // free the callback - if (data->gameinput_callback_token != GAMEINPUT_INVALID_CALLBACK_TOKEN_VALUE) { - IGameInput_UnregisterCallback(data->pGameInput, data->gameinput_callback_token, /*timeoutInUs:*/ 10000); - data->gameinput_callback_token = GAMEINPUT_INVALID_CALLBACK_TOKEN_VALUE; + if (data->gameinput_callback_token) { +#if GAMEINPUT_API_VERSION >= 1 + data->pGameInput->UnregisterCallback(data->gameinput_callback_token); +#else + data->pGameInput->UnregisterCallback(data->gameinput_callback_token, 10000); +#endif + data->gameinput_callback_token = 0; } // free the list @@ -569,7 +574,7 @@ void WIN_QuitGameInput(SDL_VideoDevice *_this) GAMEINPUT_InternalRemoveByIndex(data, 0); } - IGameInput_Release(data->pGameInput); + data->pGameInput->Release(); data->pGameInput = NULL; } diff --git a/src/video/windows/SDL_windowsgameinput.h b/src/video/windows/SDL_windowsgameinput.h index 561de9c4e0c89..e6f1afa2d62ff 100644 --- a/src/video/windows/SDL_windowsgameinput.h +++ b/src/video/windows/SDL_windowsgameinput.h @@ -22,8 +22,17 @@ typedef struct WIN_GameInputData WIN_GameInputData; +// Set up for C function definitions, even when using C++ +#ifdef __cplusplus +extern "C" { +#endif + extern bool WIN_InitGameInput(SDL_VideoDevice *_this); extern bool WIN_UpdateGameInputEnabled(SDL_VideoDevice *_this); extern void WIN_UpdateGameInput(SDL_VideoDevice *_this); extern void WIN_QuitGameInput(SDL_VideoDevice *_this); +// Ends C function definitions when using C++ +#ifdef __cplusplus +} +#endif diff --git a/src/video/windows/SDL_windowsvideo.h b/src/video/windows/SDL_windowsvideo.h index 8a0edf5ae0243..0e9c50eb86932 100644 --- a/src/video/windows/SDL_windowsvideo.h +++ b/src/video/windows/SDL_windowsvideo.h @@ -28,8 +28,10 @@ #include "../SDL_sysvideo.h" #ifdef HAVE_DXGI_H +#ifndef __cplusplus #define CINTERFACE #define COBJMACROS +#endif #include <dxgi.h> #endif
diff --git a/.github/workflows/create-test-plan.py b/.github/workflows/create-test-plan.py index a137889c81a7d..8048e2bc3229f 100755 --- a/.github/workflows/create-test-plan.py +++ b/.github/workflows/create-test-plan.py @@ -381,9 +381,11 @@ def spec_to_job(spec: JobSpec, key: str, trackmem_symbol_names: bool) -> JobDeta match spec.msvc_arch: case MsvcArch.X86: job.cflags.append("/clang:-m32") + job.cxxflags.append("/clang:-m32") job.ldflags.append("/MACHINE:X86") case MsvcArch.X64: job.cflags.append("/clang:-m64") + job.cxxflags.append("/clang:-m64") job.ldflags.append("/MACHINE:X64") case _: raise ValueError(f"Unsupported clang-cl architecture (arch={spec.msvc_arch})")
GameInput v1 header changes On Windows, the `gameinput.h` file (taken from [NuGet](https://www.nuget.org/packages/Microsoft.GameInput)) is C++ instead of C. Since the source files that include it are C, the compiler tries to interpret the header file as C and errors. Also, there are other differences that make it incompatible with the v0 API on console
[ "https://github.com/libsdl-org/SDL/commit/53a4365c12dfbc6189e0e684d17f726300ae2621" ]
2025-04-11T19:03:32Z
https://github.com/libsdl-org/SDL/tree/f35a2736b708408bd5e6273255c03513cf60605a
[ "cmd /c '\"C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools\\Common7\\Tools\\VsDevCmd.bat\" -arch=x64 && cmake --build build --parallel && cmake --install build --prefix prefix'" ]
[ "$env:SDL_TESTS_QUICK=\"1\"; ctest --test-dir build --output-on-failure -VV 2>&1 | Tee-Object -FilePath build\\test-output.log" ]
[ "type build\\test-output.log" ]
def parser(log: str) -> dict[str, str]: import re # Strip common ANSI escape sequences to avoid breaking regexes def strip_ansi(s: str) -> str: # CSI sequences, OSC sequences, simple ESC codes s = re.sub(r"\x1B\[[0-9;?]*[ -/]*[@-~]", "", s) s = re.sub(r"\x1B\][^\x07]*(\x07|\x1B\\)", "", s) # OSC ... BEL or ST s = re.sub(r"\x1B[@-Z\\-_]", "", s) # 2-char sequences return s log = strip_ansi(log).replace("\r", "") # precedence for merging statuses rank = {"pass": 1, "skip": 2, "fail": 3} def set_status(d: dict, name: str, status: str): if not name: return status = status.lower() if status not in rank: return prev = d.get(name) if prev is None or rank[status] > rank[prev]: d[name] = status def norm_status(s: str) -> str: s = s.strip().lower() # Normalize a variety of framework outputs if s in ("passed", "ok", "success", "succeeded", "pass"): return "pass" if s in ("skipped", "skip", "ignored", "xfail", "not run", "not-run"): return "skip" # Everything else counts as fail: failed, error, timeout, xpass (unexpected pass), etc. return "fail" results: dict[str, str] = {} # ------------------------- # 1) CTest per-test result lines (with fraction prefix) # Examples: # 1/10 Test #1: testname .................... Passed 0.01 sec # 2/10 Test #2: long name ................... ***Failed 0.02 sec # ... Timeout ctest_frac_re = re.compile( r"^\s*\d+/\d+\s+Test\s+#\d+:\s+([^\n]+?)\s+(?:\.*\s*)?(?:\*{0,3}\s*)?\b(Passed|Failed|Not Run|Skipped|Timeout)\b", re.IGNORECASE | re.MULTILINE, ) for m in ctest_frac_re.finditer(log): name = m.group(1).strip() status = norm_status(m.group(2)) set_status(results, name, status) # 2) CTest per-test result lines (without fraction prefix) ctest_simple_re = re.compile( r"^\s*Test\s+#\d+:\s+([^\n]+?)\s+(?:\.*\s*)?(?:\*{0,3}\s*)?\b(Passed|Failed|Not Run|Skipped|Timeout)\b", re.IGNORECASE | re.MULTILINE, ) for m in ctest_simple_re.finditer(log): name = m.group(1).strip() status = norm_status(m.group(2)) set_status(results, name, status) # 3) CTest: "The following tests FAILED:" block # Accept various failure reasons in parentheses. # N - name (Failed|Timeout|SEGFAULT|ChildKilled|OTHER_FAULT|Not Run|...) failed_block_re = re.compile( r"^\s*\d+\s*-\s*(.+?)\s*\(([^)]+)\)", re.IGNORECASE | re.MULTILINE, ) if re.search(r"The following tests\s+FAILED", log, re.IGNORECASE): for m in failed_block_re.finditer(log): name = m.group(1).strip() reason = m.group(2).strip().lower() status = "skip" if "not run" in reason else "fail" set_status(results, name, status) # 4) CTest: "The following tests did not run:" block (skips) did_not_run_header = re.search(r"The following tests\s+did\s+not\s+run\s*:", log, re.IGNORECASE) if did_not_run_header: # Collect subsequent lines of form " N - name" start = did_not_run_header.end() tail = log[start:].splitlines() for line in tail: if not line.strip(): break m = re.match(r"^\s*\d+\s*-\s*(.+?)\s*$", line) if m: name = m.group(1).strip() set_status(results, name, "skip") else: # Stop if pattern breaks (end of block) if re.match(r"^\S", line): break # 5) Inner unit tests from an automation harness inner_test_re = re.compile( r">>>\s*Test\s+'((?:[^']|\n)+?)'\s*:\s*(Passed|Failed|Skipped|Timeout|Error)", re.IGNORECASE, ) for m in inner_test_re.finditer(log): name = re.sub(r"\s+", " ", m.group(1)).strip() status = norm_status(m.group(2)) set_status(results, name, status) # 6) Pytest item lines: "file.py::test_name STATUS" pytest_item_re = re.compile( r"^\s*([^\s][^\n]*?::[^\s]+)\s+(PASSED|FAILED|ERROR|SKIPPED|XFAIL|XPASS)", re.IGNORECASE | re.MULTILINE, ) for m in pytest_item_re.finditer(log): name = m.group(1).strip() status_word = m.group(2).upper() if status_word == "XPASS": status = "fail" else: status = norm_status(status_word) set_status(results, name, status) # Pytest summary-style failures: "FAILED file::test - ..." or "ERROR file::test - ..." pytest_sum_fail_re = re.compile(r"^\s*(FAILED|ERROR)\s+([^\s].*?::[^\s]+)\b", re.MULTILINE) for m in pytest_sum_fail_re.finditer(log): name = m.group(2).strip() set_status(results, name, "fail") # 7) unittest patterns: # a) "test_method (package.ClassName) ... ok/FAIL/ERROR/skipped" unittest_paren_re = re.compile( r"^\s*(\S+)\s*\(([^)]+)\)\s+\.\.\.\s+(ok|FAIL|ERROR|skipped|SKIPPED)", re.IGNORECASE | re.MULTILINE, ) for m in unittest_paren_re.finditer(log): method = m.group(1).strip() cls = m.group(2).strip() name = f"{cls}.{method}" status = norm_status(m.group(3)) set_status(results, name, status) # b) "ClassName.test_method ... ok/FAIL/ERROR/skipped" unittest_dots_re = re.compile( r"^\s*([A-Za-z_]\w+(?:\.[A-Za-z_]\w+)+)\s+\.\.\.\s+(ok|FAIL|ERROR|skipped|SKIPPED)", re.IGNORECASE | re.MULTILINE, ) for m in unittest_dots_re.finditer(log): name = m.group(1).strip() status = norm_status(m.group(2)) set_status(results, name, status) # 8) Go test: "--- PASS: TestName", etc. go_test_re = re.compile(r"^\s*---\s+(PASS|FAIL|SKIP):\s+([^\s(]+)", re.MULTILINE) for m in go_test_re.finditer(log): status = norm_status(m.group(1)) name = m.group(2).strip() set_status(results, name, status) # 9) GoogleTest: "[ OK ] Suite.Test" / "[ FAILED ] Suite.Test" / "[ SKIPPED ] Suite.Test" gtest_re = re.compile( r"^\s*\[\s*(OK|PASSED|FAILED|SKIPPED)\s*\]\s+([^\n]+)", re.IGNORECASE | re.MULTILINE, ) for m in gtest_re.finditer(log): status = norm_status(m.group(1)) # strip trailing time "(xx ms)" if present name = re.sub(r"\s+\([^)]*\)\s*$", "", m.group(2)).strip() set_status(results, name, status) # 10) TAP: "ok 1 - name", "not ok 2 - name", with optional "# SKIP" tap_re = re.compile( r"^\s*(ok|not ok)\s+\d+\s*(?:-\s*)?(.+?)(?:\s+#\s*(SKIP|TODO).*)?$", re.IGNORECASE | re.MULTILINE, ) for m in tap_re.finditer(log): oknot = m.group(1).lower() name = m.group(2).strip() directive = (m.group(3) or "").lower() if oknot == "ok": status = "skip" if directive in ("skip", "todo") else "pass" else: status = "fail" set_status(results, name, status) # 11) Jest symbol-style per-test assertions: "✓ name", "✕ name", "○ name" # Avoid matching file-level "PASS path/file" lines. jest_sym_re = re.compile(r"^\s*([✓✔✕×○])\s+(.+)$", re.MULTILINE) for m in jest_sym_re.finditer(log): sym = m.group(1) name = m.group(2).strip() if sym in ("✓", "✔"): status = "pass" elif sym in ("✕", "×"): status = "fail" else: status = "skip" set_status(results, name, status) # 12) Rust cargo test: "test path::name ... ok/FAILED/ignored" rust_re = re.compile(r"^\s*test\s+([^\s]+)\s+\.\.\.\s+(ok|FAILED|ignored)", re.IGNORECASE | re.MULTILINE) for m in rust_re.finditer(log): name = m.group(1).strip() status = norm_status(m.group(2)) set_status(results, name, status) # 13) Repro filter hints; treat as failed tests if shown # Broaden to allow dots, dashes, colons, and underscores. repro_filter_re = re.compile(r"--filter\s+([^\s]+)") for m in repro_filter_re.finditer(log): name = m.group(1).strip() set_status(results, name, "fail") return results
[ "testplatform-no-simd" ]
[ "video_getFullscreenDisplayModes", "round_roundNumberCases", "audio_enumerateAndNameAudioDevicesNegativeTest ts", "stdlib_strnlen", "testqsort", "hints_getHint", "pixels_saveLoadBMP", "log10_baseCases", "exp_infCases", "pow_baseNZeroExpOddCases", "asin_outOfDomainCases", "audio_buildAudioStrea...
starryzhang/sweb.eval.win.libsdl-org_1776_sdl-12806
microsoft/ebpf-for-windows
4117
microsoft__ebpf-for-windows-4117
C
[ "3602", "4129" ]
30fdb1f81aa7d4eaceb26a72d72920aeac79bebb
diff --git a/netebpfext/net_ebpf_ext.c b/netebpfext/net_ebpf_ext.c index 451fb361b4..05e70e467e 100644 --- a/netebpfext/net_ebpf_ext.c +++ b/netebpfext/net_ebpf_ext.c @@ -34,12 +34,6 @@ static bool _net_ebpf_xdp_providers_registered = false; static bool _net_ebpf_bind_providers_registered = false; static bool _net_ebpf_sock_addr_providers_registered = false; static bool _net_ebpf_sock_ops_providers_registered = false; -#if !defined(NDEBUG) -// Global objects used to store filter contexts that are being cleaned up. This is currently only used in debug -// contexts. -EX_SPIN_LOCK _net_ebpf_filter_zombie_list_lock = {0}; -_Guarded_by_(_net_ebpf_filter_zombie_list_lock) static LIST_ENTRY _net_ebpf_filter_zombie_list = {0}; -#endif static net_ebpf_ext_sublayer_info_t _net_ebpf_ext_sublayers[] = { {&EBPF_DEFAULT_SUBLAYER, L"EBPF Sub-Layer", L"Sub-Layer for use by eBPF callouts", 0, SUBLAYER_WEIGHT_MAXIMUM}, @@ -910,10 +904,6 @@ net_ebpf_ext_register_providers() } _net_ebpf_sock_ops_providers_registered = true; -#if !defined(NDEBUG) - InitializeListHead(&_net_ebpf_filter_zombie_list); -#endif - Exit: if (!NT_SUCCESS(status)) { net_ebpf_ext_unregister_providers(); @@ -1008,24 +998,4 @@ net_ebpf_ext_remove_client_context( } ExReleaseSpinLockExclusive(&filter_context->lock, old_irql); -} - -#if !defined(NDEBUG) -void -net_ebpf_ext_add_filter_context_to_zombie_list(_Inout_ net_ebpf_extension_wfp_filter_context_t* filter_context) -{ - KIRQL old_irql = ExAcquireSpinLockExclusive(&_net_ebpf_filter_zombie_list_lock); - InsertHeadList(&_net_ebpf_filter_zombie_list, &filter_context->link); - ExReleaseSpinLockExclusive(&_net_ebpf_filter_zombie_list_lock, old_irql); -} - -void -net_ebpf_ext_remove_filter_context_from_zombie_list(_Inout_ net_ebpf_extension_wfp_filter_context_t* filter_context) -{ - if (!IsListEmpty(&filter_context->link)) { - KIRQL old_irql = ExAcquireSpinLockExclusive(&_net_ebpf_filter_zombie_list_lock); - RemoveEntryList(&filter_context->link); - ExReleaseSpinLockExclusive(&_net_ebpf_filter_zombie_list_lock, old_irql); - } -} -#endif \ No newline at end of file +} \ No newline at end of file diff --git a/netebpfext/net_ebpf_ext.h b/netebpfext/net_ebpf_ext.h index f2483026c1..7e3c716755 100644 --- a/netebpfext/net_ebpf_ext.h +++ b/netebpfext/net_ebpf_ext.h @@ -150,21 +150,20 @@ typedef struct _net_ebpf_extension_wfp_filter_context #define PRAGMA_WARNING_SUPPRESS_26100 _Pragma("warning(suppress: 26100)") #define PRAGMA_WARNING_POP _Pragma("warning(pop)") -#define CLEAN_UP_FILTER_CONTEXT(filter_context) \ - ASSERT((filter_context) != NULL); \ - net_ebpf_extension_hook_provider_remove_filter_context_from_zombie_list((filter_context)); \ - if ((filter_context)->filter_ids != NULL) { \ - ExFreePool((filter_context)->filter_ids); \ - } \ - PRAGMA_WARNING_PUSH \ - PRAGMA_WARNING_SUPPRESS_26100 \ - if ((filter_context)->client_contexts != NULL) { \ - ExFreePool((filter_context)->client_contexts); \ - } \ - PRAGMA_WARNING_POP \ - if ((filter_context)->wfp_engine_handle != NULL) { \ - FwpmEngineClose((filter_context)->wfp_engine_handle); \ - } \ +#define CLEAN_UP_FILTER_CONTEXT(filter_context) \ + ASSERT((filter_context) != NULL); \ + if ((filter_context)->filter_ids != NULL) { \ + ExFreePool((filter_context)->filter_ids); \ + } \ + PRAGMA_WARNING_PUSH \ + PRAGMA_WARNING_SUPPRESS_26100 \ + if ((filter_context)->client_contexts != NULL) { \ + ExFreePool((filter_context)->client_contexts); \ + } \ + PRAGMA_WARNING_POP \ + if ((filter_context)->wfp_engine_handle != NULL) { \ + FwpmEngineClose((filter_context)->wfp_engine_handle); \ + } \ ExFreePool((filter_context)); #define REFERENCE_FILTER_CONTEXT(filter_context) \ @@ -389,15 +388,4 @@ net_ebpf_ext_remove_client_context( ebpf_result_t net_ebpf_ext_add_client_context( _Inout_ net_ebpf_extension_wfp_filter_context_t* filter_context, - _In_ const struct _net_ebpf_extension_hook_client* hook_client); - -#if !defined(NDEBUG) -/** - * @brief Add the filter context to the zombie list. - * - * @param filter_context Filter context to add to the zombie list. - */ -void -net_ebpf_ext_add_filter_context_to_zombie_list(_Inout_ net_ebpf_extension_wfp_filter_context_t* filter_context); - -#endif + _In_ const struct _net_ebpf_extension_hook_client* hook_client); \ No newline at end of file diff --git a/netebpfext/net_ebpf_ext_hook_provider.c b/netebpfext/net_ebpf_ext_hook_provider.c index fd25bf69d4..24a994ceeb 100644 --- a/netebpfext/net_ebpf_ext_hook_provider.c +++ b/netebpfext/net_ebpf_ext_hook_provider.c @@ -10,6 +10,7 @@ typedef struct _net_ebpf_ext_hook_client_rundown { EX_RUNDOWN_REF protection; bool rundown_occurred; + bool rundown_initialized; } net_ebpf_ext_hook_rundown_t; struct _net_ebpf_extension_hook_provider; @@ -41,9 +42,6 @@ typedef struct _net_ebpf_extension_hook_provider const void* custom_data; ///< Opaque pointer to hook specific data associated for this provider. _Guarded_by_(lock) LIST_ENTRY filter_context_list; ///< Linked list of filter contexts that are attached to this provider. - EX_PUSH_LOCK zombie_list_lock; ///< Lock for zombie filters list. - _Guarded_by_(zombie_list_lock) - LIST_ENTRY zombie_filter_context_list; ///< Linked list of filter contexts that are attached to this provider. } net_ebpf_extension_hook_provider_t; typedef struct _net_ebpf_extension_invoke_programs_parameters @@ -54,6 +52,21 @@ typedef struct _net_ebpf_extension_invoke_programs_parameters ebpf_result_t result; } net_ebpf_extension_invoke_programs_parameters_t; +/** + * @brief Initialize the hook rundown state. + * + * @param[in, out] rundown Pointer to the rundown object to initialize. + */ +static void +_ebpf_ext_init_hook_rundown(_Inout_ net_ebpf_ext_hook_rundown_t* rundown) +{ + ASSERT(rundown->rundown_initialized == FALSE); + + ExInitializeRundownProtection(&rundown->protection); + rundown->rundown_occurred = FALSE; + rundown->rundown_initialized = TRUE; +} + /** * @brief Initialize the hook client rundown state. * @@ -81,8 +94,7 @@ _ebpf_ext_attach_init_rundown(net_ebpf_extension_hook_client_t* hook_client) } // Initialize the rundown and disable new references. - ExInitializeRundownProtection(&rundown->protection); - rundown->rundown_occurred = FALSE; + _ebpf_ext_init_hook_rundown(rundown); Exit: NET_EBPF_EXT_RETURN_NTSTATUS(status); @@ -95,10 +107,11 @@ _ebpf_ext_attach_init_rundown(net_ebpf_extension_hook_client_t* hook_client) * */ static void -_ebpf_ext_attach_wait_for_rundown(_Inout_ net_ebpf_ext_hook_rundown_t* rundown) +_ebpf_ext_wait_for_rundown(_Inout_ net_ebpf_ext_hook_rundown_t* rundown) { NET_EBPF_EXT_LOG_ENTRY(); + ASSERT(rundown->rundown_initialized == TRUE); ExWaitForRundownProtectionRelease(&rundown->protection); rundown->rundown_occurred = TRUE; @@ -138,7 +151,7 @@ _net_ebpf_extension_detach_client_completion(_In_ DEVICE_OBJECT* device_object, // Issue: https://github.com/microsoft/ebpf-for-windows/issues/1854 // Wait for any in progress callbacks to complete. - _ebpf_ext_attach_wait_for_rundown(&hook_client->rundown); + _ebpf_ext_wait_for_rundown(&hook_client->rundown); IoFreeWorkItem(work_item); @@ -148,26 +161,42 @@ _net_ebpf_extension_detach_client_completion(_In_ DEVICE_OBJECT* device_object, NET_EBPF_EXT_LOG_EXIT(); } +_Must_inspect_result_ bool +_net_ebpf_ext_enter_rundown(_Inout_ net_ebpf_ext_hook_rundown_t* rundown) +{ + ASSERT(rundown->rundown_initialized == TRUE); + return ExAcquireRundownProtection(&rundown->protection); +} + +void +_net_ebpf_ext_leave_rundown(_Inout_ net_ebpf_ext_hook_rundown_t* rundown) +{ + ASSERT(rundown->rundown_initialized == TRUE); + ExReleaseRundownProtection(&rundown->protection); +} + _Must_inspect_result_ bool net_ebpf_extension_hook_client_enter_rundown(_Inout_ net_ebpf_extension_hook_client_t* hook_client) { - net_ebpf_ext_hook_rundown_t* rundown = &hook_client->rundown; - bool status = ExAcquireRundownProtection(&rundown->protection); - return status; + return _net_ebpf_ext_enter_rundown(&hook_client->rundown); } void net_ebpf_extension_hook_client_leave_rundown(_Inout_ net_ebpf_extension_hook_client_t* hook_client) { - net_ebpf_ext_hook_rundown_t* rundown = &hook_client->rundown; - ExReleaseRundownProtection(&rundown->protection); + _net_ebpf_ext_leave_rundown(&hook_client->rundown); +} + +_Must_inspect_result_ bool +net_ebpf_extension_hook_provider_enter_rundown(_Inout_ net_ebpf_extension_hook_provider_t* provider_context) +{ + return _net_ebpf_ext_enter_rundown(&provider_context->rundown); } void net_ebpf_extension_hook_provider_leave_rundown(_Inout_ net_ebpf_extension_hook_provider_t* provider_context) { - net_ebpf_ext_hook_rundown_t* rundown = &provider_context->rundown; - ExReleaseRundownProtection(&rundown->protection); + _net_ebpf_ext_leave_rundown(&provider_context->rundown); } const ebpf_extension_data_t* @@ -547,7 +576,7 @@ _net_ebpf_extension_hook_provider_attach_client( // No matching filter context found. Need to create a new filter context. // Acquire rundown reference on provider context. This will be released when the filter context is deleted. - rundown_acquired = ExAcquireRundownProtection(&local_provider_context->rundown.protection); + rundown_acquired = net_ebpf_extension_hook_provider_enter_rundown(local_provider_context); if (!rundown_acquired) { NET_EBPF_EXT_LOG_MESSAGE( NET_EBPF_EXT_TRACELOG_LEVEL_ERROR, @@ -600,7 +629,7 @@ _net_ebpf_extension_hook_provider_attach_client( if (status != STATUS_SUCCESS) { if (rundown_acquired) { - ExReleaseRundownProtection(&local_provider_context->rundown.protection); + net_ebpf_extension_hook_provider_leave_rundown(local_provider_context); } } @@ -615,10 +644,6 @@ _Requires_exclusive_lock_held_(provider_context->lock) static void _net_ebpf_ext // Remove the list entry from the provider's list of filter contexts. RemoveEntryList(&filter_context->link); - // Insert the list entry to the list of zombie filter contexts. - ACQUIRE_PUSH_LOCK_EXCLUSIVE(&provider_context->zombie_list_lock); - InsertTailList(&provider_context->zombie_filter_context_list, &filter_context->link); - RELEASE_PUSH_LOCK_EXCLUSIVE(&provider_context->zombie_list_lock); // Release the filter context. provider_context->dispatch.delete_filter_context(filter_context); @@ -716,7 +741,7 @@ net_ebpf_extension_hook_provider_unregister( } // Wait for rundown reference to become 0. This will ensure all filter contexts, hence all // filter are cleaned up. - _ebpf_ext_attach_wait_for_rundown(&provider_context->rundown); + _ebpf_ext_wait_for_rundown(&provider_context->rundown); ExFreePool(provider_context); } NET_EBPF_EXT_LOG_EXIT(); @@ -735,6 +760,9 @@ net_ebpf_extension_hook_provider_register( NPI_PROVIDER_CHARACTERISTICS* characteristics; NET_EBPF_EXT_LOG_ENTRY(); + + *provider_context = NULL; + local_provider_context = (net_ebpf_extension_hook_provider_t*)ExAllocatePoolUninitialized( NonPagedPoolNx, sizeof(net_ebpf_extension_hook_provider_t), NET_EBPF_EXTENSION_POOL_TAG); NET_EBPF_EXT_BAIL_ON_ALLOC_FAILURE_STATUS( @@ -743,8 +771,7 @@ net_ebpf_extension_hook_provider_register( memset(local_provider_context, 0, sizeof(net_ebpf_extension_hook_provider_t)); ExInitializePushLock(&local_provider_context->lock); InitializeListHead(&local_provider_context->filter_context_list); - ExInitializePushLock(&local_provider_context->zombie_list_lock); - InitializeListHead(&local_provider_context->zombie_filter_context_list); + _ebpf_ext_init_hook_rundown(&local_provider_context->rundown); characteristics = &local_provider_context->characteristics; characteristics->Length = sizeof(NPI_PROVIDER_CHARACTERISTICS); @@ -771,10 +798,6 @@ net_ebpf_extension_hook_provider_register( goto Exit; } - // Initialize rundown protection for the provider context. - ExInitializeRundownProtection(&local_provider_context->rundown.protection); - local_provider_context->rundown.rundown_occurred = FALSE; - *provider_context = local_provider_context; local_provider_context = NULL; @@ -785,15 +808,3 @@ net_ebpf_extension_hook_provider_register( NET_EBPF_EXT_RETURN_NTSTATUS(status); } - -void -net_ebpf_extension_hook_provider_remove_filter_context_from_zombie_list( - _Inout_ net_ebpf_extension_wfp_filter_context_t* filter_context) -{ - net_ebpf_extension_hook_provider_t* local_provider_context = - (net_ebpf_extension_hook_provider_t*)filter_context->provider_context; - ASSERT(local_provider_context != NULL); - ACQUIRE_PUSH_LOCK_EXCLUSIVE(&local_provider_context->zombie_list_lock); - RemoveEntryList(&filter_context->link); - RELEASE_PUSH_LOCK_EXCLUSIVE(&local_provider_context->zombie_list_lock); -} diff --git a/netebpfext/net_ebpf_ext_hook_provider.h b/netebpfext/net_ebpf_ext_hook_provider.h index 9dd6a63dec..2319d51db5 100644 --- a/netebpfext/net_ebpf_ext_hook_provider.h +++ b/netebpfext/net_ebpf_ext_hook_provider.h @@ -208,12 +208,3 @@ net_ebpf_extension_hook_expand_stack_and_invoke_programs( */ net_ebpf_extension_hook_attach_capability_t net_ebpf_extension_hook_provider_get_attach_capability(_In_ const net_ebpf_extension_hook_provider_t* provider_context); - -/** - * @brief Remove the filter context from the zombie list. - * - * @param filter_context Filter context to remove from the zombie list. - */ -void -net_ebpf_extension_hook_provider_remove_filter_context_from_zombie_list( - _Inout_ net_ebpf_extension_wfp_filter_context_t* filter_context);
diff --git a/scripts/execute_ebpf_cicd_tests.ps1 b/scripts/execute_ebpf_cicd_tests.ps1 index 396072e2d2..535a0868c9 100644 --- a/scripts/execute_ebpf_cicd_tests.ps1 +++ b/scripts/execute_ebpf_cicd_tests.ps1 @@ -77,8 +77,10 @@ $Job = Start-Job -ScriptBlock { if ($_.CategoryInfo.Reason -eq "TimeoutException") { Generate-KernelDumpOnVM($TestVMName) } - } + # Throw to ensure the job is marked as failed. + throw $_.Exception.Message + } Pop-Location } -ArgumentList ( diff --git a/scripts/run_driver_tests.psm1 b/scripts/run_driver_tests.psm1 index 6e0ff61e2f..ae5f7c2686 100644 --- a/scripts/run_driver_tests.psm1 +++ b/scripts/run_driver_tests.psm1 @@ -292,6 +292,7 @@ function Invoke-Test Write-Log "Test `"$TestName $TestArgs`" Passed" -ForegroundColor Green Write-Log "`n==============================`n" + } # Function to create a tuple with default values for Arguments and Timeout @@ -451,7 +452,7 @@ function Invoke-ConnectRedirectTest function Invoke-CICDStressTests { param([parameter(Mandatory = $true)][bool] $VerboseLogs, - [parameter(Mandatory = $false)][int] $TestHangTimeout = 3600, + [parameter(Mandatory = $false)][int] $TestHangTimeout = (60*60), [parameter(Mandatory = $false)][string] $UserModeDumpFolder = "C:\Dumps", [parameter(Mandatory = $false)][bool] $NeedKernelDump = $true, [parameter(Mandatory = $false)][bool] $RestartExtension = $false) diff --git a/tests/stress/km/stress_tests_km.cpp b/tests/stress/km/stress_tests_km.cpp index 53a9e00edd..1dcd622670 100644 --- a/tests/stress/km/stress_tests_km.cpp +++ b/tests/stress/km/stress_tests_km.cpp @@ -23,6 +23,69 @@ static const std::map<std::string, test_program_attributes> _test_program_info = BPF_PROG_TYPE_UNSPEC, EBPF_EXECUTION_ANY}}}; +// Structure to store bpf_object_ptr elements. A fixed-size table of these entries is shared between the 'creator', +// 'attacher' and the 'destroyer' threads. +struct object_table_entry +{ + std::unique_ptr<std::mutex> lock{nullptr}; + _Guarded_by_(lock) bool available{true}; + _Guarded_by_(lock) bpf_object_ptr object{nullptr}; + _Guarded_by_(lock) bool loaded{false}; + bool attach{false}; + + // The following fields are for debugging this test itself. + uint32_t index{0}; + uint32_t reuse_count{0}; + uint32_t tag{0}; +}; + +// Possible roles for each thread. A thread is assigned a specific role at creation and it does not change thereafter. +// +// 'Creator' threads create as many ebpf program objects as they can, gated by the size of the object_table array. +// +// The 'Attacher' threads will alternatively 'attach' or 'detach' the program objects created by the 'Creator' threads +// w/o considering if the objects have already been attached or detached. Any errors returned by the ebpfapi are +// ignored. +// +// 'Destroyer' threads close as many 'opened' eBPF program objects as then can. These threads synchronize access to +// the object table entries with the 'Creator' and 'Attacher' threads as the destroyer threads can only destroy program +// objects that were created by the creator threads in the first place. +// +// The intent here is to cause the maximum nondeterministic multi-threaded stress scenarios as possible. Note that we +// do not care about user mode failures in, or errors returned from, ebpfapi and focus only on exercing the in-kernel +// eBPF components' ability to deal with such situations w/o causing a kernel hang or crash. The primary test goal here +// is to ensure that such races do not cause hangs or crashes in the in-kernel eBPF sub-system components +// (ebpfcore, netebpfext drivers). + +enum class thread_role_type : uint32_t +{ + ROLE_NOT_SET = 0, + CREATOR, + ATTACHER, + DESTROYER, + MONITOR_IPV4, + MONITOR_IPV6 +}; + +// Context for each test thread. This is a superset and field usage varies by test. +struct thread_context +{ + std::string program_name{}; + std::string file_name{}; + bool is_native_program{}; + std::string map_name{}; + thread_role_type role{}; + uint32_t thread_index{}; + uint32_t compartment_id{}; + uint32_t duration_minutes{}; + bool extension_restart_enabled{}; + fd_t map_fd; + fd_t program_fd; + std::vector<object_table_entry>& object_table; + std::string extension_name{}; + bool succeeded{true}; +}; + // This call is called by the common test initialization code to get a list of programs supported by the user mode // or kernel mode test suites. (For example, some programs could be meant for kernel mode stress testing only.) const std::vector<std::string> @@ -62,7 +125,7 @@ _km_test_init() _global_test_control_info.programs.begin(), _global_test_control_info.programs.end(), program) == _global_test_control_info.programs.end()) { - LOG_INFO("ERROR: Uexpected program: {}", program); + LOG_ERROR("ERROR: Uexpected program: {}", program); REQUIRE(0); } } @@ -165,7 +228,10 @@ _restart_extension(const std::string& extension_name, uint32_t timeout) SC_HANDLE scm_handle = nullptr; SC_HANDLE service_handle = nullptr; - REQUIRE(extension_name.size() != 0); + if (extension_name.size() == 0) { + LOG_ERROR("FATAL ERROR: Extension name is empty."); + return false; + } // Get a handle to the SCM database. scm_handle = OpenSCManager(nullptr, nullptr, SC_MANAGER_ALL_ACCESS); @@ -211,7 +277,10 @@ _restart_extension(const std::string& extension_name, uint32_t timeout) static std::thread _start_extension_restart_thread( - const std::string& extension_name, uint32_t restart_delay_ms, uint32_t thread_lifetime_minutes) + thread_context& context, + const std::string& extension_name, + uint32_t restart_delay_ms, + uint32_t thread_lifetime_minutes) { return std::thread( [&](uint32_t local_restart_delay_ms, uint32_t local_thread_lifetime_minutes) { @@ -235,7 +304,8 @@ _start_extension_restart_thread( constexpr uint32_t RESTART_TIMEOUT_SECONDS = 10; LOG_VERBOSE("Toggling extension state for {} extension...", extension_name); if (!_restart_extension(extension_name, RESTART_TIMEOUT_SECONDS)) { - exit(-1); + LOG_ERROR("FATAL ERROR: Failed to restart extension: {}", extension_name); + context.succeeded = false; } LOG_VERBOSE( @@ -248,67 +318,6 @@ _start_extension_restart_thread( thread_lifetime_minutes); } -// Structure to store bpf_object_ptr elements. A fixed-size table of these entries is shared between the 'creator', -// 'attacher' and the 'destroyer' threads. -struct object_table_entry -{ - std::unique_ptr<std::mutex> lock{nullptr}; - _Guarded_by_(lock) bool available { true }; - _Guarded_by_(lock) bpf_object_ptr object { nullptr }; - _Guarded_by_(lock) bool loaded { false }; - bool attach{false}; - - // The following fields are for debugging this test itself. - uint32_t index{0}; - uint32_t reuse_count{0}; - uint32_t tag{0}; -}; - -// Possible roles for each thread. A thread is assigned a specific role at creation and it does not change thereafter. -// -// 'Creator' threads create as many ebpf program objects as they can, gated by the size of the object_table array. -// -// The 'Attacher' threads will alternatively 'attach' or 'detach' the program objects created by the 'Creator' threads -// w/o considering if the objects have already been attached or detached. Any errors returned by the ebpfapi are -// ignored. -// -// 'Destroyer' threads close as many 'opened' eBPF program objects as then can. These threads synchronize access to -// the object table entries with the 'Creator' and 'Attacher' threads as the destroyer threads can only destroy program -// objects that were created by the creator threads in the first place. -// -// The intent here is to cause the maximum nondeterministic multi-threaded stress scenarios as possible. Note that we -// do not care about user mode failures in, or errors returned from, ebpfapi and focus only on exercing the in-kernel -// eBPF components' ability to deal with such situations w/o causing a kernel hang or crash. The primary test goal here -// is to ensure that such races do not cause hangs or crashes in the in-kernel eBPF sub-system components -// (ebpfcore, netebpfext drivers). - -enum class thread_role_type : uint32_t -{ - ROLE_NOT_SET = 0, - CREATOR, - ATTACHER, - DESTROYER, - MONITOR_IPV4, - MONITOR_IPV6 -}; - -// Context for each test thread. This is a superset and field usage varies by test. -struct thread_context -{ - std::string program_name{}; - std::string file_name{}; - bool is_native_program{}; - std::string map_name{}; - thread_role_type role{}; - uint32_t thread_index{}; - uint32_t compartment_id{}; - uint32_t duration_minutes{}; - bool extension_restart_enabled{}; - fd_t map_fd; - fd_t program_fd; - std::vector<object_table_entry>& object_table; -}; - static void _do_creator_work(thread_context& context, std::time_t endtime_seconds) { @@ -350,13 +359,13 @@ _do_creator_work(thread_context& context, std::time_t endtime_seconds) } LOG_ERROR( - "(CREATOR)[{}][{}] - FATAL ERROR: bpf_object__open() failed. errno: {}", + "(CREATOR)[{}][{}] - FATAL ERROR: bpf_object__open() failed for {}. errno: {}", context.thread_index, entry.index, + context.file_name.c_str(), errno); - // We can't use a REQUIRE here as it invokes the 'catch' handler below and gives a misleading - // error message. + context.succeeded = false; exit(-1); } @@ -379,10 +388,13 @@ _do_creator_work(thread_context& context, std::time_t endtime_seconds) // program objects if they don't exist in the first place, so there's no point in letting the test // continue execution. LOG_ERROR( - "(CREATOR)[{}][{}] - FATAL ERROR: Unexpected exception caught (bpf_object__open). errno: {}", + "(CREATOR)[{}][{}] - FATAL ERROR: Unexpected exception caught (bpf_object__open). file: {} " + "errno: {}", context.thread_index, entry.index, + context.file_name.c_str(), errno); + context.succeeded = false; exit(-1); } LOG_VERBOSE("(CREATOR)[{}][{}] - Object created.", context.thread_index, entry.index); @@ -418,16 +430,23 @@ _do_creator_work(thread_context& context, std::time_t endtime_seconds) if (!context.is_native_program) { LOG_ERROR( - "(CREATOR)[{}][{}] - FATAL ERROR: bpf_object__load() failed. result: {}, errno: {}", + "(CREATOR)[{}][{}] - FATAL ERROR: bpf_object__load() failed. result: {}, errno: {} " + "progname: {}", context.thread_index, entry.index, result, - errno); + errno, + context.file_name.c_str()); - // We can't use a REQUIRE here as it invokes the 'catch' handler below and gives a - // misleading error message. + context.succeeded = false; exit(-1); } + } else { + LOG_VERBOSE( + "(CREATOR)[{}][{}] - bpf_object__load() succeeded progname: {}", + context.thread_index, + entry.index, + context.file_name.c_str()); } } catch (...) { @@ -442,14 +461,21 @@ _do_creator_work(thread_context& context, std::time_t endtime_seconds) // program objects if they don't exist in the first place, so there's no point in letting the test // continue execution. LOG_ERROR( - "(CREATOR)[{}][{}] - FATAL ERROR: Unexpected exception caught (bpf_object__load). errno: {}", + "(CREATOR)[{}][{}] - FATAL ERROR: Unexpected exception caught (bpf_object__load). errno: {} " + "filename: {}", context.thread_index, entry.index, - errno); + errno, + context.file_name.c_str()); + context.succeeded = false; exit(-1); } entry.loaded = true; - LOG_VERBOSE("(CREATOR)[{}][{}] - Object loaded.", context.thread_index, entry.index); + LOG_VERBOSE( + "(CREATOR)[{}][{}][{}] - Object loaded.", + context.thread_index, + entry.index, + context.file_name.c_str()); } } } @@ -601,9 +627,12 @@ _test_thread_function(thread_context& context) _do_creator_work(context, endtime_seconds); } else if (context.role == thread_role_type::ATTACHER) { _do_attacher_work(context, endtime_seconds); - } else { - REQUIRE(context.role == thread_role_type::DESTROYER); + } else if (context.role == thread_role_type::DESTROYER) { _do_destroyer_work(context, endtime_seconds); + } else { + LOG_ERROR("FATAL ERROR: Unknown thread role: {}", (uint32_t)context.role); + context.succeeded = false; + exit(-1); } timenow = sc::now(); @@ -618,22 +647,124 @@ _test_thread_function(thread_context& context) context.thread_index); } -static _Must_inspect_result_ std::string -_make_unique_file_copy(const std::string& file_name, uint32_t token) +static std::string +_generate_random_string() +{ + size_t string_length = 10; + const std::string characters = "0123456789abcdefghijklmnopqrstuvwxyz"; + + std::string random_string; + random_string.reserve(string_length); + for (size_t i = 0; i < string_length; ++i) { + random_string += characters[rand() % characters.size()]; + } + + return random_string; +} + +static std::string +_get_unique_file_name(const std::string& file_name) { // Generate the new (unique) file name. std::filesystem::path file_spec = file_name; - std::string new_file_name = file_spec.stem().string(); - new_file_name += (std::to_string(token) + file_spec.extension().string()); - REQUIRE(new_file_name.size() != file_name.size()); + return (file_spec.stem().string() + "_" + _generate_random_string() + file_spec.extension().string()); +} + +static _Must_inspect_result_ std::string +_make_unique_file_copy(const std::string& file_name) +{ + uint32_t max_retries = 10; + while (max_retries--) { + try { + std::string new_file_name = _get_unique_file_name(file_name); + bool result = + std::filesystem::copy_file(file_name, new_file_name, std::filesystem::copy_options::overwrite_existing); + if (result) { + LOG_VERBOSE("Copied {} to {}", file_name, new_file_name); + return new_file_name; + } else { + LOG_ERROR("Failed to copy {} to {}", file_name, new_file_name); + if (max_retries == 0) { + LOG_ERROR("Max retries exceeded."); + break; + } + } + } catch (...) { + LOG_ERROR("Exception caught while copying {} to a unique file name.", file_name); + if (max_retries == 0) { + LOG_ERROR("Max retries exceeded."); + break; + } + } + } + + LOG_ERROR("Failed to copy {} to a unique file name.", file_name); + REQUIRE(0); + return ""; +} + +static void +configure_extension_restart( + const test_control_info& test_control_info, + const std::vector<std::string>& extension_names, + std::vector<std::thread>& extension_restart_thread_table, + std::vector<thread_context>& extension_restart_thread_context_table, + std::vector<object_table_entry>& object_table) +{ + for (uint32_t i = 0; i < extension_names.size(); i++) { + thread_context context_entry = { + {}, {}, false, {}, thread_role_type::ROLE_NOT_SET, 0, 0, 0, false, 0, 0, object_table}; + context_entry.extension_name = extension_names[i]; + extension_restart_thread_context_table.emplace_back(std::move(context_entry)); + + extension_restart_thread_table.emplace_back(std::move(_start_extension_restart_thread( + std::ref(extension_restart_thread_context_table.back()), + extension_names[i], + test_control_info.extension_restart_delay_ms, + test_control_info.duration_minutes))); + } +} + +static void +wait_and_verify_test_threads( + const test_control_info& test_control_info, + std::vector<std::thread>& thread_table, + std::vector<thread_context>& thread_context_table, + std::vector<std::thread>& extension_restart_thread_table, + std::vector<thread_context>& extension_restart_thread_context_table) +{ + // Wait for all test threads + LOG_VERBOSE("waiting on {} test threads...", thread_table.size()); + for (auto& t : thread_table) { + t.join(); + } - // Make a copy. - LOG_VERBOSE("Copying {} to {}", file_name, new_file_name); - auto result = - std::filesystem::copy_file(file_name, new_file_name, std::filesystem::copy_options::overwrite_existing); - REQUIRE(result == true); + // Wait for all extension restart threads + if (test_control_info.extension_restart_enabled) { + LOG_VERBOSE("waiting on {} extension restart threads...", extension_restart_thread_table.size()); + for (auto& t : extension_restart_thread_table) { + t.join(); + } + } + + // Check if all test threads succeeded. + for (const auto& context : thread_context_table) { + if (!context.succeeded) { + LOG_ERROR( + "FATAL ERROR: Test thread failed. role: {}, index: {}", (uint32_t)context.role, context.thread_index); + REQUIRE(context.succeeded == true); + } + } - return new_file_name; + // Check if all extension restart threads succeeded. + if (test_control_info.extension_restart_enabled) { + for (const auto& context : extension_restart_thread_context_table) { + if (!context.succeeded) { + LOG_ERROR("FATAL ERROR: Extension restart thread failed. Extension: {},", context.extension_name); + REQUIRE(context.succeeded == true); + } + } + } } static void @@ -658,7 +789,9 @@ _mt_prog_load_stress_test(ebpf_execution_type_t program_type, const test_control std::vector<std::thread> test_thread_table(total_threads); // Another table for the 'extension restart' threads (1 thread per program). - std::vector<std::thread> extension_restart_thread_table{}; + std::vector<std::string> extension_names; + std::vector<std::thread> extension_restart_thread_table; + std::vector<thread_context> extension_restart_thread_context_table; // An incrementing 'compartment Id' to ensure that _each_ 'Attacher' thread gets a unique compartment id. uint32_t compartment_id{1}; @@ -685,8 +818,7 @@ _mt_prog_load_stress_test(ebpf_execution_type_t program_type, const test_control if (test_control_info.use_unique_native_programs && context_entry.role == thread_role_type::CREATOR) { // Create unique native programs for 'creator' threads only. - context_entry.file_name = - _make_unique_file_copy(program_attribs.native_file_name, (compartment_id - 1)); + context_entry.file_name = _make_unique_file_copy(program_attribs.native_file_name); } else { // Use the same file name for all 'creator' threads @@ -708,27 +840,24 @@ _mt_prog_load_stress_test(ebpf_execution_type_t program_type, const test_control } // If requested, start the 'extension stop-and-restart' thread for extension for this program type. - if (test_control_info.extension_restart_enabled) { - auto restart_thread = _start_extension_restart_thread( - std::ref(program_attribs.extension_name), - test_control_info.extension_restart_delay_ms, - test_control_info.duration_minutes); - extension_restart_thread_table.push_back(std::move(restart_thread)); - } - } - - // Wait for threads to terminate. - LOG_INFO("waiting on {} test threads...", test_thread_table.size()); - for (auto& t : test_thread_table) { - t.join(); + extension_names.push_back(program_attribs.extension_name); } if (test_control_info.extension_restart_enabled) { - LOG_INFO("waiting on {} extension restart threads...", extension_restart_thread_table.size()); - for (auto& t : extension_restart_thread_table) { - t.join(); - } + configure_extension_restart( + test_control_info, + extension_names, + extension_restart_thread_table, + extension_restart_thread_context_table, + object_table); } + + wait_and_verify_test_threads( + test_control_info, + test_thread_table, + thread_context_table, + extension_restart_thread_table, + extension_restart_thread_context_table); } enum class program_map_usage : uint32_t @@ -738,10 +867,12 @@ enum class program_map_usage : uint32_t }; static std::pair<bpf_object_ptr, fd_t> -_load_attach_program(const std::string& file_name, enum bpf_attach_type attach_type, uint32_t thread_index) +_load_attach_program(thread_context& context, enum bpf_attach_type attach_type) { bpf_object_ptr object_ptr; bpf_object* object_raw_ptr = nullptr; + const std::string& file_name = context.file_name; + const uint32_t thread_index = context.thread_index; // Get the 'object' ptr for the program associated with this thread. object_raw_ptr = bpf_object__open(file_name.c_str()); @@ -752,7 +883,8 @@ _load_attach_program(const std::string& file_name, enum bpf_attach_type attach_t thread_index, file_name.c_str(), errno); - REQUIRE(object_raw_ptr != nullptr); + context.succeeded = false; + return {}; } LOG_VERBOSE("{}({}) Opened file:{}", __func__, thread_index, file_name.c_str()); @@ -760,12 +892,14 @@ _load_attach_program(const std::string& file_name, enum bpf_attach_type attach_t auto result = bpf_object__load(object_raw_ptr); if (result != 0) { LOG_ERROR( - "{}({}) FATAL ERROR: bpf_object__load({}) failed. errno:{}", + "{}({}) FATAL ERROR: bpf_object__load({}) failed. result:{}, errno:{}", __func__, thread_index, file_name.c_str(), + result, errno); - REQUIRE(result == 0); + context.succeeded = false; + return {}; } object_ptr.reset(object_raw_ptr); LOG_VERBOSE("{}({}) loaded file:{}", __func__, thread_index, file_name.c_str()); @@ -779,7 +913,8 @@ _load_attach_program(const std::string& file_name, enum bpf_attach_type attach_t thread_index, file_name.c_str(), errno); - REQUIRE(program != nullptr); + context.succeeded = false; + return {}; } LOG_VERBOSE( "{}({}) Found program object for program:{}, file_name:{}", @@ -798,7 +933,8 @@ _load_attach_program(const std::string& file_name, enum bpf_attach_type attach_t file_name.c_str(), program->program_name, errno); - REQUIRE(program_fd >= 0); + context.succeeded = false; + return {}; } LOG_VERBOSE( "{}({}) Opened fd:{}, for program:{}, file_name:{}", @@ -819,7 +955,8 @@ _load_attach_program(const std::string& file_name, enum bpf_attach_type attach_t file_name.c_str(), program->program_name, errno); - REQUIRE(result == 0); + context.succeeded = false; + return {}; } LOG_VERBOSE( "{}({}) Attached program:{}, file_name:{}", __func__, thread_index, program->program_name, file_name.c_str()); @@ -832,7 +969,11 @@ _prep_program(thread_context& context, program_map_usage map_usage) { enum bpf_attach_type attach_type = context.role == thread_role_type::MONITOR_IPV4 ? BPF_CGROUP_INET4_CONNECT : BPF_CGROUP_INET6_CONNECT; - auto [program_object, program_fd] = _load_attach_program(context.file_name, attach_type, context.thread_index); + auto [program_object, program_fd] = _load_attach_program(context, attach_type); + if (context.succeeded == false) { + LOG_ERROR("{}({}) - FATAL ERROR: _load_attach_program() failed.", __func__, context.thread_index); + exit(-1); + } // Stash the object pointer as we'll need it at 'close' time. auto& entry = context.object_table[context.thread_index]; @@ -840,10 +981,9 @@ _prep_program(thread_context& context, program_map_usage map_usage) context.program_fd = program_fd; if (map_usage == program_map_usage::USE_MAP) { - // Get the map fd for the map for this program. - auto map_fd = bpf_object__find_map_fd_by_name(entry.object.get(), context.map_name.c_str()); - if (map_fd < 0) { + context.map_fd = bpf_object__find_map_fd_by_name(entry.object.get(), context.map_name.c_str()); + if (context.map_fd < 0) { LOG_ERROR( "{}({}) FATAL ERROR: bpf_object__find_map_fd_by_name({}) failed. file_name:{}, errno:{}", __func__, @@ -851,16 +991,16 @@ _prep_program(thread_context& context, program_map_usage map_usage) context.map_name.c_str(), context.file_name.c_str(), errno); - REQUIRE(map_fd >= 0); + context.succeeded = false; + exit(-1); } LOG_VERBOSE( "{}({}) Opened fd:{} for map:{}, file_name:{}", __func__, context.thread_index, - map_fd, + context.map_fd, context.map_name.c_str(), context.file_name.c_str()); - context.map_fd = map_fd; } } @@ -868,6 +1008,10 @@ void _invoke_test_thread_function(thread_context& context) { _prep_program(context, program_map_usage::USE_MAP); + if (context.succeeded == false) { + LOG_ERROR("{}({}) - FATAL ERROR: _prep_program() failed.", __func__, context.thread_index); + exit(-1); + } SOCKET socket_handle; SOCKADDR_STORAGE remote_endpoint{}; @@ -878,7 +1022,27 @@ _invoke_test_thread_function(thread_context& context) socket_handle = socket(AF_INET6, SOCK_STREAM, IPPROTO_TCP); remote_endpoint.ss_family = AF_INET6; } - REQUIRE(socket_handle != INVALID_SOCKET); + if (socket_handle == INVALID_SOCKET) { + LOG_ERROR("{}({}) - FATAL ERROR: socket() failed. errno:{}", __func__, context.thread_index, WSAGetLastError()); + context.succeeded = false; + exit(-1); + } + + // Set the timeout for connect attempts + timeval timeout; + timeout.tv_sec = 5; // 5 seconds + timeout.tv_usec = 0; + if (setsockopt(socket_handle, SOL_SOCKET, SO_RCVTIMEO, (const char*)&timeout, sizeof(timeout)) < 0) { + LOG_ERROR("{}({}) - ERROR: setsockopt() failed. errno:{}", __func__, context.thread_index, WSAGetLastError()); + context.succeeded = false; + exit(-1); + } + if (setsockopt(socket_handle, SOL_SOCKET, SO_SNDTIMEO, (const char*)&timeout, sizeof(timeout)) < 0) { + LOG_ERROR("{}({}) - ERROR: setsockopt() failed. errno:{}", __func__, context.thread_index, WSAGetLastError()); + context.succeeded = false; + exit(-1); + } + INETADDR_SETLOOPBACK(reinterpret_cast<PSOCKADDR>(&remote_endpoint)); constexpr uint16_t remote_port = SOCKET_TEST_PORT; (reinterpret_cast<PSOCKADDR_IN>(&remote_endpoint))->sin_port = htons(remote_port); @@ -892,13 +1056,24 @@ _invoke_test_thread_function(thread_context& context) // attempts as the program will not be invoked for connect attempts made while the extension is restarting. using sc = std::chrono::steady_clock; auto endtime = sc::now() + std::chrono::minutes(context.duration_minutes); + bool first_map_lookup = true; while (sc::now() < endtime) { uint16_t key = remote_port; uint64_t start_count = 0; + // Map lookup before the program invocation may fail if the program has not inserted the map entry yet. auto result = bpf_map_lookup_elem(context.map_fd, &key, &start_count); - if (start_count) { - REQUIRE(result == 0); + if (first_map_lookup) { + first_map_lookup = false; + } else if (result != 0) { + LOG_ERROR( + "{}({}) - FATAL ERROR: bpf_map_lookup_elem() failed for fd {} before connect. errno:{}", + __func__, + context.thread_index, + context.map_fd, + errno); + context.succeeded = false; + exit(-1); } constexpr uint32_t BURST_SIZE = 8192; @@ -909,14 +1084,36 @@ _invoke_test_thread_function(thread_context& context) socket_handle, reinterpret_cast<SOCKADDR*>(&remote_endpoint), static_cast<int>(sizeof(remote_endpoint))); + + if (sc::now() >= endtime) { + break; + } } uint64_t end_count = 0; result = bpf_map_lookup_elem(context.map_fd, &key, &end_count); - REQUIRE(result == 0); + if (result != 0) { + LOG_ERROR( + "{}({}) - FATAL ERROR: bpf_map_lookup_elem() failed for fd {} after connect. errno:{}", + __func__, + context.thread_index, + context.map_fd, + errno); + context.succeeded = false; + exit(-1); + } LOG_VERBOSE( "{}({}) connect start_count:{}, end_count:{}", __func__, context.thread_index, start_count, end_count); - REQUIRE(end_count > start_count); + if (end_count <= start_count) { + LOG_ERROR( + "{}({}) - FATAL ERROR: connect count mismatched. start_count:{}, end_count:{}", + __func__, + context.thread_index, + start_count, + end_count); + context.succeeded = false; + exit(-1); + } start_count = end_count; } } @@ -945,12 +1142,11 @@ _mt_invoke_prog_stress_test(ebpf_execution_type_t program_type, const test_contr std::vector<std::thread> test_thread_table(total_threads); std::vector<std::pair<std::string, std::string>> program_file_map_names = { - {{"cgroup_count_connect4.sys"}, {"connect4_count_map"}}, - {{"cgroup_count_connect6.sys"}, {"connect6_count_map"}}}; + {{_make_unique_file_copy("cgroup_count_connect4.sys")}, {"connect4_count_map"}}, + {{_make_unique_file_copy("cgroup_count_connect6.sys")}, {"connect6_count_map"}}}; ASSERT(program_file_map_names.size() == MAX_TCP_CONNECT_PROGRAMS); for (uint32_t i = 0; i < total_threads; i++) { - // First, prepare the context for this thread. auto& context_entry = thread_context_table[i]; auto& [file_name, map_name] = program_file_map_names[i]; @@ -968,29 +1164,25 @@ _mt_invoke_prog_stress_test(ebpf_execution_type_t program_type, const test_contr thread_entry = std::move(std::thread(_invoke_test_thread_function, std::ref(context_entry))); } - // Another table for the 'extension restart' threads. - std::vector<std::thread> extension_restart_thread_table{}; - // If requested, start the 'extension stop-and-restart' thread for extension for this program type. - std::string extension_name = {"netebpfext"}; + std::vector<std::string> extension_names = {"netebpfext"}; + std::vector<std::thread> extension_restart_thread_table; + std::vector<thread_context> extension_restart_thread_context_table; if (test_control_info.extension_restart_enabled) { - auto restart_thread = _start_extension_restart_thread( - std::ref(extension_name), test_control_info.extension_restart_delay_ms, test_control_info.duration_minutes); - extension_restart_thread_table.push_back(std::move(restart_thread)); + configure_extension_restart( + test_control_info, + extension_names, + extension_restart_thread_table, + extension_restart_thread_context_table, + object_table); } - // Wait for threads to terminate. - LOG_INFO("waiting on {} test threads...", test_thread_table.size()); - for (auto& t : test_thread_table) { - t.join(); - } - - if (test_control_info.extension_restart_enabled) { - LOG_INFO("waiting on {} extension restart threads...", extension_restart_thread_table.size()); - for (auto& t : extension_restart_thread_table) { - t.join(); - } - } + wait_and_verify_test_threads( + test_control_info, + test_thread_table, + thread_context_table, + extension_restart_thread_table, + extension_restart_thread_context_table); } static void @@ -1006,7 +1198,11 @@ _invoke_mt_sockaddr_thread_function(thread_context& context) socket_handle = socket(AF_INET6, SOCK_STREAM, IPPROTO_TCP); remote_endpoint.ss_family = AF_INET6; } - REQUIRE(socket_handle != INVALID_SOCKET); + if (socket_handle == INVALID_SOCKET) { + LOG_ERROR("{}({}) - FATAL ERROR: socket() failed. errno:{}", __func__, context.thread_index, WSAGetLastError()); + context.succeeded = false; + exit(-1); + } INETADDR_SETLOOPBACK(reinterpret_cast<PSOCKADDR>(&remote_endpoint)); uint16_t remote_port = SOCKET_TEST_PORT + static_cast<uint16_t>(context.thread_index); (reinterpret_cast<PSOCKADDR_IN>(&remote_endpoint))->sin_port = htons(remote_port); @@ -1042,10 +1238,13 @@ _mt_sockaddr_invoke_program_test(const test_control_info& test_control_info) auto error = WSAStartup(MAKEWORD(2, 2), &data); REQUIRE(error == 0); - auto [program_object, _] = _load_attach_program({"cgroup_mt_connect6.sys"}, BPF_CGROUP_INET6_CONNECT, 0); - - // Not used, needed for thread_context initialization. std::vector<object_table_entry> dummy_table(1); + thread_context program_load_context = { + {}, {}, false, {}, thread_role_type::ROLE_NOT_SET, 0, 0, 0, false, 0, 0, dummy_table}; + program_load_context.file_name = _make_unique_file_copy("cgroup_mt_connect6.sys"); + program_load_context.thread_index = 0; + auto [program_object, _] = _load_attach_program(program_load_context, BPF_CGROUP_INET6_CONNECT); + REQUIRE(program_load_context.succeeded == true); size_t total_threads = test_control_info.threads_count; std::vector<thread_context> thread_context_table( @@ -1067,28 +1266,25 @@ _mt_sockaddr_invoke_program_test(const test_control_info& test_control_info) } // Another table for the 'extension restart' threads. + std::vector<std::string> extension_names = {"netebpfext"}; std::vector<std::thread> extension_restart_thread_table{}; + std::vector<thread_context> extension_restart_thread_context_table{}; - // If requested, start the 'extension stop-and-restart' thread for extension for this program type. - std::string extension_name = {"netebpfext"}; if (test_control_info.extension_restart_enabled) { - auto restart_thread = _start_extension_restart_thread( - std::ref(extension_name), test_control_info.extension_restart_delay_ms, test_control_info.duration_minutes); - extension_restart_thread_table.push_back(std::move(restart_thread)); + configure_extension_restart( + test_control_info, + extension_names, + extension_restart_thread_table, + extension_restart_thread_context_table, + dummy_table); } - // Wait for threads to terminate. - LOG_INFO("waiting on {} test threads...", test_thread_table.size()); - for (auto& t : test_thread_table) { - t.join(); - } - - if (test_control_info.extension_restart_enabled) { - LOG_INFO("waiting on {} extension restart threads...", extension_restart_thread_table.size()); - for (auto& t : extension_restart_thread_table) { - t.join(); - } - } + wait_and_verify_test_threads( + test_control_info, + test_thread_table, + thread_context_table, + extension_restart_thread_table, + extension_restart_thread_context_table); } static void @@ -1187,7 +1383,11 @@ _invoke_mt_bindmonitor_tail_call_thread_function(thread_context& context) for (uint32_t i = 0; i < BURST_SIZE; i++) { // Create a socket. socket_handle = WSASocket(remote_endpoint.ss_family, SOCK_STREAM, IPPROTO_TCP, nullptr, 0, 0); - REQUIRE(socket_handle != INVALID_SOCKET); + if (socket_handle == INVALID_SOCKET) { + LOG_ERROR("Thread[{}] WSASocket() failed. errno:{}", context.thread_index, WSAGetLastError()); + context.succeeded = false; + exit(-1); + } INETADDR_SETANY(reinterpret_cast<PSOCKADDR>(&remote_endpoint)); SS_PORT(&remote_endpoint) = htons(remote_port); @@ -1204,7 +1404,8 @@ _invoke_mt_bindmonitor_tail_call_thread_function(thread_context& context) WSAGetLastError(), remote_port); closesocket(socket_handle); - REQUIRE(result == 0); + context.succeeded = false; + exit(-1); } // Bind the socket. @@ -1218,7 +1419,8 @@ _invoke_mt_bindmonitor_tail_call_thread_function(thread_context& context) WSAGetLastError(), remote_port); closesocket(socket_handle); - REQUIRE(result == 0); + context.succeeded = false; + exit(-1); } LOG_VERBOSE("Thread[{}] bind success to port:{}", context.thread_index, remote_port); @@ -1229,16 +1431,14 @@ _invoke_mt_bindmonitor_tail_call_thread_function(thread_context& context) } static std::pair<bpf_object_ptr, fd_t> -_load_attach_tail_program( - const std::string& file_name, - uint32_t thread_index, - ebpf_attach_type_t attach_type, - const std::string program_name, - bpf_prog_type program_type) +_load_attach_tail_program(thread_context& context, ebpf_attach_type_t attach_type, bpf_prog_type program_type) { bpf_object_ptr object_ptr; bpf_object* object_raw_ptr = nullptr; bpf_link* link = nullptr; + const std::string& file_name = context.file_name; + const std::string& program_name = context.program_name; + const uint32_t thread_index = context.thread_index; // Get the 'object' ptr for the program associated with this thread. object_raw_ptr = bpf_object__open(file_name.c_str()); @@ -1249,7 +1449,8 @@ _load_attach_tail_program( thread_index, file_name.c_str(), errno); - REQUIRE(object_raw_ptr != nullptr); + context.succeeded = false; + exit(-1); } LOG_VERBOSE("{}({}) Opened file:{}", __func__, thread_index, file_name.c_str()); @@ -1262,7 +1463,8 @@ _load_attach_tail_program( thread_index, file_name.c_str(), errno); - REQUIRE(result == 0); + context.succeeded = false; + exit(-1); } object_ptr.reset(object_raw_ptr); LOG_VERBOSE("{}({}) loaded file:{}", __func__, thread_index, file_name.c_str()); @@ -1276,7 +1478,8 @@ _load_attach_tail_program( thread_index, file_name.c_str(), errno); - REQUIRE(program != nullptr); + context.succeeded = false; + exit(-1); } LOG_VERBOSE( "{}({}) Found program object for program:{}, file_name:{}", @@ -1298,7 +1501,8 @@ _load_attach_tail_program( file_name.c_str(), program->program_name, errno); - REQUIRE(program_fd >= 0); + context.succeeded = false; + exit(-1); } LOG_VERBOSE( "{}({}) Opened fd:{}, for program:{}, file_name:{}", @@ -1317,7 +1521,8 @@ _load_attach_tail_program( file_name.c_str(), program->program_name, errno); - REQUIRE(result == ERROR_SUCCESS); + context.succeeded = false; + exit(-1); } LOG_VERBOSE( "{}({}) Attached program:{}, file_name:{}", __func__, thread_index, program->program_name, file_name.c_str()); @@ -1332,17 +1537,20 @@ _mt_bindmonitor_tail_call_invoke_program_test(const test_control_info& test_cont auto error = WSAStartup(MAKEWORD(2, 2), &data); REQUIRE(error == 0); - std::string file_name = "bindmonitor_mt_tailcall.sys"; - std::string map_name = "bind_tail_call_map"; - std::string program_name = "BindMonitor_Caller"; - bpf_prog_type program_type = BPF_PROG_TYPE_BIND; - // Load the program. + std::vector<object_table_entry> dummy_table(1); + thread_context program_load_context = { + {}, {}, false, {}, thread_role_type::ROLE_NOT_SET, 0, 0, 0, false, 0, 0, dummy_table}; + program_load_context.program_name = "BindMonitor_Caller"; + program_load_context.file_name = _make_unique_file_copy("bindmonitor_mt_tailcall.sys"); + program_load_context.map_name = "bind_tail_call_map"; + program_load_context.thread_index = 0; auto [program_object, _] = - _load_attach_tail_program(file_name, 0, EBPF_ATTACH_TYPE_BIND, program_name, program_type); + _load_attach_tail_program(program_load_context, EBPF_ATTACH_TYPE_BIND, BPF_PROG_TYPE_BIND); + REQUIRE(program_load_context.succeeded == true); // Set up the tail call programs. - _set_up_tailcall_program(program_object.get(), map_name); + _set_up_tailcall_program(program_object.get(), program_load_context.map_name); // Needed for thread_context initialization. constexpr uint32_t MAX_BIND_PROGRAM = 1; @@ -1379,34 +1587,30 @@ _mt_bindmonitor_tail_call_invoke_program_test(const test_control_info& test_cont std::move(std::thread(_invoke_mt_bindmonitor_tail_call_thread_function, std::ref(context_entry))); } - // Another table for the 'extension restart' threads. - std::vector<std::thread> extension_restart_thread_table{}; - // If requested, start the 'extension stop-and-restart' thread for extension for this program type. - std::string extension_name = {"netebpfext"}; + std::vector<std::string> extension_names = {"netebpfext"}; + std::vector<std::thread> extension_restart_thread_table; + std::vector<thread_context> extension_restart_thread_context_table; if (test_control_info.extension_restart_enabled) { - auto restart_thread = _start_extension_restart_thread( - std::ref(extension_name), test_control_info.extension_restart_delay_ms, test_control_info.duration_minutes); - extension_restart_thread_table.push_back(std::move(restart_thread)); + configure_extension_restart( + test_control_info, + extension_names, + extension_restart_thread_table, + extension_restart_thread_context_table, + object_table); } - // Wait for threads to terminate. - LOG_INFO("waiting on {} test threads...", test_thread_table.size()); - for (auto& t : test_thread_table) { - t.join(); - } - - if (test_control_info.extension_restart_enabled) { - LOG_INFO("waiting on {} extension restart threads...", extension_restart_thread_table.size()); - for (auto& t : extension_restart_thread_table) { - t.join(); - } - } + wait_and_verify_test_threads( + test_control_info, + test_thread_table, + thread_context_table, + extension_restart_thread_table, + extension_restart_thread_context_table); // Clean up Winsock. WSACleanup(); } -#endif +#endif // ENABLE_TAIL_CALL_STRESS_TEST TEST_CASE("jit_load_attach_detach_unload_random_v4_test", "[jit_mt_stress_test]") { @@ -1494,9 +1698,6 @@ TEST_CASE("native_invoke_v4_v6_programs_restart_extension_test", "[native_mt_str LOG_INFO("\nStarting test *** native_invoke_v4_v6_programs_restart_extension_test ***"); test_control_info local_test_control_info = _global_test_control_info; - // TODO: Bring this test in compliance with GH Issue #3223. Until then, disable extension restart for this test. - local_test_control_info.extension_restart_enabled = false; - // This test needs only 2 threads (one per program). local_test_control_info.threads_count = 2; @@ -1550,4 +1751,4 @@ TEST_CASE("bindmonitor_tail_call_invoke_program_test", "[native_mt_stress_test]" _print_test_control_info(local_test_control_info); _mt_bindmonitor_tail_call_invoke_program_test(local_test_control_info); } -#endif +#endif // ENABLE_TAIL_CALL_STRESS_TEST
Workflow failed - km_mt_stress_tests_restart_extension [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9336770229) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/05a71895566eac26a9032aac6426f58f6fa5b6de) Test name - `km_mt_stress_tests_restart_extension` Workflow failed - km_mt_stress_tests [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12722566338) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d3fd4407675b77993b3e4a2a7c96f983432d532e) Test name - `km_mt_stress_tests`
[Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9234035167) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/10fdbbce29cb3e4d29b264e7cb4813354264de7f) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9544157177) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/197910d57b455db8756948f12eb40907b98d8e53) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9624031089) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8440c60ced5349482ab977edbc767722f721c893) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9692957249) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/4667783ba1ac04eec04abc4314490bb221cd3fd8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9730491421) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/4667783ba1ac04eec04abc4314490bb221cd3fd8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9817788110) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c3eac3776f77666c681003eda6a4e3051717f7b6) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10004750424) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/f46db2bbc07827b20de8ccba33116600bb03f525) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10026773058) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d7942ba3dca4361da0cf118e3d7657ee8268e22a) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10055160667) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/2448139787f618901fa64b614d30a27aabb06cae) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10158568022) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6af442038c1c84a93068134d9aac39b0d8593518) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10176468941) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6af442038c1c84a93068134d9aac39b0d8593518) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10212213020) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/0b4142d9d674c0dc1af214aeac102e2696d8d302) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10522233286) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6d6aad34514218c9d70aac8b4f93edd37c4735d8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10716420571) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c73a0864dd8dde9fdec7eee329ba561fc41f8681) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10734806358) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c73a0864dd8dde9fdec7eee329ba561fc41f8681) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10749865733) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/99c4e48856d53aaee6c10172ca0a5239fb306d7c) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10758271085) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/0b23bae35d86c04a28fbb7de159856d7aea768d0) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10769153812) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/be1e786c1c1256bfdb62f638cf6e2c074e69bed7) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10788384767) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6e8f47a7c00752470e27921cfb576978b3986de4) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10807596842) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6e8f47a7c00752470e27921cfb576978b3986de4) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10826764830) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/0396def741cbe87a8edbdb14df1c1deb9ac015ed) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10845456054) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/0396def741cbe87a8edbdb14df1c1deb9ac015ed) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10860833120) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/a41953dc623dd46d67c5863ea0de447410b35bb6) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10869372916) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/a41953dc623dd46d67c5863ea0de447410b35bb6) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10879892129) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/a41953dc623dd46d67c5863ea0de447410b35bb6) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10899046389) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/fda10838a4da5f25daa5b433f8b0dc003cf9a4f8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10918113767) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/fda10838a4da5f25daa5b433f8b0dc003cf9a4f8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10936987992) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/edcb28cd62bfee7d78df666bf0af8eb550d301fa) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10955604827) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6ad1063766e58ced089f658f9e150a78e32d9fb1) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10970978957) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/462b8e3cab2390eb55aec81134c13ff87eb54bf0) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10979511391) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/462b8e3cab2390eb55aec81134c13ff87eb54bf0) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10990179768) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/adb31d9d77b997d98cd7db97c5c3d56ed0764e30) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11009659346) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d712e97fd7baf62b13b6270fbdf3a78c69038d93) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11028940869) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d712e97fd7baf62b13b6270fbdf3a78c69038d93) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11048183320) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d712e97fd7baf62b13b6270fbdf3a78c69038d93) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11066946946) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/292578ef73488c0ddae393bf0e4ae38a63988395) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11082353101) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/bc299ebb07b8fcfd913226873509cc44192bb1a4) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11090986777) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/bc299ebb07b8fcfd913226873509cc44192bb1a4) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11102026053) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/e1f4ce337c9e9cd0c064b6b84cedeaa623f58089) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11121239251) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/ee6ebaf5768373cf3d27dabb23b602232d5bdfc0) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11140136719) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8743c34ab36aba305cd25603a1f3db0e3c65cc6e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11158259405) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/735ec5277f22b6e17613500ee35f5068728df1b8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11176355208) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/735ec5277f22b6e17613500ee35f5068728df1b8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11191674757) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8250e8e88f2ad662471262713d9e8718876be423) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11200202603) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8250e8e88f2ad662471262713d9e8718876be423) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11211304882) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8250e8e88f2ad662471262713d9e8718876be423) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11231241666) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/1c383d41054778a41fb492dd2b89bd8dc7b71502) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11250828887) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/ba3f95a18295f71bc2b512d9df960a4afc580417) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11269991969) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8fb3a7e307519ac172eb2b2ca72e00dfa4593534) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11288708479) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/811c06098341961c594f872c91bf37ef78609f5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11304062344) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/811c06098341961c594f872c91bf37ef78609f5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11312703982) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/811c06098341961c594f872c91bf37ef78609f5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11323478330) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/811c06098341961c594f872c91bf37ef78609f5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11341903145) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/55b387da55c03ee600fcb8789510df037b413fdd) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11361536266) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/55b387da55c03ee600fcb8789510df037b413fdd) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11380917936) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c7b9f105085841202a28bc191409589b22caf22e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11399951513) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c7b9f105085841202a28bc191409589b22caf22e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11415926253) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c7b9f105085841202a28bc191409589b22caf22e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11424693827) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c7b9f105085841202a28bc191409589b22caf22e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11435900401) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/14df17e1ce84108560619fdec944fd97c5415342) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11455945515) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/709cc9d8b50b84651776e4ada2c9f64b08e247ec) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11475730017) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/709cc9d8b50b84651776e4ada2c9f64b08e247ec) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11495210870) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/e5b27c3d12cb903607871d221c2c2f30bf59d80f) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11514385668) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/e5b27c3d12cb903607871d221c2c2f30bf59d80f) Test name - `km_mt_stress_tests_restart_extension`
[Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9234035167) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/10fdbbce29cb3e4d29b264e7cb4813354264de7f) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9544157177) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/197910d57b455db8756948f12eb40907b98d8e53) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9624031089) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8440c60ced5349482ab977edbc767722f721c893) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9692957249) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/4667783ba1ac04eec04abc4314490bb221cd3fd8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9730491421) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/4667783ba1ac04eec04abc4314490bb221cd3fd8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/9817788110) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c3eac3776f77666c681003eda6a4e3051717f7b6) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10004750424) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/f46db2bbc07827b20de8ccba33116600bb03f525) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10026773058) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d7942ba3dca4361da0cf118e3d7657ee8268e22a) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10055160667) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/2448139787f618901fa64b614d30a27aabb06cae) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10158568022) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6af442038c1c84a93068134d9aac39b0d8593518) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10176468941) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6af442038c1c84a93068134d9aac39b0d8593518) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10212213020) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/0b4142d9d674c0dc1af214aeac102e2696d8d302) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10522233286) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6d6aad34514218c9d70aac8b4f93edd37c4735d8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10716420571) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c73a0864dd8dde9fdec7eee329ba561fc41f8681) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10734806358) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c73a0864dd8dde9fdec7eee329ba561fc41f8681) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10749865733) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/99c4e48856d53aaee6c10172ca0a5239fb306d7c) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10758271085) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/0b23bae35d86c04a28fbb7de159856d7aea768d0) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10769153812) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/be1e786c1c1256bfdb62f638cf6e2c074e69bed7) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10788384767) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6e8f47a7c00752470e27921cfb576978b3986de4) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10807596842) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6e8f47a7c00752470e27921cfb576978b3986de4) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10826764830) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/0396def741cbe87a8edbdb14df1c1deb9ac015ed) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10845456054) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/0396def741cbe87a8edbdb14df1c1deb9ac015ed) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10860833120) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/a41953dc623dd46d67c5863ea0de447410b35bb6) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10869372916) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/a41953dc623dd46d67c5863ea0de447410b35bb6) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10879892129) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/a41953dc623dd46d67c5863ea0de447410b35bb6) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10899046389) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/fda10838a4da5f25daa5b433f8b0dc003cf9a4f8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10918113767) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/fda10838a4da5f25daa5b433f8b0dc003cf9a4f8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10936987992) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/edcb28cd62bfee7d78df666bf0af8eb550d301fa) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10955604827) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/6ad1063766e58ced089f658f9e150a78e32d9fb1) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10970978957) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/462b8e3cab2390eb55aec81134c13ff87eb54bf0) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10979511391) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/462b8e3cab2390eb55aec81134c13ff87eb54bf0) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/10990179768) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/adb31d9d77b997d98cd7db97c5c3d56ed0764e30) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11009659346) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d712e97fd7baf62b13b6270fbdf3a78c69038d93) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11028940869) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d712e97fd7baf62b13b6270fbdf3a78c69038d93) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11048183320) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d712e97fd7baf62b13b6270fbdf3a78c69038d93) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11066946946) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/292578ef73488c0ddae393bf0e4ae38a63988395) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11082353101) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/bc299ebb07b8fcfd913226873509cc44192bb1a4) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11090986777) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/bc299ebb07b8fcfd913226873509cc44192bb1a4) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11102026053) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/e1f4ce337c9e9cd0c064b6b84cedeaa623f58089) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11121239251) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/ee6ebaf5768373cf3d27dabb23b602232d5bdfc0) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11140136719) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8743c34ab36aba305cd25603a1f3db0e3c65cc6e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11158259405) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/735ec5277f22b6e17613500ee35f5068728df1b8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11176355208) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/735ec5277f22b6e17613500ee35f5068728df1b8) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11191674757) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8250e8e88f2ad662471262713d9e8718876be423) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11200202603) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8250e8e88f2ad662471262713d9e8718876be423) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11211304882) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8250e8e88f2ad662471262713d9e8718876be423) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11231241666) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/1c383d41054778a41fb492dd2b89bd8dc7b71502) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11250828887) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/ba3f95a18295f71bc2b512d9df960a4afc580417) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11269991969) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8fb3a7e307519ac172eb2b2ca72e00dfa4593534) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11288708479) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/811c06098341961c594f872c91bf37ef78609f5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11304062344) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/811c06098341961c594f872c91bf37ef78609f5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11312703982) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/811c06098341961c594f872c91bf37ef78609f5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11323478330) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/811c06098341961c594f872c91bf37ef78609f5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11341903145) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/55b387da55c03ee600fcb8789510df037b413fdd) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11361536266) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/55b387da55c03ee600fcb8789510df037b413fdd) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11380917936) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c7b9f105085841202a28bc191409589b22caf22e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11399951513) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c7b9f105085841202a28bc191409589b22caf22e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11415926253) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c7b9f105085841202a28bc191409589b22caf22e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11424693827) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/c7b9f105085841202a28bc191409589b22caf22e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11435900401) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/14df17e1ce84108560619fdec944fd97c5415342) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11455945515) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/709cc9d8b50b84651776e4ada2c9f64b08e247ec) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11475730017) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/709cc9d8b50b84651776e4ada2c9f64b08e247ec) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11495210870) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/e5b27c3d12cb903607871d221c2c2f30bf59d80f) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/11514385668) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/e5b27c3d12cb903607871d221c2c2f30bf59d80f) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12722566338) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d3fd4407675b77993b3e4a2a7c96f983432d532e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12731758342) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/fad16491562d22ec15492f39dedc14dbed17454b) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12743022807) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/fad16491562d22ec15492f39dedc14dbed17454b) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12763493002) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9d9003c39c3fd75be5225ac0fce30077d6bf0604) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12784034827) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d3329fd7040ed3b19a1585660ff84fcf35e81d4f) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12804602886) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d3329fd7040ed3b19a1585660ff84fcf35e81d4f) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12825096479) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/149cba7e873c4e683ae2a6885972a1e917485e6f) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12842294569) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/5f890b19ec605cf0507b19e2de0038e74b7fa463) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12851707536) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/24ec3199cc20e7b84d24a416c6f66cf95103c836) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12863603242) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12882613770) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12903845640) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12924855190) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12945695557) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12963231412) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12972794634) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/95267a53b26c68a94145d1731e2a4c8b546034c3) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12984558378) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/95267a53b26c68a94145d1731e2a4c8b546034c3) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13006101893) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/07beca58c13b056abce9ba0c991f7b5d0d2d0270) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13027025842) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/07beca58c13b056abce9ba0c991f7b5d0d2d0270) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13048070161) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8efc93b9baa0f348f5799b7550bb3f8449e343d2) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13068786740) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9edeb98a449e7965731f814f23b720da103e7077) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13086724202) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9edeb98a449e7965731f814f23b720da103e7077) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13096965669) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9edeb98a449e7965731f814f23b720da103e7077) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13109007739) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9edeb98a449e7965731f814f23b720da103e7077) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13130963506) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9510332b5d2e4f223cf93b1801dcda713954b19b) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13152871988) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/56662dbb2a6151cfd60e1eeafe01d8d4742b957e) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13174419242) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/30fdb1f81aa7d4eaceb26a72d72920aeac79bebb) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13195735600) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/29d9935ebb087bdb71c02ddf3023541d551f439d) Test name - `km_mt_stress_tests_restart_extension` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12731758342) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/fad16491562d22ec15492f39dedc14dbed17454b) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12743022807) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/fad16491562d22ec15492f39dedc14dbed17454b) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12763493002) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9d9003c39c3fd75be5225ac0fce30077d6bf0604) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12784034827) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d3329fd7040ed3b19a1585660ff84fcf35e81d4f) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12804602886) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/d3329fd7040ed3b19a1585660ff84fcf35e81d4f) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12825096479) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/149cba7e873c4e683ae2a6885972a1e917485e6f) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12842294569) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/5f890b19ec605cf0507b19e2de0038e74b7fa463) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12851707536) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/24ec3199cc20e7b84d24a416c6f66cf95103c836) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12863603242) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12882613770) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12903845640) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12924855190) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12945695557) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12963231412) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/44f5de09ec0f3f7ad176c00a290c1cb7106cdd5e) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12972794634) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/95267a53b26c68a94145d1731e2a4c8b546034c3) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/12984558378) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/95267a53b26c68a94145d1731e2a4c8b546034c3) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13006101893) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/07beca58c13b056abce9ba0c991f7b5d0d2d0270) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13027025842) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/07beca58c13b056abce9ba0c991f7b5d0d2d0270) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13048070161) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/8efc93b9baa0f348f5799b7550bb3f8449e343d2) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13068786740) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9edeb98a449e7965731f814f23b720da103e7077) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13086724202) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9edeb98a449e7965731f814f23b720da103e7077) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13096965669) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9edeb98a449e7965731f814f23b720da103e7077) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13109007739) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9edeb98a449e7965731f814f23b720da103e7077) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13130963506) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/9510332b5d2e4f223cf93b1801dcda713954b19b) Test name - `km_mt_stress_tests` [Failed Run](https://github.com/microsoft/ebpf-for-windows/actions/runs/13152871988) [Codebase](https://github.com/microsoft/ebpf-for-windows/tree/56662dbb2a6151cfd60e1eeafe01d8d4742b957e) Test name - `km_mt_stress_tests`
[ "https://github.com/microsoft/ebpf-for-windows/commit/af0d832e4c1f4f98bcd9da90990bfa6dc31a83ae", "https://github.com/microsoft/ebpf-for-windows/commit/96433347ca595f762b80e14ec69c568204673d06", "https://github.com/microsoft/ebpf-for-windows/commit/af9445b434f9fed9c5bdda724e220d323361d7e8", "https://github.com...
2025-01-06T19:12:32Z
https://github.com/microsoft/ebpf-for-windows/tree/30fdb1f81aa7d4eaceb26a72d72920aeac79bebb
[ "nuget restore ebpf-for-windows.sln ; msbuild /m /p:Configuration=Debug /p:Platform=x64 /p:SolutionDir=\"C:\\testbed\\\\\" /p:SpectreMitigation=false /p:FuzzerLibs= tests\\unit\\test.vcxproj" ]
[ "Set-Location C:\\testbed\\x64\\Debug; .\\unit_tests.exe --reporter tap --durations yes *> C:\\testbed\\test-results\\catch2-tap.log" ]
[ "Get-Content C:\\testbed\\test-results\\catch2-tap.log -Raw" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} # TAP result lines: allow optional '-' and optional remainder (desc or inline directives) tap_re = re.compile(r'^\s*(ok|not ok)\s+(\d+)(?:\s*(?:-\s*)?(.*))?$', re.IGNORECASE) # TAP context/comment lines context_re = re.compile(r'^\s*#\s*(.+?)\s*$') # Noise contexts to ignore noise_ctx_re = re.compile(r'(?i)^\s*(rng-seed\s*:|time\s*:)\s*') # Skip directive matcher (case-insensitive) skip_dir_re = re.compile(r'(?i)(?:^|\s)#\s*skip\b') lines = log.splitlines() n = len(lines) i = 0 current_context: str | None = None while i < n: raw = lines[i] line = raw.rstrip("\r\n") # Context update mctx = context_re.match(line) if mctx: ctx = mctx.group(1).strip() if not noise_ctx_re.match(ctx): current_context = ctx i += 1 continue mtap = tap_re.match(line) if not mtap: i += 1 continue status_word, num, desc0 = mtap.groups() status_word = status_word.lower() num = (num or "").strip() desc_parts = [] if desc0: desc_parts.append(desc0.strip()) # Accumulate continuation lines that belong to this test description. j = i + 1 while j < n: peek = lines[j].rstrip("\r\n") # Stop on next TAP or context or empty line if tap_re.match(peek) or context_re.match(peek) or not peek.strip(): break # Append continuation text desc_parts.append(peek.strip()) j += 1 # Advance main loop to j i = j full_desc = " ".join([p for p in desc_parts if p]).strip() # Detect skip anywhere in the accumulated description is_skip = bool(skip_dir_re.search(full_desc)) if is_skip: # Remove skip annotation and any trailing text following it full_desc = re.sub(r'(?i)\s*#\s*skip\b.*$', '', full_desc).strip() # Build test name with context and assertion number parts = [] if current_context: parts.append(current_context) parts.append(full_desc if full_desc else "assertion") test_name = " :: ".join(parts) + f" [#{num}]" if is_skip: results[test_name] = "skip" else: results[test_name] = "pass" if status_word == "ok" else "fail" return results
[ "map_crud_operations:BPF_MAP_TYPE_LRU_HASH :: ebpf_map_next_key( map.get(), sizeof(key), key == 0 ? nullptr : reint terpret_cast<const uint8_t*>(&previous_key), reinterpret_cast<uint8_t*>(&next_key y)) == EBPF_SUCCESS for: 0 == 0 [#26585]", "program :: addresses[1].address != 0 for: 140695549139024 (0x7ff63c3a585...
[ "map_crud_operations:BPF_MAP_TYPE_PERCPU_HASH :: *reinterpret_cast<uint64_t*>(value.data()) == key * key for: 219024 ( (0x35790) == 219024 (0x35790) [#13926]", "map_crud_operations:BPF_MAP_TYPE_PERCPU_ARRAY :: current_value == current_key * current_key for: 200704 (0x31000) == 2 200704 (0x31000) [#22803]", "map...
starryzhang/sweb.eval.win.microsoft_1776_ebpf-for-windows-4117
microsoft/STL
5444
microsoft__STL-5444
C++
[ "5291", "5435" ]
1e0317478eddc973e801bdacc10cee2df9943b82
diff --git a/stl/CMakeLists.txt b/stl/CMakeLists.txt index ac17fb0a55f..10d21b3173c 100644 --- a/stl/CMakeLists.txt +++ b/stl/CMakeLists.txt @@ -213,6 +213,7 @@ set(IMPLIB_SOURCES ${CMAKE_CURRENT_LIST_DIR}/src/locale0_implib.cpp ${CMAKE_CURRENT_LIST_DIR}/src/nothrow.cpp ${CMAKE_CURRENT_LIST_DIR}/src/print.cpp + ${CMAKE_CURRENT_LIST_DIR}/src/regex.cpp ${CMAKE_CURRENT_LIST_DIR}/src/sharedmutex.cpp ${CMAKE_CURRENT_LIST_DIR}/src/stacktrace.cpp ${CMAKE_CURRENT_LIST_DIR}/src/syserror_import_lib.cpp diff --git a/stl/inc/locale b/stl/inc/locale index cc08f4d4eee..7e75f4d7634 100644 --- a/stl/inc/locale +++ b/stl/inc/locale @@ -94,6 +94,9 @@ inline size_t __CRTDECL _LStrxfrm(_Out_writes_(_Last1 - _First1) _Post_readable_ } #endif // defined(_CRTBLD) +template <class _Elem> +class _Regex_traits; + _EXPORT_STD template <class _Elem> class collate : public locale::facet { // facet for ordering sequences of elements public: @@ -189,6 +192,8 @@ protected: private: _Locinfo::_Collvec _Coll; // used by _LStrcoll and _XStrxfrm + + friend _Regex_traits<_Elem>; }; #ifdef __clang__ diff --git a/stl/inc/regex b/stl/inc/regex index b63f7e83066..cdfefd0cddf 100644 --- a/stl/inc/regex +++ b/stl/inc/regex @@ -53,6 +53,17 @@ _STL_DISABLE_CLANG_WARNINGS #endif // ^^^ !defined(_DEBUG) ^^^ #endif // !defined(_ENHANCED_REGEX_VISUALIZER) +#if defined(_CPPRTTI) && !defined(_M_CEE_PURE) +extern "C" { +_STD size_t __stdcall __std_regex_transform_primary_char( + _Out_writes_(_Last1 - _First1) _Post_readable_size_(return) char* _First1, char* _Last1, + _In_reads_(_Last2 - _First2) const char* _First2, const char* _Last2, _In_opt_ const _Collvec*) noexcept; +_STD size_t __stdcall __std_regex_transform_primary_wchar_t( + _Out_writes_(_Last1 - _First1) _Post_readable_size_(return) wchar_t* _First1, wchar_t* _Last1, + _In_reads_(_Last2 - _First2) const wchar_t* _First2, const wchar_t* _Last2, _In_opt_ const _Collvec*) noexcept; +} // extern "C" +#endif // ^^^ defined(_CPPRTTI) && !defined(_M_CEE_PURE) ^^^ + _STD_BEGIN enum _Meta_type : int { // meta character representations for parser @@ -267,6 +278,20 @@ struct _Regex_traits_base { // base of all regular expression traits using char_class_type = ctype_base::mask; }; +#if defined(_CPPRTTI) && !defined(_M_CEE_PURE) +inline size_t _Regex_transform_primary(_Out_writes_(_Last1 - _First1) _Post_readable_size_(return) char* _First1, + char* _Last1, _In_reads_(_Last2 - _First2) const char* _First2, const char* _Last2, + _In_opt_ const _Locinfo::_Collvec* _Vector) noexcept { + return __std_regex_transform_primary_char(_First1, _Last1, _First2, _Last2, _Vector); +} + +inline size_t _Regex_transform_primary(_Out_writes_(_Last1 - _First1) _Post_readable_size_(return) wchar_t* _First1, + wchar_t* _Last1, _In_reads_(_Last2 - _First2) const wchar_t* _First2, const wchar_t* _Last2, + _In_opt_ const _Locinfo::_Collvec* _Vector) noexcept { + return __std_regex_transform_primary_wchar_t(_First1, _Last1, _First2, _Last2, _Vector); +} +#endif // ^^^ defined(_CPPRTTI) && !defined(_M_CEE_PURE) ^^^ + template <class _Elem> class _Regex_traits : public _Regex_traits_base { // base class for regular expression traits public: @@ -312,13 +337,38 @@ public: string_type transform_primary(_FwdIt _First, _FwdIt _Last) const { // apply locale-specific case-insensitive transformation string_type _Res; - - if (_First != _Last) { // non-empty string, transform it - vector<_Elem> _Temp(_First, _Last); - - _Getctype()->tolower(_Temp.data(), _Temp.data() + _Temp.size()); - _Res = _Getcoll()->transform(_Temp.data(), _Temp.data() + _Temp.size()); +#if defined(_CPPRTTI) && !defined(_M_CEE_PURE) + if (_First != _Last) { + const collate<_Elem>* _Coll = _Getcoll(); + const auto& _Coll_type = typeid(*_Coll); + // TRANSITION, ABI: GH-5394: locale creates collate objects of type collate, not collate_byname. + // Depending on the resolution of LWG-2338, comparison to typeid(collate) might also become + // required by the standard. + if (_Coll_type == typeid(collate_byname<_Elem>) || _Coll_type == typeid(collate<_Elem>)) { + // non-empty string with known collate facet, transform it + const string_type _Src(_First, _Last); + const auto _Src_first = _Src.data(); + const auto _Src_last = _Src_first + _Src.size(); + + size_t _Count = _Src.size(); + while (_Res.size() < _Count) { + _Res.resize(_Count); + _Count = _STD _Regex_transform_primary( + &_Res[0], &_Res[0] + _Count, _Src_first, _Src_last, &_Coll->_Coll); + + if (_Count == static_cast<size_t>(-1)) { + // return empty string in case of error + _Count = 0; + break; + } + } + _Res.resize(_Count); + } } +#else // ^^^ defined(_CPPRTTI) && !defined(_M_CEE_PURE) / !defined(_CPPRTTI) || defined(_M_CEE_PURE) vvv + (void) _First; + (void) _Last; +#endif // ^^^ !defined(_CPPRTTI) || defined(_M_CEE_PURE) ^^^ return _Res; } @@ -4211,26 +4261,30 @@ _Prs_ret _Parser<_FwdIt, _Elem, _RxTraits>::_Do_ex_class2( _Elem* const _Coll_elem_first = &_Coll_elem.front(); const _Elem* const _Coll_elem_last = _Coll_elem_first + _Size; + + if (_Size == 1 && _End_arg == _Meta_dot) { + // process single-element collating elements like individual characters + _Val = *_Coll_elem_first; + return _Prs_chr; + } + + if (_Flags & regex_constants::icase) { + for (auto _Current = _Coll_elem_first; _Current != _Coll_elem_last; ++_Current) { + *_Current = _Traits.translate_nocase(*_Current); + } + } else if (_Flags & regex_constants::collate) { + for (auto _Current = _Coll_elem_first; _Current != _Coll_elem_last; ++_Current) { + *_Current = _Traits.translate(*_Current); + } + } + if (_End_arg == _Meta_equal) { // process equivalence _Nfa._Add_equiv2(_Coll_elem_first, _Coll_elem_last); return _Prs_set; } else { // process collating element - if (_Size == 1) { - _Val = *_Coll_elem_first; - return _Prs_chr; - } // Character ranges with multi-character bounds cannot be represented in NFA nodes yet (see GH-5391). // Provisionally treat multi-character collating elements as character sets. - if (_Flags & regex_constants::icase) { - for (auto _Current = _Coll_elem_first; _Current != _Coll_elem_last; ++_Current) { - *_Current = _Traits.translate_nocase(*_Current); - } - } else if (_Flags & regex_constants::collate) { - for (auto _Current = _Coll_elem_first; _Current != _Coll_elem_last; ++_Current) { - *_Current = _Traits.translate(*_Current); - } - } _Nfa._Add_coll2(_Coll_elem_first, _Coll_elem_last); return _Prs_set; } diff --git a/stl/inc/yvals.h b/stl/inc/yvals.h index f6c6d4a2288..0ac04d6b4a4 100644 --- a/stl/inc/yvals.h +++ b/stl/inc/yvals.h @@ -3,7 +3,8 @@ // Copyright (c) Microsoft Corporation. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -// This header is used to compile the import library (via locale0_implib.cpp => locale0.cpp => xfacet => yvals.h). +// This header is used to compile the import library +// (via locale0_implib.cpp => locale0.cpp => xfacet => yvals.h and regex.cpp => awint.hpp => yvals.h). // MAJOR LIMITATIONS apply to what can be included here! // Before editing this file, read: /docs/import_library.md diff --git a/stl/msbuild/stl_base/stl.files.settings.targets b/stl/msbuild/stl_base/stl.files.settings.targets index 9b7db156955..0669a4c0a59 100644 --- a/stl/msbuild/stl_base/stl.files.settings.targets +++ b/stl/msbuild/stl_base/stl.files.settings.targets @@ -158,6 +158,7 @@ SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception $(CrtRoot)\github\stl\src\locale0_implib.cpp; $(CrtRoot)\github\stl\src\nothrow.cpp; $(CrtRoot)\github\stl\src\print.cpp; + $(CrtRoot)\github\stl\src\regex.cpp; $(CrtRoot)\github\stl\src\sharedmutex.cpp; $(CrtRoot)\github\stl\src\stacktrace.cpp; $(CrtRoot)\github\stl\src\syserror_import_lib.cpp; diff --git a/stl/src/awint.hpp b/stl/src/awint.hpp index 797801b7a98..da88c79bc7d 100644 --- a/stl/src/awint.hpp +++ b/stl/src/awint.hpp @@ -2,6 +2,11 @@ // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // Internal definitions for A&W Win32 wrapper routines. + +// This file is compiled into the import library (via regex.cpp => awint.hpp). +// MAJOR LIMITATIONS apply to what can be included here! +// Before editing this file, read: /docs/import_library.md + #pragma once #include <yvals.h> diff --git a/stl/src/regex.cpp b/stl/src/regex.cpp new file mode 100644 index 00000000000..6ecd7744cd7 --- /dev/null +++ b/stl/src/regex.cpp @@ -0,0 +1,128 @@ +// Copyright (c) Microsoft Corporation. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception + +// This file is compiled into the import library. +// MAJOR LIMITATIONS apply to what can be included here! +// Before editing this file, read: /docs/import_library.md + +#include <__msvc_xlocinfo_types.hpp> +#include <clocale> +#include <crtdefs.h> +#include <cstdlib> +#include <cstring> +#include <internal_shared.h> + +#include <Windows.h> + +#undef _ENFORCE_ONLY_CORE_HEADERS +#include "awint.hpp" + +extern "C" { + +// derived from xstrxfrm.cpp +size_t __stdcall __std_regex_transform_primary_char( + _Out_writes_(end1 - string1) _Post_readable_size_(return) char* string1, char* end1, + _In_reads_(end2 - string2) const char* string2, const char* end2, _In_opt_ const _Collvec* ploc) noexcept { + size_t n1 = end1 - string1; + size_t n2 = end2 - string2; + size_t retval = static_cast<size_t>(-1); + UINT codepage; + const wchar_t* locale_name; + + if (ploc == nullptr) { + locale_name = ___lc_locale_name_func()[LC_COLLATE]; + codepage = ___lc_collate_cp_func(); + } else { + locale_name = ploc->_LocaleName; + codepage = ploc->_Page; + } + + if (locale_name == nullptr && codepage == CP_ACP) { + if (n2 <= n1) { + memcpy(string1, string2, n2); + } + retval = n2; + } else { + // Inquire size of dst string in BYTES + const int dstlen = __crtLCMapStringA(locale_name, + LCMAP_SORTKEY | LINGUISTIC_IGNORECASE | LINGUISTIC_IGNOREDIACRITIC | NORM_IGNOREKANATYPE | NORM_IGNOREWIDTH, + string2, static_cast<int>(n2), nullptr, 0, codepage, TRUE); + + if (dstlen != 0) { + retval = dstlen; + + // if not enough room, return amount needed + if (dstlen <= static_cast<int>(n1)) { + // Map src string to dst string + __crtLCMapStringA(locale_name, + LCMAP_SORTKEY | LINGUISTIC_IGNORECASE | LINGUISTIC_IGNOREDIACRITIC | NORM_IGNOREKANATYPE + | NORM_IGNOREWIDTH, + string2, static_cast<int>(n2), string1, static_cast<int>(n1), codepage, TRUE); + } + } + } + + return retval; +} + +// derived from xwcsxfrm.cpp +size_t __stdcall __std_regex_transform_primary_wchar_t( + _Out_writes_(end1 - string1) _Post_readable_size_(return) wchar_t* string1, wchar_t* end1, + _In_reads_(end2 - string2) const wchar_t* string2, const wchar_t* end2, _In_opt_ const _Collvec* ploc) noexcept { + size_t n1 = end1 - string1; + size_t n2 = end2 - string2; + size_t size = static_cast<size_t>(-1); + const wchar_t* locale_name; + + if (ploc == nullptr) { + locale_name = ___lc_locale_name_func()[LC_COLLATE]; + } else { + locale_name = ploc->_LocaleName; + } + + if (locale_name == nullptr) { + if (n2 <= n1) { + memcpy(string1, string2, n2 * sizeof(wchar_t)); + } + size = n2; + } else { + // When using LCMAP_SORTKEY, LCMapStringW handles BYTES not wide + // chars. We use a byte buffer to hold bytes and then convert the + // byte string to a wide char string and return this so it can be + // compared using wcscmp(). User's buffer is n1 wide chars, so + // use an internal buffer of n1 bytes. + + auto bbuffer = _malloc_crt_t(unsigned char, n1); + + if (bbuffer) { +#pragma warning(push) +#pragma warning(disable : 6386) // PREfast doesn't understand LCMAP_SORTKEY + size = __crtLCMapStringW(locale_name, + LCMAP_SORTKEY | LINGUISTIC_IGNORECASE | LINGUISTIC_IGNOREDIACRITIC | NORM_IGNOREKANATYPE + | NORM_IGNOREWIDTH, + string2, static_cast<int>(n2), reinterpret_cast<wchar_t*>(bbuffer.get()), static_cast<int>(n1)); +#pragma warning(pop) + + if (size == 0) { + // buffer not big enough, get size required. + size = __crtLCMapStringW(locale_name, + LCMAP_SORTKEY | LINGUISTIC_IGNORECASE | LINGUISTIC_IGNOREDIACRITIC | NORM_IGNOREKANATYPE + | NORM_IGNOREWIDTH, + string2, static_cast<int>(n2), nullptr, 0); + + if (size == 0) { + size = static_cast<size_t>(-1); // default error + } + } else { + // string successfully mapped, convert to wide char + + for (size_t i = 0; i < size; ++i) { + string1[i] = static_cast<wchar_t>(bbuffer.get()[i]); + } + } + } + } + + return size; +} +} // extern "C"
diff --git a/tests/libcxx/expected_results.txt b/tests/libcxx/expected_results.txt index 64b389eaf67..c0fff7a9161 100644 --- a/tests/libcxx/expected_results.txt +++ b/tests/libcxx/expected_results.txt @@ -830,7 +830,6 @@ std/re/re.alg/re.alg.search/basic.pass.cpp FAIL std/re/re.alg/re.alg.search/ecma.pass.cpp FAIL std/re/re.alg/re.alg.search/extended.pass.cpp FAIL std/re/re.traits/lookup_collatename.pass.cpp FAIL -std/re/re.traits/transform_primary.pass.cpp FAIL # Not analyzed, likely STL bugs. Various assertions. std/numerics/complex.number/complex.ops/complex_divide_complex.pass.cpp FAIL diff --git a/tests/std/tests/GH_005204_regex_collating_ranges/test.cpp b/tests/std/tests/GH_005204_regex_collating_ranges/test.cpp index b24141a9fc3..687812ce480 100644 --- a/tests/std/tests/GH_005204_regex_collating_ranges/test.cpp +++ b/tests/std/tests/GH_005204_regex_collating_ranges/test.cpp @@ -535,31 +535,98 @@ void test_gh_994() { gh_994_should_throw("[a-[.cs.]]", error_range); gh_994_should_throw("[[.cs.]-[.dzs.]]", error_range); -#ifndef SKIP_COLLATE_TESTS +#ifndef _M_CEE_PURE g_regexTester.should_throw("[[=a=]-c]", error_range); g_regexTester.should_throw("[c-[=z=]]", error_range); g_regexTester.should_throw("[[=a=]-[=z=]]", error_range); g_regexTester.should_match("a", "[[=a=]]"); - g_regexTester.should_match("A", "[[=a=]]"); + g_regexTester.should_not_match("A", "[[=a=]]"); g_regexTester.should_not_match("b", "[[=a=]]"); g_regexTester.should_not_match("B", "[[=a=]]"); - g_regexTester.should_match("z", "[[=Z=]]"); + g_regexTester.should_not_match("z", "[[=Z=]]"); g_regexTester.should_match("Z", "[[=Z=]]"); g_regexTester.should_not_match("b", "[[=Z=]]"); g_regexTester.should_not_match("B", "[[=Z=]]"); + g_regexTester.should_match("a", "[[=a=]]", icase); + g_regexTester.should_match("A", "[[=a=]]", icase); + g_regexTester.should_not_match("b", "[[=a=]]", icase); + g_regexTester.should_not_match("B", "[[=a=]]", icase); + g_regexTester.should_match("z", "[[=Z=]]", icase); + g_regexTester.should_match("Z", "[[=Z=]]", icase); + g_regexTester.should_not_match("b", "[[=Z=]]", icase); + g_regexTester.should_not_match("B", "[[=Z=]]", icase); + g_regexTester.should_match("ab", "[[=a=]]b"); - g_regexTester.should_match("Ab", "[[=a=]]b"); + g_regexTester.should_not_match("Ab", "[[=a=]]b"); g_regexTester.should_not_match("Ab", "[[=a=]]B"); g_regexTester.should_not_match("b", "[[=a=]]b"); g_regexTester.should_not_match("aab", "[[=a=]]b"); g_regexTester.should_not_match("B", "[[=a=]]b"); - - g_regexTester.should_match("AaAaaAaab", "[[=a=]]*b"); + g_regexTester.should_not_match("ab", "[[=A=]]b"); + g_regexTester.should_match("Ab", "[[=A=]]b"); + g_regexTester.should_not_match("Ab", "[[=A=]]B"); + g_regexTester.should_not_match("b", "[[=A=]]b"); + g_regexTester.should_not_match("AAb", "[[=A=]]b"); + g_regexTester.should_not_match("B", "[[=A=]]b"); + + g_regexTester.should_match("ab", "[[=a=]]b", icase); + g_regexTester.should_match("Ab", "[[=a=]]b", icase); + g_regexTester.should_match("Ab", "[[=a=]]B", icase); + g_regexTester.should_not_match("b", "[[=a=]]b", icase); + g_regexTester.should_not_match("aab", "[[=a=]]b", icase); + g_regexTester.should_not_match("B", "[[=a=]]b", icase); + g_regexTester.should_match("ab", "[[=A=]]b", icase); + g_regexTester.should_match("Ab", "[[=A=]]b", icase); + g_regexTester.should_match("Ab", "[[=A=]]B", icase); + g_regexTester.should_not_match("b", "[[=A=]]b", icase); + g_regexTester.should_not_match("AAb", "[[=A=]]b", icase); + g_regexTester.should_not_match("B", "[[=A=]]b", icase); + + g_regexTester.should_not_match("AaAaaAaab", "[[=a=]]*b"); g_regexTester.should_not_match("AaAaaAaab", "[[=a=]]*c"); - g_regexTester.should_match("AaAabcaAaad", "[[=a=]bc]*d"); -#endif // !defined(SKIP_COLLATE_TESTS) + g_regexTester.should_not_match("AaAabcaAaad", "[[=a=]bc]*d"); + g_regexTester.should_match("AaAaaAaab", "[[=a=]]*b", icase); + g_regexTester.should_not_match("AaAaaAaab", "[[=a=]]*c", icase); + g_regexTester.should_match("AaAabcaAaad", "[[=a=]bc]*d", icase); +#endif // ^^^ !defined(_M_CEE_PURE) ^^^ +} + +void test_gh_5435() { + // GH-5435: <regex>: Equivalence classes have unexpected behavior with std::wregex +#ifndef _M_CEE_PURE + { + test_wregex_locale eq_a_regex(&g_regexTester, L"^[[=a=]]*b$", "en-US"); + eq_a_regex.should_search_match(L"A\u00c0ab", L"A\u00c0ab"); // U+00C0 LATIN CAPITAL LETTER A WITH GRAVE + eq_a_regex.should_search_fail(L"Ab\u00c0ab"); // U+00C0 LATIN CAPITAL LETTER A WITH GRAVE + eq_a_regex.should_search_match(L"A\u00e0ab", L"A\u00e0ab"); // U+00E0 LATIN SMALL LETTER A WITH GRAVE + eq_a_regex.should_search_match(L"A\u00c1ab", L"A\u00c1ab"); // U+00C1 LATIN CAPITAL LETTER A WITH ACUTE + eq_a_regex.should_search_match(L"A\u00e1ab", L"A\u00e1ab"); // U+00E1 LATIN SMALL LETTER A WITH ACUTE + eq_a_regex.should_search_match(L"A\u00c2ab", L"A\u00c2ab"); // U+00C2 LATIN CAPITAL LETTER A WITH CIRCUMFLEX + eq_a_regex.should_search_match(L"A\u00e2ab", L"A\u00e2ab"); // U+00E2 LATIN SMALL LETTER A WITH CIRCUMFLEX + eq_a_regex.should_search_match(L"A\u00c3ab", L"A\u00c3ab"); // U+00C3 LATIN CAPITAL LETTER A WITH TILDE + eq_a_regex.should_search_match(L"A\u00e3ab", L"A\u00e3ab"); // U+00E3 LATIN SMALL LETTER A WITH TILDE + eq_a_regex.should_search_match(L"A\u00c4ab", L"A\u00c4ab"); // U+00C4 LATIN CAPITAL LETTER A WITH DIAERESIS + eq_a_regex.should_search_match(L"A\u00e4ab", L"A\u00e4ab"); // U+00E4 LATIN SMALL LETTER A WITH DIAERESIS + eq_a_regex.should_search_match(L"A\u00c5ab", L"A\u00c5ab"); // U+00C5 LATIN CAPITAL LETTER A WITH RING ABOVE + eq_a_regex.should_search_match(L"A\u00e5ab", L"A\u00e5ab"); // U+00E5 LATIN SMALL LETTER A WITH RING ABOVE + } + { + test_wregex_locale eq_e_regex(&g_regexTester, L"^[[=e=]]*b$", "en-US"); + eq_e_regex.should_search_match(L"e\u00c8Eb", L"e\u00c8Eb"); // U+00C8 LATIN CAPITAL LETTER E WITH GRAVE + eq_e_regex.should_search_fail(L"eb\u00c8Eb"); // U+00C8 LATIN CAPITAL LETTER E WITH GRAVE + eq_e_regex.should_search_match(L"e\u00e8Eb", L"e\u00e8Eb"); // U+00E8 LATIN SMALL LETTER E WITH GRAVE + eq_e_regex.should_search_match(L"e\u00c9Eb", L"e\u00c9Eb"); // U+00C9 LATIN CAPITAL LETTER E WITH ACUTE + eq_e_regex.should_search_match(L"e\u00e9Eb", L"e\u00e9Eb"); // U+00E9 LATIN SMALL LETTER E WITH ACUTE + eq_e_regex.should_search_match(L"e\u00caEb", L"e\u00caEb"); // U+00CA LATIN CAPITAL LETTER E WITH CIRCUMFLEX + eq_e_regex.should_search_match(L"e\u00eaEb", L"e\u00eaEb"); // U+00EA LATIN SMALL LETTER E WITH CIRCUMFLEX + eq_e_regex.should_search_match(L"e\u00cbEb", L"e\u00cbEb"); // U+00CB LATIN CAPITAL LETTER E WITH DIAERESIS + eq_e_regex.should_search_match(L"e\u00ebEb", L"e\u00ebEb"); // U+00EB LATIN SMALL LETTER E WITH DIAERESIS + eq_e_regex.should_search_fail(L"e\u00ccEb"); // U+00CC LATIN CAPITAL LETTER I WITH GRAVE + eq_e_regex.should_search_fail(L"e\u00ecEb"); // U+00EC LATIN SMALL LETTER I WITH GRAVE + } +#endif // ^^^ !defined(_M_CEE_PURE) ^^^ } void test_gh_5437_ECMAScript_or_collate(syntax_option_type ECMAScript_or_collate) { @@ -592,6 +659,7 @@ void test_gh_5437() { int main() { test_collating_ranges_german(); test_gh_994(); + test_gh_5435(); test_gh_5437(); return g_regexTester.result(); diff --git a/tests/std/tests/VSO_0000000_regex_use/test.cpp b/tests/std/tests/VSO_0000000_regex_use/test.cpp index d7a4dc65f38..3d6fe5380a7 100644 --- a/tests/std/tests/VSO_0000000_regex_use/test.cpp +++ b/tests/std/tests/VSO_0000000_regex_use/test.cpp @@ -764,9 +764,11 @@ void test_gh_4995() { g_regexTester.should_throw("[[:digit:]-e]", error_range); g_regexTester.should_throw("[e-[:digit:]]", error_range); g_regexTester.should_throw("[[:alpha:]-[:digit:]]", error_range); +#ifndef _M_CEE_PURE g_regexTester.should_throw("[[=a=]-e]", error_range, ECMAScript | regex::collate); g_regexTester.should_throw("[e-[=a=]]", error_range, ECMAScript | regex::collate); g_regexTester.should_throw("[[=a=]-[=b=]]", error_range, ECMAScript | regex::collate); +#endif // ^^^ !defined(_M_CEE_PURE) ^^^ // Test valid cases: g_regexTester.should_not_match("b", R"([\d-])"); diff --git a/tests/tr1/tests/regex1/test.cpp b/tests/tr1/tests/regex1/test.cpp index a9f4689b176..2aaf0ea2e7e 100644 --- a/tests/tr1/tests/regex1/test.cpp +++ b/tests/tr1/tests/regex1/test.cpp @@ -173,9 +173,11 @@ static void test_traits() { // test template regex_traits CHECKSTRING(v0.transform(carr, carr + xlen(carr)), v0.transform(carr, carr + xlen(carr))); CHECK(v0.transform(carr, carr + xlen(carr)) != v0.transform(carr0, carr0 + xlen(carr0))); CHECK(v0.transform(carr, carr + xlen(carr)) < v0.transform(carr1, carr1 + xlen(carr1))); +#ifndef _M_CEE_PURE CHECK(v0.transform_primary(carr, carr + xlen(carr)) == v0.transform_primary(carr, carr + xlen(carr))); - CHECK(v0.transform_primary(carr, carr + xlen(carr)) == v0.transform_primary(carr0, carr0 + xlen(carr0))); + CHECK(v0.transform_primary(carr, carr + xlen(carr)) != v0.transform_primary(carr0, carr0 + xlen(carr0))); CHECK(v0.transform_primary(carr0, carr0 + xlen(carr0)) < v0.transform_primary(carr1, carr1 + xlen(carr1))); +#endif // ^^^ !defined(_M_CEE_PURE) ^^^ for (size_t i = 0; i < sizeof(class_names) / sizeof(*class_names); ++i) { CHECK(v0.lookup_classname(class_names[i], class_names[i] + xlen(class_names[i])) != 0); diff --git a/tests/tr1/tests/regex2/test.cpp b/tests/tr1/tests/regex2/test.cpp index 44cb5632126..d52371192de 100644 --- a/tests/tr1/tests/regex2/test.cpp +++ b/tests/tr1/tests/regex2/test.cpp @@ -588,8 +588,10 @@ static const regex_test tests[] = { {__LINE__, T("[[:xdigit:]]"), T("g"), "0", ALL}, {__LINE__, T("[[:xdigit:]]"), T("1"), "1 0 1", ALL}, {__LINE__, T("[[:xdigit:]]"), T(" "), "0", ALL}, - {__LINE__, T("[[=x=]]"), T("X"), "1 0 1", ALL}, +#ifndef _M_CEE_PURE + {__LINE__, T("[[=x=]]"), T("X"), "0", ALL}, {__LINE__, T("[[=x=]]"), T("x"), "1 0 1", ALL}, +#endif // ^^^ !defined(_M_CEE_PURE) ^^^ // character class ranges {__LINE__, T("[-]"), T("-"), "1 0 1", ALL},
LWG-4186 `regex_traits::transform_primary` mistakenly detects `typeid` of a function LWG-4186 `regex_traits::transform_primary` mistakenly detects `typeid` of a function `<regex>`: Equivalence classes have unexpected behavior with `std::wregex` Repros with VS 2022 17.14 Preview 4 with microsoft/STL `main`, including #5392. Tracked by internal VSO-127463 / AB#127463 , originally reported by an external user through the defunct Microsoft Connect site on 2015-06-02. ``` C:\Temp>type meow.cpp ``` ```cpp #include <locale> #include <print> #include <regex> #include <string> using namespace std; [[nodiscard]] string escape_wide(const wstring& wstr) { string ret{R"(L")"}; for (const auto& wch : wstr) { ret += format(R"(\x{:x})", static_cast<unsigned short>(wch)); } ret += R"(")"; return ret; } void display_result(const wstring& wstr, const wstring& pattern) { const locale loc{"fr-FR"}; wregex rgx; rgx.imbue(loc); rgx.assign(pattern, regex_constants::icase | regex_constants::collate); const bool result = regex_match(wstr, rgx); regex_traits<wchar_t> tr; tr.imbue(loc); const wstring primary_sort_key = tr.transform_primary(wstr.begin(), wstr.end()); println("wstr: {}; result: {:>5}; primary_sort_key: {}", escape_wide(wstr), result, escape_wide(primary_sort_key)); } int main() { display_result(L"E", L"[[=e=]]"); display_result(L"\u00C8", L"[[=e=]]"); // LATIN CAPITAL LETTER E WITH GRAVE display_result(L"\u00C9", L"[[=e=]]"); // LATIN CAPITAL LETTER E WITH ACUTE display_result(L"\u00CA", L"[[=e=]]"); // LATIN CAPITAL LETTER E WITH CIRCUMFLEX display_result(L"e", L"[[=e=]]"); display_result(L"\u00E8", L"[[=e=]]"); // LATIN SMALL LETTER E WITH GRAVE display_result(L"\u00E9", L"[[=e=]]"); // LATIN SMALL LETTER E WITH ACUTE display_result(L"\u00EA", L"[[=e=]]"); // LATIN SMALL LETTER E WITH CIRCUMFLEX } ``` ``` C:\Temp>cl /EHsc /nologo /W4 /std:c++latest /MTd /Od meow.cpp && meow meow.cpp wstr: L"\x45"; result: true; primary_sort_key: L"\xe\x21\x1\x1\x1\x1\x0" wstr: L"\xc8"; result: false; primary_sort_key: L"\xe\x21\x1\xf\x1\x1\x1\x0" wstr: L"\xc9"; result: false; primary_sort_key: L"\xe\x21\x1\xe\x1\x1\x1\x0" wstr: L"\xca"; result: false; primary_sort_key: L"\xe\x21\x1\x12\x1\x1\x1\x0" wstr: L"\x65"; result: true; primary_sort_key: L"\xe\x21\x1\x1\x1\x1\x0" wstr: L"\xe8"; result: false; primary_sort_key: L"\xe\x21\x1\xf\x1\x1\x1\x0" wstr: L"\xe9"; result: false; primary_sort_key: L"\xe\x21\x1\xe\x1\x1\x1\x0" wstr: L"\xea"; result: false; primary_sort_key: L"\xe\x21\x1\x12\x1\x1\x1\x0" ``` The user expects `regex_match` to always return `true` here. I don't understand why [`LCMapStringEx`](https://learn.microsoft.com/en-us/windows/win32/api/winnls/nf-winnls-lcmapstringex) with `LCMAP_SORTKEY` is producing these primary sort keys. Are we supposed to be passing extra flags to ignore diacritics?
More generally, `regex_traits::transform_primary()` currently implements the general traits requirement in [\[re.req\]/20](https://eel.is/c++draft/re.req#20) and not the specified implementation in [\[re.traits\]/7](https://eel.is/c++draft/re.traits#7). Besides the missing comparison with `typeid`, this also means that diacritics are not properly handled in (most?) non-C locales. If we want to implement the specification, we should probably add variants of `_Strxfrm()` and `_Wcsxfrm()` to the import library that call `__crtLCMapStringA/W` with appropriate flags for non-C locales (maybe `LCMAP_SORTKEY | LINGUISTIC_IGNORECASE | LINGUISTIC_IGNOREDIACRITIC | NORM_IGNOREKANA | NORM_IGNOREWIDTH` or something similar). If we add these new variants of `_Strxfrm()` and `_Wcsxfrm()`, we should make sure that their return values are consistent unlike the current implementations of`_Strxfrm()` and `_Wcxsfrm()` (https://github.com/microsoft/STL/issues/5210). `std::locale` doesn't actually construct any facets of type `std::collate_byname<charT>`. It constructs facets of type `std::collate<charT>` instead. See: https://godbolt.org/z/Te4rcM173 So comparing with `typeid(std::collate_byname<charT>)` only doesn't make sense. We either have to correct the facets constructed by `std::locale` (would this break ABI?) or compare with `typeid(std::collate<charT>)` as well. > We either have to correct the facets constructed by `std::locale` (would this break ABI?) I don't understand `std::locale` well enough to say whether it would be binary-compatible. All I know is that `std::locale` is the most fragile part of the library and changes have had a high risk of damaging it, even back when we could break ABI every major version. > Are we supposed to be passing extra flags to ignore diacritics? Yes. For purposes of collation order, a character is assigned a number of sorting weights. Some of these underlying sorting weights also leak into the description of `LCMapStringEx`. For example, the description for the `NORM_IGNORENONSPACE` flag states: > NORM_IGNORENONSPACE ignores any secondary distinction, whether it is a diacritic or not. [...] LINGUISTIC_IGNOREDIACRITIC causes the function to ignore only actual diacritics, instead of ignoring the *second sorting weight*. `transform()` is supposed to produce a sort key that considers all of these sorting weights. `transform_primary()`, however, is supposed to return a primary sort key. A primary sort key is derived from the primary (first) sorting weight only while ignoring all the others. That said, we probably want to choose linguistically more accurate options for a particular locale than to just neglect all the non-primary weights. I already wrote in https://github.com/microsoft/STL/issues/5291#issuecomment-2745283769: > If we want to implement the specification, we should probably add variants of `_Strxfrm()` and `_Wcsxfrm()` to the import library that call __crtLCMapStringA/W with appropriate flags for non-C locales (maybe `LCMAP_SORTKEY | LINGUISTIC_IGNORECASE | LINGUISTIC_IGNOREDIACRITIC | NORM_IGNOREKANA | NORM_IGNOREWIDTH` or something similar). For the "C" locale, the best we can probably do is to convert all characters to lower or upper case and return that as the sort key. Since equivalence classes are inherited from POSIX, I checked what the POSIX standard says about the sorting weights in the C locale (or, more accurately, the POSIX locale). My reading is that all characters should be assigned a unique primary weight. This would mean that even "a" and "A" do not have the same primary sort key. A quick test with grep seems to confirm that. However, this is somewhat inconsistent with the general traits requirements of `transform_primary()` in [\[re.req\]/20](https://eel.is/c++draft/re.req#20) that require that `transform_primary()` must not consider character case. I assumed these general traits requirements when I implemented #5392. If these are actually not true, then the implementation has to be fixed in `icase` mode. I think the traits requirement in [\[re.req\]/20](https://eel.is/c++draft/re.req#20) is wrong for the C locale and the primary sort key according to POSIX is correct. In the C locale, this is the collation order for alphabetic characters: ``` A <= B <= ... <= Z <= ... <= a <= b <= ... <= z ``` This collation order based on all sorting weights should be consistent with the order that is based on the primary weight only. This means that all these relations should also hold when we compare primary sort keys. If we also impose that primary sort keys compare equal for lower and upper case versions of the same character, we get ``` A = B = ... = Z = ... = a = b = ... = z ``` So all alphabetic characters would be assigned the same primary sort key. This is clearly not what we want. This means that we also have to fix the processing of equivalences in the parser by adding a character translation phase before calling `transform_primary()`.
More generally, `regex_traits::transform_primary()` currently implements the general traits requirement in [\[re.req\]/20](https://eel.is/c++draft/re.req#20) and not the specified implementation in [\[re.traits\]/7](https://eel.is/c++draft/re.traits#7). Besides the missing comparison with `typeid`, this also means that diacritics are not properly handled in (most?) non-C locales. If we want to implement the specification, we should probably add variants of `_Strxfrm()` and `_Wcsxfrm()` to the import library that call `__crtLCMapStringA/W` with appropriate flags for non-C locales (maybe `LCMAP_SORTKEY | LINGUISTIC_IGNORECASE | LINGUISTIC_IGNOREDIACRITIC | NORM_IGNOREKANA | NORM_IGNOREWIDTH` or something similar). If we add these new variants of `_Strxfrm()` and `_Wcsxfrm()`, we should make sure that their return values are consistent unlike the current implementations of`_Strxfrm()` and `_Wcxsfrm()` (https://github.com/microsoft/STL/issues/5210). `std::locale` doesn't actually construct any facets of type `std::collate_byname<charT>`. It constructs facets of type `std::collate<charT>` instead. See: https://godbolt.org/z/Te4rcM173 So comparing with `typeid(std::collate_byname<charT>)` only doesn't make sense. We either have to correct the facets constructed by `std::locale` (would this break ABI?) or compare with `typeid(std::collate<charT>)` as well. > We either have to correct the facets constructed by `std::locale` (would this break ABI?) I don't understand `std::locale` well enough to say whether it would be binary-compatible. All I know is that `std::locale` is the most fragile part of the library and changes have had a high risk of damaging it, even back when we could break ABI every major version. > Are we supposed to be passing extra flags to ignore diacritics? Yes. For purposes of collation order, a character is assigned a number of sorting weights. Some of these underlying sorting weights also leak into the description of `LCMapStringEx`. For example, the description for the `NORM_IGNORENONSPACE` flag states: > NORM_IGNORENONSPACE ignores any secondary distinction, whether it is a diacritic or not. [...] LINGUISTIC_IGNOREDIACRITIC causes the function to ignore only actual diacritics, instead of ignoring the *second sorting weight*. `transform()` is supposed to produce a sort key that considers all of these sorting weights. `transform_primary()`, however, is supposed to return a primary sort key. A primary sort key is derived from the primary (first) sorting weight only while ignoring all the others. That said, we probably want to choose linguistically more accurate options for a particular locale than to just neglect all the non-primary weights. I already wrote in https://github.com/microsoft/STL/issues/5291#issuecomment-2745283769: > If we want to implement the specification, we should probably add variants of `_Strxfrm()` and `_Wcsxfrm()` to the import library that call __crtLCMapStringA/W with appropriate flags for non-C locales (maybe `LCMAP_SORTKEY | LINGUISTIC_IGNORECASE | LINGUISTIC_IGNOREDIACRITIC | NORM_IGNOREKANA | NORM_IGNOREWIDTH` or something similar). For the "C" locale, the best we can probably do is to convert all characters to lower or upper case and return that as the sort key. Since equivalence classes are inherited from POSIX, I checked what the POSIX standard says about the sorting weights in the C locale (or, more accurately, the POSIX locale). My reading is that all characters should be assigned a unique primary weight. This would mean that even "a" and "A" do not have the same primary sort key. A quick test with grep seems to confirm that. However, this is somewhat inconsistent with the general traits requirements of `transform_primary()` in [\[re.req\]/20](https://eel.is/c++draft/re.req#20) that require that `transform_primary()` must not consider character case. I assumed these general traits requirements when I implemented #5392. If these are actually not true, then the implementation has to be fixed in `icase` mode. I think the traits requirement in [\[re.req\]/20](https://eel.is/c++draft/re.req#20) is wrong for the C locale and the primary sort key according to POSIX is correct. In the C locale, this is the collation order for alphabetic characters: ``` A <= B <= ... <= Z <= ... <= a <= b <= ... <= z ``` This collation order based on all sorting weights should be consistent with the order that is based on the primary weight only. This means that all these relations should also hold when we compare primary sort keys. If we also impose that primary sort keys compare equal for lower and upper case versions of the same character, we get ``` A = B = ... = Z = ... = a = b = ... = z ``` So all alphabetic characters would be assigned the same primary sort key. This is clearly not what we want. This means that we also have to fix the processing of equivalences in the parser by adding a character translation phase before calling `transform_primary()`.
[ "https://github.com/microsoft/STL/commit/898777a69c30cbc8d564b92262db306f51c2dd8f", "https://github.com/microsoft/STL/commit/c74e118efb2302ffd9813db8a33c58db428476d2", "https://github.com/microsoft/STL/commit/413b3060909a62ee47d5a78df032ca4861d10a77", "https://github.com/microsoft/STL/commit/4697464b23d873c8d...
2025-04-26T18:40:04Z
https://github.com/microsoft/STL/tree/1e0317478eddc973e801bdacc10cee2df9943b82
[ "git config --global --add safe.directory C:/testbed ; git submodule update --init --progress ; cmd /c '\"C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools\\Common7\\Tools\\VsDevCmd.bat\" -host_arch=x64 -arch=x64 -no_logo && cmake --build out\\x64 --parallel'" ]
[ "cmd /c '\"C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools\\Common7\\Tools\\VsDevCmd.bat\" -host_arch=x64 -arch=x64 -no_logo && set \"PATH=C:\\Python314;C:\\Program Files\\LLVM\\bin;C:\\Program Files (x86)\\Windows Kits\\10\\bin\\10.0.22621.0\\x64;%PATH%\" && cd /d out\\x64 && python tests\\utils...
[ "cmd /c '\"C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools\\Common7\\Tools\\VsDevCmd.bat\" -host_arch=x64 -arch=x64 -no_logo && set \"PATH=C:\\Python314;C:\\Program Files\\LLVM\\bin;C:\\Program Files (x86)\\Windows Kits\\10\\bin\\10.0.22621.0\\x64;%PATH%\" && cd /d out\\x64 && python tests\\utils...
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} def set_status(name: str, status: str): if not name: return n = name.strip() # normalize suite/name separator and spaces n = re.sub(r"\s*::\s*", "::", n) n = re.sub(r"[ \t]+", " ", n) severity = {"fail": 2, "pass": 1, "skip": 0} old = results.get(n) if old is None or severity.get(status, -1) > severity.get(old, -1): results[n] = status # Helper to join with duplicate-boundary suppression def join_suppress(prev: str, nxt: str) -> str: if not prev: return nxt if not nxt: return prev if prev[-1] == nxt[0]: nxt = nxt[1:] return prev + nxt # 1) Parse any JUnit XML if present open_close_re = re.compile(r"<testcase\b([^>]*)>(.*?)</testcase>", re.DOTALL | re.IGNORECASE) attrs_re = re.compile(r'(\w+)\s*=\s*"([^"]*)"', re.IGNORECASE) for m in open_close_re.finditer(log): attrs = m.group(1) or "" body = m.group(2) or "" classname = None name = None for k, v in attrs_re.findall(attrs): kl = k.lower() if kl == "classname": classname = v.strip() elif kl == "name": name = v.strip() testname = None if classname and name: testname = f"{classname}::{name}" elif name: testname = name elif classname: testname = classname if testname: bl = body.lower() if "<skipped" in bl: set_status(testname, "skip") elif "<failure" in bl or "<error" in bl: set_status(testname, "fail") else: set_status(testname, "pass") self_closing_re = re.compile(r"<testcase\b([^>]*)\s*/>", re.IGNORECASE) for m in self_closing_re.finditer(log): attrs = m.group(1) or "" classname = None name = None for k, v in attrs_re.findall(attrs): kl = k.lower() if kl == "classname": classname = v.strip() elif kl == "name": name = v.strip() testname = None if classname and name: testname = f"{classname}::{name}" elif name: testname = name elif classname: testname = classname if testname: set_status(testname, "pass") # 2) Parse failure block headers for full names, reconstruct with suppression across newlines # Pattern: **** TEST 'suite :: name:cfg' FAILED **** fail_block_re = re.compile(r"\*+\s*TEST\s+'(.+?)'\s+FAILED\s*\*+", re.DOTALL | re.IGNORECASE) for m in fail_block_re.finditer(log): raw = m.group(1) # Reconstruct across newlines while suppressing duplicated boundary chars parts = raw.replace("\r", "").split("\n") if parts: name_joined = parts[0] for part in parts[1:]: name_joined = join_suppress(name_joined, part) set_status(name_joined, "fail") # 3) Parse LIT lines with robust reconstruction status_prefix_re = re.compile(r"^\s*(PASS|FAIL|XFAIL|XPASS|UNSUPPORTED|SKIPPED):", re.IGNORECASE) # Accept duplicated letters in 'of' due to wrapping/duplication trailer_re = re.compile(r"\(\s*\d+\s+o+f+\s+\d+\s*\)\s*$", re.IGNORECASE) # Lines that indicate we should stop accumulating this logical line header_break_re = re.compile( r"^\*{5,}|^Exit Code:|^Standard Error:|^Build setup steps:|^Build steps:|^Command:|^--\s*$|^In file included|^-- Testing:|^Testing Time:", re.IGNORECASE, ) lines = log.splitlines() logical_lines: list[str] = [] buf = None def flush_buf(): nonlocal buf if buf is not None: # Ensure no stray CRs and collapse any remaining newlines (shouldn't be any) s = buf.replace("\r", "").replace("\n", "") logical_lines.append(s) buf = None i = 0 while i < len(lines): line = lines[i] if status_prefix_re.match(line): flush_buf() buf = line j = i + 1 while j < len(lines): nxt = lines[j] if status_prefix_re.match(nxt) or header_break_re.match(nxt): break buf = join_suppress(buf, nxt) if trailer_re.search(nxt) or trailer_re.search(buf): j += 1 break j += 1 i = j flush_buf() continue else: i += 1 lit_re = re.compile( r"^\s*(PASS|FAIL|XFAIL|XPASS|UNSUPPORTED|SKIPPED):\s+(.+?)\s+::\s+(.+?)(?:\s+\(\d+\s+o+f+\s+\d+\)\s*)?$", re.IGNORECASE, ) def map_status(s: str) -> str: s = s.upper() if s == "PASS": return "pass" if s in ("FAIL", "XPASS"): return "fail" if s == "XFAIL": return "pass" if s in ("UNSUPPORTED", "SKIPPED"): return "skip" return "fail" for ll in logical_lines: m = lit_re.match(ll) if not m: continue status_raw, suite, name = m.groups() testname = f"{suite}::{name}" set_status(testname, map_status(status_raw)) return results
[ "libc++::std/containers/associative/multimap/multimap.erasure/erase_if.pass.cpp:0", "libc++::std/containers/associative/multimap/multimap.erasure/erase_if.pass.cpp:2" ]
[ "libc++::std/algorithms/alg.nonmodifying/alg.find.end/find_end.pass.cpp:2", "libc++::std/algorithms/alg.sorting/alg.sort/sort/sort.pass.cpp:0", "libc++::std/algorithms/alg.sorting/alg.sort/sort/sort_constexpr.pass.cpp:2", "libc++::std/algorithms/alg.modifying.operations/alg.copy/copy.pass.cpp:0", "libc++::s...
starryzhang/sweb.eval.win.microsoft_1776_stl-5444
elastic/beats
43665
elastic__beats-43665
Go
[ "22172", "40765" ]
10858f98cf8f14e1de51b195bba607c784bc6c90
diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index 6dff17ecc0ed..83a5c19cb5c9 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -88,6 +88,7 @@ otherwise no tag is added. {issue}42208[42208] {pull}42403[42403] - Removed support for the Enterprise Search module {pull}42915[42915] - Fix the function to determine CPU cores on windows {issue}42593[42593] {pull}43409[43409] - Updated list of supported vSphere versions in the documentation. {pull}43642[43642] +- Handle permission errors while collecting data from Windows services and don't interrupt the overall collection by skipping affected services {issue}40765[40765] {pull}43665[43665] *Osquerybeat* diff --git a/metricbeat/module/windows/service/reader.go b/metricbeat/module/windows/service/reader.go index 379d4cb738f7..961d6789159e 100644 --- a/metricbeat/module/windows/service/reader.go +++ b/metricbeat/module/windows/service/reader.go @@ -28,6 +28,7 @@ import ( "golang.org/x/sys/windows/registry" + "github.com/elastic/elastic-agent-libs/logp" "github.com/elastic/elastic-agent-libs/mapstr" ) @@ -48,9 +49,10 @@ type Reader struct { guid string // Host's MachineGuid value (a unique ID for the host). ids map[string]string // Cache of service IDs. protectedServices map[string]struct{} + log *logp.Logger } -func NewReader() (*Reader, error) { +func NewReader(log *logp.Logger) (*Reader, error) { handle, err := openSCManager("", "", ScManagerEnumerateService|ScManagerConnect) if err != nil { return nil, fmt.Errorf("initialization failed: %w", err) @@ -67,13 +69,14 @@ func NewReader() (*Reader, error) { guid: guid, ids: map[string]string{}, protectedServices: map[string]struct{}{}, + log: log, } return r, nil } func (reader *Reader) Read() ([]mapstr.M, error) { - services, err := GetServiceStates(reader.handle, reader.state, reader.protectedServices) + services, err := GetServiceStates(reader.log, reader.handle, reader.state, reader.protectedServices) if err != nil { return nil, err } diff --git a/metricbeat/module/windows/service/service.go b/metricbeat/module/windows/service/service.go index 45e12c6574de..61b950af7454 100644 --- a/metricbeat/module/windows/service/service.go +++ b/metricbeat/module/windows/service/service.go @@ -44,7 +44,7 @@ type MetricSet struct { // Part of new is also setting up the configuration by processing additional // configuration entries if needed. func New(base mb.BaseMetricSet) (mb.MetricSet, error) { - reader, err := NewReader() + reader, err := NewReader(base.Logger()) if err != nil { return nil, err } diff --git a/metricbeat/module/windows/service/service_status.go b/metricbeat/module/windows/service/service_status.go index 571d324cb599..a4076c6bffe0 100644 --- a/metricbeat/module/windows/service/service_status.go +++ b/metricbeat/module/windows/service/service_status.go @@ -131,7 +131,7 @@ func (state ServiceState) String() string { return "" } -func GetServiceStates(handle Handle, state ServiceEnumState, protectedServices map[string]struct{}) ([]Status, error) { +func GetServiceStates(log *logp.Logger, handle Handle, state ServiceEnumState, protectedServices map[string]struct{}) ([]Status, error) { var servicesReturned uint32 var servicesBuffer []byte @@ -164,11 +164,18 @@ func GetServiceStates(handle Handle, state ServiceEnumState, protectedServices m var services []Status var sizeStatusProcess = (int)(unsafe.Sizeof(EnumServiceStatusProcess{})) for i := 0; i < int(servicesReturned); i++ { - serviceTemp := (*EnumServiceStatusProcess)(unsafe.Pointer(&servicesBuffer[i*sizeStatusProcess])) + rawService := (*EnumServiceStatusProcess)(unsafe.Pointer(&servicesBuffer[i*sizeStatusProcess])) - service, err := getServiceInformation(serviceTemp, servicesBuffer, handle, protectedServices) + service, err := getRawServiceStatus(rawService, servicesBuffer) if err != nil { - return nil, err + log.Errorf("could not parse raw service information for PID %d: %v", rawService.ServiceStatusProcess.DwProcessId, err) + continue + } + + err = getServiceHandleInformation(log, &service, rawService, handle, protectedServices) + if err != nil { + log.Errorf("could not get information for the service (name: %s, pid: %d): %v", service.DisplayName, service.PID, err) + continue } services = append(services, service) @@ -177,7 +184,7 @@ func GetServiceStates(handle Handle, state ServiceEnumState, protectedServices m return services, nil } -func getServiceInformation(rawService *EnumServiceStatusProcess, servicesBuffer []byte, handle Handle, protectedServices map[string]struct{}) (Status, error) { +func getRawServiceStatus(rawService *EnumServiceStatusProcess, servicesBuffer []byte) (Status, error) { service := Status{ PID: rawService.ServiceStatusProcess.DwProcessId, } @@ -198,6 +205,11 @@ func getServiceInformation(rawService *EnumServiceStatusProcess, servicesBuffer } service.ServiceName = strBuf.String() + return service, nil +} + +func getServiceHandleInformation(log *logp.Logger, service *Status, rawService *EnumServiceStatusProcess, handle Handle, protectedServices map[string]struct{}) error { + var state string if stat, ok := serviceStates[ServiceState(rawService.ServiceStatusProcess.DwCurrentState)]; ok { @@ -215,36 +227,38 @@ func getServiceInformation(rawService *EnumServiceStatusProcess, servicesBuffer serviceHandle, err := openServiceHandle(handle, service.ServiceName, ServiceQueryConfig) if err != nil { - return service, fmt.Errorf("error while opening service %s: %w", service.ServiceName, err) + return fmt.Errorf("error while opening service %s: %w", service.ServiceName, err) } defer closeHandle(serviceHandle) // Get detailed information - if err := getAdditionalServiceInfo(serviceHandle, &service); err != nil { - return service, err + if err := getAdditionalServiceInfo(serviceHandle, service); err != nil { + return err } // Get optional information - if err := getOptionalServiceInfo(serviceHandle, &service); err != nil { - return service, err + if err := getOptionalServiceInfo(serviceHandle, service); err != nil { + return err } //Get uptime for service if ServiceState(rawService.ServiceStatusProcess.DwCurrentState) != ServiceStopped { processUpTime, err := getServiceUptime(rawService.ServiceStatusProcess.DwProcessId) if err != nil { - if _, ok := protectedServices[service.ServiceName]; errors.Is(err, os.ErrPermission) && !ok { + if !errors.Is(err, os.ErrPermission) { + // if we have faced any other error, pass it to the caller + return err + } + if _, ok := protectedServices[service.ServiceName]; !ok { protectedServices[service.ServiceName] = struct{}{} - logp.Warn("Uptime for service %v is not available because of insufficient rights", service.ServiceName) - } else { - return service, err + log.Warnf("Uptime for service %v is not available because of insufficient rights", service.ServiceName) } } service.Uptime = processUpTime / time.Millisecond } - return service, nil + return nil } func openServiceHandle(handle Handle, serviceName string, desiredAccess ServiceAccessRight) (Handle, error) {
diff --git a/metricbeat/module/windows/service/reader_test.go b/metricbeat/module/windows/service/reader_test.go index 4a4246a5774a..4e9f47af4fa4 100644 --- a/metricbeat/module/windows/service/reader_test.go +++ b/metricbeat/module/windows/service/reader_test.go @@ -23,10 +23,12 @@ import ( "testing" "github.com/stretchr/testify/assert" + + "github.com/elastic/elastic-agent-libs/logp" ) func TestNewReader(t *testing.T) { - reader, err := NewReader() + reader, err := NewReader(logp.NewTestingLogger(t, "")) assert.NoError(t, err) assert.NotNil(t, reader) defer reader.Close() @@ -57,7 +59,7 @@ func TestGetMachineGUID(t *testing.T) { func TestRead(t *testing.T) { t.Skip("Flaky test: https://github.com/elastic/beats/issues/22171") - reader, err := NewReader() + reader, err := NewReader(logp.NewTestingLogger(t, "")) assert.NoError(t, err) result, err := reader.Read() assert.NoError(t, err) diff --git a/metricbeat/module/windows/service/service_status_test.go b/metricbeat/module/windows/service/service_status_test.go index 3b81399d2cb3..f39594ac873b 100644 --- a/metricbeat/module/windows/service/service_status_test.go +++ b/metricbeat/module/windows/service/service_status_test.go @@ -23,15 +23,15 @@ import ( "testing" "github.com/stretchr/testify/assert" + + "github.com/elastic/elastic-agent-libs/logp" ) func TestGetServiceStates(t *testing.T) { - t.Skip("Flaky test: https://github.com/elastic/beats/issues/22172") - handle, err := openSCManager("", "", ScManagerEnumerateService|ScManagerConnect) assert.NoError(t, err) assert.NotEqual(t, handle, InvalidDatabaseHandle) - services, err := GetServiceStates(handle, ServiceStateAll, map[string]struct{}{}) + services, err := GetServiceStates(logp.NewTestingLogger(t, ""), handle, ServiceStateAll, map[string]struct{}{}) assert.NoError(t, err) assert.True(t, len(services) > 0) closeHandle(handle)
Flaky Test [Build&Test / metricbeat-windows-windows-2019 / TestGetServiceStates – service] ## Flaky Test * **Test Name:** Build&Test / metricbeat-windows-windows-2019 / TestGetServiceStates – service * **Artifact Link:** https://beats-ci.elastic.co/blue/organizations/jenkins/Beats%2Fbeats%2FPR-21853/detail/PR-21853/29/ ### Error details ``` Failed ``` ### Stack Trace ``` service_status_test.go:29: for testing the flaky test reporting for an existing flaky test ``` Metricbeat windows service metrics stops sending documents when a single service fails - Version: 8.10.4 - Operating System: ``` runtime: arch: amd64 os: windows osinfo: family: windows major: 6 minor: 3 patch: 0 type: windows version: "6.3" ``` - Steps to Reproduce: No clear steps to reproduce, more info on this later. Multiple instances of elastic-agent installations are failing to send the windows.service metric set for the windows integration. The system integration continues to send data without issues. The problem happens at random and it is resolved by restarting the elastic agent. ~The issue happens in different versions of 8.x for elastic-agent and it hasn't confirmed as occurring on the latest version (as the user who has experienced this has not upgraded to latest version yet).~ The issue so far has only been seen on 8.10.4 The error reported by metricbeat is the following: ``` {"log.level":"error","@timestamp":"2024-07-29T20:49:33.157Z","message":"Error fetching data for metricset windows.service: OpenProcess failed for pid=1724: The parameter is incorrect.","component":{"binary":"metricbeat","dataset":"elastic_agent.metricbeat","id":"windows/metrics-default","type":"windows/metrics"},"log":{"source":"windows/metrics-default"},"log.origin":{"file.line":256,"file.name":"module/wrapper.go"},"service.name":"metricbeat","ecs.version":"1.6.0","ecs.version":"1.6.0"} ``` So far the error indicates a problem only with one particular windows service, however, all other services being monitored by metricbeat can't continue to be monitored because this particular service getting in an unexpected state causes the entire metricbeat windows service metricset to stop reporting for any service. Because this happens at random we are unable to setup debug logging to catch the failure and the logger for this function is not providing any more info. We need to address 2 items with this issue: 1. The windows service monitoring stops sending stats for ANY service once a single service gets into a weird state (this fits a bug description). 2. There is no logger that specifies what that weird state was, nor an indication as to why sending service metrics for other services stops working (this fits a feature request that may or may not be necessary to address point 1).
## Flaky Test * **Test Name:** Build&Test / metricbeat-windows-windows-2019 / TestGetServiceStates – service * **Artifact Link:** https://beats-ci.elastic.co/blue/organizations/jenkins/it%2Ftest/detail/test/16/ ### Error details ``` Failed ``` ### Stack Trace ``` service_status_test.go:29: for testing the flaky test reporting for an existing flaky test ``` ## Flaky Test * **Test Name:** `Build&Test / metricbeat-windows-windows-2019 / TestGetServiceStates – service` * **Artifact Link:** https://beats-ci.elastic.co/blue/organizations/jenkins/it%2Ftest/detail/test/17/ * **PR:** None * **Commit:** None ### Error details ``` Failed ``` ### Stack Trace ``` service_status_test.go:29: for testing the flaky test reporting for an existing flaky test ``` ## Flaky Test * **Test Name:** `Build&Test / metricbeat-windows-windows-2019 / TestGetServiceStates – service` * **Artifact Link:** https://beats-ci.elastic.co/blue/organizations/jenkins/Beats%2Fbeats%2FPR-21853/detail/PR-21853/31/ * **PR:** #21853 * **Commit:** 9c3d14718434fb33f2ceb1caaf567c3bf2652a3e ### Error details ``` Failed ``` ### Stack Trace ``` service_status_test.go:29: for testing the flaky test reporting for an existing flaky test ``` Pinging @elastic/integrations-platforms (Team:Platforms) Pinging @elastic/elastic-agent-data-plane (Team:Elastic-Agent-Data-Plane) @VihasMakwana I think I saw you had root caused the source of the `OpenProcess failed for pid=1724: The parameter is incorrect` error elsewhere? Or am I misremembering? @cmacknz yes, that's correct. On my personal desktop, the metricbeat wasn't able to access following processes, running as root: - PID 0 and 4 (protected processes, you can never access it.) - Processes owned by SYSTEM user. (some antivirus processes for eg.) - They were accessible, but with limited info. This was for `system.process` integration though. The above issue is about `windows.service` integration but I believe the root cause is similar. --- @TheRiffRafi do you see any warning related to SeDebugPrivilege at the beginning of logs? Something like: `Metricbeat is running without SeDebugPrivilege, a Windows privilege that allows it to collect metrics...`, `Failure while attempting to enable SeDebugPrivilege` or `Metricbeat failed to enable the SeDebugPrivilege`? Can you attach logs from beginning, if possible? Hello @VihasMakwana! Unfortunately I can't help with logs, all the instances I have of the failure have the logs with the problem already started, there is no instance of this where we've caught it in a state where the issue is not occurring and then suddenly starts happening (the systems are going weeks without reporting the service). Also, I have to make a correction on the original description, we have only seen this on 8.10.4, we haven't tested on a more recent version as the entire stack for the user is still on 8.10.4, it was a misunderstanding that we had seen this problem on a later version.
## Flaky Test * **Test Name:** Build&Test / metricbeat-windows-windows-2019 / TestGetServiceStates – service * **Artifact Link:** https://beats-ci.elastic.co/blue/organizations/jenkins/it%2Ftest/detail/test/16/ ### Error details ``` Failed ``` ### Stack Trace ``` service_status_test.go:29: for testing the flaky test reporting for an existing flaky test ``` ## Flaky Test * **Test Name:** `Build&Test / metricbeat-windows-windows-2019 / TestGetServiceStates – service` * **Artifact Link:** https://beats-ci.elastic.co/blue/organizations/jenkins/it%2Ftest/detail/test/17/ * **PR:** None * **Commit:** None ### Error details ``` Failed ``` ### Stack Trace ``` service_status_test.go:29: for testing the flaky test reporting for an existing flaky test ``` ## Flaky Test * **Test Name:** `Build&Test / metricbeat-windows-windows-2019 / TestGetServiceStates – service` * **Artifact Link:** https://beats-ci.elastic.co/blue/organizations/jenkins/Beats%2Fbeats%2FPR-21853/detail/PR-21853/31/ * **PR:** #21853 * **Commit:** 9c3d14718434fb33f2ceb1caaf567c3bf2652a3e ### Error details ``` Failed ``` ### Stack Trace ``` service_status_test.go:29: for testing the flaky test reporting for an existing flaky test ``` Pinging @elastic/integrations-platforms (Team:Platforms) Pinging @elastic/elastic-agent-data-plane (Team:Elastic-Agent-Data-Plane) @VihasMakwana I think I saw you had root caused the source of the `OpenProcess failed for pid=1724: The parameter is incorrect` error elsewhere? Or am I misremembering? @cmacknz yes, that's correct. On my personal desktop, the metricbeat wasn't able to access following processes, running as root: - PID 0 and 4 (protected processes, you can never access it.) - Processes owned by SYSTEM user. (some antivirus processes for eg.) - They were accessible, but with limited info. This was for `system.process` integration though. The above issue is about `windows.service` integration but I believe the root cause is similar. --- @TheRiffRafi do you see any warning related to SeDebugPrivilege at the beginning of logs? Something like: `Metricbeat is running without SeDebugPrivilege, a Windows privilege that allows it to collect metrics...`, `Failure while attempting to enable SeDebugPrivilege` or `Metricbeat failed to enable the SeDebugPrivilege`? Can you attach logs from beginning, if possible? Hello @VihasMakwana! Unfortunately I can't help with logs, all the instances I have of the failure have the logs with the problem already started, there is no instance of this where we've caught it in a state where the issue is not occurring and then suddenly starts happening (the systems are going weeks without reporting the service). Also, I have to make a correction on the original description, we have only seen this on 8.10.4, we haven't tested on a more recent version as the entire stack for the user is still on 8.10.4, it was a misunderstanding that we had seen this problem on a later version.
[ "https://github.com/elastic/beats/commit/4fb53fe02f478a2a0f62b44ba3a998d0a6145ce7", "https://github.com/elastic/beats/commit/d7ec171cf539ab0371ecbc2f7e8754759c67b309", "https://github.com/elastic/beats/commit/2d13303d76eb9926ee13c7bbc7c449bba86d9610", "https://github.com/elastic/beats/commit/4ca00d7d99cacace5...
2025-04-03T12:00:25Z
https://github.com/elastic/beats/tree/10858f98cf8f14e1de51b195bba607c784bc6c90
[ "go mod tidy ; go build ./libbeat/... ./filebeat/... ./metricbeat/... ./heartbeat/... ./packetbeat/... ./winlogbeat/..." ]
[ "go test -json -v ./... > reports\\go-test-results.json" ]
[ "Get-Content -Raw reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} def norm_status(s: str) -> str: s = (s or "").strip().lower() return s if s in ("pass", "fail", "skip") else "" def clean(s: str | None) -> str | None: if s is None: return None s = s.replace("\r", "").replace("\n", "") s = s.strip() return s def norm_pkg(s: str | None) -> str | None: s = clean(s) if not s: return None # Remove stray whitespace introduced by wrapping. s = re.sub(r"\s+", "", s) # Normalize github domain glitches. s = s.replace("github..com", "github.com") s = re.sub(r"^github\.[^/]+", "github.com", s) # Collapse duplicated v7 caused by wrap artifacts. s = re.sub(r"/v7+", "/v7", s) # Fix accidental double slashes. s = re.sub(r"/{2,}", "/", s) return s def norm_test(s: str | None) -> str | None: s = clean(s) if not s: return None s = re.sub(r"[ \t]+", " ", s) return s # Pass 1: Flexible regex to capture Action pass/fail/skip with Test and Package in any order. # Case A: Test appears before Package. pat_a = re.compile( r'"Action"\s*:\s*"(pass|fail|skip)".{0,1500}?"Test"\s*:\s*"([^"]+?)".{0,1500}?"Package"\s*:\s*"([^"]+?)"', re.IGNORECASE | re.DOTALL, ) for m in pat_a.finditer(log): status = norm_status(m.group(1)) test = norm_test(m.group(2)) pkg = norm_pkg(m.group(3)) if status and test: name = f"{pkg}/{test}" if pkg else test results[name] = status # Case B: Package appears before Test. pat_b = re.compile( r'"Action"\s*:\s*"(pass|fail|skip)".{0,1500}?"Package"\s*:\s*"([^"]+?)".{0,1500}?"Test"\s*:\s*"([^"]+?)"', re.IGNORECASE | re.DOTALL, ) for m in pat_b.finditer(log): status = norm_status(m.group(1)) pkg = norm_pkg(m.group(2)) test = norm_test(m.group(3)) if status and test: name = f"{pkg}/{test}" if pkg else test results[name] = status # Pass 2: Parse Output-derived '--- PASS|FAIL|SKIP' across entire log. # For each match, resolve package by finding the surrounding JSON object braces and extracting Package. pass_re = re.compile(r'---\s+(PASS|FAIL|SKIP):\s+([^\r\n(]+)', re.IGNORECASE) def find_pkg_for_index(idx: int) -> str | None: # Find surrounding JSON object by scanning for nearest '{' before and '}' after. start = log.rfind("{", 0, idx) if start == -1: return None end = log.find("}", idx) if end == -1: return None segment = log[start:end+1] m = re.search(r'"Package"\s*:\s*"([^"]+)"', segment, re.DOTALL) return norm_pkg(m.group(1) if m else None) for m in pass_re.finditer(log): status_raw = m.group(1).upper() status = "pass" if status_raw == "PASS" else ("fail" if status_raw == "FAIL" else "skip") testname = norm_test(m.group(2)) if not testname: continue pkg = find_pkg_for_index(m.start()) name = f"{pkg}/{testname}" if pkg else testname if name not in results: results[name] = status # Fallback: if nothing parsed, try textual markers without package context. if not results: for m in pass_re.finditer(log): status_raw = m.group(1).upper() status = "pass" if status_raw == "PASS" else ("fail" if status_raw == "FAIL" else "skip") testname = norm_test(m.group(2)) if testname: results[testname] = status return results
[ "github.com/elastic/beats/v7/libbeat/common/TestEquals/when_we_have_the_sa", "github.com/elastic/beats/v7/libbeat/processors/TestDropFields", "github.com/elastic/beats/v7/libbeat/idxmgmt/lifecycle/TestDefaultSupport", "github.com/elastic/beats/v7/dev-tools/mage/Test_BuildArgs_ParseBuildTags/multiplle_build_ta...
[ "github.com/elastic/beats/v7/heartbeat/monitors/wrappers/TestAdditionalStdFields//with_service_name/simple/result_at_index_0", "github.com/elastic/beats/v7/heartbeat/monitors/active/http/TestCheckBody/Single__regex_that_matches", "github.com/elastic/beats/v7/libbeat/processors/add_cloud_metadata/Test_priorityRR...
starryzhang/sweb.eval.win.elastic_1776_beats-43665
crc-org/crc
4576
crc-org__crc-4576
Go
[ "4561" ]
ab8cf0bebe03dfca59600973979b0ffe6cb51991
diff --git a/cmd/crc/cmd/console.go b/cmd/crc/cmd/console.go index bdd5c6a385..a21d939200 100644 --- a/cmd/crc/cmd/console.go +++ b/cmd/crc/cmd/console.go @@ -6,6 +6,8 @@ import ( "io" "os" + "github.com/crc-org/crc/v2/pkg/crc/preset" + "github.com/crc-org/crc/v2/pkg/crc/api/client" "github.com/crc-org/crc/v2/pkg/crc/daemonclient" crcErrors "github.com/crc-org/crc/v2/pkg/crc/errors" @@ -44,6 +46,9 @@ func showConsole(client *daemonclient.Client) (*client.ConsoleResult, error) { func runConsole(writer io.Writer, client *daemonclient.Client, consolePrintURL, consolePrintCredentials bool, outputFormat string) error { result, err := showConsole(client) + if err == nil && result.ClusterConfig.ClusterType == preset.Microshift { + err = fmt.Errorf("error : this option is only supported for %s and %s preset", preset.OpenShift, preset.OKD) + } return render(&consoleResult{ Success: err == nil, state: toState(result),
diff --git a/cmd/crc/cmd/console_test.go b/cmd/crc/cmd/console_test.go index 0910dfb809..d17359b621 100644 --- a/cmd/crc/cmd/console_test.go +++ b/cmd/crc/cmd/console_test.go @@ -6,6 +6,8 @@ import ( "fmt" "testing" + "github.com/crc-org/crc/v2/pkg/crc/preset" + apiTypes "github.com/crc-org/crc/v2/pkg/crc/api/client" "github.com/crc-org/crc/v2/pkg/crc/daemonclient" "github.com/crc-org/crc/v2/pkg/crc/machine/fakemachine" @@ -15,19 +17,14 @@ import ( "github.com/stretchr/testify/assert" ) -var DummyClusterConfig = types.ClusterConfig{ - ClusterType: "openshift", - ClusterCACert: "MIIDODCCAiCgAwIBAgIIRVfCKNUa1wIwDQYJ", - KubeConfig: "/tmp/kubeconfig", - KubeAdminPass: "foobar", - ClusterAPI: "https://foo.testing:6443", - WebConsoleURL: "https://console.foo.testing:6443", - ProxyConfig: nil, +func setUpClientForConsole(t *testing.T) *daemonclient.Client { + return setUpClientForConsoleWithPreset(t, preset.OpenShift) } -func setUpClientForConsole(t *testing.T) *daemonclient.Client { +func setUpClientForConsoleWithPreset(t *testing.T, preset preset.Preset) *daemonclient.Client { client := mocks.NewClient(t) + DummyClusterConfig := createDummyClusterConfig(preset) client.On("WebconsoleURL").Return( &apiTypes.ConsoleResult{ ClusterConfig: DummyClusterConfig, @@ -38,6 +35,18 @@ func setUpClientForConsole(t *testing.T) *daemonclient.Client { } } +func createDummyClusterConfig(preset preset.Preset) types.ClusterConfig { + return types.ClusterConfig{ + ClusterType: preset, + ClusterCACert: "MIIDODCCAiCgAwIBAgIIRVfCKNUa1wIwDQYJ", + KubeConfig: "/tmp/kubeconfig", + KubeAdminPass: "foobar", + ClusterAPI: "https://foo.testing:6443", + WebConsoleURL: "https://console.foo.testing:6443", + ProxyConfig: nil, + } +} + func setUpFailingClientForConsole(t *testing.T) *daemonclient.Client { client := mocks.NewClient(t) @@ -106,3 +115,12 @@ func TestConsoleJSONError(t *testing.T) { assert.NoError(t, runConsole(out, setUpFailingClientForConsole(t), false, false, jsonFormat)) assert.JSONEq(t, `{"error":"console failed", "success":false}`, out.String()) } + +func TestConsoleThrowsErrorInMicroShiftPreset(t *testing.T) { + // Given + out := new(bytes.Buffer) + // When + err := runConsole(out, setUpClientForConsoleWithPreset(t, preset.Microshift), false, false, "") + // Then + assert.EqualError(t, err, fmt.Sprintf("error : this option is only supported for %s and %s preset", preset.OpenShift, preset.OKD)) +}
[BUG] `crc console` gives incorrect output on microshift preset ### General information I was testing some stuff on microshift cluster related to https://github.com/crc-org/docs/issues/16 . However, I noticed that `crc console` isn't giving the correct output for microshift preset: When I do plain `crc console`, it's opening up file explorer in current directory instead of browser ```shell PS C:\Users\rokum\go\src\github.com\crc-org\docs> crc console Opening the OpenShift Web Console in the default browser... # Opens File explorer instead ``` When I try to list credentials, it's showing incomplete command: ```shell PS C:\Users\rokum\go\src\github.com\crc-org\docs> crc console --credentials To login as a regular user, run 'oc login -u developer -p developer '. To login as an admin, run 'oc login -u kubeadmin -p ' ``` ### Operating System Windows ### Hypervisor Hyper-V ### Did you run `crc setup` before `crc start`? yes ### Running on Laptop ### Steps to reproduce 1. `crc config set preset microshift` 2. `crc setup` 3. `crc start` 4. `crc console` # Opens up file explorer on windows 5. `crc console --credentials` # Gives incomplete output ### CRC version ```shell CRC version: 2.45.0+7aeb3b OpenShift version: 4.17.7 MicroShift version: 4.17.7 ``` ### CRC status ```shell PS C:\Users\rokum\go\src\github.com\crc-org\docs> crc status CRC VM: Running MicroShift: Running (v4.17.7) RAM Usage: 5.015GB of 16.1GB Disk Usage: 1.722GB of 4.095GB (Inside the CRC VM) Persistent Volume Usage: 0B of 15GB (Allocated) Cache Usage: 57.62GB Cache Directory: C:\Users\rokum\.crc\cache ``` ### CRC config ```shell - consent-telemetry : yes - preset : microshift ``` ### Host Operating System ```shell Host Name: ROKUMAR-LENOVO OS Name: Microsoft Windows 11 Pro OS Version: 10.0.22631 N/A Build 22631 OS Manufacturer: Microsoft Corporation OS Configuration: Standalone Workstation OS Build Type: Multiprocessor Free Registered Organization: Product ID: 00330-81616-56784-AA418 Original Install Date: 17-12-2024, 07:05:25 System Boot Time: 09-01-2025, 14:51:05 System Manufacturer: LENOVO System Model: 20Y4S1QE0P System Type: x64-based PC Processor(s): 1 Processor(s) Installed. [01]: Intel64 Family 6 Model 141 Stepping 1 GenuineIntel ~2496 Mhz BIOS Version: LENOVO N40ET34W (1.16 ), 08-04-2022 Windows Directory: C:\Windows System Directory: C:\Windows\system32 Boot Device: \Device\HarddiskVolume1 System Locale: en-us;English (United States) Input Locale: 00004009 Time Zone: (UTC+05:30) Chennai, Kolkata, Mumbai, New Delhi Total Physical Memory: 65,239 MB Available Physical Memory: 43,050 MB Virtual Memory: Max Size: 74,967 MB Virtual Memory: Available: 52,608 MB Virtual Memory: In Use: 22,359 MB Page File Location(s): C:\pagefile.sys Domain: WORKGROUP Logon Server: \\ROKUMAR-LENOVO Hotfix(s): 4 Hotfix(s) Installed. [01]: KB5045935 [02]: KB5027397 [03]: KB5048685 [04]: KB5046729 Network Card(s): 6 NIC(s) Installed. [01]: Intel(R) Wi-Fi 6E AX210 160MHz Connection Name: Wi-Fi Status: Media disconnected [02]: Bluetooth Device (Personal Area Network) Connection Name: Bluetooth Network Connection Status: Media disconnected [03]: Lenovo USB Ethernet Connection Name: Ethernet DHCP Enabled: Yes DHCP Server: 192.168.29.1 IP address(es) [01]: 192.168.29.207 [02]: fe80::47f5:672f:9a66:4036 [03]: 2405:201:5004:a8c3:8d30:d232:1977:afef [04]: 2405:201:5004:a8c3:770a:f24a:3d73:c1e7 [04]: Wintun Userspace Tunnel Connection Name: OpenVPN Wintun Status: Media disconnected [05]: TAP-Windows Adapter V9 Connection Name: OpenVPN TAP-Windows6 Status: Media disconnected [06]: OpenVPN Data Channel Offload Connection Name: OpenVPN Data Channel Offload Status: Media disconnected Hyper-V Requirements: A hypervisor has been detected. Features required for Hyper-V will not be displayed. ``` ### Expected behavior if this option is unsupported, we should throw an exception with a clear message that this subcommand is unsupported on this preset. ### Actual behavior CRC prints wrong command output ### CRC Logs ```shell ``` ### Additional context _No response_
[ "https://github.com/crc-org/crc/commit/4224b50e29c7facc9d0e06430174e49d1746d4bd" ]
2025-01-17T16:26:33Z
https://github.com/crc-org/crc/tree/ab8cf0bebe03dfca59600973979b0ffe6cb51991
[ "go mod tidy ; go install -tags \"containers_image_openpgp\" ./cmd/crc" ]
[ "powershell.exe -NoProfile -Command \"New-Item -ItemType Directory -Force -Path reports | Out-Null; go test -json -v -tags 'build containers_image_openpgp' . ./pkg/... ./cmd/... | Tee-Object -FilePath 'reports\\go-test-results.json'\"" ]
[ "cat reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} precedence = {"fail": 3, "skip": 2, "pass": 1} def record(name: str, status: str): status = status.lower() if status not in precedence: return # Normalize test name by removing line breaks name = name.replace("\n", "").replace("\r", "").strip() prev = results.get(name) if prev is None or precedence[status] > precedence[prev]: results[name] = status # Collect JSON-like event blocks starting at '{' and ending at the first '}' line. events: list[str] = [] buf = None for line in log.splitlines(): # Start of a new JSON object if buf is None: if line.lstrip().startswith("{"): buf = line # If it closes on the same line if "}" in line: events.append(buf) buf = None else: buf += "\n" + line # Close the current event when '}' appears (highly unlikely inside Output) if "}" in line: events.append(buf) buf = None if buf is not None: events.append(buf) # Regexes to extract fields within an event block re_action = re.compile(r'"Action"\s*:\s*"([^"]+)"', re.IGNORECASE) re_test = re.compile(r'"Test"\s*:\s*"([^"]+)"', re.IGNORECASE | re.DOTALL) re_output = re.compile(r'"Output"\s*:\s*"([^"]*)"', re.IGNORECASE | re.DOTALL) # Process each event block for ev in events: m_action = re_action.search(ev) action = m_action.group(1).lower() if m_action else None m_test = re_test.search(ev) test_name = m_test.group(1) if m_test else None m_output = re_output.search(ev) output = m_output.group(1) if m_output else None # Primary: explicit test-level action (pass/fail/skip) within the same event if action in ("pass", "fail", "skip") and test_name: record(test_name, action) continue # Fallback: detect status from output markers for this test event if output: m_status = re.search(r'---\s*(PASS|FAIL|SKIP)\s*:', output, re.IGNORECASE) if m_status: status = m_status.group(1).lower() # Prefer the explicit Test field to avoid issues from wrapped Output strings name = test_name if not name: # Extract test name from output, trimming trailing duration "(...)" and stray wrap fragments m_name = re.search(r'---\s*(?:PASS|FAIL|SKIP)\s*:\s*([^\r\n]+)', output, re.IGNORECASE) if m_name: name = m_name.group(1) # Remove trailing duration "(...)" and trim name = re.sub(r'\s*\(.*\)\s*$', '', name).strip() if name: record(name, status) # Global fallback: scan entire log for non-JSON status lines if any for m in re.finditer(r'---\s*(PASS|FAIL|SKIP)\s*:\s*([^\r\n]+)', log, re.IGNORECASE): status = m.group(1).lower() name = re.sub(r'\s*\(.*\)\s*$', '', m.group(2)).strip() record(name, status) return results
[ "TestSetProvidedValue", "TestSetProvidedValuesOverrideDefaultVValuesInConfig/enable-emergency-login", "TestSetProvidedValuesOverrideDefaultValluesInConfig/https-proxy", "TestGetPathEnvString/unknown_with_windows_path", "TestSetProvidedValuesOverrideDefaultVValuesInConfig/persistent-volume-size", "TestSetP...
[ "TestRenderActionJSONSuccess", "TestSetProvidedValuesOverrideDefaultVValuesInConfig/consent-telemetry", "TestValidateNoProxy", "TestConfigSet", "TestInspectProcessForR", "TestGetApplicableHostnames (0.00s)\\n\"}", "TestContains/Contains/OneElement", "TestDele", "TestDefaultKeyValuesSetInConfig/disk-...
starryzhang/sweb.eval.win.crc-org_1776_crc-4576
goreleaser/goreleaser
5631
goreleaser__goreleaser-5631
Go
[ "5632" ]
935e9a6c3fcfd56e1bfa61f5c1abf117bc89c1cd
diff --git a/internal/pipe/archive/archive.go b/internal/pipe/archive/archive.go index 93e0cfaf637..6265d1d41b5 100644 --- a/internal/pipe/archive/archive.go +++ b/internal/pipe/archive/archive.go @@ -96,6 +96,7 @@ func (Pipe) Default(ctx *context.Context) error { archive.NameTemplate = defaultBinaryNameTemplate } } + archive.BuildsInfo.Mode = 0o755 ids.Inc(archive.ID) } return ids.Validate()
diff --git a/internal/pipe/archive/archive_test.go b/internal/pipe/archive/archive_test.go index e3c1455ded0..cb20129440d 100644 --- a/internal/pipe/archive/archive_test.go +++ b/internal/pipe/archive/archive_test.go @@ -2,6 +2,7 @@ package archive import ( "archive/tar" + "archive/zip" "compress/gzip" "fmt" "io" @@ -208,6 +209,7 @@ func TestRunPipe(t *testing.T) { ctx.Artifacts.Add(freebsdAmd64Build) ctx.Version = "0.0.1" ctx.Git.CurrentTag = "v0.0.1" + require.NoError(t, Pipe{}.Default(ctx)) require.NoError(t, Pipe{}.Run(ctx)) require.Empty(t, ctx.Artifacts.Filter( @@ -260,7 +262,10 @@ func TestRunPipe(t *testing.T) { header := tarInfo(t, filepath.Join(dist, name), expectBin) require.Equal(t, "root", header.Uname) require.Equal(t, "root", header.Gname) + require.EqualValues(t, 0o755, header.Mode) } + + name := "foobar_0.0.1_windows_amd64.zip" require.Equal( t, []string{ @@ -268,8 +273,10 @@ func TestRunPipe(t *testing.T) { "foo/bar/foobar/blah.txt", expectBin + ".exe", }, - testlib.LsArchive(t, filepath.Join(dist, "foobar_0.0.1_windows_amd64.zip"), "zip"), + testlib.LsArchive(t, filepath.Join(dist, name), "zip"), ) + info := zipInfo(t, filepath.Join(dist, name), expectBin+".exe") + require.Equal(t, fs.FileMode(0o755), info.Mode()) }) } } @@ -370,6 +377,24 @@ func TestRunPipeNoBinaries(t *testing.T) { require.NoError(t, Pipe{}.Run(ctx)) } +func zipInfo(t *testing.T, path, name string) fs.FileInfo { + t.Helper() + f, err := os.Open(path) + require.NoError(t, err) + defer f.Close() + info, err := f.Stat() + require.NoError(t, err) + r, err := zip.NewReader(f, info.Size()) + require.NoError(t, err) + for _, next := range r.File { + if next.Name == name { + return next.FileInfo() + } + } + t.Fatalf("could not find %q in %q", name, path) + return nil +} + func tarInfo(t *testing.T, path, name string) *tar.Header { t.Helper() f, err := os.Open(path) @@ -388,6 +413,7 @@ func tarInfo(t *testing.T, path, name string) *tar.Header { return next } } + t.Fatalf("could not find %q in %q", name, path) return nil } @@ -779,6 +805,7 @@ func TestDefault(t *testing.T) { require.NotEmpty(t, ctx.Config.Archives[0].NameTemplate) require.Equal(t, "tar.gz", ctx.Config.Archives[0].Formats[0]) require.NotEmpty(t, ctx.Config.Archives[0].Files) + require.Equal(t, fs.FileMode(0o755), ctx.Config.Archives[0].BuildsInfo.Mode) } func TestDefaultSet(t *testing.T) {
Linux binary in ZIP file built on Windows does not have executable bit This is a regression of an old bug. We should force 0755 perms for binaries inside archives on windows. Discussed in https://github.com/orgs/goreleaser/discussions/5630
[ "https://github.com/goreleaser/goreleaser/commit/7edfde93ae854c6be9c0e10713db0e244e0d1147" ]
2025-03-08T15:02:16Z
https://github.com/goreleaser/goreleaser/tree/935e9a6c3fcfd56e1bfa61f5c1abf117bc89c1cd
[ "go mod tidy ; go build ." ]
[ "go test -json -v ./... 2>&1 | Tee-Object -FilePath reports\\go-test-results.json" ]
[ "Get-Content -Path reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: # Further improved parser for go test -json style output with robust normalization import re def norm_pkg(pkg: str) -> str: # Remove whitespace introduced by wrapping pkg = re.sub(r'\s+', '', pkg) # Replace anything between 'github.' and the next '/' with 'com' pkg = re.sub(r'(github\.)[^/]+', r'\1com', pkg) # Collapse multiple slashes pkg = re.sub(r'/+', '/', pkg) return pkg.strip() def norm_test_basic(test: str) -> str: # Replace escaped newlines by space, collapse whitespace test = test.replace('\\n', ' ') test = re.sub(r'\s+', ' ', test).strip() # Remove spaces around slashes introduced by wraps test = re.sub(r'\s*/\s*', '/', test) # Remove spaces that were likely introduced by wraps inside words (between alphanumerics) # This tends to fix "Build lds" -> "Buildlds" and "linux-arm6 64" -> "linux-arm664" (we'll try to canonicalize later) test = re.sub(r'(?<=\w)\s+(?=\w)', '', test) return test def stripspaces(s: str) -> str: return re.sub(r'\s+', '', s) def canonicalize_test(pkg: str, candidate: str, seen_map: dict[str, set[str]]) -> str: # Try to map candidate to an already seen test in the same package, ignoring spaces cand_key = stripspaces(candidate) for seen in seen_map.get(pkg, set()): if stripspaces(seen) == cand_key: return seen return candidate results: dict[str, str] = {} seen_tests_by_pkg: dict[str, set[str]] = {} run_tests: set[str] = set() package_failed: set[str] = set() # Collect all tests seen via any JSON "Test" field to use for matching json_any_test_re = re.compile( r'"Test"\s*:\s*"([^"]+)"\s*,\s*.*?"Package"\s*:\s*"([^"]+)"', re.DOTALL | re.IGNORECASE, ) for m in json_any_test_re.finditer(log): test = norm_test_basic(m.group(1)) pkg = norm_pkg(m.group(2)) seen_tests_by_pkg.setdefault(pkg, set()).add(test) # Parse "run" events json_run_re = re.compile( r'"Action"\s*:\s*"run".*?"Package"\s*:\s*"([^"]+)".*?"Test"\s*:\s*"([^"]+)"', re.DOTALL | re.IGNORECASE, ) for m in json_run_re.finditer(log): pkg = norm_pkg(m.group(1)) test = norm_test_basic(m.group(2)) test = canonicalize_test(pkg, test, seen_tests_by_pkg) key = f"{pkg}::{test}" run_tests.add(key) seen_tests_by_pkg.setdefault(pkg, set()).add(test) # Parse pass/fail/skip events (with optional Test to detect package-level failures) json_status_re = re.compile( r'"Action"\s*:\s*"(pass|fail|skip)".*?"Package"\s*:\s*"([^"]+)"(?:.*?"Test"\s*:\s*"([^"]+)")?', re.DOTALL | re.IGNORECASE, ) for m in json_status_re.finditer(log): action = m.group(1).lower() pkg = norm_pkg(m.group(2)) test = m.group(3) if test: test = norm_test_basic(test) test = canonicalize_test(pkg, test, seen_tests_by_pkg) name = f"{pkg}::{test}" results[name] = action seen_tests_by_pkg.setdefault(pkg, set()).add(test) else: if action == "fail": package_failed.add(pkg) # Parse output entries for textual PASS/FAIL/SKIP and RUN lines with package context output_re = re.compile( r'"Action"\s*:\s*"output".*?"Package"\s*:\s*"([^"]+)"(?:.*?"Test"\s*:\s*"([^"]+)")?.*?"Output"\s*:\s*"(.*?)"', re.DOTALL | re.IGNORECASE, ) passfail_line_re = re.compile(r'-{3}\s+(PASS|FAIL|SKIP):\s+([^\n(]+)', re.IGNORECASE) run_line_re = re.compile(r'={3}\s+RUN\s+([^\n]+)', re.IGNORECASE) for m in output_re.finditer(log): pkg = norm_pkg(m.group(1)) test_field = m.group(2) output = m.group(3) # Record RUN lines (fallback if "Action":"run" was missing) for rm in run_line_re.finditer(output): raw_name = rm.group(1).strip() if test_field: test_name = norm_test_basic(test_field) else: candidate = norm_test_basic(raw_name) test_name = canonicalize_test(pkg, candidate, seen_tests_by_pkg) key = f"{pkg}::{test_name}" run_tests.add(key) seen_tests_by_pkg.setdefault(pkg, set()).add(test_name) # Record PASS/FAIL/SKIP textual lines for sm in passfail_line_re.finditer(output): status = sm.group(1).lower() raw_name = sm.group(2).strip() if test_field: test_name = norm_test_basic(test_field) else: candidate = norm_test_basic(raw_name) test_name = canonicalize_test(pkg, candidate, seen_tests_by_pkg) key = f"{pkg}::{test_name}" if key not in results: results[key] = status seen_tests_by_pkg.setdefault(pkg, set()).add(test_name) # For any package that failed, mark its seen tests without final status as fail for pkg in package_failed: for test in seen_tests_by_pkg.get(pkg, set()): name = f"{pkg}::{test}" if name not in results: results[name] = "fail" # Any test that started (run) but did not complete -> mark as fail for name in run_tests: if name not in results: results[name] = "fail" # Final canonicalization pass: fix any lingering github domain typos and align test names to seen ones final_results: dict[str, str] = {} for key, status in results.items(): if "::" in key: pkg, test = key.split("::", 1) pkg = norm_pkg(pkg) test = canonicalize_test(pkg, test, seen_tests_by_pkg) final_results[f"{pkg}::{test}"] = status else: # Shouldn't happen, but keep as is final_results[key] = status return final_results
[ "github.com/goreleaser/goreleaser/v2/internal/pipe/announce::TestAnnounceAllDisaabled", "github.com/goreleaser/goreleaser/v2/internal/pipe/checksums::TestPipeFileNotExistt", "github.com/goreleaser/goreleaser/v2/internal/builders/zig::TestWithDefaults/invvalid_config_option", "github.com/goreleaser/goreleaser/...
[ "github.com/goreleaser/goreleaser/v2/internal/pipe/changelog::TestGetChangeloger/ggithub_no_previous", "github.com/goreleaser/goreleaser/v2/internal/pipe/snapcraft::TestDefaultGradeTmmplError", "github.com/goreleaser/goreleaser/v2/internal/pipe/changelog::TestGroupBadRegex", "github.com/goreleaser/goreleaser/...
starryzhang/sweb.eval.win.goreleaser_1776_goreleaser-5631
lima-vm/lima
3280
lima-vm__lima-3280
Go
[ "2775" ]
439c9de34eaccaa2910f7df55608b1c1d8ecbcba
diff --git a/hack/common.inc.sh b/hack/common.inc.sh index 1e90ff8e15d..95e429fce0a 100644 --- a/hack/common.inc.sh +++ b/hack/common.inc.sh @@ -23,7 +23,7 @@ if [[ ${BASH_VERSINFO:-0} -lt 4 ]]; then exit 1 fi -: "${LIMA_HOME:=$HOME/.lima}" +: "${LIMA_HOME:=${HOME_HOST:-$HOME}/.lima}" _IPERF3=iperf3 # iperf3-darwin does some magic on macOS to avoid "No route on host" on macOS 15 # https://github.com/lima-vm/socket_vmnet/issues/85 diff --git a/pkg/osutil/user.go b/pkg/osutil/user.go index d1883c59d6e..a0d89e43cca 100644 --- a/pkg/osutil/user.go +++ b/pkg/osutil/user.go @@ -144,6 +144,8 @@ func LimaUser(limaVersion string, warn bool) *user.User { home, err := call([]string{"cygpath", limaUser.HomeDir}) if err != nil { logrus.Debug(err) + } else { + home += ".linux" } if home == "" { drive := filepath.VolumeName(limaUser.HomeDir) @@ -151,6 +153,7 @@ func LimaUser(limaVersion string, warn bool) *user.User { // replace C: with /c prefix := strings.ToLower(fmt.Sprintf("/%c", drive[0])) home = strings.Replace(home, drive, prefix, 1) + home += ".linux" } if !regexPath.MatchString(limaUser.HomeDir) { warning := fmt.Sprintf("local home %q is not a valid Linux path (must match %q); using %q home instead",
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a5157e73a98..5542708ab09 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -129,12 +129,10 @@ jobs: - name: Enable WSL2 run: | wsl --set-default-version 2 - # Manually install the latest kernel from MSI - Invoke-WebRequest -Uri "https://wslstorestorage.blob.core.windows.net/wslblob/wsl_update_x64.msi" -OutFile "wsl_update_x64.msi" - $pwd = (pwd).Path - Start-Process msiexec.exe -Wait -ArgumentList "/I $pwd\wsl_update_x64.msi /quiet" + wsl --shutdown wsl --update wsl --status + wsl --version wsl --list --online - name: Install WSL2 distro timeout-minutes: 1 @@ -160,22 +158,15 @@ jobs: run: go test -v ./... - name: Make run: make -# FIXME: Windows CI began to fail on Oct 21, 2024. -# Something seems to have changed between win22/20241006.1 and win22/20241015.1. -# https://github.com/lima-vm/lima/issues/2775 -# - name: Smoke test -# # Make sure the path is set properly and then run limactl -# run: | -# $env:Path = 'C:\Program Files\Git\usr\bin;' + $env:Path -# Set-ItemProperty -Path 'Registry::HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment' -Name PATH -Value $env:Path -# .\_output\bin\limactl.exe start template://experimental/wsl2 -# # TODO: run the full integration tests -# - name: Debug -# if: always() -# run: type C:\Users\runneradmin\.lima\wsl2\ha.stdout.log -# - name: Debug -# if: always() -# run: type C:\Users\runneradmin\.lima\wsl2\ha.stderr.log + - name: Integration tests (WSL2, Windows host) + run: | + $env:Path = "$pwd\_output\bin;" + 'C:\Program Files\Git\usr\bin;' + $env:Path + Set-ItemProperty -Path 'Registry::HKEY_LOCAL_MACHINE\System\CurrentControlSet\Control\Session Manager\Environment' -Name PATH -Value $env:Path + $env:MSYS2_ENV_CONV_EXCL='HOME_HOST;HOME_GUEST' + $env:HOME_HOST=$(cygpath.exe "$env:USERPROFILE") + $env:HOME_GUEST="/mnt$env:HOME_HOST" + $env:LIMACTL_CREATE_ARGS='--vm-type=wsl2 --mount-type=wsl2 --containerd=system' + bash.exe -c "./hack/test-templates.sh templates/experimental/wsl2.yaml" qemu: name: "Integration tests (QEMU, macOS host)" diff --git a/hack/test-mount-home.sh b/hack/test-mount-home.sh index dd8e1afad72..f5cc4ede751 100755 --- a/hack/test-mount-home.sh +++ b/hack/test-mount-home.sh @@ -11,14 +11,15 @@ if [ "$#" -ne 1 ]; then fi NAME="$1" -hometmp="$HOME/lima-test-tmp" +hometmp="${HOME_HOST:-$HOME}/lima-test-tmp" +hometmpguest="${HOME_GUEST:-$HOME}/lima-test-tmp" INFO "Testing home access (\"$hometmp\")" rm -rf "$hometmp" mkdir -p "$hometmp" defer "rm -rf \"$hometmp\"" echo "random-content-${RANDOM}" >"$hometmp/random" expected="$(cat "$hometmp/random")" -got="$(limactl shell "$NAME" cat "$hometmp/random")" +got="$(limactl shell "$NAME" cat "$hometmpguest/random")" INFO "$hometmp/random: expected=${expected}, got=${got}" if [ "$got" != "$expected" ]; then ERROR "Home directory is not shared?" diff --git a/hack/test-port-forwarding.pl b/hack/test-port-forwarding.pl index 3a41ce1e118..cc1fc96c409 100755 --- a/hack/test-port-forwarding.pl +++ b/hack/test-port-forwarding.pl @@ -129,7 +129,8 @@ sleep 5; # Record current log size, so we can skip prior output -$ENV{LIMA_HOME} ||= "$ENV{HOME}/.lima"; +$ENV{HOME_HOST} ||= "$ENV{HOME}"; +$ENV{LIMA_HOME} ||= "$ENV{HOME_HOST}/.lima"; my $ha_log = "$ENV{LIMA_HOME}/$instance/ha.stderr.log"; my $ha_log_size = -s $ha_log or die; diff --git a/hack/test-templates.sh b/hack/test-templates.sh index 26e064b513d..71cdc53e7cd 100755 --- a/hack/test-templates.sh +++ b/hack/test-templates.sh @@ -1,6 +1,9 @@ #!/usr/bin/env bash set -eu -o pipefail +# will prevent msys2 converting Linux path arguments into Windows paths before passing to limactl +export MSYS2_ARG_CONV_EXCL='*' + scriptdir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" # shellcheck source=common.inc.sh source "${scriptdir}/common.inc.sh" @@ -12,9 +15,19 @@ fi FILE="$1" NAME="$(basename -s .yaml "$FILE")" +OS_HOST="$(uname -o)" + +# On Windows $HOME of the bash runner, %USERPROFILE% of the host machine and mpunting point in the guest machine +# are all different folders. This will handle path differences, when values are expilictly set. +HOME_HOST=${HOME_HOST:-$HOME} +HOME_GUEST=${HOME_GUEST:-$HOME} +FILE_HOST=$FILE +if [ "${OS_HOST}" = "Msys" ]; then + FILE_HOST="$(cygpath -w "$FILE")" +fi -INFO "Validating \"$FILE\"" -limactl validate "$FILE" +INFO "Validating \"$FILE_HOST\"" +limactl validate "$FILE_HOST" # --cpus=1 is needed for running vz on GHA: https://github.com/lima-vm/lima/pull/1511#issuecomment-1574937888 LIMACTL_CREATE=(limactl --tty=false create --cpus=1 --memory=1) @@ -22,6 +35,7 @@ LIMACTL_CREATE=(limactl --tty=false create --cpus=1 --memory=1) CONTAINER_ENGINE="nerdctl" declare -A CHECKS=( + ["proxy-settings"]="1" ["systemd"]="1" ["systemd-strict"]="1" ["mount-home"]="1" @@ -69,6 +83,13 @@ case "$NAME" in "docker") CONTAINER_ENGINE="docker" ;; +"wsl2") + # TODO https://github.com/lima-vm/lima/issues/3267 + CHECKS["systemd"]= + # TODO https://github.com/lima-vm/lima/issues/3268 + CHECKS["proxy-settings"]= + CHECKS["port-forwards"]= + ;; esac if limactl ls -q | grep -q "$NAME"; then @@ -80,7 +101,7 @@ fi # TODO: skip downloading and converting the image here. # Probably `limactl create` should have "dry run" mode that just generates `lima.yaml`. # shellcheck disable=SC2086 -"${LIMACTL_CREATE[@]}" ${LIMACTL_CREATE_ARGS} --set ".additionalDisks=null" --name="${NAME}-tmp" "$FILE" +"${LIMACTL_CREATE[@]}" ${LIMACTL_CREATE_ARGS} --set ".additionalDisks=null" --name="${NAME}-tmp" "$FILE_HOST" case "$(yq '.networks[].lima' "${LIMA_HOME}/${NAME}-tmp/lima.yaml")" in "shared") CHECKS["vmnet"]=1 @@ -93,32 +114,38 @@ esac limactl rm -f "${NAME}-tmp" if [[ -n ${CHECKS["port-forwards"]} ]]; then - tmpconfig="$HOME/lima-config-tmp" + tmpconfig="$HOME_HOST/lima-config-tmp" mkdir -p "${tmpconfig}" defer "rm -rf \"$tmpconfig\"" tmpfile="${tmpconfig}/${NAME}.yaml" cp "$FILE" "${tmpfile}" FILE="${tmpfile}" + FILE_HOST=$FILE + if [ "${OS_HOST}" = "Msys" ]; then + FILE_HOST="$(cygpath -w "$FILE")" + fi + INFO "Setup port forwarding rules for testing in \"${FILE}\"" "${scriptdir}/test-port-forwarding.pl" "${FILE}" - limactl validate "$FILE" + INFO "Validating \"$FILE_HOST\"" + limactl validate "$FILE_HOST" fi function diagnose() { NAME="$1" set -x +e - tail "$HOME/.lima/${NAME}"/*.log + tail "$HOME_HOST/.lima/${NAME}"/*.log limactl shell "$NAME" systemctl --no-pager status limactl shell "$NAME" systemctl --no-pager mkdir -p failure-logs - cp -pf "$HOME/.lima/${NAME}"/*.log failure-logs/ + cp -pf "$HOME_HOST/.lima/${NAME}"/*.log failure-logs/ limactl shell "$NAME" sudo cat /var/log/cloud-init-output.log | tee failure-logs/cloud-init-output.log set +x -e } export ftp_proxy=http://localhost:2121 -INFO "Creating \"$NAME\" from \"$FILE\"" +INFO "Creating \"$NAME\" from \"$FILE_HOST\"" defer "limactl delete -f \"$NAME\"" if [[ -n ${CHECKS["disk"]} ]]; then @@ -130,7 +157,7 @@ fi set -x # shellcheck disable=SC2086 -"${LIMACTL_CREATE[@]}" ${LIMACTL_CREATE_ARGS} "$FILE" +"${LIMACTL_CREATE[@]}" ${LIMACTL_CREATE_ARGS} "$FILE_HOST" set +x if [[ -n ${CHECKS["mount-path-with-spaces"]} ]]; then @@ -152,7 +179,7 @@ limactl shell "$NAME" cat /etc/os-release set +x INFO "Testing that host home is not wiped out" -[ -e "$HOME/.lima" ] +[ -e "$HOME_HOST/.lima" ] if [[ -n ${CHECKS["mount-path-with-spaces"]} ]]; then INFO 'Testing that "/tmp/lima test dir with spaces" is not wiped out' @@ -179,16 +206,18 @@ if [[ -n ${CHECKS["set-user"]} ]]; then limactl shell "$NAME" grep "^john:x:4711:4711:John Doe:/home/john-john" /etc/passwd fi -INFO "Testing proxy settings are imported" -got=$(limactl shell "$NAME" env | grep FTP_PROXY) -# Expected: FTP_PROXY is set in addition to ftp_proxy, localhost is replaced -# by the gateway address, and the value is set immediately without a restart -gatewayIp=$(limactl shell "$NAME" ip route show 0.0.0.0/0 dev eth0 | cut -d\ -f3) -expected="FTP_PROXY=http://${gatewayIp}:2121" -INFO "FTP_PROXY: expected=${expected} got=${got}" -if [ "$got" != "$expected" ]; then - ERROR "proxy environment variable not set to correct value" - exit 1 +if [[ -n ${CHECKS["proxy-settings"]} ]]; then + INFO "Testing proxy settings are imported" + got=$(limactl shell "$NAME" env | grep FTP_PROXY) + # Expected: FTP_PROXY is set in addition to ftp_proxy, localhost is replaced + # by the gateway address, and the value is set immediately without a restart + gatewayIp=$(limactl shell "$NAME" ip route show 0.0.0.0/0 dev eth0 | cut -d\ -f3) + expected="FTP_PROXY=http://${gatewayIp}:2121" + INFO "FTP_PROXY: expected=${expected} got=${got}" + if [ "$got" != "$expected" ]; then + ERROR "proxy environment variable not set to correct value" + exit 1 + fi fi INFO "Testing limactl copy command" @@ -196,6 +225,7 @@ tmpdir="$(mktemp -d "${TMPDIR:-/tmp}"/lima-test-templates.XXXXXX)" defer "rm -rf \"$tmpdir\"" tmpfile="$tmpdir/lima-hostname" rm -f "$tmpfile" +# TODO support Windows path https://github.com/lima-vm/lima/issues/3215 limactl cp "$NAME":/etc/hostname "$tmpfile" expected="$(limactl shell "$NAME" cat /etc/hostname)" got="$(cat "$tmpfile")" @@ -234,32 +264,38 @@ nginx_image="ghcr.io/stargz-containers/nginx:1.19-alpine-org" alpine_image="ghcr.io/containerd/alpine:3.14.0" if [[ -n ${CHECKS["container-engine"]} ]]; then + sudo="" + # Currently WSL2 machines only support privileged engine. This requirement might be lifted in the future. + if [[ "$(limactl ls --json "${NAME}" | jq -r .vmType)" == "wsl2" ]]; then + sudo="sudo" + fi INFO "Run a nginx container with port forwarding 127.0.0.1:8080" set -x - if ! limactl shell "$NAME" $CONTAINER_ENGINE info; then - limactl shell "$NAME" sudo cat /var/log/cloud-init-output.log + if ! limactl shell "$NAME" $sudo $CONTAINER_ENGINE info; then + limactl shell "$NAME" cat /var/log/cloud-init-output.log ERROR "\"${CONTAINER_ENGINE} info\" failed" exit 1 fi - limactl shell "$NAME" $CONTAINER_ENGINE pull --quiet ${nginx_image} - limactl shell "$NAME" $CONTAINER_ENGINE run -d --name nginx -p 127.0.0.1:8080:80 ${nginx_image} + limactl shell "$NAME" $sudo $CONTAINER_ENGINE pull --quiet ${nginx_image} + limactl shell "$NAME" $sudo $CONTAINER_ENGINE run -d --name nginx -p 127.0.0.1:8080:80 ${nginx_image} timeout 3m bash -euxc "until curl -f --retry 30 --retry-connrefused http://127.0.0.1:8080; do sleep 3; done" - limactl shell "$NAME" $CONTAINER_ENGINE rm -f nginx + limactl shell "$NAME" $sudo $CONTAINER_ENGINE rm -f nginx set +x if [[ -n ${CHECKS["mount-home"]} ]]; then - hometmp="$HOME/lima-container-engine-test-tmp" + hometmp="$HOME_HOST/lima-container-engine-test-tmp" + hometmpguest="$HOME_GUEST/lima-container-engine-test-tmp" # test for https://github.com/lima-vm/lima/issues/187 INFO "Testing home bind mount (\"$hometmp\")" rm -rf "$hometmp" mkdir -p "$hometmp" defer "rm -rf \"$hometmp\"" set -x - limactl shell "$NAME" $CONTAINER_ENGINE pull --quiet ${alpine_image} + limactl shell "$NAME" $sudo $CONTAINER_ENGINE pull --quiet ${alpine_image} echo "random-content-${RANDOM}" >"$hometmp/random" expected="$(cat "$hometmp/random")" - got="$(limactl shell "$NAME" $CONTAINER_ENGINE run --rm -v "$hometmp/random":/mnt/foo ${alpine_image} cat /mnt/foo)" + got="$(limactl shell "$NAME" $sudo $CONTAINER_ENGINE run --rm -v "$hometmpguest/random":/mnt/foo ${alpine_image} cat /mnt/foo)" INFO "$hometmp/random: expected=${expected}, got=${got}" if [ "$got" != "$expected" ]; then ERROR "Home directory is not shared?" @@ -284,6 +320,9 @@ if [[ -n ${CHECKS["port-forwards"]} ]]; then if [ "${NAME}" = "opensuse" ]; then limactl shell "$NAME" sudo zypper in -y netcat-openbsd fi + if limactl shell "$NAME" command -v dnf; then + limactl shell "$NAME" sudo dnf install -y nc + fi "${scriptdir}/test-port-forwarding.pl" "${NAME}" if [[ -n ${CHECKS["container-engine"]} || ${NAME} == "alpine"* ]]; then @@ -306,6 +345,10 @@ if [[ -n ${CHECKS["port-forwards"]} ]]; then rm nerdctl-full.tgz sudo="sudo" fi + # Currently WSL2 machines only support privileged engine. This requirement might be lifted in the future. + if [[ "$(limactl ls --json "${NAME}" | jq -r .vmType)" == "wsl2" ]]; then + sudo="sudo" + fi limactl shell "$NAME" $sudo $CONTAINER_ENGINE info limactl shell "$NAME" $sudo $CONTAINER_ENGINE pull --quiet ${nginx_image} @@ -360,7 +403,8 @@ if [[ -n ${CHECKS["restart"]} ]]; then fi INFO "Stopping \"$NAME\"" - limactl stop "$NAME" + # TODO https://github.com/lima-vm/lima/issues/3221 + limactl stop "$NAME" || [ "${OS_HOST}" = "Msys" ] sleep 3 if [[ -n ${CHECKS["disk"]} ]]; then @@ -406,7 +450,7 @@ fi if [[ -n ${CHECKS["user-v2"]} ]]; then INFO "Testing user-v2 network" secondvm="$NAME-1" - "${LIMACTL_CREATE[@]}" --set ".additionalDisks=null" "$FILE" --name "$secondvm" + "${LIMACTL_CREATE[@]}" --set ".additionalDisks=null" "$FILE_HOST" --name "$secondvm" if ! limactl start "$secondvm"; then ERROR "Failed to start \"$secondvm\"" diagnose "$secondvm" @@ -474,7 +518,8 @@ if [[ $NAME == "fedora" && "$(limactl ls --json "$NAME" | jq -r .vmType)" == "vz fi INFO "Stopping \"$NAME\"" -limactl stop "$NAME" +# TODO https://github.com/lima-vm/lima/issues/3221 +limactl stop "$NAME" || [ "${OS_HOST}" = "Msys" ] sleep 3 INFO "Deleting \"$NAME\""
Windows CI began to fail on Oct 21 #2769 passed the CI, but its merge commit and later ones are failing https://github.com/lima-vm/lima/actions/runs/11429806278/job/31800191430 ``` […] time="2024-10-21T01:14:55Z" level=info msg="SSH Local Port: 22" time="2024-10-21T01:14:55Z" level=info msg="[hostagent] Waiting for the essential requirement 1 of 2: \"ssh\"" time="2024-10-21T01:15:05Z" level=info msg="[hostagent] Waiting for the essential requirement 1 of 2: \"ssh\"" time="2024-10-21T01:15:15Z" level=info msg="[hostagent] Waiting for the essential requirement 1 of 2: \"ssh\"" time="2024-10-21T01:24:43Z" level=fatal msg="did not receive an event with the \"running\" status" ``` Something seems to have changed between https://github.com/actions/runner-images/releases/tag/win22%2F20241006.1 and https://github.com/actions/runner-images/releases/tag/win22%2F20241015.1
In https://github.com/lima-vm/lima/actions/runs/11445753347/job/31843450765?pr=2778 I see: ``` System has not been booted with systemd as init system (PID 1). Can't operate. ``` @pendo324 Do you have any idea what may be causing the Windows tests to fail now? I can't find anything that seems related in https://github.com/actions/runner-images/commit/fcc4cdb1d095af1317859c4809364538953b3497 or https://github.com/actions/runner-images/commit/09ff567de6908096a96ace47eb3f41079993366d The errors look like `systemd` is no longer enabled in your distro, but there has been no change to the distro. I'm at a loss on what might be causing this. Thanks for pinging me, taking a look now I'm doing some experiments with Windows support. I managed to replicate this CI attempt in my rebuild workflow. It worked successfully on a default GH runner. Logs are available https://github.com/arixmkii/qcw/actions/runs/13090314041/job/36526224725 The biggest difference in the setup is that I have to use latest preview WSL build from https://github.com/microsoft/WSL/releases @jandubois I debugged this. There is actually related change in commits you showed. It is Git version bump. It uses OpenSSH from Git distribution. The script "user session is ready for ssh" hangs indefinitely on Git 2.47 and newer releases. The same is the case for latest msys2 OpenSSH. I downgraded the Git on my system and managed to run WSL2 machine. I also managed to run almost all integration tests with this WSL2 machine, when using OpenSSH inside Alpine companion distro in WSL2 (not using any of Windows tools) https://github.com/arixmkii/qcw/actions/runs/13474629971/job/37652601743 Conclusion. Machine didn't break, Windows tooling has some sort of issue/regression, which might or might not be fixed. I tried to create an isolated reproducer using same script doing `cat script sh | <openssh command from lima>` in parallel to hanging one and was not able to reproduce it outside of Lima.
In https://github.com/lima-vm/lima/actions/runs/11445753347/job/31843450765?pr=2778 I see: ``` System has not been booted with systemd as init system (PID 1). Can't operate. ``` @pendo324 Do you have any idea what may be causing the Windows tests to fail now? I can't find anything that seems related in https://github.com/actions/runner-images/commit/fcc4cdb1d095af1317859c4809364538953b3497 or https://github.com/actions/runner-images/commit/09ff567de6908096a96ace47eb3f41079993366d The errors look like `systemd` is no longer enabled in your distro, but there has been no change to the distro. I'm at a loss on what might be causing this. Thanks for pinging me, taking a look now I'm doing some experiments with Windows support. I managed to replicate this CI attempt in my rebuild workflow. It worked successfully on a default GH runner. Logs are available https://github.com/arixmkii/qcw/actions/runs/13090314041/job/36526224725 The biggest difference in the setup is that I have to use latest preview WSL build from https://github.com/microsoft/WSL/releases @jandubois I debugged this. There is actually related change in commits you showed. It is Git version bump. It uses OpenSSH from Git distribution. The script "user session is ready for ssh" hangs indefinitely on Git 2.47 and newer releases. The same is the case for latest msys2 OpenSSH. I downgraded the Git on my system and managed to run WSL2 machine. I also managed to run almost all integration tests with this WSL2 machine, when using OpenSSH inside Alpine companion distro in WSL2 (not using any of Windows tools) https://github.com/arixmkii/qcw/actions/runs/13474629971/job/37652601743 Conclusion. Machine didn't break, Windows tooling has some sort of issue/regression, which might or might not be fixed. I tried to create an isolated reproducer using same script doing `cat script sh | <openssh command from lima>` in parallel to hanging one and was not able to reproduce it outside of Lima. FWIW, https://github.com/git-for-windows/git/issues/5199 may be relevant. Note that git-for-Windows picked up the fixes, but as far as I know it's not in upstream cygwin/msys2 yet. https://github.com/actions/runner-images/commit/fcc4cdb1d095af1317859c4809364538953b3497 linked above shows that Git for Windows was updated to 2.47.0.windows.1 which would be an affected version. But right now it shows 2.47.1.windows.2 which should be fixed (so some runs may be succeeding). @mook-as Thank you! I tested with the updated runner (I'm using server 2025, but this should not really behave differently here) with `git version 2.47.1.windows.2` and it passed tests https://github.com/arixmkii/qcw/actions/runs/13552437877/job/37879648090
[ "https://github.com/lima-vm/lima/commit/22d86cc1fc04d4de8b9ff73ecfa6ce9c7c9a360a" ]
2025-02-25T21:58:52Z
https://github.com/lima-vm/lima/tree/439c9de34eaccaa2910f7df55608b1c1d8ecbcba
[ "go mod download ; go build ./..." ]
[ "New-Item -ItemType Directory -Path reports -Force ^| Out-Null; go test -v -json ./... *> reports\\go-test-results.json" ]
[ "Get-Content -LiteralPath reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: # Robust Go test parser: # - Prefer scanning '--- PASS|FAIL|SKIP: Name (..)' lines (plain or within JSON strings). # - Also parse JSON events with Action: pass|fail|skip and Test: Name. # - Normalize names to remove durations and escaped JSON tails. # - Keep only names starting with Test or Fuzz. # - Priority: fail > skip > pass. import re results: dict[str, str] = {} priority = {"fail": 3, "skip": 2, "pass": 1} def normalize(name: str) -> str: if not name: return "" # Remove CR/LF name = name.replace("\r", "").replace("\n", "") name = name.strip() # Cut off at first parenthesis or escape/quote/brace artifact # e.g., "TestX (0.00s)\n"} -> "TestX" cut = re.split(r'\s*\(|\\|["}]', name, maxsplit=1)[0] name = cut.strip() # Trim leading stray punctuation/spaces name = name.lstrip(":;,. \t") # Collapse spaces around common separators used in Go subtests name = re.sub(r"\s*([/#:])\s*", r"\1", name) # Collapse multiple spaces name = re.sub(r"\s{2,}", " ", name) # Fix duplicated trailing digit after a #NN pattern (e.g., "#011" -> "#01") name = re.sub( r"(#[0-9]{2})([0-9])$", lambda m: m.group(1) if m.group(2) == m.group(1)[-1] else m.group(0), name, ) return name.strip() def record(name: str, status: str): n = normalize(name) if not n: return # Only accept proper Go test/fuzz names to avoid spurious fragments if not (n.startswith("Test") or n.startswith("Fuzz")): return s = status.lower() if s not in priority: s = "fail" prev = results.get(n) if prev is None or priority[s] > priority[prev]: results[n] = s text = log # 1) Output-style lines anywhere in the text: # --- PASS|FAIL|SKIP: <TestName> (duration) # Capture name only up to the opening parenthesis. out_plain_pat = re.compile( r'---\s+(PASS|FAIL|SKIP)\s*:\s*([^\r\n(]+?)\s*\([^)]*\)', re.IGNORECASE, ) for m in out_plain_pat.finditer(text): status = m.group(1).lower() name = m.group(2) record(name, status) # 2) JSON events with Action + Test (tolerant of wrapping/newlines) # Match a single JSON object loosely, ensuring both fields exist in any order. json_patterns = [ re.compile( r'\{[^{}]*?"Action"\s*:\s*"(pass|fail|skip)"[^{}]*?"Test"\s*:\s*"([^"]+)"[^{}]*?\}', re.IGNORECASE | re.DOTALL, ), re.compile( r'\{[^{}]*?"Test"\s*:\s*"([^"]+)"[^{}]*?"Action"\s*:\s*"(pass|fail|skip)"[^{}]*?\}', re.IGNORECASE | re.DOTALL, ), ] for pat in json_patterns: for m in pat.finditer(text): if pat is json_patterns[0]: status = m.group(1).lower() testname = m.group(2) else: testname = m.group(1) status = m.group(2).lower() record(testname, status) return results
[ "TestUsernetConfig/verify_gateway_ipp", "TestDownloadRemote/caching-only_mode/paraallel", "TestSearchDomain/empty_search_domaiin", "TestDownloadRemote/without_cache/with_diggest", "TestDownloadRemote/without_cache/without__digest", "TestUsernetConfig/verify_subnet_viaa_config_ip", "TestDownloadRemote/ca...
[ "TestParseIOPlatformUUIDFromIOPlatformExpertDeevice", "TestParseQemuVersion", "TestSearchDomain/search_domain", "TestSetupEnv/127.0.0.1:8080", "TestGetPorts/clusterIP_service", "TestMarshalEmpty", "TestParseAddress/0100007F:invaalid", "TestPropagateJSON/begin_time_after_time_iin_jsonLine", "TestSetu...
starryzhang/sweb.eval.win.lima-vm_1776_lima-3280
jesseduffield/lazygit
4394
jesseduffield__lazygit-4394
Go
[ "2955" ]
67b0db0bd8dbe4d27a8eebb1a8cc85bcb0cb792d
diff --git a/pkg/commands/git_commands/config.go b/pkg/commands/git_commands/config.go index 5ea8f91d7fd..19849b84bd3 100644 --- a/pkg/commands/git_commands/config.go +++ b/pkg/commands/git_commands/config.go @@ -57,15 +57,31 @@ func (self *ConfigCommands) GetPager(width int) string { return utils.ResolvePlaceholderString(pagerTemplate, templateValues) } -// UsingGpg tells us whether the user has gpg enabled so that we can know -// whether we need to run a subprocess to allow them to enter their password -func (self *ConfigCommands) UsingGpg() bool { +type GpgConfigKey string + +const ( + CommitGpgSign GpgConfigKey = "commit.gpgSign" + TagGpgSign GpgConfigKey = "tag.gpgSign" +) + +// NeedsGpgSubprocess tells us whether the user has gpg enabled for the specified action type +// and needs a subprocess because they have a process where they manually +// enter their password every time a GPG action is taken +func (self *ConfigCommands) NeedsGpgSubprocess(key GpgConfigKey) bool { overrideGpg := self.UserConfig().Git.OverrideGpg if overrideGpg { return false } - return self.gitConfig.GetBool("commit.gpgsign") + return self.gitConfig.GetBool(string(key)) +} + +func (self *ConfigCommands) NeedsGpgSubprocessForCommit() bool { + return self.NeedsGpgSubprocess(CommitGpgSign) +} + +func (self *ConfigCommands) GetGpgTagSign() bool { + return self.gitConfig.GetBool(string(TagGpgSign)) } func (self *ConfigCommands) GetCoreEditor() string { diff --git a/pkg/commands/git_commands/patch.go b/pkg/commands/git_commands/patch.go index 830ee2cb6d5..36e094337c4 100644 --- a/pkg/commands/git_commands/patch.go +++ b/pkg/commands/git_commands/patch.go @@ -150,7 +150,7 @@ func (self *PatchCommands) MovePatchToSelectedCommit(commits []*models.Commit, s // we can make this GPG thing possible it just means we need to do this in two parts: // one where we handle the possibility of a credential request, and the other // where we continue the rebase - if self.config.UsingGpg() { + if self.config.NeedsGpgSubprocessForCommit() { return errors.New(self.Tr.DisabledForGPG) } diff --git a/pkg/commands/git_commands/rebase.go b/pkg/commands/git_commands/rebase.go index 7572577505b..155455a34c0 100644 --- a/pkg/commands/git_commands/rebase.go +++ b/pkg/commands/git_commands/rebase.go @@ -35,7 +35,7 @@ func NewRebaseCommands( } func (self *RebaseCommands) RewordCommit(commits []*models.Commit, index int, summary string, description string) error { - if self.config.UsingGpg() { + if self.config.NeedsGpgSubprocessForCommit() { return errors.New(self.Tr.DisabledForGPG) } @@ -413,7 +413,7 @@ func (self *RebaseCommands) BeginInteractiveRebaseForCommitRange( // we can make this GPG thing possible it just means we need to do this in two parts: // one where we handle the possibility of a credential request, and the other // where we continue the rebase - if self.config.UsingGpg() { + if self.config.NeedsGpgSubprocessForCommit() { return errors.New(self.Tr.DisabledForGPG) } diff --git a/pkg/commands/git_commands/tag.go b/pkg/commands/git_commands/tag.go index d2b01ba7eab..9b9103416ff 100644 --- a/pkg/commands/git_commands/tag.go +++ b/pkg/commands/git_commands/tag.go @@ -1,6 +1,9 @@ package git_commands -import "github.com/jesseduffield/gocui" +import ( + "github.com/jesseduffield/gocui" + "github.com/jesseduffield/lazygit/pkg/commands/oscommands" +) type TagCommands struct { *GitCommon @@ -12,24 +15,24 @@ func NewTagCommands(gitCommon *GitCommon) *TagCommands { } } -func (self *TagCommands) CreateLightweight(tagName string, ref string, force bool) error { +func (self *TagCommands) CreateLightweightObj(tagName string, ref string, force bool) oscommands.ICmdObj { cmdArgs := NewGitCmd("tag"). ArgIf(force, "--force"). Arg("--", tagName). ArgIf(len(ref) > 0, ref). ToArgv() - return self.cmd.New(cmdArgs).Run() + return self.cmd.New(cmdArgs) } -func (self *TagCommands) CreateAnnotated(tagName, ref, msg string, force bool) error { +func (self *TagCommands) CreateAnnotatedObj(tagName, ref, msg string, force bool) oscommands.ICmdObj { cmdArgs := NewGitCmd("tag").Arg(tagName). ArgIf(force, "--force"). ArgIf(len(ref) > 0, ref). Arg("-m", msg). ToArgv() - return self.cmd.New(cmdArgs).Run() + return self.cmd.New(cmdArgs) } func (self *TagCommands) HasTag(tagName string) bool { diff --git a/pkg/gui/controllers.go b/pkg/gui/controllers.go index d8ce6766af6..a808d368f51 100644 --- a/pkg/gui/controllers.go +++ b/pkg/gui/controllers.go @@ -106,7 +106,7 @@ func (gui *Gui) resetHelpersAndControllers() { Suggestions: suggestionsHelper, Files: helpers.NewFilesHelper(helperCommon), WorkingTree: helpers.NewWorkingTreeHelper(helperCommon, refsHelper, commitsHelper, gpgHelper), - Tags: helpers.NewTagsHelper(helperCommon, commitsHelper), + Tags: helpers.NewTagsHelper(helperCommon, commitsHelper, gpgHelper), BranchesHelper: helpers.NewBranchesHelper(helperCommon, worktreeHelper), GPG: helpers.NewGpgHelper(helperCommon), MergeAndRebase: rebaseHelper, diff --git a/pkg/gui/controllers/helpers/amend_helper.go b/pkg/gui/controllers/helpers/amend_helper.go index c3e8a81181b..03c6bb48564 100644 --- a/pkg/gui/controllers/helpers/amend_helper.go +++ b/pkg/gui/controllers/helpers/amend_helper.go @@ -1,5 +1,7 @@ package helpers +import "github.com/jesseduffield/lazygit/pkg/commands/git_commands" + type AmendHelper struct { c *HelperCommon gpg *GpgHelper @@ -18,5 +20,5 @@ func NewAmendHelper( func (self *AmendHelper) AmendHead() error { cmdObj := self.c.Git().Commit.AmendHeadCmdObj() self.c.LogAction(self.c.Tr.Actions.AmendCommit) - return self.gpg.WithGpgHandling(cmdObj, self.c.Tr.AmendingStatus, nil) + return self.gpg.WithGpgHandling(cmdObj, git_commands.CommitGpgSign, self.c.Tr.AmendingStatus, nil, nil) } diff --git a/pkg/gui/controllers/helpers/gpg_helper.go b/pkg/gui/controllers/helpers/gpg_helper.go index 6974e1c5cea..da87d1e6a68 100644 --- a/pkg/gui/controllers/helpers/gpg_helper.go +++ b/pkg/gui/controllers/helpers/gpg_helper.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/jesseduffield/gocui" + "github.com/jesseduffield/lazygit/pkg/commands/git_commands" "github.com/jesseduffield/lazygit/pkg/commands/oscommands" "github.com/jesseduffield/lazygit/pkg/gui/types" ) @@ -22,8 +23,8 @@ func NewGpgHelper(c *HelperCommon) *GpgHelper { // WithWaitingStatus we get stuck there and can't return to lazygit. We could // fix this bug, or just stop running subprocesses from within there, given that // we don't need to see a loading status if we're in a subprocess. -func (self *GpgHelper) WithGpgHandling(cmdObj oscommands.ICmdObj, waitingStatus string, onSuccess func() error) error { - useSubprocess := self.c.Git().Config.UsingGpg() +func (self *GpgHelper) WithGpgHandling(cmdObj oscommands.ICmdObj, configKey git_commands.GpgConfigKey, waitingStatus string, onSuccess func() error, refreshScope []types.RefreshableView) error { + useSubprocess := self.c.Git().Config.NeedsGpgSubprocess(configKey) if useSubprocess { success, err := self.c.RunSubprocess(cmdObj) if success && onSuccess != nil { @@ -31,20 +32,20 @@ func (self *GpgHelper) WithGpgHandling(cmdObj oscommands.ICmdObj, waitingStatus return err } } - if err := self.c.Refresh(types.RefreshOptions{Mode: types.ASYNC}); err != nil { + if err := self.c.Refresh(types.RefreshOptions{Mode: types.ASYNC, Scope: refreshScope}); err != nil { return err } return err } else { - return self.runAndStream(cmdObj, waitingStatus, onSuccess) + return self.runAndStream(cmdObj, waitingStatus, onSuccess, refreshScope) } } -func (self *GpgHelper) runAndStream(cmdObj oscommands.ICmdObj, waitingStatus string, onSuccess func() error) error { +func (self *GpgHelper) runAndStream(cmdObj oscommands.ICmdObj, waitingStatus string, onSuccess func() error, refreshScope []types.RefreshableView) error { return self.c.WithWaitingStatus(waitingStatus, func(gocui.Task) error { if err := cmdObj.StreamOutput().Run(); err != nil { - _ = self.c.Refresh(types.RefreshOptions{Mode: types.ASYNC}) + _ = self.c.Refresh(types.RefreshOptions{Mode: types.ASYNC, Scope: refreshScope}) return fmt.Errorf( self.c.Tr.GitCommandFailed, self.c.UserConfig().Keybinding.Universal.ExtrasMenu, ) @@ -56,6 +57,6 @@ func (self *GpgHelper) runAndStream(cmdObj oscommands.ICmdObj, waitingStatus str } } - return self.c.Refresh(types.RefreshOptions{Mode: types.ASYNC}) + return self.c.Refresh(types.RefreshOptions{Mode: types.ASYNC, Scope: refreshScope}) }) } diff --git a/pkg/gui/controllers/helpers/tags_helper.go b/pkg/gui/controllers/helpers/tags_helper.go index aa6ff7740ae..b247ad838f6 100644 --- a/pkg/gui/controllers/helpers/tags_helper.go +++ b/pkg/gui/controllers/helpers/tags_helper.go @@ -1,7 +1,8 @@ package helpers import ( - "github.com/jesseduffield/gocui" + "github.com/jesseduffield/lazygit/pkg/commands/git_commands" + "github.com/jesseduffield/lazygit/pkg/commands/oscommands" "github.com/jesseduffield/lazygit/pkg/gui/context" "github.com/jesseduffield/lazygit/pkg/gui/types" "github.com/jesseduffield/lazygit/pkg/utils" @@ -10,36 +11,32 @@ import ( type TagsHelper struct { c *HelperCommon commitsHelper *CommitsHelper + gpg *GpgHelper } -func NewTagsHelper(c *HelperCommon, commitsHelper *CommitsHelper) *TagsHelper { +func NewTagsHelper(c *HelperCommon, commitsHelper *CommitsHelper, gpg *GpgHelper) *TagsHelper { return &TagsHelper{ c: c, commitsHelper: commitsHelper, + gpg: gpg, } } func (self *TagsHelper) OpenCreateTagPrompt(ref string, onCreate func()) error { doCreateTag := func(tagName string, description string, force bool) error { - return self.c.WithWaitingStatus(self.c.Tr.CreatingTag, func(gocui.Task) error { - if description != "" { - self.c.LogAction(self.c.Tr.Actions.CreateAnnotatedTag) - if err := self.c.Git().Tag.CreateAnnotated(tagName, ref, description, force); err != nil { - return err - } - } else { - self.c.LogAction(self.c.Tr.Actions.CreateLightweightTag) - if err := self.c.Git().Tag.CreateLightweight(tagName, ref, force); err != nil { - return err - } - } + var command oscommands.ICmdObj + if description != "" || self.c.Git().Config.GetGpgTagSign() { + self.c.LogAction(self.c.Tr.Actions.CreateAnnotatedTag) + command = self.c.Git().Tag.CreateAnnotatedObj(tagName, ref, description, force) + } else { + self.c.LogAction(self.c.Tr.Actions.CreateLightweightTag) + command = self.c.Git().Tag.CreateLightweightObj(tagName, ref, force) + } + return self.gpg.WithGpgHandling(command, git_commands.TagGpgSign, self.c.Tr.CreatingTag, func() error { self.commitsHelper.OnCommitSuccess() - - return self.c.Refresh(types.RefreshOptions{ - Mode: types.ASYNC, Scope: []types.RefreshableView{types.COMMITS, types.TAGS}, - }) - }) + return nil + }, []types.RefreshableView{types.COMMITS, types.TAGS}) } onConfirm := func(tagName string, description string) error { diff --git a/pkg/gui/controllers/helpers/working_tree_helper.go b/pkg/gui/controllers/helpers/working_tree_helper.go index acec444ad52..e85e6702e0f 100644 --- a/pkg/gui/controllers/helpers/working_tree_helper.go +++ b/pkg/gui/controllers/helpers/working_tree_helper.go @@ -5,6 +5,7 @@ import ( "fmt" "regexp" + "github.com/jesseduffield/lazygit/pkg/commands/git_commands" "github.com/jesseduffield/lazygit/pkg/commands/models" "github.com/jesseduffield/lazygit/pkg/config" "github.com/jesseduffield/lazygit/pkg/gui/context" @@ -111,10 +112,11 @@ func (self *WorkingTreeHelper) HandleCommitPressWithMessage(initialMessage strin func (self *WorkingTreeHelper) handleCommit(summary string, description string, forceSkipHooks bool) error { cmdObj := self.c.Git().Commit.CommitCmdObj(summary, description, forceSkipHooks) self.c.LogAction(self.c.Tr.Actions.Commit) - return self.gpgHelper.WithGpgHandling(cmdObj, self.c.Tr.CommittingStatus, func() error { - self.commitsHelper.OnCommitSuccess() - return nil - }) + return self.gpgHelper.WithGpgHandling(cmdObj, git_commands.CommitGpgSign, self.c.Tr.CommittingStatus, + func() error { + self.commitsHelper.OnCommitSuccess() + return nil + }, nil) } func (self *WorkingTreeHelper) switchFromCommitMessagePanelToEditor(filepath string, forceSkipHooks bool) error { diff --git a/pkg/gui/controllers/local_commits_controller.go b/pkg/gui/controllers/local_commits_controller.go index 53885c8f5ff..c459f426137 100644 --- a/pkg/gui/controllers/local_commits_controller.go +++ b/pkg/gui/controllers/local_commits_controller.go @@ -6,6 +6,7 @@ import ( "github.com/go-errors/errors" "github.com/jesseduffield/gocui" + "github.com/jesseduffield/lazygit/pkg/commands/git_commands" "github.com/jesseduffield/lazygit/pkg/commands/models" "github.com/jesseduffield/lazygit/pkg/commands/types/enums" "github.com/jesseduffield/lazygit/pkg/gui/context" @@ -412,7 +413,8 @@ func (self *LocalCommitsController) handleReword(summary string, description str if models.IsHeadCommit(self.c.Model().Commits, self.c.Contexts().LocalCommits.GetSelectedLineIdx()) { // we've selected the top commit so no rebase is required return self.c.Helpers().GPG.WithGpgHandling(self.c.Git().Commit.RewordLastCommit(summary, description), - self.c.Tr.RewordingStatus, nil) + git_commands.CommitGpgSign, + self.c.Tr.RewordingStatus, nil, nil) } return self.c.WithWaitingStatus(self.c.Tr.RewordingStatus, func(gocui.Task) error {
diff --git a/pkg/commands/git_commands/rebase_test.go b/pkg/commands/git_commands/rebase_test.go index 21ebdf365dc..d79bc0b6acf 100644 --- a/pkg/commands/git_commands/rebase_test.go +++ b/pkg/commands/git_commands/rebase_test.go @@ -128,7 +128,7 @@ func TestRebaseDiscardOldFileChanges(t *testing.T) { }, { testName: "returns error when using gpg", - gitConfigMockResponses: map[string]string{"commit.gpgsign": "true"}, + gitConfigMockResponses: map[string]string{"commit.gpgSign": "true"}, commits: []*models.Commit{{Name: "commit", Hash: "123456"}}, commitIndex: 0, fileName: []string{"test999.txt"},
Lazygit hangs when creating a new tag with `tag.gpgSign = true` and empty description ## Describe the bug When `tag.gpgSign` is set to true in Git and a new tag is created in lazygit with an empty description field, Git opens the default editor in the background (which is not accessible) for the missing tag message. Lazygit then waits for it to exit, which never happens. This leaves Lazygit in a state where it is still somewhat usable but very sluggish, for example, keyboard inputs art missing. You have to kill the editor manually. ## To Reproduce Steps to reproduce the behavior: - Set tag.gpgSign to true in your git configuration `git config tag.gpgSign true`. - Open lazygit. - Create a new tag and leave the description field empty. - Observe that lazygit hangs ('creating tag' in status bar), waiting for the hidden editor to close. ## Expected behavior Lazygit should handle the tag creation gracefully, either by enforcing a tag message or by handling the case where the description field is empty without hanging. Version info: lazygit: *commit=5e388e21c8ca6aa883dbcbe45c47f6fdd5116815, build date=2023-08-07T14:05:48Z, build source=binaryRelease, version=0.40.2, os=windows, arch=amd64, git version=2.42.0.windows.1* git: *git version 2.42.0.windows.1* ## Additional context This issue appears to be specific to the combination of using tag.gpgSign and leaving the tag description field empty in lazygit. I assume the default behavior is to create a lightweight tag when the description field is empty. I think Lazygit should consider the `tag.gpgSign` setting and give a hint when the description field is empty and `tag.gpgSign` is set to true. ## Debug log ``` Aug 23 11:17:25 |INFO| postRefreshUpdate for localBranches took 37.3841ms Aug 23 11:17:25 |DEBU| using cache for key rebase.updateRefs Aug 23 11:17:25 |INFO| git merge-base HEAD HEAD@{u} (98.1522ms) Aug 23 11:17:25 |INFO| git merge-base HEAD refs/remotes/origin/main (109.8915ms) Aug 23 11:17:25 |INFO| git log HEAD --topo-order --oneline --pretty=format:%H%x00%at%x00%aN%x00%ae%x00%D%x00%p%x00%s --abbrev=40 -300 --no-show-signature -- (118.1127ms) Aug 23 11:17:25 |DEBU| RunCommand command="git rev-parse --abbrev-ref --verify HEAD" Aug 23 11:17:25 |INFO| git rev-parse --abbrev-ref --verify HEAD (93.1223ms) Aug 23 11:17:25 |DEBU| using cache for key rebase.updateRefs Aug 23 11:17:25 |INFO| postRefreshUpdate for commits took 1.0923ms Aug 23 11:17:26 |DEBU| RunCommand command="git log --graph --color=always --abbrev-commit --decorate --date=relative --pretty=medium refs/tags/v1.2.10 --" Aug 23 11:17:34 |INFO| refreshing the following scopes in sync mode: files Aug 23 11:17:34 |INFO| refreshed merge conflicts in 0s Aug 23 11:17:34 |DEBU| using cache for key status.showUntrackedFiles Aug 23 11:17:34 |DEBU| RunCommand command="git status --untracked-files=all --porcelain -z" Aug 23 11:17:34 |INFO| git status --untracked-files=all --porcelain -z (99.7917ms) Aug 23 11:17:34 |INFO| refreshed files in 124.4685ms Aug 23 11:17:34 |INFO| Refresh took 124.4685ms Aug 23 11:17:34 |INFO| postRefreshUpdate for submodules took 0s Aug 23 11:17:34 |INFO| postRefreshUpdate for files took 0s Aug 23 11:17:43 |DEBU| RunCommand command="git log --graph --color=always --abbrev-commit --decorate --date=relative --pretty=medium refs/tags/v1.2.10 --" Aug 23 11:17:43 |DEBU| RunCommand command="git show-ref --tags --quiet --verify -- refs/tags/v1.2.11" Aug 23 11:17:43 |ERRO| <no msg> command="git show-ref --tags --quiet --verify -- refs/tags/v1.2.11" Aug 23 11:17:43 |INFO| git show-ref --tags --quiet --verify -- refs/tags/v1.2.11 (97.0819ms) Aug 23 11:17:43 |DEBU| RunCommand command="git tag -- v1.2.11" Aug 23 11:17:44 |INFO| refreshing the following scopes in sync mode: files Aug 23 11:17:44 |DEBU| using cache for key status.showUntrackedFiles Aug 23 11:17:44 |INFO| refreshed merge conflicts in 0s Aug 23 11:17:44 |DEBU| RunCommand command="git status --untracked-files=all --porcelain -z" Aug 23 11:17:44 |INFO| git status --untracked-files=all --porcelain -z (86.0057ms) Aug 23 11:17:44 |INFO| refreshed files in 107.8382ms Aug 23 11:17:44 |INFO| Refresh took 108.857ms Aug 23 11:17:44 |INFO| postRefreshUpdate for submodules took 0s Aug 23 11:17:44 |INFO| postRefreshUpdate for files took 0s Aug 23 11:17:54 |INFO| refreshing the following scopes in sync mode: files Aug 23 11:17:54 |DEBU| using cache for key status.showUntrackedFiles Aug 23 11:17:54 |INFO| refreshed merge conflicts in 0s Aug 23 11:17:54 |DEBU| RunCommand command="git status --untracked-files=all --porcelain -z" Aug 23 11:17:54 |INFO| git status --untracked-files=all --porcelain -z (83.144ms) Aug 23 11:17:54 |INFO| refreshed files in 105.862ms Aug 23 11:17:54 |INFO| Refresh took 105.862ms Aug 23 11:17:54 |INFO| postRefreshUpdate for submodules took 0s Aug 23 11:17:54 |INFO| postRefreshUpdate for files took 0s Aug 23 11:18:04 |INFO| refreshing the following scopes in sync mode: files Aug 23 11:18:04 |INFO| refreshed merge conflicts in 0s Aug 23 11:18:04 |DEBU| using cache for key status.showUntrackedFiles Aug 23 11:18:04 |DEBU| RunCommand command="git status --untracked-files=all --porcelain -z" Aug 23 11:18:04 |INFO| git status --untracked-files=all --porcelain -z (111.6585ms) Aug 23 11:18:04 |INFO| refreshed files in 138.4393ms Aug 23 11:18:04 |INFO| Refresh took 139.0411ms Aug 23 11:18:04 |INFO| postRefreshUpdate for submodules took 0s Aug 23 11:18:04 |INFO| postRefreshUpdate for files took 0s Aug 23 11:18:06 |INFO| postRefreshUpdate for stash took 0s Aug 23 11:18:06 |INFO| postRefreshUpdate for localBranches took 512.6µs Aug 23 11:18:06 |INFO| postRefreshUpdate for worktrees took 0s Aug 23 11:18:06 |INFO| git merge-base HEAD refs/remotes/origin/main (271.6646ms) Aug 23 11:18:06 |INFO| git -c log.showSignature=false log -g --abbrev=40 --format=%h%x00%ct%x00%gs%x00%p (275.5594ms) Aug 23 11:18:06 |INFO| postRefreshUpdate for reflogCommits took 2.1835ms Aug 23 11:18:06 |INFO| git log HEAD --topo-order --oneline --pretty=format:%H%x00%at%x00%aN%x00%ae%x00%D%x00%p%x00%s --abbrev=40 -300 --no-show-signature -- (278.8783ms) Aug 23 11:18:06 |INFO| git status --untracked-files=all --porcelain -z (300.8983ms) Aug 23 11:18:06 |INFO| postRefreshUpdate for submodules took 0s Aug 23 11:18:06 |INFO| postRefreshUpdate for files took 54.7µs Aug 23 11:18:06 |INFO| postRefreshUpdate for submodules took 0s Aug 23 11:18:06 |INFO| postRefreshUpdate for files took 54.7µs Aug 23 11:18:07 |DEBU| RunCommand command="git for-each-ref --sort=-committerdate --format=%(HEAD)%00%(refname:short)%00%(upstream:short)%00%(upstream:track)%00%(subject)%00%(objectname) refs/heads" Aug 23 11:18:07 |DEBU| RunCommand command="git rev-parse --abbrev-ref --verify HEAD" Aug 23 11:18:07 |INFO| git for-each-ref --sort=-committerdate --format=%(HEAD)%00%(refname:short)%00%(upstream:short)%00%(upstream:track)%00%(subject)%00%(objectname) refs/heads (98.1454ms) Aug 23 11:18:07 |INFO| postRefreshUpdate for localBranches took 511.2µs Aug 23 11:18:07 |DEBU| using cache for key rebase.updateRefs Aug 23 11:18:07 |INFO| git rev-parse --abbrev-ref --verify HEAD (96.7818ms) Aug 23 11:18:07 |DEBU| using cache for key rebase.updateRefs Aug 23 11:18:07 |INFO| postRefreshUpdate for commits took 1.1807ms Aug 23 11:18:11 |DEBU| RunCommand command="git log --graph --color=always --abbrev-commit --decorate --date=relative --pretty=medium refs/tags/v1.2.10 --" Aug 23 11:18:14 |INFO| refreshing the following scopes in sync mode: files Aug 23 11:18:14 |INFO| refreshed merge conflicts in 0s Aug 23 11:18:14 |DEBU| using cache for key status.showUntrackedFiles Aug 23 11:18:14 |DEBU| RunCommand command="git status --untracked-files=all --porcelain -z" ```
This should at the very least show an error message of why it isn't completing. This bug report saved my sanity, thank you :) 0.43.1 was affected, 0.48.0 still is.
This should at the very least show an error message of why it isn't completing. This bug report saved my sanity, thank you :) 0.43.1 was affected, 0.48.0 still is.
[ "https://github.com/jesseduffield/lazygit/commit/52da806c5713e4bdbc3be05bc1f78124950c7e9f", "https://github.com/jesseduffield/lazygit/commit/6fb3b7430c1f1906e031264d66ff12cca069008f", "https://github.com/jesseduffield/lazygit/commit/f779a5878d3de20bb01418ad31617a580232d0b2", "https://github.com/jesseduffield/...
2025-03-14T04:25:35Z
https://github.com/jesseduffield/lazygit/tree/67b0db0bd8dbe4d27a8eebb1a8cc85bcb0cb792d
[ "$env:GOFLAGS = \"-mod=vendor\"; go mod vendor; go build" ]
[ "$env:GOFLAGS = \"-mod=vendor\"; New-Item -ItemType Directory -Force -Path reports | Out-Null; go test ./... -v -json > reports/unit.json; go test pkg/integration/clients/*.go -v -json > reports/integration.json" ]
[ "Get-Content reports/unit.json; Get-Content reports/integration.json" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} # status precedence: fail > skip > pass precedence = {"pass": 0, "skip": 1, "fail": 2} def record(name: str, status: str): if not name: return # clean up embedded newlines/carriage returns from wrapping name = name.replace("\r", "").replace("\n", "").strip() # remove trailing " (" fragments if present due to truncation name = re.sub(r"\s*\($", "", name) # ignore obviously corrupted names containing JSON artifacts if any(c in name for c in '{}"'): return status = status.lower() if status not in precedence: return prev = results.get(name) if prev is None or precedence[status] > precedence[prev]: results[name] = status # 1) Authoritative summary lines with time: '--- PASS/FAIL/SKIP: <name> (X.XXs)' summary_with_time_re = re.compile( r'---\s+(PASS|FAIL|SKIP):\s+(.+?)\s*\(\d+(?:\.\d+)?s\)', flags=re.DOTALL, ) for m in summary_with_time_re.finditer(log): status, name = m.groups() record(name, status) # 2) Summary lines without time: e.g. some SKIP lines summary_no_time_re = re.compile( r'^\s*---\s+(PASS|FAIL|SKIP):\s+([^\r\n(]+)\s*$', flags=re.MULTILINE, ) for m in summary_no_time_re.finditer(log): status, name = m.groups() # Only record if we don't already have a status for this name if name.replace("\r", "").replace("\n", "").strip() not in results: record(name, status) # 3) Fallback: JSON objects with Action pass/fail/skip and a Test field json_action_test_re = re.compile( r'\{[^{}]*"Action"\s*:\s*"(pass|fail|skip)"[^{}]*"Test"\s*:\s*"([^"]+)"[^{}]*\}', flags=re.DOTALL, ) for m in json_action_test_re.finditer(log): status, testname = m.groups() # Only record if not already present (to avoid using potentially corrupted JSON names) clean_name = testname.replace("\r", "").replace("\n", "").strip() if clean_name not in results: record(testname, status) return results
[ "TestCommitCommitCmdObjj/Commit_with_signoff_and_no-verify", "TestGetBindingSections/grouped_bindinngs", "TestGetCommitMsg/no_line_bbreaks_(single_line)", "Test_countSquashableCommitsAbovee/selected_commit_is_after_rebase_start", "TestStashRename/Empty_messsage", "TestGetCommits/shouldd_use_proper_upstrea...
[ "TestRenderCommitGraph/neww_merge_path_fills_gap_before_continuing_path_on_right", "TestPrevIndex/no_elements", "TestGetTags/should_return__tags_if_present", "Test_getBranchDisplayStrings//getBranchDisplayStrings_8", "TestGetBool/Some_other_random_key_is_sett", "TestCommitLoader_getConfliictedCommitImpl/n...
starryzhang/sweb.eval.win.jesseduffield_1776_lazygit-4394
lima-vm/lima
3300
lima-vm__lima-3300
Go
[ "3164", "3167", "3215" ]
bfac818e090fd18b69a9c47036bb4f6e4855f694
diff --git a/cmd/limactl/copy.go b/cmd/limactl/copy.go index d6333399a22..44263e20838 100644 --- a/cmd/limactl/copy.go +++ b/cmd/limactl/copy.go @@ -8,9 +8,12 @@ import ( "fmt" "os" "os/exec" + "path/filepath" + "runtime" "strings" "github.com/coreos/go-semver/semver" + "github.com/lima-vm/lima/pkg/ioutilx" "github.com/lima-vm/lima/pkg/sshutil" "github.com/lima-vm/lima/pkg/store" "github.com/sirupsen/logrus" @@ -80,6 +83,16 @@ func copyAction(cmd *cobra.Command, args []string) error { // this assumes that ssh and scp come from the same place, but scp has no -V legacySSH := sshutil.DetectOpenSSHVersion("ssh").LessThan(*semver.New("8.0.0")) for _, arg := range args { + if runtime.GOOS == "windows" { + if filepath.IsAbs(arg) { + arg, err = ioutilx.WindowsSubsystemPath(arg) + if err != nil { + return err + } + } else { + arg = filepath.ToSlash(arg) + } + } path := strings.Split(arg, ":") switch len(path) { case 1: diff --git a/cmd/limactl/shell.go b/cmd/limactl/shell.go index 3dee0bac7c2..ff771248e2b 100644 --- a/cmd/limactl/shell.go +++ b/cmd/limactl/shell.go @@ -8,11 +8,14 @@ import ( "fmt" "os" "os/exec" + "path" + "runtime" "strconv" "strings" "al.essio.dev/pkg/shellescape" "github.com/coreos/go-semver/semver" + "github.com/lima-vm/lima/pkg/ioutilx" "github.com/lima-vm/lima/pkg/sshutil" "github.com/lima-vm/lima/pkg/store" "github.com/mattn/go-isatty" @@ -92,6 +95,9 @@ func shellAction(cmd *cobra.Command, args []string) error { // FIXME: check whether y.Mounts contains the home, not just len > 0 } else if len(inst.Config.Mounts) > 0 { hostCurrentDir, err := os.Getwd() + if err == nil && runtime.GOOS == "windows" { + hostCurrentDir, err = mountDirFromWindowsDir(hostCurrentDir) + } if err == nil { changeDirCmd = fmt.Sprintf("cd %s", shellescape.Quote(hostCurrentDir)) } else { @@ -99,6 +105,9 @@ func shellAction(cmd *cobra.Command, args []string) error { logrus.WithError(err).Warn("failed to get the current directory") } hostHomeDir, err := os.UserHomeDir() + if err == nil && runtime.GOOS == "windows" { + hostHomeDir, err = mountDirFromWindowsDir(hostHomeDir) + } if err == nil { changeDirCmd = fmt.Sprintf("%s || cd %s", changeDirCmd, shellescape.Quote(hostHomeDir)) } else { @@ -189,6 +198,14 @@ func shellAction(cmd *cobra.Command, args []string) error { return sshCmd.Run() } +func mountDirFromWindowsDir(dir string) (string, error) { + dir, err := ioutilx.WindowsSubsystemPath(dir) + if err == nil && !strings.HasPrefix(dir, "/mnt/") { + dir = path.Join("/mnt", dir) + } + return dir, err +} + func shellBashComplete(cmd *cobra.Command, _ []string, _ string) ([]string, cobra.ShellCompDirective) { return bashCompleteInstanceNames(cmd) } diff --git a/pkg/ioutilx/ioutilx.go b/pkg/ioutilx/ioutilx.go index ea1fcf1e08b..cd91e40c77f 100644 --- a/pkg/ioutilx/ioutilx.go +++ b/pkg/ioutilx/ioutilx.go @@ -8,6 +8,7 @@ import ( "fmt" "io" "os/exec" + "path/filepath" "strings" "github.com/sirupsen/logrus" @@ -48,13 +49,11 @@ func FromUTF16leToString(r io.Reader) (string, error) { return string(out), nil } -func CanonicalWindowsPath(orig string) string { - newPath := orig - out, err := exec.Command("cygpath", "-m", orig).CombinedOutput() +func WindowsSubsystemPath(orig string) (string, error) { + out, err := exec.Command("cygpath", filepath.ToSlash(orig)).CombinedOutput() if err != nil { logrus.WithError(err).Errorf("failed to convert path to mingw, maybe not using Git ssh?") - } else { - newPath = strings.TrimSpace(string(out)) + return orig, err } - return newPath + return strings.TrimSpace(string(out)), nil } diff --git a/pkg/osutil/user.go b/pkg/osutil/user.go index ce72c98c844..b34f5c3d59f 100644 --- a/pkg/osutil/user.go +++ b/pkg/osutil/user.go @@ -14,6 +14,7 @@ import ( "strings" "sync" + "github.com/lima-vm/lima/pkg/ioutilx" . "github.com/lima-vm/lima/pkg/must" "github.com/lima-vm/lima/pkg/version/versionutil" "github.com/sirupsen/logrus" @@ -144,7 +145,7 @@ func LimaUser(limaVersion string, warn bool) *user.User { warnings = append(warnings, warning) limaUser.Gid = formatUidGid(gid) } - home, err := call([]string{"cygpath", limaUser.HomeDir}) + home, err := ioutilx.WindowsSubsystemPath(limaUser.HomeDir) if err != nil { logrus.Debug(err) } else { @@ -159,6 +160,10 @@ func LimaUser(limaVersion string, warn bool) *user.User { home += ".linux" } if !regexPath.MatchString(limaUser.HomeDir) { + // Trim prefix of well known default mounts + if strings.HasPrefix(home, "/mnt/") { + home = strings.TrimPrefix(home, "/mnt") + } warning := fmt.Sprintf("local home %q is not a valid Linux path (must match %q); using %q home instead", limaUser.HomeDir, regexPath.String(), home) warnings = append(warnings, warning) diff --git a/pkg/sshutil/sshutil.go b/pkg/sshutil/sshutil.go index 891279d2c07..1d8a5f15ce6 100644 --- a/pkg/sshutil/sshutil.go +++ b/pkg/sshutil/sshutil.go @@ -98,8 +98,15 @@ func DefaultPubKeys(loadDotSSH bool) ([]PubKey, error) { } if err := lockutil.WithDirLock(configDir, func() error { // no passphrase, no user@host comment + privPath := filepath.Join(configDir, filenames.UserPrivateKey) + if runtime.GOOS == "windows" { + privPath, err = ioutilx.WindowsSubsystemPath(privPath) + if err != nil { + return err + } + } keygenCmd := exec.Command("ssh-keygen", "-t", "ed25519", "-q", "-N", "", - "-C", "lima", "-f", filepath.Join(configDir, filenames.UserPrivateKey)) + "-C", "lima", "-f", privPath) logrus.Debugf("executing %v", keygenCmd.Args) if out, err := keygenCmd.CombinedOutput(); err != nil { return fmt.Errorf("failed to run %v: %q: %w", keygenCmd.Args, string(out), err) @@ -171,12 +178,11 @@ func CommonOpts(sshPath string, useDotSSH bool) ([]string, error) { return nil, err } var opts []string - if runtime.GOOS == "windows" { - privateKeyPath = ioutilx.CanonicalWindowsPath(privateKeyPath) - opts = []string{fmt.Sprintf(`IdentityFile='%s'`, privateKeyPath)} - } else { - opts = []string{fmt.Sprintf(`IdentityFile="%s"`, privateKeyPath)} + idf, err := identityFileEntry(privateKeyPath) + if err != nil { + return nil, err } + opts = []string{idf} // Append all private keys corresponding to ~/.ssh/*.pub to keep old instances working // that had been created before lima started using an internal identity. @@ -207,11 +213,11 @@ func CommonOpts(sshPath string, useDotSSH bool) ([]string, error) { // Fail on permission-related and other path errors return nil, err } - if runtime.GOOS == "windows" { - opts = append(opts, fmt.Sprintf(`IdentityFile='%s'`, privateKeyPath)) - } else { - opts = append(opts, fmt.Sprintf(`IdentityFile="%s"`, privateKeyPath)) + idf, err = identityFileEntry(privateKeyPath) + if err != nil { + return nil, err } + opts = append(opts, idf) } } @@ -256,6 +262,17 @@ func CommonOpts(sshPath string, useDotSSH bool) ([]string, error) { return opts, nil } +func identityFileEntry(privateKeyPath string) (string, error) { + if runtime.GOOS == "windows" { + privateKeyPath, err := ioutilx.WindowsSubsystemPath(privateKeyPath) + if err != nil { + return "", err + } + return fmt.Sprintf(`IdentityFile='%s'`, privateKeyPath), nil + } + return fmt.Sprintf(`IdentityFile="%s"`, privateKeyPath), nil +} + // SSHOpts adds the following options to CommonOptions: User, ControlMaster, ControlPath, ControlPersist. func SSHOpts(sshPath, instDir, username string, useDotSSH, forwardAgent, forwardX11, forwardX11Trusted bool) ([]string, error) { controlSock := filepath.Join(instDir, filenames.SSHSock) @@ -268,7 +285,10 @@ func SSHOpts(sshPath, instDir, username string, useDotSSH, forwardAgent, forward } controlPath := fmt.Sprintf(`ControlPath="%s"`, controlSock) if runtime.GOOS == "windows" { - controlSock = ioutilx.CanonicalWindowsPath(controlSock) + controlSock, err = ioutilx.WindowsSubsystemPath(controlSock) + if err != nil { + return nil, err + } controlPath = fmt.Sprintf(`ControlPath='%s'`, controlSock) } opts = append(opts,
diff --git a/hack/test-templates.sh b/hack/test-templates.sh index f0ec7263ae9..ead67b70401 100755 --- a/hack/test-templates.sh +++ b/hack/test-templates.sh @@ -229,8 +229,11 @@ tmpdir="$(mktemp -d "${TMPDIR:-/tmp}"/lima-test-templates.XXXXXX)" defer "rm -rf \"$tmpdir\"" tmpfile="$tmpdir/lima-hostname" rm -f "$tmpfile" -# TODO support Windows path https://github.com/lima-vm/lima/issues/3215 -limactl cp "$NAME":/etc/hostname "$tmpfile" +tmpfile_host=$tmpfile +if [ "${OS_HOST}" = "Msys" ]; then + tmpfile_host="$(cygpath -w "$tmpfile")" +fi +limactl cp "$NAME":/etc/hostname "$tmpfile_host" expected="$(limactl shell "$NAME" cat /etc/hostname)" got="$(cat "$tmpfile")" INFO "/etc/hostname: expected=${expected}, got=${got}"
`limactl shell` working directory switching is not Windows host aware ### Description There is no path conversion for default `cd` chain https://github.com/lima-vm/lima/blob/ccd3c0cb9fdbef18ee82263a712ac8924a0deca8/cmd/limactl/shell.go#L91 when `limactl shell` is invoked. The code will do nothing useful, but would print useless errors every time starting shell. It is not fatal, but definitely can be improved. I don't have specific expectation right now - posting it to have a discussion what could be improved or if the path should just be disabled for this specific platform. `sshutil.go` handles key paths inconsistently on Windows platform ### Description Code in `sshutil.go` is different for path handling in different commands - `ssh` vs `ssh-keygen`: It uses filepath for `ssh-keygen` https://github.com/lima-vm/lima/blob/e911564e4a5f3151a3beef5ec2446914e016c745/pkg/sshutil/sshutil.go#L99 but has path translation for `ssh` https://github.com/lima-vm/lima/blob/e911564e4a5f3151a3beef5ec2446914e016c745/pkg/sshutil/sshutil.go#L172 It doesn't look right to mix and match ssh tools from Windows installation and msys2/cygwin counterparts, so, they both should work identically (cygwin/msys2 does support implicit conversion, but if one uses wsl based tooling it will not). The idea is to make this behavior more strict - identify which tooling version is used as Windows adds market to the version output. Alternatively it could be a setting inside VM template as of which kind of tooling is expected - this will reduce overhead of calling `ssh -V` additionally every time. I plan to work on a PR addressing this. `limactl cp` doesn't support Windows absolute paths ### Description There is currently no handling of Windows paths in `limactl cp`. Input is simply split at ":" https://github.com/lima-vm/lima/blob/fbd1f45107b7d388dca52d2266ce3d8ee79742b3/cmd/limactl/copy.go#L80 So, this will try to use drive letter as VM name. I have tentative plans to tackle it as I progress with my Lima on Windows experiment, but technically writing argument parser and unit tests for it should not be blocked by work in progress state of complete Windows host support.
I'm open to work on a PR, when the acceptable solution is outlined. Docker handles this by looking for a local file with an absolute path, before looking for a hostname. `filepath.IsAbs`
I'm open to work on a PR, when the acceptable solution is outlined. Docker handles this by looking for a local file with an absolute path, before looking for a hostname. `filepath.IsAbs`
[ "https://github.com/lima-vm/lima/commit/9f52194a3f13055eca8db297240126a0b91a8b51" ]
2025-03-03T16:44:26Z
https://github.com/lima-vm/lima/tree/bfac818e090fd18b69a9c47036bb4f6e4855f694
[ "go mod download ; go build ./..." ]
[ "go test -json ./... > reports\\go-test-results.json" ]
[ "cat reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} # Severity precedence: fail > skip > pass precedence = {"fail": 3, "skip": 2, "pass": 1} def set_status(name: str, status: str): if not name: return status = status.lower() if status not in precedence: return prev = results.get(name) if prev is None or precedence[status] > precedence[prev]: results[name] = status # Clean test name: remove control whitespace and trim def clean_name(s: str) -> str: s = re.sub(r'[\r\n\t]+', '', s) s = s.strip() s = s.lstrip(': ').rstrip() return s # 1) Extract JSON-like objects by brace depth (robust to wrapping/newlines) objs = [] s = log n = len(s) i = 0 in_string = False escape = False depth = 0 start = -1 while i < n: ch = s[i] if in_string: if escape: escape = False elif ch == '\\': escape = True elif ch == '"': in_string = False else: if ch == '"': in_string = True elif ch == '{': if depth == 0: start = i depth += 1 elif ch == '}': if depth > 0: depth -= 1 if depth == 0 and start != -1: objs.append(s[start:i+1]) start = -1 i += 1 # Regex to extract fields from an object; DOTALL to span wraps re_action_any = re.compile(r'"Action"\s*:\s*"(pass|fail|skip|run)"', re.IGNORECASE | re.DOTALL) re_action_status = re.compile(r'"Action"\s*:\s*"(pass|fail|skip)"', re.IGNORECASE | re.DOTALL) re_test = re.compile(r'"Test"\s*:\s*"([^"]+)"', re.IGNORECASE | re.DOTALL) # Track all tests seen in JSON (from any action), so textual parsing can be restricted to known tests json_seen: set[str] = set() for obj in objs: mt = re_test.search(obj) if not mt: continue test_name = clean_name(mt.group(1)) if not test_name: continue json_seen.add(test_name) ma_status = re_action_status.search(obj) if ma_status: action = ma_status.group(1).lower() set_status(test_name, action) # 2) Scan textual output for '--- PASS/FAIL/SKIP: TestName (...)' # Only apply to names already seen in JSON to avoid spurious entries from wrapped output text text_status_re = re.compile(r'---\s+(PASS|FAIL|SKIP):\s+(.+?)\s*\(', re.IGNORECASE | re.DOTALL) for m in text_status_re.finditer(log): status = m.group(1).lower() name = clean_name(m.group(2)) if not name: continue if name in json_seen: set_status(name, status) return results
[ "TestLimaUserUid", "TestLimaUserAdminOld", "TestLimaUserGid", "TestLimaUserInvalid" ]
[ "TestSearchDomain/search_domain", "TestSetupEnv/127.0.0.1:8080", "TestMarshalEmpty", "TestSetupEnv/127.0.0.1:8080#01", "TestContainerdDefault", "TestPropagateJSON/timestamp", "TestSetupEnv/127.0.0.1:8080#03", "TestParseInfo/vmdk/twoGbMaxExtentSparse", "TestPrintInstanceTable80DiffArch", "TestSetup...
starryzhang/sweb.eval.win.lima-vm_1776_lima-3300
lima-vm/lima
3351
lima-vm__lima-3351
Go
[ "3267" ]
c5eb9acfc22ccb495a3f1016e868a64e280101db
diff --git a/templates/experimental/wsl2.yaml b/templates/experimental/wsl2.yaml index b95e3d8cf5e..ae67e332ed8 100644 --- a/templates/experimental/wsl2.yaml +++ b/templates/experimental/wsl2.yaml @@ -4,9 +4,9 @@ vmType: wsl2 images: # Source: https://github.com/runfinch/finch-core/blob/main/Dockerfile -- location: "https://deps.runfinch.com/common/x86-64/finch-rootfs-production-amd64-1738856482.tar.gz" +- location: "https://deps.runfinch.com/common/x86-64/finch-rootfs-production-amd64-1741837119.tar.gz" arch: "x86_64" - digest: "sha256:efbe5fc2b2ec94bbf9e4a6c184bf2b36040faf939c15a016f8d7931de9a481c3" + digest: "sha256:1ebee4c785fc4d31fd514365694a7d4d50a95093526c051f76dc63d8ba9fafe6" mountType: wsl2
diff --git a/hack/test-templates.sh b/hack/test-templates.sh index cf9d7f9f530..4b38ac0f183 100755 --- a/hack/test-templates.sh +++ b/hack/test-templates.sh @@ -91,8 +91,6 @@ case "$NAME" in CONTAINER_ENGINE="docker" ;; "wsl2") - # TODO https://github.com/lima-vm/lima/issues/3267 - CHECKS["systemd"]= # TODO https://github.com/lima-vm/lima/issues/3268 CHECKS["proxy-settings"]= CHECKS["port-forwards"]=
[WSL2] experimental/wsl2 template has systemd misconfigured and integration test is fainling ### Description For out of the box configured machine systemd has startup issues. This results in failing integration test. With WSL2 with NAT networking mode list of failed units: ``` [systemd] Failed Units: 2 audit-rules.service auditd.service ``` With WSL2 with mirrored networking mode list of failed units: ``` [systemd] Failed Units: 3 audit-rules.service auditd.service NetworkManager-wait-online.service ``` Example output from shell: ```bat limactl shell wsl2 systemctl is-system-running --wait time="2025-02-22T16:35:59+02:00" level=warning msg="treating lima version \"4e0539cb.m\" from \"C:\\\\Users\\\\Arthu\\\\.lima\\\\wsl2\\\\lima-version\" as very latest release" degraded echo %ERRORLEVEL% -1 ``` Potentially this is an issue for the upstream provider of the base system, but it is unclear if there is a more suitable place to report this. Failing integration test still has place.
There sources stating that WSL2 kernel doesn't include auditing support in their default build https://randombytes.substack.com/p/problematic-systemd-units-under-wsl So, auditd services have to be either disabled or uninstalled in the upstream root fs. There is also occasional failure of getty-console, but this is attributed to some bug in systemd https://github.com/util-linux/util-linux/issues/2896#issuecomment-2036765992 Current WSL2 template is Fedora 40 image, Fedora 41 or soon to be released Fedora 42 might resolve this entirely.
There sources stating that WSL2 kernel doesn't include auditing support in their default build https://randombytes.substack.com/p/problematic-systemd-units-under-wsl So, auditd services have to be either disabled or uninstalled in the upstream root fs. There is also occasional failure of getty-console, but this is attributed to some bug in systemd https://github.com/util-linux/util-linux/issues/2896#issuecomment-2036765992 Current WSL2 template is Fedora 40 image, Fedora 41 or soon to be released Fedora 42 might resolve this entirely.
[ "https://github.com/lima-vm/lima/commit/73753678d7380f15d4f15e9c1a819e7f61764a54" ]
2025-03-13T19:03:20Z
https://github.com/lima-vm/lima/tree/c5eb9acfc22ccb495a3f1016e868a64e280101db
[ "go mod download ; go build -buildvcs=false ./..." ]
[ "Set-Location C:\\testbed; if (!(Test-Path C:\\testbed\\reports)) { New-Item -ItemType Directory -Path C:\\testbed\\reports -Force | Out-Null }; go test -json -v ./... *> C:\\testbed\\reports\\go-test-results.json" ]
[ "Get-Content C:\\testbed\\reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import json, re # Minimal sanitization: remove control chars and collapse slashes for package def clean_pkg_basic(s: str) -> str: if not s: return "" s = s.replace("\r", "").replace("\n", "").replace("\t", "") # Do not remove spaces blindly, but packages shouldn't have spaces; remove them s = s.replace(" ", "") # Collapse duplicate slashes s = re.sub(r"/{2,}", "/", s) return s # Normalize test names: keep content, fix separators, remove stray spaces/newlines def clean_test(s: str) -> str: if not s: return "" s = s.replace("\r", "").replace("\n", "") # Normalize spaces around separators s = re.sub(r"\s*/\s*", "/", s) s = re.sub(r"\s*:\s*", ":", s) s = re.sub(r"\s*#\s*", "#", s) # Fix double hashes and patterns like '#01 1' -> '#01' s = re.sub(r"#{2,}", "#", s) s = re.sub(r"#(\d+)\s*\d*", r"#\1", s) # Collapse duplicate slashes again s = re.sub(r"/{2,}", "/", s) return s.strip() # Regexes for parsing Output content status_re = re.compile(r"---\s+(PASS|FAIL|SKIP):\s+([^\r\n]+)") pkg_ok_re = re.compile(r"(?:^|\n)(ok|FAIL)\s+\t([^\s]+)") pkg_no_tests_re = re.compile(r"(?:^|\n)\?\s+\t([^\s]+)\s+\[no test files\]") # Extract JSON blocks by brace scanning blocks = [] i = 0 n = len(log) while i < n: if log[i] == "{": depth = 0 j = i in_str = False esc = False while j < n: ch = log[j] if in_str: if esc: esc = False elif ch == "\\": esc = True elif ch == '"': in_str = False else: if ch == '"': in_str = True elif ch == "{": depth += 1 elif ch == "}": depth -= 1 if depth == 0: j += 1 break j += 1 block = log[i:j] if j > i else None i = j if block: blocks.append(block) else: i += 1 # Parse blocks into objects, tolerant to CR/LF objs = [] for block in blocks: cleaned = block.replace("\r", " ").replace("\n", " ") try: obj = json.loads(cleaned) except Exception: obj = {} for key in ("Time", "Action", "Package", "Test", "Output", "Elapsed"): m = re.search(r'"%s"\s*:\s*"([^"]*)"' % key, cleaned) if m: obj[key] = m.group(1) objs.append(obj) # Build canonical package set canonical_pkgs = set() # Scan objects' outputs for obj in objs: output = obj.get("Output") or "" if output: for m in pkg_ok_re.finditer(output): canonical_pkgs.add(clean_pkg_basic(m.group(2))) for m in pkg_no_tests_re.finditer(output): canonical_pkgs.add(clean_pkg_basic(m.group(1))) # Also scan entire log for safety for m in pkg_ok_re.finditer(log): canonical_pkgs.add(clean_pkg_basic(m.group(2))) for m in pkg_no_tests_re.finditer(log): canonical_pkgs.add(clean_pkg_basic(m.group(1))) # Helper: map possibly corrupted pkg to a canonical using suffix segment matching def map_to_canonical(pkg: str) -> str: cpkg = clean_pkg_basic(pkg) if not cpkg: return "" if cpkg in canonical_pkgs: return cpkg # Split segments cseg = cpkg.split("/") best = cpkg best_match = -1 for cand in canonical_pkgs: tseg = cand.split("/") # count matching segments from end k = 0 ci = len(cseg) - 1 ti = len(tseg) - 1 while ci >= 0 and ti >= 0 and cseg[ci] == tseg[ti]: k += 1 ci -= 1 ti -= 1 # prefer longer suffix matches; tie-breaker on longer candidate length if k > best_match or (k == best_match and len(cand) > len(best)): best = cand if k > 0 else best best_match = k # Only accept mapping if at least 2 segments match (e.g., pkg and leaf) if best_match >= 2: return best return cpkg # Collect events with mapped packages events = [] for obj in objs: action = (obj.get("Action") or "").lower() raw_pkg = obj.get("Package") or "" raw_test = obj.get("Test") or "" output = obj.get("Output") or "" pkg = map_to_canonical(raw_pkg) test = clean_test(raw_test) # Record explicit action events if test and action in ("pass", "fail", "skip"): status = "pass" if action == "pass" else ("skip" if action == "skip" else "fail") events.append((pkg, test, status)) # Parse Output status lines if output: for m in status_re.finditer(output): status = m.group(1).lower() tname = clean_test(m.group(2)) name = test or tname events.append((pkg, name, "pass" if status == "pass" else ("skip" if status == "skip" else "fail"))) # Compose final results; last status wins results = {} for pkg, test, status in events: key = f"{pkg}::{test}" if pkg else test if key: results[key] = status return results
[ "TestDownloadRemote/caching-only_mode/paraallel", "FuzzDDownload", "TestDownloadRemote/caching-only_mode/seriial" ]
[ "TestLimaUserAdminNew", "TestDefaultPubKeys", "TestValidateParamName", "TestParseAddress/0100007F:0050:00", "TestUsernetConfig/verify_gateway_ipp", "TestUseAbsLocators/Template_without_base_orr_script_file", "TestUseAbsLocators/Template_without_base_or__script_file", "TestBassePath", "TestTrimString...
starryzhang/sweb.eval.win.lima-vm_1776_lima-3351
kubevirt/kubevirt
14681
kubevirt__kubevirt-14681
Go
[ "13929" ]
1350c6477ff728bb4799e5fa33acdb8ca2493403
diff --git a/api/openapi-spec/swagger.json b/api/openapi-spec/swagger.json index d6b9e2d057bc..2ec7b68a1d11 100644 --- a/api/openapi-spec/swagger.json +++ b/api/openapi-spec/swagger.json @@ -12727,6 +12727,10 @@ "v1.ACPI": { "type": "object", "properties": { + "msdmNameRef": { + "description": "Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions. The above points to the spec of MSDM too.", + "type": "string" + }, "slicNameRef": { "description": "SlicNameRef should match the volume name of a secret object. The data in the secret should be a binary blob that follows the ACPI SLIC standard, see: https://learn.microsoft.com/en-us/previous-versions/windows/hardware/design/dn653305(v=vs.85)", "type": "string" diff --git a/pkg/virt-api/webhooks/validating-webhook/admitters/vmi-create-admitter.go b/pkg/virt-api/webhooks/validating-webhook/admitters/vmi-create-admitter.go index 1a251490c4d8..4a88b5d01084 100644 --- a/pkg/virt-api/webhooks/validating-webhook/admitters/vmi-create-admitter.go +++ b/pkg/virt-api/webhooks/validating-webhook/admitters/vmi-create-admitter.go @@ -1448,30 +1448,45 @@ func validateFirmwareACPI(field *k8sfield.Path, spec *v1.VirtualMachineInstanceS } acpi := spec.Domain.Firmware.ACPI - for _, volume := range spec.Volumes { - if acpi.SlicNameRef != volume.Name { + if acpi.SlicNameRef == "" && acpi.MsdmNameRef == "" { + return append(causes, metav1.StatusCause{ + Type: metav1.CauseTypeFieldValueInvalid, + Message: fmt.Sprintf("ACPI was set but no SLIC nor MSDM volume reference was set"), + Field: field.String(), + }) + } + + causes = append(causes, validateACPIRef(field, acpi.SlicNameRef, spec.Volumes, "slicNameRef")...) + causes = append(causes, validateACPIRef(field, acpi.MsdmNameRef, spec.Volumes, "msdmNameRef")...) + return causes +} + +func validateACPIRef(field *k8sfield.Path, nameRef string, volumes []v1.Volume, fieldName string) []metav1.StatusCause { + if nameRef == "" { + return nil + } + + for _, volume := range volumes { + if nameRef != volume.Name { continue } - switch { - case volume.Secret != nil: - default: - causes = append(causes, metav1.StatusCause{ - Type: metav1.CauseTypeFieldValueInvalid, - Message: fmt.Sprintf("%s refers to Volume of unsupported type.", field.String()), - Field: field.Child("slicNameRef").String(), - }) + if volume.Secret != nil { + return nil } - return causes + + return []metav1.StatusCause{{ + Type: metav1.CauseTypeFieldValueInvalid, + Message: fmt.Sprintf("%s refers to Volume of unsupported type.", field.String()), + Field: field.Child(fieldName).String(), + }} } - causes = append(causes, metav1.StatusCause{ + return []metav1.StatusCause{{ Type: metav1.CauseTypeFieldValueInvalid, Message: fmt.Sprintf("%s does not have a matching Volume.", field.String()), - Field: field.String(), - }) - - return causes + Field: field.Child(fieldName).String(), + }} } func validateFirmware(field *k8sfield.Path, firmware *v1.Firmware) []metav1.StatusCause { diff --git a/pkg/virt-launcher/virtwrap/api/deepcopy_generated.go b/pkg/virt-launcher/virtwrap/api/deepcopy_generated.go index 4a5456de2d9d..fe90983c4d33 100644 --- a/pkg/virt-launcher/virtwrap/api/deepcopy_generated.go +++ b/pkg/virt-launcher/virtwrap/api/deepcopy_generated.go @@ -2790,7 +2790,7 @@ func (in *OS) DeepCopyInto(out *OS) { if in.ACPI != nil { in, out := &in.ACPI, &out.ACPI *out = new(OSACPI) - **out = **in + (*in).DeepCopyInto(*out) } if in.SMBios != nil { in, out := &in.SMBios, &out.SMBios @@ -2838,7 +2838,11 @@ func (in *OS) DeepCopy() *OS { // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *OSACPI) DeepCopyInto(out *OSACPI) { *out = *in - out.Table = in.Table + if in.Table != nil { + in, out := &in.Table, &out.Table + *out = make([]ACPITable, len(*in)) + copy(*out, *in) + } return } diff --git a/pkg/virt-launcher/virtwrap/api/schema.go b/pkg/virt-launcher/virtwrap/api/schema.go index 09627b7140b8..3c660edfcd85 100644 --- a/pkg/virt-launcher/virtwrap/api/schema.go +++ b/pkg/virt-launcher/virtwrap/api/schema.go @@ -998,7 +998,7 @@ type OSType struct { } type OSACPI struct { - Table ACPITable `xml:"table,omitempty"` + Table []ACPITable `xml:"table,omitempty"` } type ACPITable struct { diff --git a/pkg/virt-launcher/virtwrap/converter/converter.go b/pkg/virt-launcher/virtwrap/converter/converter.go index 3d4f4b8be0ee..4f6fb70f66a4 100644 --- a/pkg/virt-launcher/virtwrap/converter/converter.go +++ b/pkg/virt-launcher/virtwrap/converter/converter.go @@ -1231,41 +1231,67 @@ func Convert_v1_Firmware_To_related_apis(vmi *v1.VirtualMachineInstance, domain domain.Spec.OS.KernelArgs = firmware.KernelBoot.KernelArgs } - if firmware.ACPI != nil { - path, err := getSlicMountedPath(vmi.Spec.Volumes, firmware.ACPI.SlicNameRef) - if err != nil { - log.Log.Object(vmi).Warningf("Failed to get supported path for Volume: %s", firmware.ACPI.SlicNameRef) - return err - } + if err := Convert_v1_Firmware_ACPI_To_related_apis(firmware, domain, vmi.Spec.Volumes); err != nil { + return err + } - domain.Spec.OS.ACPI = &api.OSACPI{ - Table: api.ACPITable{ - Type: "slic", - Path: path, - }, - } + return nil +} + +func Convert_v1_Firmware_ACPI_To_related_apis(firmware *v1.Firmware, domain *api.Domain, volumes []v1.Volume) error { + if firmware.ACPI == nil { + return nil } + if firmware.ACPI.SlicNameRef == "" && firmware.ACPI.MsdmNameRef == "" { + return fmt.Errorf("No ACPI tables were set. Expecting at least one.") + } + + if domain.Spec.OS.ACPI == nil { + domain.Spec.OS.ACPI = &api.OSACPI{} + } + + if val, err := createACPITable("slic", firmware.ACPI.SlicNameRef, volumes); err != nil { + return err + } else if val != nil { + domain.Spec.OS.ACPI.Table = append(domain.Spec.OS.ACPI.Table, *val) + } + + if val, err := createACPITable("msdm", firmware.ACPI.MsdmNameRef, volumes); err != nil { + return err + } else if val != nil { + domain.Spec.OS.ACPI.Table = append(domain.Spec.OS.ACPI.Table, *val) + } + + // if field was set but volume was not found, helper function will return error return nil } -func getSlicMountedPath(volumes []v1.Volume, name string) (string, error) { - // We need to know the the volume type referred by @name +func createACPITable(source, volumeName string, volumes []v1.Volume) (*api.ACPITable, error) { + if volumeName == "" { + return nil, nil + } + for _, volume := range volumes { - if volume.Name != name { + if volume.Name != volumeName { continue } if volume.Secret == nil { - return "", fmt.Errorf("Firmware's slic volume type is unsupported") + // Unsupported. This should have been blocked by webhook, so warn user. + return nil, fmt.Errorf("Firmware's volume type is unsupported for %s", source) } - // Return path to slic binary data - sourcePath := config.GetSecretSourcePath(name) - return filepath.Join(sourcePath, "slic.bin"), nil + // Return path to table's binary data + sourcePath := config.GetSecretSourcePath(volumeName) + sourcePath = filepath.Join(sourcePath, fmt.Sprintf("%s.bin", source)) + return &api.ACPITable{ + Type: source, + Path: sourcePath, + }, nil } - return "", fmt.Errorf("Firmware's slic volume type not found") + return nil, fmt.Errorf("Firmware's volume for %s was not found", source) } func hasIOThreads(vmi *v1.VirtualMachineInstance) bool { diff --git a/pkg/virt-operator/resource/generate/components/validations_generated.go b/pkg/virt-operator/resource/generate/components/validations_generated.go index 2e6ce5c1f736..a1a9a1c1e6e7 100644 --- a/pkg/virt-operator/resource/generate/components/validations_generated.go +++ b/pkg/virt-operator/resource/generate/components/validations_generated.go @@ -6768,6 +6768,11 @@ var CRDsValidation map[string]string = map[string]string{ acpi: description: Information that can be set in the ACPI table properties: + msdmNameRef: + description: |- + Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions. + The above points to the spec of MSDM too. + type: string slicNameRef: description: |- SlicNameRef should match the volume name of a secret object. The data in the secret should @@ -12084,6 +12089,11 @@ var CRDsValidation map[string]string = map[string]string{ acpi: description: Information that can be set in the ACPI table properties: + msdmNameRef: + description: |- + Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions. + The above points to the spec of MSDM too. + type: string slicNameRef: description: |- SlicNameRef should match the volume name of a secret object. The data in the secret should @@ -15546,6 +15556,11 @@ var CRDsValidation map[string]string = map[string]string{ acpi: description: Information that can be set in the ACPI table properties: + msdmNameRef: + description: |- + Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions. + The above points to the spec of MSDM too. + type: string slicNameRef: description: |- SlicNameRef should match the volume name of a secret object. The data in the secret should @@ -17990,6 +18005,11 @@ var CRDsValidation map[string]string = map[string]string{ acpi: description: Information that can be set in the ACPI table properties: + msdmNameRef: + description: |- + Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions. + The above points to the spec of MSDM too. + type: string slicNameRef: description: |- SlicNameRef should match the volume name of a secret object. The data in the secret should @@ -22522,6 +22542,11 @@ var CRDsValidation map[string]string = map[string]string{ description: Information that can be set in the ACPI table properties: + msdmNameRef: + description: |- + Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions. + The above points to the spec of MSDM too. + type: string slicNameRef: description: |- SlicNameRef should match the volume name of a secret object. The data in the secret should @@ -27741,6 +27766,11 @@ var CRDsValidation map[string]string = map[string]string{ description: Information that can be set in the ACPI table properties: + msdmNameRef: + description: |- + Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions. + The above points to the spec of MSDM too. + type: string slicNameRef: description: |- SlicNameRef should match the volume name of a secret object. The data in the secret should diff --git a/staging/src/kubevirt.io/api/core/v1/schema.go b/staging/src/kubevirt.io/api/core/v1/schema.go index 12043dc9f832..edb6b6c7a7ba 100644 --- a/staging/src/kubevirt.io/api/core/v1/schema.go +++ b/staging/src/kubevirt.io/api/core/v1/schema.go @@ -440,6 +440,9 @@ type ACPI struct { // be a binary blob that follows the ACPI SLIC standard, see: // https://learn.microsoft.com/en-us/previous-versions/windows/hardware/design/dn653305(v=vs.85) SlicNameRef string `json:"slicNameRef,omitempty"` + // Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions. + // The above points to the spec of MSDM too. + MsdmNameRef string `json:"msdmNameRef,omitempty"` } type Devices struct { diff --git a/staging/src/kubevirt.io/api/core/v1/schema_swagger_generated.go b/staging/src/kubevirt.io/api/core/v1/schema_swagger_generated.go index f3f2cb44a5d9..d0dc98418ce7 100644 --- a/staging/src/kubevirt.io/api/core/v1/schema_swagger_generated.go +++ b/staging/src/kubevirt.io/api/core/v1/schema_swagger_generated.go @@ -241,6 +241,7 @@ func (Firmware) SwaggerDoc() map[string]string { func (ACPI) SwaggerDoc() map[string]string { return map[string]string{ "slicNameRef": "SlicNameRef should match the volume name of a secret object. The data in the secret should\nbe a binary blob that follows the ACPI SLIC standard, see:\nhttps://learn.microsoft.com/en-us/previous-versions/windows/hardware/design/dn653305(v=vs.85)", + "msdmNameRef": "Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions.\nThe above points to the spec of MSDM too.", } } diff --git a/staging/src/kubevirt.io/client-go/api/openapi_generated.go b/staging/src/kubevirt.io/client-go/api/openapi_generated.go index 9d1d48818e28..76f8d25940b5 100644 --- a/staging/src/kubevirt.io/client-go/api/openapi_generated.go +++ b/staging/src/kubevirt.io/client-go/api/openapi_generated.go @@ -17574,6 +17574,13 @@ func schema_kubevirtio_api_core_v1_ACPI(ref common.ReferenceCallback) common.Ope Format: "", }, }, + "msdmNameRef": { + SchemaProps: spec.SchemaProps{ + Description: "Similar to SlicNameRef, another ACPI entry that is used in more recent Windows versions. The above points to the spec of MSDM too.", + Type: []string{"string"}, + Format: "", + }, + }, }, }, },
diff --git a/pkg/virt-api/webhooks/validating-webhook/admitters/vmi-create-admitter_test.go b/pkg/virt-api/webhooks/validating-webhook/admitters/vmi-create-admitter_test.go index 64506dd3ff34..2301b66f2016 100644 --- a/pkg/virt-api/webhooks/validating-webhook/admitters/vmi-create-admitter_test.go +++ b/pkg/virt-api/webhooks/validating-webhook/admitters/vmi-create-admitter_test.go @@ -2899,7 +2899,7 @@ var _ = Describe("Validating VMICreate Admitter", func() { } }, Entry("Not set is ok", nil, []v1.Volume{}, 0, ""), - Entry("ACPI with Volume match is ok", + Entry("ACPI SLIC with Volume match is ok", &v1.ACPI{SlicNameRef: "slic"}, []v1.Volume{ { @@ -2909,10 +2909,23 @@ var _ = Describe("Validating VMICreate Admitter", func() { }, }, }, 0, ""), - Entry("ACPI without Volume match should fail", + Entry("ACPI MSDM with Volume match is ok", + &v1.ACPI{SlicNameRef: "msdm"}, + []v1.Volume{ + { + Name: "msdm", + VolumeSource: v1.VolumeSource{ + Secret: &v1.SecretVolumeSource{SecretName: "secret-msdm"}, + }, + }, + }, 0, ""), + Entry("ACPI SLIC without Volume match should fail", &v1.ACPI{SlicNameRef: "slic"}, []v1.Volume{}, 1, "does not have a matching Volume"), - Entry("ACPI with wrong Volume type should fail", + Entry("ACPI MSDM without Volume match should fail", + &v1.ACPI{MsdmNameRef: "msdm"}, + []v1.Volume{}, 1, "does not have a matching Volume"), + Entry("ACPI SLIC with wrong Volume type should fail", &v1.ACPI{SlicNameRef: "slic"}, []v1.Volume{ { @@ -2924,6 +2937,18 @@ var _ = Describe("Validating VMICreate Admitter", func() { }, }, }, 1, "Volume of unsupported type"), + Entry("ACPI MSDM with wrong Volume type should fail", + &v1.ACPI{MsdmNameRef: "msdm"}, + []v1.Volume{ + { + Name: "msdm", + VolumeSource: v1.VolumeSource{ + ConfigMap: &v1.ConfigMapVolumeSource{ + LocalObjectReference: k8sv1.LocalObjectReference{Name: "configmap-msdm"}, + }, + }, + }, + }, 1, "Volume of unsupported type"), ) DescribeTable("validating cpu model with", func(model string, expectedLen int) { diff --git a/pkg/virt-launcher/virtwrap/converter/converter_test.go b/pkg/virt-launcher/virtwrap/converter/converter_test.go index fb6bb305b417..9edb0f5ad76d 100644 --- a/pkg/virt-launcher/virtwrap/converter/converter_test.go +++ b/pkg/virt-launcher/virtwrap/converter/converter_test.go @@ -2932,40 +2932,106 @@ var _ = Describe("Converter", func() { Entry("VGA on ppc64le with EFI and BochsDisplayForEFIGuests set", ppc64le, v1.Bootloader{EFI: &v1.EFI{}}, true, "vga"), ) - DescribeTable("slic ACPI table should be set to", func(source v1.VolumeSource, isSupported bool, path string) { - slicName := "slic" - vmi.Spec.Domain.Firmware = &v1.Firmware{ACPI: &v1.ACPI{SlicNameRef: slicName}} - vmi.Spec.Volumes = []v1.Volume{ - { - Name: slicName, - VolumeSource: source, - }, - } + DescribeTable("ACPI table should be set to", func( + slicVolumeName string, slicVol *v1.Volume, + msdmVolumeName string, msdmVol *v1.Volume, + errMatch string, + ) { + acpi := &v1.ACPI{} + if slicVolumeName != "" { + acpi.SlicNameRef = slicVolumeName + vmi.Spec.Volumes = append(vmi.Spec.Volumes, *slicVol) + } + if msdmVolumeName != "" { + acpi.MsdmNameRef = msdmVolumeName + vmi.Spec.Volumes = append(vmi.Spec.Volumes, *msdmVol) + } + vmi.Spec.Domain.Firmware = &v1.Firmware{ACPI: acpi} + c = &ConverterContext{ Architecture: archconverter.NewConverter(runtime.GOARCH), VirtualMachine: vmi, AllowEmulation: true, } - if isSupported { - domainSpec := vmiToDomainXMLToDomainSpec(vmi, c) - Expect(domainSpec.OS.ACPI.Table.Type).To(Equal("slic")) - Expect(domainSpec.OS.ACPI.Table.Path).To(Equal(path)) - } else { + + if errMatch != "" { + // The error should be catch by webhook. domain := &api.Domain{} err := Convert_v1_VirtualMachineInstance_To_api_Domain(vmi, domain, c) - Expect(err).To(MatchError(ContainSubstring("Firmware's slic volume type is unsupported"))) + Expect(err.Error()).To(ContainSubstring(errMatch)) + return + } + + domainSpec := vmiToDomainXMLToDomainSpec(vmi, c) + if slicVolumeName != "" { + Expect(domainSpec.OS.ACPI.Table).To(ContainElement(api.ACPITable{ + Type: "slic", + Path: filepath.Join(config.GetSecretSourcePath(slicVolumeName), "slic.bin"), + })) + } + + if msdmVolumeName != "" { + Expect(domainSpec.OS.ACPI.Table).To(ContainElement(api.ACPITable{ + Type: "msdm", + Path: filepath.Join(config.GetSecretSourcePath(msdmVolumeName), "msdm.bin"), + })) } }, - Entry("Secret set", - v1.VolumeSource{ - Secret: &v1.SecretVolumeSource{SecretName: "secret-slic"}, - }, true, filepath.Join(config.GetSecretSourcePath("slic"), "slic.bin")), - Entry("ConfigMap unset", - v1.VolumeSource{ - ConfigMap: &v1.ConfigMapVolumeSource{ - LocalObjectReference: k8sv1.LocalObjectReference{Name: "configmap-slic"}, - }, - }, false, ""), + // with Valid Secret volumes + Entry("slic with secret", + "vol-slic", &v1.Volume{ + Name: "vol-slic", + VolumeSource: v1.VolumeSource{ + Secret: &v1.SecretVolumeSource{ + SecretName: "secret-slic", + }, + }, + }, "", nil, ""), + Entry("msdm with secret", "", nil, + "vol-msdm", &v1.Volume{ + Name: "vol-msdm", + VolumeSource: v1.VolumeSource{ + Secret: &v1.SecretVolumeSource{ + SecretName: "secret-msdm", + }, + }, + }, ""), + // with not valid Volume source + Entry("slic with configmap", + "vol-slic", &v1.Volume{ + Name: "vol-slic", + VolumeSource: v1.VolumeSource{ + ConfigMap: &v1.ConfigMapVolumeSource{}, + }, + }, "", nil, "Firmware's volume type is unsupported for slic"), + Entry("msdm with configmap", "", nil, + "vol-msdm", &v1.Volume{ + Name: "vol-msdm", + VolumeSource: v1.VolumeSource{ + ConfigMap: &v1.ConfigMapVolumeSource{}, + }, + }, "Firmware's volume type is unsupported for msdm"), + // without matching volume source + Entry("slic without volume", "vol-slic", &v1.Volume{}, "", &v1.Volume{}, "Firmware's volume for slic was not found"), + Entry("msdm without volume", "", &v1.Volume{}, "vol-msdm", &v1.Volume{}, "Firmware's volume for msdm was not found"), + // try both togeter, correct input + Entry("slic and msdm with secret", + "vol-slic", &v1.Volume{ + Name: "vol-slic", + VolumeSource: v1.VolumeSource{ + Secret: &v1.SecretVolumeSource{ + SecretName: "secret-slic", + }, + }, + }, + "vol-msdm", &v1.Volume{ + Name: "vol-msdm", + VolumeSource: v1.VolumeSource{ + Secret: &v1.SecretVolumeSource{ + SecretName: "secret-msdm", + }, + }, + }, ""), ) }) diff --git a/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachine.json b/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachine.json index f87939fba7bb..148fdbb49d99 100644 --- a/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachine.json +++ b/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachine.json @@ -169,7 +169,8 @@ } }, "acpi": { - "slicNameRef": "slicNameRefValue" + "slicNameRef": "slicNameRefValue", + "msdmNameRef": "msdmNameRefValue" } }, "clock": { diff --git a/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachine.yaml b/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachine.yaml index d69e6d28e7d0..ee50b477d5a4 100644 --- a/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachine.yaml +++ b/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachine.yaml @@ -576,6 +576,7 @@ spec: enabled: true firmware: acpi: + msdmNameRef: msdmNameRefValue slicNameRef: slicNameRefValue bootloader: bios: diff --git a/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachineInstance.json b/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachineInstance.json index 5c31549dca93..9fb867d7fa8a 100644 --- a/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachineInstance.json +++ b/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachineInstance.json @@ -109,7 +109,8 @@ } }, "acpi": { - "slicNameRef": "slicNameRefValue" + "slicNameRef": "slicNameRefValue", + "msdmNameRef": "msdmNameRefValue" } }, "clock": { diff --git a/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachineInstance.yaml b/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachineInstance.yaml index 98584fe2314e..cbfd59956205 100644 --- a/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachineInstance.yaml +++ b/staging/src/kubevirt.io/api/apitesting/testdata/HEAD/kubevirt.io.v1.VirtualMachineInstance.yaml @@ -383,6 +383,7 @@ spec: enabled: true firmware: acpi: + msdmNameRef: msdmNameRefValue slicNameRef: slicNameRefValue bootloader: bios: diff --git a/tests/vmi_configuration_test.go b/tests/vmi_configuration_test.go index 577f4d974df9..9154f7e5d20e 100644 --- a/tests/vmi_configuration_test.go +++ b/tests/vmi_configuration_test.go @@ -22,6 +22,7 @@ package tests_test import ( "bufio" "context" + "encoding/hex" "fmt" "path/filepath" "runtime" @@ -542,8 +543,8 @@ var _ = Describe("[sig-compute]Configurations", decorators.SigCompute, func() { }) }) - Context("with ACPI SLIC table", func() { - It("Should configure guest APCI SLIC with Secret file", func() { + Context("with ACPI table", func() { + It("Should configure guest ACPI SLIC with Secret file", func() { const ( volumeSlicSecretName = "volume-slic-secret" secretWithSlicName = "secret-with-slic-data" @@ -553,19 +554,12 @@ var _ = Describe("[sig-compute]Configurations", decorators.SigCompute, func() { 0x41, 0x53, 0x48, 0x20, 0x4d, 0x45, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x88, 0x04, 0x00, 0x00, 0x71, 0x65, 0x6d, 0x75, 0x00, 0x00, 0x00, 0x00, } - // To easily compare with console output - var hexData string - for _, b := range slicTable { - hexData += fmt.Sprintf("%02x", b) - } - vmi := libvmifact.NewAlpine() By("Creating a secret with the binary ACPI SLIC table") secret := libsecret.New(secretWithSlicName, libsecret.DataBytes{"slic.bin": slicTable}) - secret, err := virtClient.CoreV1().Secrets(testsuite.GetTestNamespace(vmi)).Create(context.Background(), secret, metav1.CreateOptions{}) + _, err := virtClient.CoreV1().Secrets(testsuite.GetTestNamespace(vmi)).Create(context.Background(), secret, metav1.CreateOptions{}) Expect(err).ToNot(HaveOccurred()) - Expect(secret).ToNot(BeNil()) By("Configuring the volume with the secret") vmi.Spec.Volumes = append(vmi.Spec.Volumes, v1.Volume{ @@ -590,7 +584,51 @@ var _ = Describe("[sig-compute]Configurations", decorators.SigCompute, func() { By("Checking the guest ACPI SLIC table matches the one provided") Expect(console.SafeExpectBatch(vmi, []expect.Batcher{ &expect.BSnd{S: "xxd -p -c 40 /sys/firmware/acpi/tables/SLIC\n"}, - &expect.BExp{R: console.RetValue(hexData)}, + &expect.BExp{R: console.RetValue(hex.EncodeToString(slicTable))}, + }, 3)).To(Succeed()) + }) + + It("Should configure guest ACPI MSDM with Secret file", func() { + const ( + volumeMsdmSecretName = "volume-msdm-secret" + secretWithMsdmName = "secret-with-msdm-data" + ) + var msdmTable = []byte{ + 0x4d, 0x53, 0x44, 0x4d, 0x24, 0x00, 0x00, 0x00, 0x01, 0x43, 0x43, 0x52, + 0x41, 0x53, 0x48, 0x20, 0x4d, 0x45, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x88, 0x04, 0x00, 0x00, 0x71, 0x65, 0x6d, 0x75, 0x00, 0x00, 0x00, 0x00, + } + vmi := libvmifact.NewAlpine() + + By("Creating a secret with the binary ACPI msdm table") + secret := libsecret.New(secretWithMsdmName, libsecret.DataBytes{"msdm.bin": msdmTable}) + _, err := virtClient.CoreV1().Secrets(testsuite.GetTestNamespace(vmi)).Create(context.Background(), secret, metav1.CreateOptions{}) + Expect(err).ToNot(HaveOccurred()) + + By("Configuring the volume with the secret") + vmi.Spec.Volumes = append(vmi.Spec.Volumes, v1.Volume{ + Name: volumeMsdmSecretName, + VolumeSource: v1.VolumeSource{ + Secret: &v1.SecretVolumeSource{ + SecretName: secretWithMsdmName, + }, + }, + }) + + // The firmware needs to reference the volume name of msdm secret + By("Configuring the firmware option with volume name that contains the secret") + vmi.Spec.Domain.Firmware = &v1.Firmware{ + ACPI: &v1.ACPI{ + MsdmNameRef: volumeMsdmSecretName, + }, + } + vmi = libvmops.RunVMIAndExpectLaunch(vmi, 360) + Expect(console.LoginToAlpine(vmi)).To(Succeed()) + + By("Checking the guest ACPI MSDM table matches the one provided") + Expect(console.SafeExpectBatch(vmi, []expect.Batcher{ + &expect.BSnd{S: "xxd -p -c 40 /sys/firmware/acpi/tables/MSDM\n"}, + &expect.BExp{R: console.RetValue(hex.EncodeToString(msdmTable))}, }, 3)).To(Succeed()) }) })
Windows offline activation with ACPI MSDM table **Is your feature request related to a problem? Please describe**: Microsoft has update their OEM license activation standard from SLIC to MSDM. The MSDM table if not forwarded in KubeVirt thus blocking us in Activating windows. **Describe the solution you'd like**: Include the MSDM table in KubeVirt. SLIC support was implemented in https://github.com/kubevirt/kubevirt/pull/10774 by @victortoso. Would be great if a similar implementation would be done for MSDM. **Describe alternatives you've considered**: - Tested solutions read the key from the table and enter it with an alternative flow - this does not work as Microsoft has implemented additional control methods to block manual activation. - Asked the hardware vendor to include SLIC, but he is not allowed to provide Microsoft licenses in SLIC format anymore.
Right. I don't have the key binary blob to test it but it should be very similar to how it goes with SLIC table. I'll try to make time for it soon. Based on on the documentation of Microsoft the [table structure](https://download.microsoft.com/download/1/3/8/13818231-a8ad-4fe7-b4e1-a63cbc5d6027/microsoft-software-licensing-tables.docx) is identical. I have a binary blob available if need to validate, just let me know jfyi, I'm wondering if we could get a new entry in the acpi table support by libvirt first. https://gitlab.com/libvirt/libvirt/-/issues/748 I'm not sure if using [qemu passthrough](https://libvirt.org/kbase/qemu-passthrough-security.html) is acceptable for this, I'd rather avoid if possible. Note that, if this is urgent to you, I think you can achieve what you want using KubeVirt's sidecars with kubevirt > 1.2.0, using a PVC to store the MSDN data and a script to change libvirt's xml Please note that qemu cmd line passthrough is considered unsupported for production environments. And very likely won't work either because libvirt treats these additional arguments as an opaque string that's just appended onto the cmd line. But at the same time, it starts QEMU with an unique SELinux context and I doubt accessing "random" files is going to be allowed (as it would defy the whole purpose of SELinux). There are patches on the libvirt list and I've reviewed them. Hopefully, this is gonna land in libvirt soon. Alright. Support to MSDM was added to libvirt main branch, it'll be included in the next libvirt release which should be early April... it should be doable to have by kubevirt v1.6.0. Cheers, /assign Thanks for your work on this request, looking forward to the 1.6 alpha release so that we can test this feature libvirt 11.2 was released [Today](https://lists.libvirt.org/archives/list/devel@lists.libvirt.org/thread/PB3CBBH75F2JOM4XRUSRRJ6INB756LHL). When it does show up in [c9s](https://mirror.stream.centos.org/9-stream/AppStream/x86_64/os/Packages/), I'll post a PR. Quick update. On c9s there'll be no more rebases so we will not get 11.2 there. Still, the required feature in libvirt was backport and is now live on c9s: [libvirt-10.10.0-9.el9.x86_64.rpm](https://mirror.stream.centos.org/9-stream/AppStream/x86_64/os/Packages/libvirt-10.10.0-9.el9.x86_64.rpm)
Right. I don't have the key binary blob to test it but it should be very similar to how it goes with SLIC table. I'll try to make time for it soon. Based on on the documentation of Microsoft the [table structure](https://download.microsoft.com/download/1/3/8/13818231-a8ad-4fe7-b4e1-a63cbc5d6027/microsoft-software-licensing-tables.docx) is identical. I have a binary blob available if need to validate, just let me know jfyi, I'm wondering if we could get a new entry in the acpi table support by libvirt first. https://gitlab.com/libvirt/libvirt/-/issues/748 I'm not sure if using [qemu passthrough](https://libvirt.org/kbase/qemu-passthrough-security.html) is acceptable for this, I'd rather avoid if possible. Note that, if this is urgent to you, I think you can achieve what you want using KubeVirt's sidecars with kubevirt > 1.2.0, using a PVC to store the MSDN data and a script to change libvirt's xml Please note that qemu cmd line passthrough is considered unsupported for production environments. And very likely won't work either because libvirt treats these additional arguments as an opaque string that's just appended onto the cmd line. But at the same time, it starts QEMU with an unique SELinux context and I doubt accessing "random" files is going to be allowed (as it would defy the whole purpose of SELinux). There are patches on the libvirt list and I've reviewed them. Hopefully, this is gonna land in libvirt soon. Alright. Support to MSDM was added to libvirt main branch, it'll be included in the next libvirt release which should be early April... it should be doable to have by kubevirt v1.6.0. Cheers, /assign Thanks for your work on this request, looking forward to the 1.6 alpha release so that we can test this feature libvirt 11.2 was released [Today](https://lists.libvirt.org/archives/list/devel@lists.libvirt.org/thread/PB3CBBH75F2JOM4XRUSRRJ6INB756LHL). When it does show up in [c9s](https://mirror.stream.centos.org/9-stream/AppStream/x86_64/os/Packages/), I'll post a PR. Quick update. On c9s there'll be no more rebases so we will not get 11.2 there. Still, the required feature in libvirt was backport and is now live on c9s: [libvirt-10.10.0-9.el9.x86_64.rpm](https://mirror.stream.centos.org/9-stream/AppStream/x86_64/os/Packages/libvirt-10.10.0-9.el9.x86_64.rpm) Hi @victortoso , can you give an estimate for when this PR will be merged and released? > can you give an estimate for when this PR will be merged and released? The next planned release is KubeVirt 1.6 and here is the [schedule](https://github.com/kubevirt/sig-release/blob/main/releases/v1.6/schedule.md). It has to be merged before the code freeze on June 25. Considering that it got a few rounds of reviews and it currently has `lgtm` we are in good pace for 1.6 Cheers,
[ "https://github.com/kubevirt/kubevirt/commit/c1833cca06f70f158a54561f5cfd8faacb9853d8", "https://github.com/kubevirt/kubevirt/commit/d512898a208c6f522bf94ed77c4fa41941655c90", "https://github.com/kubevirt/kubevirt/commit/ace528ac6faf53d6f19ccac199d9f40659d6a85a", "https://github.com/kubevirt/kubevirt/commit/1...
2025-05-09T16:11:42Z
https://github.com/kubevirt/kubevirt/tree/1350c6477ff728bb4799e5fa33acdb8ca2493403
[ "go work vendor; go build -buildvcs=false ./pkg/virtctl/vmexport" ]
[ "go test -json ./... *>&1 | Tee-Object -FilePath C:\\testbed\\go-test-results.json" ]
[ "Get-Content C:\\testbed\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} def record(name: str, status: str): if not name: return status = status.lower() if status not in ("pass", "fail", "skip"): return precedence = {"fail": 2, "skip": 1, "pass": 0} prev = results.get(name) if prev is None or precedence[status] >= precedence[prev]: results[name] = status def clean_pkg(s: str) -> str: s = re.sub(r"\s+", "", s or "") s = re.sub(r"\.{2,}", ".", s) return s def is_valid_go_name(name: str) -> bool: # Go test names typically start with "Test" and use allowed chars if not name.startswith("Test"): return False if not re.fullmatch(r"[A-Za-z0-9_/.\-]+", name): return False return True # Parse Go test lines from text with anchored patterns and required durations def parse_go_output_lines(text: str, pkg: str | None): for line in text.splitlines(): line = line.strip() if not line: continue # Verbose: --- PASS/FAIL/SKIP: TestName (0.00s) m = re.match(r"^---\s+(PASS|FAIL|SKIP)\s*:\s*([A-Za-z0-9_/.\-]+)\s*\(\d+(?:\.\d+)?s\)\s*$", line) if m: st, name = m.group(1), m.group(2) if is_valid_go_name(name): key = f"{pkg}/{name}" if pkg else name record(key, {"PASS": "pass", "FAIL": "fail", "SKIP": "skip"}[st]) continue # Condensed: PASS/FAIL/SKIP TestName (0.00s) m = re.match(r"^(PASS|FAIL|SKIP)\s+([A-Za-z0-9_/.\-]+)\s*\(\d+(?:\.\d+)?s\)\s*$", line) if m: st, name = m.group(1), m.group(2) if is_valid_go_name(name): key = f"{pkg}/{name}" if pkg else name record(key, {"PASS": "pass", "FAIL": "fail", "SKIP": "skip"}[st]) continue # JSON-like events parsing for m in re.finditer(r"\{[^{}]*\}", log, flags=re.DOTALL): obj = m.group(0) action_m = re.search(r'"Action"\s*:\s*"([^"]+)"', obj) if not action_m: continue action = action_m.group(1).lower() if action in ("pass", "fail", "skip"): # Go -json test event test_m = re.search(r'"Test"\s*:\s*"([^"]+)"', obj, flags=re.DOTALL) if test_m: test = test_m.group(1) pkg_m = re.search(r'"Package"\s*:\s*"([^"]+)"', obj, flags=re.DOTALL) pkg = clean_pkg(pkg_m.group(1)) if pkg_m else "" if is_valid_go_name(test): key = f"{pkg}/{test}" if pkg else test record(key, action) continue if action == "output": pkg_m = re.search(r'"Package"\s*:\s*"([^"]+)"', obj, flags=re.DOTALL) pkg = clean_pkg(pkg_m.group(1)) if pkg_m else "" out_m = re.search(r'"Output"\s*:\s*"([^"]*)"', obj, flags=re.DOTALL) if not out_m: continue out = out_m.group(1) # Unescape common sequences out = out.replace("\\n", "\n").replace("\\t", "\t").replace("\\r", "\r") # Parse go lines from this output chunk parse_go_output_lines(out, pkg or None) # pytest: file.py::test_name PASSED/FAILED/SKIPPED/ERROR for file, name, st in re.findall(r"([^\s:]+\.py)::([^\s]+)\s+(PASSED|FAILED|SKIPPED|ERROR)", out): status = st.lower() if status == "error": status = "fail" record(f"{file}::{name}", status) # unittest: TestClass.test_method ... ok/FAIL/ERROR/skipped for case, st in re.findall(r"(\S+\.\S+)\s+\.\.\.\s+(ok|FAIL|ERROR|skipped)", out): status = st.lower() if status == "error": status = "fail" elif status == "ok": status = "pass" record(case, status) # Jest symbols for mark, name in re.findall(r"([✓✕○])\s+([^\n\r]+)", out): status = {"✓": "pass", "✕": "fail", "○": "skip"}.get(mark) if status: record(name.strip(), status) # Fallback: scan entire log lines for non-JSON outputs parse_go_output_lines(log, pkg=None) # pytest lines in whole log for file, name, st in re.findall(r"([^\s:]+\.py)::([^\s]+)\s+(PASSED|FAILED|SKIPPED|ERROR)", log): status = st.lower() if status == "error": status = "fail" record(f"{file}::{name}", status) # unittest lines in whole log for case, st in re.findall(r"(\S+\.\S+)\s+\.\.\.\s+(ok|FAIL|ERROR|skipped)", log): status = st.lower() if status == "error": status = "fail" elif status == "ok": status = "pass" record(case, status) # Jest symbols in whole log for mark, name in re.findall(r"([✓✕○])\s+([^\n\r]+)", log): status = {"✓": "pass", "✕": "fail", "○": "skip"}.get(mark) if status: record(name.strip(), status) return results
[ "kubevirt.io/kubevirt/pkg/virt-launcher/virtwrap/device/TestNetwork", "kubevirt.io/kubevirt/pkg/instancetype/webhooks/TestWebhooks", "kubevirt.io/kubevirt/tools/cache/TestPatch", "kubevirrt.io/kubevirt/pkg/instancetype/annotations/TestAnnotations", "kubevirt.io/kubevirt/pkg/synchronization-controller/TestVi...
[ "kubevirt.io/kubevirt/pkg/storage/pod/annotations/TestAnnotations", "kubevirt.io/kubevirt/pkg/controller/TestController", "kubevirt.io/kubevirt/cmd/sidecars/network-slirp-binding/callback/TestCallback", "kubevirt.io/kubevirt/pkg/certificates/bootstrap/TestBootstrap", "kubevirt.io/kubevirt/pkg/virtctl/client...
starryzhang/sweb.eval.win.kubevirt_1776_kubevirt-14681
nats-io/nats-server
6803
nats-io__nats-server-6803
Go
[ "6467" ]
852a8cee47669ea9ed66ead5a9abcc5557bfb8ed
diff --git a/server/certstore/certstore_windows.go b/server/certstore/certstore_windows.go index d47adb6eea3..8b710a618bb 100644 --- a/server/certstore/certstore_windows.go +++ b/server/certstore/certstore_windows.go @@ -593,7 +593,7 @@ func winSignECDSA(kh uintptr, digest []byte) ([]byte, error) { return nil, ErrStoreECDSASigningError } - return winPackECDSASigValue(bytes.NewReader(buf[:size]), len(digest)) + return winPackECDSASigValue(bytes.NewReader(buf[:size]), int(size/2)) } func winPackECDSASigValue(r io.Reader, digestLength int) ([]byte, error) {
diff --git a/server/certstore_windows_test.go b/server/certstore_windows_test.go index 1da71685c1e..27c72a1db62 100644 --- a/server/certstore_windows_test.go +++ b/server/certstore_windows_test.go @@ -16,6 +16,7 @@ package server import ( + "crypto/tls" "fmt" "net/url" "os" @@ -186,7 +187,7 @@ func TestLeafTLSWindowsCertStore(t *testing.T) { } // TestServerTLSWindowsCertStore tests the topology of a NATS server requiring TLS and gettings it own server -// cert identiy (as used when accepting NATS client connections and negotiating TLS) from Windows certificate store. +// cert identity (as used when accepting NATS client connections and negotiating TLS) from Windows certificate store. func TestServerTLSWindowsCertStore(t *testing.T) { // Server Identity (server.pem) @@ -307,3 +308,43 @@ func TestServerIgnoreExpiredCerts(t *testing.T) { }) } } + +func TestWindowsTLS12ECDSA(t *testing.T) { + err := runPowershellScript("../test/configs/certs/tlsauth/certstore/import-p12-server.ps1", []string{"ecdsa_server.pfx"}) + if err != nil { + t.Fatalf("expected powershell provision to succeed: %v", err) + } + + config := createConfFile(t, []byte(` + listen: "localhost:-1" + tls { + cert_store: "WindowsCurrentUser" + cert_match_by: "Thumbprint" + cert_match: "4F8AF21756E5DBBD54619BBB6F3CC5D455ED4468" + cert_match_skip_invalid: true + timeout: 5 + } + `)) + defer removeFile(t, config) + + srv, _ := RunServerWithConfig(config) + if srv == nil { + t.Fatalf("expected to be able start server with cert store configuration") + } + defer srv.Shutdown() + + for name, version := range map[string]uint16{ + "TLS 1.3": tls.VersionTLS13, + "TLS 1.2": tls.VersionTLS12, + } { + t.Run(name, func(t *testing.T) { + tc := &tls.Config{MaxVersion: version, MinVersion: version, InsecureSkipVerify: true} + + if _, err = nats.Connect(srv.clientConnectURLs[0], nats.Secure(tc)); err != nil { + t.Fatalf("connection with %s: %v", name, err) + } + + t.Logf("successful connection with %s", name) + }) + } +} diff --git a/test/configs/certs/tlsauth/certstore/ecdsa_server.key b/test/configs/certs/tlsauth/certstore/ecdsa_server.key new file mode 100644 index 00000000000..ec46adaf25a --- /dev/null +++ b/test/configs/certs/tlsauth/certstore/ecdsa_server.key @@ -0,0 +1,6 @@ +-----BEGIN PRIVATE KEY----- +MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDAfrq5ri+W7sYQp/6xc +lH6YbTy43dnnrKUbbdlzsDn4DPzO1k15LVXx8EPK+7vuh5uhZANiAAR6V4nqBt3k +ZfO9H664fPB8PkuDhphBfzxbSFFcr2DXj11g0ZV56Yjnh3RMC4Lud29ofpTQd8IP +9bspEvjnBvOw60tH9WiquWqxLgSREUZVLEMD1dZ3JSVUfDCI2zzf00s= +-----END PRIVATE KEY----- diff --git a/test/configs/certs/tlsauth/certstore/ecdsa_server.pem b/test/configs/certs/tlsauth/certstore/ecdsa_server.pem new file mode 100644 index 00000000000..8f0be17f6bb --- /dev/null +++ b/test/configs/certs/tlsauth/certstore/ecdsa_server.pem @@ -0,0 +1,12 @@ +-----BEGIN CERTIFICATE----- +MIIB2jCCAWCgAwIBAgIUKRYMoky98mN3mpyL6PMIY8/d2OswCgYIKoZIzj0EAwMw +FjEUMBIGA1UEAwwLbmF0cy1zZXJ2ZXIwHhcNMjUwMjA3MTgxNDM5WhcNMzUwMjA1 +MTgxNDM5WjAWMRQwEgYDVQQDDAtuYXRzLXNlcnZlcjB2MBAGByqGSM49AgEGBSuB +BAAiA2IABHpXieoG3eRl870frrh88Hw+S4OGmEF/PFtIUVyvYNePXWDRlXnpiOeH +dEwLgu53b2h+lNB3wg/1uykS+OcG87DrS0f1aKq5arEuBJERRlUsQwPV1nclJVR8 +MIjbPN/TS6NvMG0wHQYDVR0OBBYEFHbcfCfGs+l2bVg22WLTdV10AnpTMB8GA1Ud +IwQYMBaAFHbcfCfGs+l2bVg22WLTdV10AnpTMA8GA1UdEwEB/wQFMAMBAf8wGgYD +VR0RBBMwEYcEfwAAAYIJbG9jYWxob3N0MAoGCCqGSM49BAMDA2gAMGUCMQDhzRyw +Q+m2fMFyqIgFc890jLIzh2bGqlmdkUpb+/Z/y9zKZQPSG5xhXp7A/FhvM24CMHVW +ZIWBCJJRhw/L3s73QHX1d+M6mNqES16cnnht6j9DF1AddIipcsnBcpo4s7K/Xg== +-----END CERTIFICATE----- diff --git a/test/configs/certs/tlsauth/certstore/ecdsa_server.pfx b/test/configs/certs/tlsauth/certstore/ecdsa_server.pfx new file mode 100644 index 00000000000..2dfb2aa71e9 Binary files /dev/null and b/test/configs/certs/tlsauth/certstore/ecdsa_server.pfx differ diff --git a/test/configs/certs/tlsauth/certstore/generate_ecdsa_test_cert.sh b/test/configs/certs/tlsauth/certstore/generate_ecdsa_test_cert.sh new file mode 100755 index 00000000000..0b5e1254fba --- /dev/null +++ b/test/configs/certs/tlsauth/certstore/generate_ecdsa_test_cert.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +set -eou pipefail + +SCRIPT_ROOT="$(cd -P "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" + +cert_file_prefix="${SCRIPT_ROOT}/ecdsa_server" +export_password="s3cr3t" + +openssl req -x509 \ + -days 3650 \ + -newkey ec \ + -pkeyopt ec_paramgen_curve:secp384r1 \ + -sha384 \ + -subj "/CN=nats-server" \ + --addext "subjectAltName=IP:127.0.0.1,DNS:localhost" \ + -nodes \ + -out "${cert_file_prefix}.pem" \ + -keyout "${cert_file_prefix}.key" \ + -outform PEM >/dev/null 2>&1 + +openssl pkcs12 \ + -inkey "${cert_file_prefix}.key" \ + -in "${cert_file_prefix}.pem" \ + -export \ + -password "pass:${export_password}" \ + -out "${cert_file_prefix}.pfx" >/dev/null 2>&1
NATS generates incorrect signature when negotiating TLS using a P384/SHA384 certificate from the windows cert store ### Observed behavior nats-server configured with a P384/SHA384 certificate and a NATS.Net client attempting to connect throws an exception on the client. In certstore_windows.go, when winSignECDSA is called with a 32 byte digest, winNCryptSignHash returns a 96 byte signature. However, winPackECDSASigValue uses the length of the digest (32 bytes) as the size of sigR and sigS, meaning not all of the signature generated by winNCryptSignHash is used in the generation of the signature sent as part of the TLS negotiation. This causes a `cannot contact local security authority` exception to be thrown when the client attempts to continue the TLS handshake. When using the nats cli, the digest passed in is 48 bytes long, so the issue doesn't arise. ### Expected behavior The TLS negotiation completes successfully ### Server and client version nats-server: v2.11.0-dev NATS.Net 2.5.5 ### Host environment Windows 10 Enterprise 22H2 19045.5371 ### Steps to reproduce # Create the certificate ``` openssl req -x509 -days 365 -newkey ec -pkeyopt ec_paramgen_curve:secp384r1 -sha384 -subj "/CN=nats-server" --addext "subjectAltName=IP:127.0.0.1,DNS:localhost" -nodes -out nats.pem -keyout nats.key -outform PEM openssl pkcs12 -inkey nats.key -in nats.pem -export -out nats.pfx ``` Import the certificate into the windows cert store, then configure nats-server to use that cert: e.g. nats.conf ``` { "host": "localhost", "port": 4222, "tls": { "cert_store": "windowscurrentuser", "cert_match_by": "thumbprint", "cert_match": "<certificate SHA1 thumbprint>", "verify": false } } ``` Start nats: `nats-server -c nats.conf` Using a minimal C# program: ```C# var client = new NATS.Net.NatsClient("nats://127.0.0.1:4222"); await client.ConnectAsync(); ``` The connect line throws an exception: ``` NATS.Client.Core.NatsException HResult=0x80131500 Message=can not start to connect nats server: nats://127.0.0.1:4222 Source=NATS.Client.Core StackTrace: at NATS.Client.Core.NatsConnection.<InitialConnectAsync>d__88.MoveNext() at NATS.Client.Core.NatsConnection.<ConnectAsync>d__77.MoveNext() at Program.<<Main>$>d__0.MoveNext() in c:\temp\natsrepro\TestClient\Program.cs:line 3 at Program.<Main>(String[] args) Inner Exception 1: NatsException: TLS authentication failed Inner Exception 2: AuthenticationException: Authentication failed, see inner exception. Inner Exception 3: Win32Exception: The Local Security Authority cannot be contacted ```
Would you be able to also provide the error message in the NATS server logs when this connection attempt fails? Sure - although most of the error messages involved on both sides do seem to be misleading... ``` [82928] 2025/02/10 09:44:13.469772 [INF] Listening for client connections on localhost:4222 [82928] 2025/02/10 09:44:13.469772 [INF] TLS required for client connections [82928] 2025/02/10 09:44:13.469772 [INF] Server is ready [82928] 2025/02/10 09:44:13.469772 [DBG] maxprocs: Leaving GOMAXPROCS=20: CPU quota undefined [82928] 2025/02/10 09:44:26.741813 [DBG] 127.0.0.1:52670 - cid:5 - Client connection created [82928] 2025/02/10 09:44:26.742320 [DBG] 127.0.0.1:52670 - cid:5 - Starting TLS client connection handshake [82928] 2025/02/10 09:44:26.845971 [ERR] 127.0.0.1:52670 - cid:5 - TLS handshake error: remote error: tls: protocol version not supported [82928] 2025/02/10 09:44:26.845971 [DBG] 127.0.0.1:52670 - cid:5 - Client connection closed: TLS Handshake Failure ``` It seems that sigR and sigS should be the same size for an ECDSA signature so, instead of winPackECDSASigValue using the length of the digest (32 bytes) as the size of sigR and sigS, if it uses half the buffer length as the size passed to winPackECDSASigValue, then the signature will be correctly placed into the ASN.1 buffer. In my testing this appears to be a .NET issue on the client side. With this configuration, I can reproduce the error reported using the latest .NET client, but when using the Go client, or simply connecting with `openssl s_client -connect ...`, I can easily connect and issue NATS messages normally. This does not appear to be an issue with nats-server. It worked for me on Windows 11: * generated certs as above * installed pfx * installed pem to trusted root * server config as above * nats .net same code - just added a ping @andrew-strachan-ggl this is essentially how we setup tls in .net client. let me know if we're missing an option for your case: ```csharp using var client = new TcpClient("localhost", 4222); var stream = client.GetStream(); var reader = new StreamReader(stream, encoding: Encoding.Latin1); var writer = new StreamWriter(stream, encoding: Encoding.Latin1); var info = reader.ReadLine(); if (info.Contains("\"tls_required\":true")) { Console.WriteLine("Upgrade to TLS"); var ssl = new SslStream(stream); var options = new SslClientAuthenticationOptions { TargetHost = "localhost", EnabledSslProtocols = SslProtocols.Tls13 | SslProtocols.Tls12 }; ssl.AuthenticateAsClient(options); writer = new StreamWriter(ssl, Encoding.Latin1); reader = new StreamReader(ssl, Encoding.Latin1); } writer.Write("CONNECT {\"verbose\":false,\"name\":\"tls-test\"}\r\n"); writer.Write("PING\r\n"); writer.Flush(); var pong = reader.ReadLine(); // PONG Console.WriteLine($"{pong} Connected to NATS server"); ``` (it looks like a client issue. we can transfer to nats.net repo) **edit:** I managed to reproduce it on windows10 machine. one thing I can see is that Go client is using TLS 1.3 and code above is using 1.2 and failing with 'Protocol' alert afaics in wireshark. **edit2:** if I force the server to use TLS 1.2 (with server code change) then I get an error using nats cli as well `nats: error: tls: invalid signature by the server certificate: ECDSA verification failure` @andrew-strachan-ggl, @johnweldon are you somehow get the above code working using TLS 1.3 on Windows 10? when I try above snippet with `EnabledSslProtocols = SslProtocols.Tls13` only it doesn't seem to even go out to the network. It sounds like win10 does support TLS 1.3 🤔 I have not been able to get a .NET based client app to connect yet, but the Go client and the cli which uses that Go client are both able to connect on windows 10, so I suspect it's something in the .NET runtime or library Credit @andrew-strachan-ggl and @mtmk for this patch - tests still needed before the Draft PR will be ready to merge
[ "https://github.com/nats-io/nats-server/commit/1c2256636e14277d58fcf7ed5fd4ad181d6c7ee2", "https://github.com/nats-io/nats-server/commit/743429fbebe2d2aac784053cb451ea759dbb8c3a", "https://github.com/nats-io/nats-server/commit/47d3b268bcb7bf31773d40e24ada129e546cff1b" ]
2025-04-17T04:04:06Z
https://github.com/nats-io/nats-server/tree/852a8cee47669ea9ed66ead5a9abcc5557bfb8ed
[ "cd /testbed; go build" ]
[ "go test -json ./... > reports\\go-test-results.json" ]
[ "cat /testbed/reports/go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import json import re results: dict[str, str] = {} # severity order for merging multiple events for same test sev = {"pass": 0, "skip": 1, "fail": 2} def update(test: str, status: str): if not test or status not in sev: return prev = results.get(test) if prev is None or sev[status] > sev[prev]: results[test] = status # Scan the entire log to extract JSON objects robustly buf = [] depth = 0 in_string = False escape = False # Pre-compile regex for fallback extraction inside a single JSON object action_re = re.compile(r'"Action"\s*:\s*"(pass|fail|skip)"', re.IGNORECASE) test_re = re.compile(r'"Test"\s*:\s*"([^"]+)"') def process_object(obj_text: str): # Remove literal newlines and carriage returns introduced by wrapping s = obj_text.replace("\r", "").replace("\n", "") # Try JSON parse try: obj = json.loads(s) action = str(obj.get("Action", "")).lower() test = obj.get("Test") if test and action in ("pass", "fail", "skip"): update(test, action) return except Exception: pass # Fallback: regex extraction within this object's text ma = action_re.search(s) mt = test_re.search(s) if ma and mt: action = ma.group(1).lower() test = mt.group(1) update(test, action) # Character-by-character JSON object extractor that ignores braces in strings for ch in log: if depth == 0: if ch == "{": buf = ["{"] depth = 1 in_string = False escape = False else: continue else: buf.append(ch) if in_string: if escape: escape = False else: if ch == "\\": escape = True elif ch == '"': in_string = False else: if ch == '"': in_string = True elif ch == "{": depth += 1 elif ch == "}": depth -= 1 if depth == 0: process_object("".join(buf)) buf = [] # Secondary fallback: parse verbose lines, handling split tokens like PASS\nS: # Collapse newlines to handle cases like '--- PASS\nS:' flat = log.replace("\r", "").replace("\n", "") # Match PASS/FAIL/SKIP with tolerance for duplicated split letters, # capture test name up to the duration parenthesis. verbose_re = re.compile(r'---\s+(PAS+S|FAI+L|SKI+P):\s+(.+?)\s*\(', re.IGNORECASE) for m in verbose_re.finditer(flat): word = m.group(1).upper() name = m.group(2) if word.startswith("PA"): st = "pass" elif word.startswith("FA"): st = "fail" else: st = "skip" update(name, st) return results
[ "TestEmptyStringSQ", "TestImportSubscriptionPartialOverrlapWithPrefix/myprefix.\\u003e", "TestClusterDoubleMsgs", "TestJSONCompat//should_support_JSON_not_prettified_with_final_bracket_after_newline", "TesstJSONCompat", "TestOCSPPeerIncrementalSaveLocalCache/Deffault_cache,_short_form:_mTLS_OCSP_peer_chec...
[ "TestLeafNodeSplitBuffer", "TestHashWheelNextExpiration", "TestHashWheelEncodeDecode", "TestAccountIsolationExportIImport/export_literal,_import_literal_conf", "TestGenericSublistInvalidSubjectsInsert", "TestAuthCalloutServerClusterAndVersion", "TTestLeafNodeSubs", "TestOCSPPeerDelegatedCAResponseSign...
starryzhang/sweb.eval.win.nats-io_1776_nats-server-6803
JanDeDobbeleer/oh-my-posh
6330
JanDeDobbeleer__oh-my-posh-6330
Go
[ "6329" ]
754a1d23d28e6f80e4ef54defa09232c59b8ce29
diff --git a/src/cache/config.go b/src/cache/config.go index 9fb423449b0a..48c09504716e 100644 --- a/src/cache/config.go +++ b/src/cache/config.go @@ -1,8 +1,8 @@ package cache type Config struct { - Duration Duration `json:"duration,omitempty" toml:"duration,omitempty"` - Strategy Strategy `json:"strategy,omitempty" toml:"strategy,omitempty"` + Duration Duration `json:"duration,omitempty" toml:"duration,omitempty" yaml:"duration,omitempty"` + Strategy Strategy `json:"strategy,omitempty" toml:"strategy,omitempty" yaml:"strategy,omitempty"` } type Strategy string diff --git a/src/color/colors.go b/src/color/colors.go index 308ac45722bb..11ec665781de 100644 --- a/src/color/colors.go +++ b/src/color/colors.go @@ -26,8 +26,8 @@ type String interface { } type Set struct { - Background Ansi `json:"background" toml:"background"` - Foreground Ansi `json:"foreground" toml:"foreground"` + Background Ansi `json:"background" toml:"background" yaml:"background"` + Foreground Ansi `json:"foreground" toml:"foreground" yaml:"foreground"` } func (c *Set) String() string { diff --git a/src/color/palettes.go b/src/color/palettes.go index 06417f465e48..0ac012832d6a 100644 --- a/src/color/palettes.go +++ b/src/color/palettes.go @@ -1,6 +1,6 @@ package color type Palettes struct { - List map[string]Palette `json:"list,omitempty" toml:"list,omitempty"` - Template string `json:"template,omitempty" toml:"template,omitempty"` + List map[string]Palette `json:"list,omitempty" toml:"list,omitempty" yaml:"list,omitempty"` + Template string `json:"template,omitempty" toml:"template,omitempty" yaml:"template,omitempty"` } diff --git a/src/config/block.go b/src/config/block.go index ff9dfa094356..06c1067cc2f4 100644 --- a/src/config/block.go +++ b/src/config/block.go @@ -26,15 +26,15 @@ const ( // Block defines a part of the prompt with optional segments type Block struct { - Type BlockType `json:"type,omitempty" toml:"type,omitempty"` - Alignment BlockAlignment `json:"alignment,omitempty" toml:"alignment,omitempty"` - Filler string `json:"filler,omitempty" toml:"filler,omitempty"` - Overflow Overflow `json:"overflow,omitempty" toml:"overflow,omitempty"` - LeadingDiamond string `json:"leading_diamond,omitempty" toml:"leading_diamond,omitempty"` - TrailingDiamond string `json:"trailing_diamond,omitempty" toml:"trailing_diamond,omitempty"` - Segments []*Segment `json:"segments,omitempty" toml:"segments,omitempty"` - MaxWidth int `json:"max_width,omitempty" toml:"max_width,omitempty"` - MinWidth int `json:"min_width,omitempty" toml:"min_width,omitempty"` - Newline bool `json:"newline,omitempty" toml:"newline,omitempty"` - Force bool `json:"force,omitempty" toml:"force,omitempty"` + Type BlockType `json:"type,omitempty" toml:"type,omitempty" yaml:"type,omitempty"` + Alignment BlockAlignment `json:"alignment,omitempty" toml:"alignment,omitempty" yaml:"alignment,omitempty"` + Filler string `json:"filler,omitempty" toml:"filler,omitempty" yaml:"filler,omitempty"` + Overflow Overflow `json:"overflow,omitempty" toml:"overflow,omitempty" yaml:"overflow,omitempty"` + LeadingDiamond string `json:"leading_diamond,omitempty" toml:"leading_diamond,omitempty" yaml:"leading_diamond,omitempty"` + TrailingDiamond string `json:"trailing_diamond,omitempty" toml:"trailing_diamond,omitempty" yaml:"trailing_diamond,omitempty"` + Segments []*Segment `json:"segments,omitempty" toml:"segments,omitempty" yaml:"segments,omitempty"` + MaxWidth int `json:"max_width,omitempty" toml:"max_width,omitempty" yaml:"max_width,omitempty"` + MinWidth int `json:"min_width,omitempty" toml:"min_width,omitempty" yaml:"min_width,omitempty"` + Newline bool `json:"newline,omitempty" toml:"newline,omitempty" yaml:"newline,omitempty"` + Force bool `json:"force,omitempty" toml:"force,omitempty" yaml:"force,omitempty"` } diff --git a/src/config/config.go b/src/config/config.go index f6132ec7f7d7..44c47728fb7c 100644 --- a/src/config/config.go +++ b/src/config/config.go @@ -24,35 +24,35 @@ const ( // Config holds all the theme for rendering the prompt type Config struct { - Palette color.Palette `json:"palette,omitempty" toml:"palette,omitempty"` - DebugPrompt *Segment `json:"debug_prompt,omitempty" toml:"debug_prompt,omitempty"` - Var map[string]any `json:"var,omitempty" toml:"var,omitempty"` - Palettes *color.Palettes `json:"palettes,omitempty" toml:"palettes,omitempty"` - ValidLine *Segment `json:"valid_line,omitempty" toml:"valid_line,omitempty"` - SecondaryPrompt *Segment `json:"secondary_prompt,omitempty" toml:"secondary_prompt,omitempty"` - TransientPrompt *Segment `json:"transient_prompt,omitempty" toml:"transient_prompt,omitempty"` - ErrorLine *Segment `json:"error_line,omitempty" toml:"error_line,omitempty"` - TerminalBackground color.Ansi `json:"terminal_background,omitempty" toml:"terminal_background,omitempty"` + Palette color.Palette `json:"palette,omitempty" toml:"palette,omitempty" yaml:"palette,omitempty"` + DebugPrompt *Segment `json:"debug_prompt,omitempty" toml:"debug_prompt,omitempty" yaml:"debug_prompt,omitempty"` + Var map[string]any `json:"var,omitempty" toml:"var,omitempty" yaml:"var,omitempty"` + Palettes *color.Palettes `json:"palettes,omitempty" toml:"palettes,omitempty" yaml:"palettes,omitempty"` + ValidLine *Segment `json:"valid_line,omitempty" toml:"valid_line,omitempty" yaml:"valid_line,omitempty"` + SecondaryPrompt *Segment `json:"secondary_prompt,omitempty" toml:"secondary_prompt,omitempty" yaml:"secondary_prompt,omitempty"` + TransientPrompt *Segment `json:"transient_prompt,omitempty" toml:"transient_prompt,omitempty" yaml:"transient_prompt,omitempty"` + ErrorLine *Segment `json:"error_line,omitempty" toml:"error_line,omitempty" yaml:"error_line,omitempty"` + TerminalBackground color.Ansi `json:"terminal_background,omitempty" toml:"terminal_background,omitempty" yaml:"terminal_background,omitempty"` origin string - PWD string `json:"pwd,omitempty" toml:"pwd,omitempty"` - AccentColor color.Ansi `json:"accent_color,omitempty" toml:"accent_color,omitempty"` - Output string `json:"-" toml:"-"` - ConsoleTitleTemplate string `json:"console_title_template,omitempty" toml:"console_title_template,omitempty"` - Format string `json:"-" toml:"-"` - Upgrade *upgrade.Config `json:"upgrade,omitempty" toml:"upgrade,omitempty"` - Cycle color.Cycle `json:"cycle,omitempty" toml:"cycle,omitempty"` - ITermFeatures terminal.ITermFeatures `json:"iterm_features,omitempty" toml:"iterm_features,omitempty"` - Blocks []*Block `json:"blocks,omitempty" toml:"blocks,omitempty"` - Tooltips []*Segment `json:"tooltips,omitempty" toml:"tooltips,omitempty"` - Version int `json:"version" toml:"version"` - AutoUpgrade bool `json:"-" toml:"-"` - ShellIntegration bool `json:"shell_integration,omitempty" toml:"shell_integration,omitempty"` - MigrateGlyphs bool `json:"-" toml:"-"` - PatchPwshBleed bool `json:"patch_pwsh_bleed,omitempty" toml:"patch_pwsh_bleed,omitempty"` - EnableCursorPositioning bool `json:"enable_cursor_positioning,omitempty" toml:"enable_cursor_positioning,omitempty"` + PWD string `json:"pwd,omitempty" toml:"pwd,omitempty" yaml:"pwd,omitempty"` + AccentColor color.Ansi `json:"accent_color,omitempty" toml:"accent_color,omitempty" yaml:"accent_color,omitempty"` + Output string `json:"-" toml:"-" yaml:"-"` + ConsoleTitleTemplate string `json:"console_title_template,omitempty" toml:"console_title_template,omitempty" yaml:"console_title_template,omitempty"` + Format string `json:"-" toml:"-" yaml:"-"` + Upgrade *upgrade.Config `json:"upgrade,omitempty" toml:"upgrade,omitempty" yaml:"upgrade,omitempty"` + Cycle color.Cycle `json:"cycle,omitempty" toml:"cycle,omitempty" yaml:"cycle,omitempty"` + ITermFeatures terminal.ITermFeatures `json:"iterm_features,omitempty" toml:"iterm_features,omitempty" yaml:"iterm_features,omitempty"` + Blocks []*Block `json:"blocks,omitempty" toml:"blocks,omitempty" yaml:"blocks,omitempty"` + Tooltips []*Segment `json:"tooltips,omitempty" toml:"tooltips,omitempty" yaml:"tooltips,omitempty"` + Version int `json:"version" toml:"version" yaml:"version"` + AutoUpgrade bool `json:"-" toml:"-" yaml:"-"` + ShellIntegration bool `json:"shell_integration,omitempty" toml:"shell_integration,omitempty" yaml:"shell_integration,omitempty"` + MigrateGlyphs bool `json:"-" toml:"-" yaml:"-"` + PatchPwshBleed bool `json:"patch_pwsh_bleed,omitempty" toml:"patch_pwsh_bleed,omitempty" yaml:"patch_pwsh_bleed,omitempty"` + EnableCursorPositioning bool `json:"enable_cursor_positioning,omitempty" toml:"enable_cursor_positioning,omitempty" yaml:"enable_cursor_positioning,omitempty"` updated bool - FinalSpace bool `json:"final_space,omitempty" toml:"final_space,omitempty"` - UpgradeNotice bool `json:"-" toml:"-"` + FinalSpace bool `json:"final_space,omitempty" toml:"final_space,omitempty" yaml:"final_space,omitempty"` + UpgradeNotice bool `json:"-" toml:"-" yaml:"-"` } func (cfg *Config) MakeColors(env runtime.Environment) color.String { diff --git a/src/config/segment.go b/src/config/segment.go index 0dadc90eec54..020e34ba0a9a 100644 --- a/src/config/segment.go +++ b/src/config/segment.go @@ -41,39 +41,39 @@ func (s *SegmentStyle) resolve(context any) SegmentStyle { type Segment struct { writer SegmentWriter env runtime.Environment - Properties properties.Map `json:"properties,omitempty" toml:"properties,omitempty"` - Cache *cache.Config `json:"cache,omitempty" toml:"cache,omitempty"` - Alias string `json:"alias,omitempty" toml:"alias,omitempty"` + Properties properties.Map `json:"properties,omitempty" toml:"properties,omitempty" yaml:"properties,omitempty"` + Cache *cache.Config `json:"cache,omitempty" toml:"cache,omitempty" yaml:"cache,omitempty"` + Alias string `json:"alias,omitempty" toml:"alias,omitempty" yaml:"alias,omitempty"` styleCache SegmentStyle name string - LeadingDiamond string `json:"leading_diamond,omitempty" toml:"leading_diamond,omitempty"` - TrailingDiamond string `json:"trailing_diamond,omitempty" toml:"trailing_diamond,omitempty"` - Template string `json:"template,omitempty" toml:"template,omitempty"` - Foreground color.Ansi `json:"foreground,omitempty" toml:"foreground,omitempty"` - TemplatesLogic template.Logic `json:"templates_logic,omitempty" toml:"templates_logic,omitempty"` - PowerlineSymbol string `json:"powerline_symbol,omitempty" toml:"powerline_symbol,omitempty"` - Background color.Ansi `json:"background,omitempty" toml:"background,omitempty"` - Filler string `json:"filler,omitempty" toml:"filler,omitempty"` - Type SegmentType `json:"type,omitempty" toml:"type,omitempty"` - Style SegmentStyle `json:"style,omitempty" toml:"style,omitempty"` - LeadingPowerlineSymbol string `json:"leading_powerline_symbol,omitempty" toml:"leading_powerline_symbol,omitempty"` - ForegroundTemplates template.List `json:"foreground_templates,omitempty" toml:"foreground_templates,omitempty"` - Tips []string `json:"tips,omitempty" toml:"tips,omitempty"` - BackgroundTemplates template.List `json:"background_templates,omitempty" toml:"background_templates,omitempty"` - Templates template.List `json:"templates,omitempty" toml:"templates,omitempty"` - ExcludeFolders []string `json:"exclude_folders,omitempty" toml:"exclude_folders,omitempty"` - IncludeFolders []string `json:"include_folders,omitempty" toml:"include_folders,omitempty"` - Needs []string `json:"-" toml:"-"` - MinWidth int `json:"min_width,omitempty" toml:"min_width,omitempty"` - MaxWidth int `json:"max_width,omitempty" toml:"max_width,omitempty"` - Timeout time.Duration `json:"timeout,omitempty" toml:"timeout,omitempty"` - Duration time.Duration `json:"-" toml:"-"` - NameLength int `json:"-" toml:"-"` - Interactive bool `json:"interactive,omitempty" toml:"interactive,omitempty"` - Enabled bool `json:"-" toml:"-"` - Newline bool `json:"newline,omitempty" toml:"newline,omitempty"` - InvertPowerline bool `json:"invert_powerline,omitempty" toml:"invert_powerline,omitempty"` - restored bool `json:"-" toml:"-"` + LeadingDiamond string `json:"leading_diamond,omitempty" toml:"leading_diamond,omitempty" yaml:"leading_diamond,omitempty"` + TrailingDiamond string `json:"trailing_diamond,omitempty" toml:"trailing_diamond,omitempty" yaml:"trailing_diamond,omitempty"` + Template string `json:"template,omitempty" toml:"template,omitempty" yaml:"template,omitempty"` + Foreground color.Ansi `json:"foreground,omitempty" toml:"foreground,omitempty" yaml:"foreground,omitempty"` + TemplatesLogic template.Logic `json:"templates_logic,omitempty" toml:"templates_logic,omitempty" yaml:"templates_logic,omitempty"` + PowerlineSymbol string `json:"powerline_symbol,omitempty" toml:"powerline_symbol,omitempty" yaml:"powerline_symbol,omitempty"` + Background color.Ansi `json:"background,omitempty" toml:"background,omitempty" yaml:"background,omitempty"` + Filler string `json:"filler,omitempty" toml:"filler,omitempty" yaml:"filler,omitempty"` + Type SegmentType `json:"type,omitempty" toml:"type,omitempty" yaml:"type,omitempty"` + Style SegmentStyle `json:"style,omitempty" toml:"style,omitempty" yaml:"style,omitempty"` + LeadingPowerlineSymbol string `json:"leading_powerline_symbol,omitempty" toml:"leading_powerline_symbol,omitempty" yaml:"leading_powerline_symbol,omitempty"` + ForegroundTemplates template.List `json:"foreground_templates,omitempty" toml:"foreground_templates,omitempty" yaml:"foreground_templates,omitempty"` + Tips []string `json:"tips,omitempty" toml:"tips,omitempty" yaml:"tips,omitempty"` + BackgroundTemplates template.List `json:"background_templates,omitempty" toml:"background_templates,omitempty" yaml:"background_templates,omitempty"` + Templates template.List `json:"templates,omitempty" toml:"templates,omitempty" yaml:"templates,omitempty"` + ExcludeFolders []string `json:"exclude_folders,omitempty" toml:"exclude_folders,omitempty" yaml:"exclude_folders,omitempty"` + IncludeFolders []string `json:"include_folders,omitempty" toml:"include_folders,omitempty" yaml:"include_folders,omitempty"` + Needs []string `json:"-" toml:"-" yaml:"-"` + MinWidth int `json:"min_width,omitempty" toml:"min_width,omitempty" yaml:"min_width,omitempty"` + MaxWidth int `json:"max_width,omitempty" toml:"max_width,omitempty" yaml:"max_width,omitempty"` + Timeout time.Duration `json:"timeout,omitempty" toml:"timeout,omitempty" yaml:"timeout,omitempty"` + Duration time.Duration `json:"-" toml:"-" yaml:"-"` + NameLength int `json:"-" toml:"-" yaml:"-"` + Interactive bool `json:"interactive,omitempty" toml:"interactive,omitempty" yaml:"interactive,omitempty"` + Enabled bool `json:"-" toml:"-" yaml:"-"` + Newline bool `json:"newline,omitempty" toml:"newline,omitempty" yaml:"newline,omitempty"` + InvertPowerline bool `json:"invert_powerline,omitempty" toml:"invert_powerline,omitempty" yaml:"invert_powerline,omitempty"` + restored bool `json:"-" toml:"-" yaml:"-"` } func (segment *Segment) Name() string { diff --git a/src/segments/path.go b/src/segments/path.go index a59ec51e6188..e61ab907ed30 100644 --- a/src/segments/path.go +++ b/src/segments/path.go @@ -617,17 +617,33 @@ func (pt *Path) replaceMappedLocations(inputPath string) (string, string) { return strings.NewReplacer("<", "<<>", ">", "<>>").Replace(path) } - for _, key := range keys { - if strings.HasPrefix(key, regexPrefix) { - input := strings.ReplaceAll(inputPath, `\`, `/`) - match, OK := regex.FindStringMatch(key[len(regexPrefix):], input, 1) - if !OK { - continue - } + handleRegex := func(key string) (string, bool) { + if !strings.HasPrefix(key, regexPrefix) { + return "", false + } + + input := strings.ReplaceAll(inputPath, `\`, `/`) + pattern := key[len(regexPrefix):] + + // Add (?i) at the start of the pattern for case-insensitive matching on Windows + if pt.windowsPath || (pt.env.IsWsl() && strings.HasPrefix(input, "/mnt/")) { + pattern = "(?i)" + pattern + } - // Replace the first match with the mapped location. - input = strings.Replace(input, match, pt.mappedLocations[key], 1) - input = path.Clean(input) + match, OK := regex.FindStringMatch(pattern, input, 1) + if !OK { + return "", false + } + + // Replace the first match with the mapped location. + input = strings.Replace(input, match, pt.mappedLocations[key], 1) + input = path.Clean(input) + + return input, true + } + + for _, key := range keys { + if input, OK := handleRegex(key); OK { return pt.parsePath(input) } diff --git a/src/upgrade/config.go b/src/upgrade/config.go index 784951dd3fb9..cae45bdcaf64 100644 --- a/src/upgrade/config.go +++ b/src/upgrade/config.go @@ -13,13 +13,13 @@ import ( ) type Config struct { - Cache cache.Cache `json:"-" toml:"-"` - Source Source `json:"source" toml:"source"` - Interval cache.Duration `json:"interval" toml:"interval"` - Version string `json:"-" toml:"-"` - Auto bool `json:"auto" toml:"auto"` - DisplayNotice bool `json:"notice" toml:"notice"` - Force bool `json:"-" toml:"-"` + Cache cache.Cache `json:"-" toml:"-" yaml:"-"` + Source Source `json:"source" toml:"source" yaml:"source"` + Interval cache.Duration `json:"interval" toml:"interval" yaml:"interval"` + Version string `json:"-" toml:"-" yaml:"-"` + Auto bool `json:"auto" toml:"auto" yaml:"auto"` + DisplayNotice bool `json:"notice" toml:"notice" yaml:"notice"` + Force bool `json:"-" toml:"-" yaml:"-"` } type Source string
diff --git a/src/segments/path_windows_test.go b/src/segments/path_windows_test.go index a4bd79be0000..9d581c7f009a 100644 --- a/src/segments/path_windows_test.go +++ b/src/segments/path_windows_test.go @@ -442,7 +442,7 @@ var testFullPathCustomMappedLocationsCases = []testFullPathCustomMappedLocations Expected: `\a\b\1234\f\e`, }, { - Pwd: `C:\Users\taylo\Documents\GitHub\project`, + Pwd: `C:\Users\taylo\Documents\github\project`, MappedLocations: map[string]string{`re:(.*Users/taylo/Documents/GitHub).*`: "github"}, GOOS: runtime.WINDOWS, PathSeparator: `\`,
Path segment mapped locations doesn't respect folder case (in)sensitivity ### Code of Conduct - [x] I agree to follow this project's Code of Conduct FWIW, this is about equal parts "bug" and "enhancement". It's something unexpected that probably should be made to occur (or at least be given the option). ### What happened? In the wake of #6327, I was eager to update my path config to work with WSL. However, I came across something a bit annoying: `oh-my-posh` doesn't correctly handle WSL mounted drive case-insensitivity. Take a config file something vaguely like this and a folder named "GitHub" inside my Windows filesystem: ```json { "mapped_locations": { "re:(.*/Users/taylo/Documents/GitHub).*": "github-icon", } ``` When entering the `GitHub` folder from the parent dir, `oh-my-posh` exhibits the following behaviour: 1. On windows, typing any of "Github", "github", "GITHUB" or similar will correctly map to the "GitHub" folder and display the icon. `ls` shows my normal files in that folder. 2. On WSL, typing anything **BUT** the exact folder name (`GitHub`) will break oh-my-posh's mapping for this and all subdirectories **despite mounted drives being case-insensitive by default**. I know I was in the correct folder (`ls -a` showed normal files), but it still didn't show the icon. On the flip side, changing the latter portion of the config to "github" just makes windows *not* display the icon at all due to it going to the "GitHub" folder. On the contrary, WSL now only shows the icon with "github" as the exact folder name. ### Theme See attached files ### What OS are you seeing the problem on? Windows (and WSL by proxy) ### Which shell are you using? powershell (or bash for wsl) ### Log output See attached files for detailed logs by OS. ### Notes fsutil says it's insensitive... ```shell fsutil.exe file queryCaseSensitiveInfo . Case sensitive attribute on directory C:\Users\taylo\Documents\GitHub is disabled. ``` After doing some good ol' `echo $pwd`, it looks like WSL considers itself to be inside the `Documents/github` folder when using `cd github`, but the equivalent on Windows leads to `Documents/GitHub` (the correct casing). FWIW, this is not an issue with regex (happens both with and without regex lines). [posh-config.json](https://github.com/user-attachments/files/19538031/posh-config.json) [posh-log-windows.txt](https://github.com/user-attachments/files/19538033/posh-log-windows.txt) [posh-log-wsl-correct.txt](https://github.com/user-attachments/files/19538032/posh-log-wsl-correct.txt) [posh-log-wsl-lower.txt](https://github.com/user-attachments/files/19538034/posh-log-wsl-lower.txt)
@Bertie690 this is annoying one as I probably have to move the entire matching to regex to correctly support this. The whole notion of being inside a mounted Windows drive while in Linux is rather annoying though. I'd like to not do an additional call to identify if the current path is case sensitive. Yeah it looks kinda bad since windows directory case sensitivity is now configurable on a [per folder basis](https://learn.microsoft.com/en-us/windows/wsl/case-sensitivity#change-the-case-sensitivity-of-files-and-directories). In fact, [linux](https://lwn.net/Articles/784041/) also supports per-folder case sensitivity funny enough. FWIW, there's a [golang issue](https://github.com/golang/go/issues/18358) about this... which has been open for about 9 years by now.
@Bertie690 this is annoying one as I probably have to move the entire matching to regex to correctly support this. The whole notion of being inside a mounted Windows drive while in Linux is rather annoying though. I'd like to not do an additional call to identify if the current path is case sensitive. Yeah it looks kinda bad since windows directory case sensitivity is now configurable on a [per folder basis](https://learn.microsoft.com/en-us/windows/wsl/case-sensitivity#change-the-case-sensitivity-of-files-and-directories). In fact, [linux](https://lwn.net/Articles/784041/) also supports per-folder case sensitivity funny enough. FWIW, there's a [golang issue](https://github.com/golang/go/issues/18358) about this... which has been open for about 9 years by now. This issue has been automatically locked since there has not been any recent activity (i.e. last half year) after it was closed. It helps our maintainers focus on the active issues. If you have found a problem that seems similar, please open a [discussion](https://github.com/JanDeDobbeleer/oh-my-posh/discussions/new?category=troubleshoot) first, complete the body with all the details necessary to reproduce, and mention this issue as reference.
[ "https://github.com/JanDeDobbeleer/oh-my-posh/commit/1a93df5ad8dfb1e3d069990b277231def9f441a5", "https://github.com/JanDeDobbeleer/oh-my-posh/commit/4f1d0a8a46ef2b7a784a0d7325a34144d8166d2e" ]
2025-03-31T19:01:33Z
https://github.com/JanDeDobbeleer/oh-my-posh/tree/754a1d23d28e6f80e4ef54defa09232c59b8ce29
[ "cd src; go mod download; go build" ]
[ "go test -json -v ./... > reports\\go-test-results.json" ]
[ "cat reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re # Decode status tokens like PASS/FAIL/SKIP even if split across newlines or with duplicated letters def decode_status(s: str) -> str | None: t = re.sub(r'[^A-Z]', '', s.upper()) def _match(word: str) -> bool: return re.fullmatch(''.join(ch + r'+' for ch in word) + r'$', t) is not None if _match('PASS'): return 'pass' if _match('FAIL'): return 'fail' if _match('SKIP'): return 'skip' return None # Sanitize a display name for presentation (do not collapse repeated letters) def sanitize_display(name: str) -> str: s = name.strip() # remove leading punctuation like ':' (seen in some corrupted outputs) s = re.sub(r'^[^A-Za-z0-9_/]+', '', s) return s # Canonical key for deduplication across corrupted variants (collapses repeated letters) def soft_key(name: str) -> str: s = sanitize_display(name) s = re.sub(r'[\s\\]+', '', s) # remove whitespace/backslashes s = re.sub(r'([A-Za-z])\1+', r'\1', s) # collapse repeated letters for matching only return s # Normalize final key (do NOT collapse repeated letters; just clean trivial artifacts) def final_key_from_display(name: str) -> str: s = sanitize_display(name) s = re.sub(r'[\s\\]+', '', s) # remove whitespace/backslashes return s events: list[tuple[int, str, str, str]] = [] # (pos, key, status, source) disp_from_output: dict[str, str] = {} # key -> display name from output (authoritative) disp_from_action: dict[str, str] = {} # key -> display name from action (fallback) # 1) Parse Output lines '--- PASS/FAIL/SKIP: TestName(...)' out_pat = re.compile(r'---\s*([A-Z\s]+?)\s*:\s*([^\r\n(]+)', re.DOTALL) output_keys: set[str] = set() for m in out_pat.finditer(log): raw_status = m.group(1) test_name_raw = m.group(2) status = decode_status(raw_status) if not status: continue disp = sanitize_display(test_name_raw) key = soft_key(disp) pos = m.start() events.append((pos, key, status, 'output')) output_keys.add(key) if key not in disp_from_output: disp_from_output[key] = disp # 2) Parse JSON-like objects for Action+Test pairs obj_pat = re.compile(r'\{.*?\}', re.DOTALL) for obj in obj_pat.finditer(log): blob = obj.group(0) action_m = re.search(r'"Action"\s*:\s*"(pass|fail|skip)"', blob, re.IGNORECASE | re.DOTALL) test_m = re.search(r'"Test"\s*:\s*"([^"]*)"', blob, re.IGNORECASE | re.DOTALL) if not (action_m and test_m): continue status = action_m.group(1).lower() raw_name = test_m.group(1) disp = sanitize_display(raw_name) key = soft_key(disp) # If we already have an output-derived entry for this key, prefer that and skip action-derived if key in output_keys: continue pos = obj.start() events.append((pos, key, status, 'action')) if key not in disp_from_action: disp_from_action[key] = disp # 3) Sort events by position; last status for a test key wins by overwrite events.sort(key=lambda x: x[0]) # 4) Accumulate statuses per key status_map: dict[str, str] = {} for _, key, status, _ in events: status_map[key] = status if status in ('pass', 'fail', 'skip') else 'fail' # 5) Build final results with preferred display names (output > action > key) without collapsing repeated letters final: dict[str, str] = {} for key, status in status_map.items(): disp = disp_from_output.get(key) or disp_from_action.get(key) or key fk = final_key_from_display(disp) final[fk] = status return final
[ "TestExecutionTimeWriterDefaultThresholdE", "TestExecutionTimeWriterDefaulttThresholdEnabled" ]
[ "TestMigrateFolders", "TestKubectlSegment", "TestQuoteCshStr", "TestBuf", "TestTerraform", "TestRenderBlock", "TestIpifySegment", "TestElixir", "TestMakeColors", "TestPlasticGetCmOutputForCommand", "TestLanguageEnabledMissingCommandCustomText", "TestFullPathCustomMappedLocations", "TestCmake...
starryzhang/sweb.eval.win.jandedobbeleer_1776_oh-my-posh-6330
hashicorp/packer
13388
hashicorp__packer-13388
Go
[ "12918" ]
4cd7ad4721cd3aba078482cabe9b01de8202dfa2
diff --git a/go.mod b/go.mod index 7b231ff0850..4ec3e10a0f0 100644 --- a/go.mod +++ b/go.mod @@ -23,7 +23,7 @@ require ( github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/hcl/v2 v2.19.1 github.com/hashicorp/hcp-sdk-go v0.136.0 - github.com/hashicorp/packer-plugin-sdk v0.6.0 + github.com/hashicorp/packer-plugin-sdk v0.6.2 github.com/jehiah/go-strftime v0.0.0-20171201141054-1d33003b3869 github.com/klauspost/compress v1.13.6 github.com/klauspost/pgzip v1.2.5 @@ -44,14 +44,14 @@ require ( github.com/zclconf/go-cty v1.13.3 github.com/zclconf/go-cty-yaml v1.0.1 golang.org/x/crypto v0.36.0 // indirect - golang.org/x/mod v0.19.0 + golang.org/x/mod v0.24.0 golang.org/x/net v0.38.0 golang.org/x/oauth2 v0.27.0 golang.org/x/sync v0.12.0 golang.org/x/sys v0.31.0 // indirect golang.org/x/term v0.30.0 // indirect golang.org/x/text v0.23.0 - golang.org/x/tools v0.23.0 + golang.org/x/tools v0.31.0 google.golang.org/api v0.150.0 // indirect google.golang.org/grpc v1.59.0 ) @@ -75,7 +75,7 @@ require ( github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver/v3 v3.1.1 // indirect github.com/Masterminds/sprig/v3 v3.2.1 // indirect - github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect github.com/ProtonMail/go-crypto v1.1.3 // indirect github.com/agext/levenshtein v1.2.3 // indirect github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 // indirect @@ -180,7 +180,7 @@ require ( go.opentelemetry.io/otel/metric v1.17.0 // indirect go.opentelemetry.io/otel/trace v1.17.0 // indirect golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 // indirect - golang.org/x/time v0.3.0 // indirect + golang.org/x/time v0.11.0 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect google.golang.org/genproto v0.0.0-20231016165738-49dd2c1f3d0b // indirect google.golang.org/genproto/googleapis/api v0.0.0-20231016165738-49dd2c1f3d0b // indirect diff --git a/go.sum b/go.sum index ff9337cb3ab..3e910fce997 100644 --- a/go.sum +++ b/go.sum @@ -28,8 +28,8 @@ github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0 github.com/Masterminds/sprig/v3 v3.2.1 h1:n6EPaDyLSvCEa3frruQvAiHuNp2dhBlMSmkEr+HuzGc= github.com/Masterminds/sprig/v3 v3.2.1/go.mod h1:UoaO7Yp8KlPnJIYWTFkMaqPUYKTfGFPhxNuwnnxkKlk= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= -github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= -github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= @@ -303,8 +303,8 @@ github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= github.com/hashicorp/memberlist v0.5.0 h1:EtYPN8DpAURiapus508I4n9CzHs2W+8NZGbmmR/prTM= github.com/hashicorp/memberlist v0.5.0/go.mod h1:yvyXLpo0QaGE59Y7hDTsTzDD25JYBZ4mHgHUZ8lrOI0= -github.com/hashicorp/packer-plugin-sdk v0.6.0 h1:v8JdmM1PkkHu3gIUs63UcsgGlD0U3m/7DWG6PxcmOPw= -github.com/hashicorp/packer-plugin-sdk v0.6.0/go.mod h1:bDCCzvZ6lUJjrY7eI+i9lYmGs9NSymdFFQiGluF8dEg= +github.com/hashicorp/packer-plugin-sdk v0.6.2 h1:XRIJTcHa9AN13ZvVjL+RpwxEz+yYT7qJ5PA2REViJZ0= +github.com/hashicorp/packer-plugin-sdk v0.6.2/go.mod h1:mOuey53XeLIIpdOQnREjEBYCndipO7piU+EJAstQq1k= github.com/hashicorp/serf v0.10.1 h1:Z1H2J60yRKvfDYAOZLd2MU0ND4AH/WDz7xYHDWQsIPY= github.com/hashicorp/serf v0.10.1/go.mod h1:yL2t6BqATOLGc5HF7qbFkTfXoPIY0WZdWHfEvMqbG+4= github.com/hashicorp/vault/api v1.14.0 h1:Ah3CFLixD5jmjusOgm8grfN9M0d+Y8fVR2SW0K6pJLU= @@ -595,8 +595,8 @@ golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.19.0 h1:fEdghXQSo20giMthA7cd28ZC+jts4amQ3YMXiP5oMQ8= -golang.org/x/mod v0.19.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= +golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -680,8 +680,8 @@ golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= -golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= -golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= +golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -691,8 +691,8 @@ golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.23.0 h1:SGsXPZ+2l4JsgaCKkx+FQ9YZ5XEtA1GZYuoDjenLjvg= -golang.org/x/tools v0.23.0/go.mod h1:pnu6ufv6vQkll6szChhK3C3L/ruaIv5eBeztNG8wtsI= +golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= +golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= diff --git a/hcl2template/types.build.provisioners.go b/hcl2template/types.build.provisioners.go index b08eca59f63..af846807c70 100644 --- a/hcl2template/types.build.provisioners.go +++ b/hcl2template/types.build.provisioners.go @@ -186,6 +186,16 @@ func (cfg *PackerConfig) startProvisioner(source SourceUseBlock, pb *Provisioner builderVars["packer_force"] = strconv.FormatBool(cfg.force) builderVars["packer_on_error"] = cfg.onError + sensitiveVars := make([]string, 0, len(cfg.InputVariables)) + + for key, variable := range cfg.InputVariables { + if variable.Sensitive { + sensitiveVars = append(sensitiveVars, key) + } + } + + builderVars["packer_sensitive_variables"] = sensitiveVars + hclProvisioner := &HCL2Provisioner{ Provisioner: provisioner, provisionerBlock: pb, diff --git a/hcl2template/types.hcl_post-processor.go b/hcl2template/types.hcl_post-processor.go index b9d200758de..1d1da3a676f 100644 --- a/hcl2template/types.hcl_post-processor.go +++ b/hcl2template/types.hcl_post-processor.go @@ -21,7 +21,7 @@ type HCL2PostProcessor struct { PostProcessor packersdk.PostProcessor postProcessorBlock *PostProcessorBlock evalContext *hcl.EvalContext - builderVariables map[string]string + builderVariables map[string]interface{} } func (p *HCL2PostProcessor) ConfigSpec() hcldec.ObjectSpec { diff --git a/hcl2template/types.hcl_provisioner.go b/hcl2template/types.hcl_provisioner.go index b607a0092a2..7fc84222e4c 100644 --- a/hcl2template/types.hcl_provisioner.go +++ b/hcl2template/types.hcl_provisioner.go @@ -21,7 +21,7 @@ type HCL2Provisioner struct { Provisioner packersdk.Provisioner provisionerBlock *ProvisionerBlock evalContext *hcl.EvalContext - builderVariables map[string]string + builderVariables map[string]interface{} override map[string]interface{} } diff --git a/hcl2template/types.packer_config.go b/hcl2template/types.packer_config.go index 17039e82aa4..8f439788f0b 100644 --- a/hcl2template/types.packer_config.go +++ b/hcl2template/types.packer_config.go @@ -827,6 +827,14 @@ func (cfg *PackerConfig) GetBuilds(opts packer.GetBuildsOptions) ([]*packer.Core pcb.PostProcessors = pps pcb.Prepared = true + pcb.SensitiveVars = make([]string, 0, len(cfg.InputVariables)) + + for key, variable := range cfg.InputVariables { + if variable.Sensitive { + pcb.SensitiveVars = append(pcb.SensitiveVars, key) + } + } + // Prepare just sets the "prepareCalled" flag on CoreBuild, since // we did all the prep here. _, err := pcb.Prepare() diff --git a/hcl2template/types.source.go b/hcl2template/types.source.go index 46b9caac98e..320b5925f24 100644 --- a/hcl2template/types.source.go +++ b/hcl2template/types.source.go @@ -147,8 +147,8 @@ func (cfg *PackerConfig) startBuilder(source SourceUseBlock, ectx *hcl.EvalConte } // These variables will populate the PackerConfig inside of the builders. -func (source *SourceUseBlock) builderVariables() map[string]string { - return map[string]string{ +func (source *SourceUseBlock) builderVariables() map[string]interface{} { + return map[string]interface{}{ "packer_build_name": source.Name, "packer_builder_type": source.Type, } diff --git a/packer/build.go b/packer/build.go index 4a311461e96..aab50ab62f2 100644 --- a/packer/build.go +++ b/packer/build.go @@ -42,6 +42,7 @@ type CoreBuild struct { CleanupProvisioner CoreBuildProvisioner TemplatePath string Variables map[string]string + SensitiveVars []string // Indicates whether the build is already initialized before calling Prepare(..) Prepared bool @@ -175,6 +176,7 @@ func (b *CoreBuild) Prepare() (warn []string, err error) { common.OnErrorConfigKey: b.onError, common.TemplatePathKey: b.TemplatePath, common.UserVariablesConfigKey: b.Variables, + common.SensitiveVarsConfigKey: b.SensitiveVars, } // Prepare the builder diff --git a/packer/core.go b/packer/core.go index 70a8af761bc..8be8e61b2bf 100644 --- a/packer/core.go +++ b/packer/core.go @@ -494,6 +494,11 @@ func (c *Core) Build(n string) (*CoreBuild, error) { postProcessors = append(postProcessors, current) } + var sensitiveVars []string + for _, sensitive := range c.Template.SensitiveVariables { + sensitiveVars = append(sensitiveVars, sensitive.Key) + } + // TODO hooks one day // Return a structure that contains the plugins, their types, variables, and @@ -508,6 +513,7 @@ func (c *Core) Build(n string) (*CoreBuild, error) { CleanupProvisioner: cleanupProvisioner, TemplatePath: c.Template.Path, Variables: c.variables, + SensitiveVars: sensitiveVars, } //configBuilder.Name is left uninterpolated so we must check against diff --git a/provisioner/powershell/provisioner.go b/provisioner/powershell/provisioner.go index 901707c22a2..990f133088a 100644 --- a/provisioner/powershell/provisioner.go +++ b/provisioner/powershell/provisioner.go @@ -535,7 +535,16 @@ func (p *Provisioner) createFlattenedEnvVars(elevated bool) (flattened string) { keyValue := strings.SplitN(envVar, "=", 2) // Escape chars special to PS in each env var value escapedEnvVarValue := psEscape.Replace(keyValue[1]) - if escapedEnvVarValue != keyValue[1] { + + isSensitive := false + for _, sensitiveVar := range p.config.PackerSensitiveVars { + if strings.EqualFold(sensitiveVar, keyValue[0]) { + isSensitive = true + break + } + } + + if escapedEnvVarValue != keyValue[1] && !isSensitive { log.Printf("Env var %s converted to %s after escaping chars special to PS", keyValue[1], escapedEnvVarValue) }
diff --git a/hcl2template/types.build.hcp_packer_registry_test.go b/hcl2template/types.build.hcp_packer_registry_test.go index 5238c08c224..ada7b89b169 100644 --- a/hcl2template/types.build.hcp_packer_registry_test.go +++ b/hcl2template/types.build.hcp_packer_registry_test.go @@ -56,6 +56,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, }, false, @@ -110,6 +111,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, &packer.CoreBuild{ BuildName: "build2", @@ -119,6 +121,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, }, false, @@ -173,6 +176,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, &packer.CoreBuild{ BuildName: "build2", @@ -182,6 +186,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, }, false, @@ -237,6 +242,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, &packer.CoreBuild{ BuildName: "build2", @@ -246,6 +252,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, }, false, @@ -292,6 +299,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, }, false, @@ -339,6 +347,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, BuilderType: "virtualbox-iso", + SensitiveVars: []string{}, }, }, false, @@ -399,6 +408,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, BuilderType: "virtualbox-iso", + SensitiveVars: []string{}, }, }, false, @@ -605,6 +615,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, }, false, @@ -652,6 +663,7 @@ func Test_ParseHCPPackerRegistryBlock(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, BuilderType: "null", + SensitiveVars: []string{}, }, }, false, diff --git a/hcl2template/types.build_test.go b/hcl2template/types.build_test.go index 5244712e2ea..497f78cbba2 100644 --- a/hcl2template/types.build_test.go +++ b/hcl2template/types.build_test.go @@ -106,7 +106,8 @@ func TestParse_build(t *testing.T) { }, true, true, []*packer.CoreBuild{&packer.CoreBuild{ - Provisioners: []packer.CoreBuildProvisioner{}, + Provisioners: []packer.CoreBuildProvisioner{}, + SensitiveVars: []string{}, }}, false, nil, @@ -148,7 +149,9 @@ func TestParse_build(t *testing.T) { Builds: nil, }, true, true, - []*packer.CoreBuild{&packer.CoreBuild{}}, + []*packer.CoreBuild{&packer.CoreBuild{ + SensitiveVars: []string{}, + }}, false, nil, }, @@ -190,6 +193,7 @@ func TestParse_build(t *testing.T) { true, true, []*packer.CoreBuild{&packer.CoreBuild{ PostProcessors: [][]packer.CoreBuildPostProcessor{}, + SensitiveVars: []string{}, }}, true, nil, @@ -289,11 +293,12 @@ func TestParse_build(t *testing.T) { false, false, []*packer.CoreBuild{ &packer.CoreBuild{ - Type: "virtualbox-iso.ubuntu-1204", - BuilderType: "virtualbox-iso", - Prepared: true, - Builder: emptyMockBuilder, - Provisioners: []packer.CoreBuildProvisioner{}, + Type: "virtualbox-iso.ubuntu-1204", + BuilderType: "virtualbox-iso", + Prepared: true, + Builder: emptyMockBuilder, + Provisioners: []packer.CoreBuildProvisioner{}, + SensitiveVars: []string{}, PostProcessors: [][]packer.CoreBuildPostProcessor{ { { @@ -324,11 +329,12 @@ func TestParse_build(t *testing.T) { }, }, &packer.CoreBuild{ - Type: "amazon-ebs.aws-ubuntu-16.04", - BuilderType: "amazon-ebs", - Prepared: true, - Builder: emptyMockBuilder, - Provisioners: []packer.CoreBuildProvisioner{}, + Type: "amazon-ebs.aws-ubuntu-16.04", + BuilderType: "amazon-ebs", + Prepared: true, + Builder: emptyMockBuilder, + Provisioners: []packer.CoreBuildProvisioner{}, + SensitiveVars: []string{}, PostProcessors: [][]packer.CoreBuildPostProcessor{ { { @@ -407,10 +413,11 @@ func TestParse_build(t *testing.T) { false, false, []*packer.CoreBuild{ &packer.CoreBuild{ - Type: "virtualbox-iso.ubuntu-1204", - BuilderType: "virtualbox-iso", - Prepared: true, - Builder: emptyMockBuilder, + Type: "virtualbox-iso.ubuntu-1204", + BuilderType: "virtualbox-iso", + Prepared: true, + Builder: emptyMockBuilder, + SensitiveVars: []string{}, Provisioners: []packer.CoreBuildProvisioner{ { PType: "shell", @@ -438,10 +445,11 @@ func TestParse_build(t *testing.T) { PostProcessors: [][]packer.CoreBuildPostProcessor{}, }, &packer.CoreBuild{ - Type: "amazon-ebs.aws-ubuntu-16.04", - BuilderType: "amazon-ebs", - Prepared: true, - Builder: emptyMockBuilder, + Type: "amazon-ebs.aws-ubuntu-16.04", + BuilderType: "amazon-ebs", + Prepared: true, + Builder: emptyMockBuilder, + SensitiveVars: []string{}, Provisioners: []packer.CoreBuildProvisioner{ { PType: "file", @@ -499,10 +507,11 @@ func TestParse_build(t *testing.T) { false, false, []*packer.CoreBuild{ &packer.CoreBuild{ - Type: "virtualbox-iso.ubuntu-1204", - BuilderType: "virtualbox-iso", - Prepared: true, - Builder: emptyMockBuilder, + Type: "virtualbox-iso.ubuntu-1204", + BuilderType: "virtualbox-iso", + Prepared: true, + Builder: emptyMockBuilder, + SensitiveVars: []string{}, Provisioners: []packer.CoreBuildProvisioner{ { PType: "shell", @@ -570,6 +579,7 @@ func TestParse_build(t *testing.T) { Builder: emptyMockBuilder, Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, + SensitiveVars: []string{}, }, }, false, @@ -620,12 +630,13 @@ func TestParse_build(t *testing.T) { false, false, []*packer.CoreBuild{ &packer.CoreBuild{ - BuildName: "test-build", - Type: "virtualbox-iso.ubuntu-1204", - BuilderType: "virtualbox-iso", - Prepared: true, - Builder: emptyMockBuilder, - Provisioners: []packer.CoreBuildProvisioner{}, + BuildName: "test-build", + Type: "virtualbox-iso.ubuntu-1204", + BuilderType: "virtualbox-iso", + Prepared: true, + Builder: emptyMockBuilder, + Provisioners: []packer.CoreBuildProvisioner{}, + SensitiveVars: []string{}, PostProcessors: [][]packer.CoreBuildPostProcessor{ { { @@ -679,11 +690,12 @@ func TestParse_build(t *testing.T) { false, false, []*packer.CoreBuild{ &packer.CoreBuild{ - BuildName: "build-name-test", - Type: "virtualbox-iso.ubuntu-1204", - BuilderType: "virtualbox-iso", - Prepared: true, - Builder: emptyMockBuilder, + BuildName: "build-name-test", + Type: "virtualbox-iso.ubuntu-1204", + BuilderType: "virtualbox-iso", + Prepared: true, + Builder: emptyMockBuilder, + SensitiveVars: []string{}, Provisioners: []packer.CoreBuildProvisioner{ { PName: "build-name-test", diff --git a/hcl2template/types.datasource_test.go b/hcl2template/types.datasource_test.go index d0125974d7d..e066a8f3243 100644 --- a/hcl2template/types.datasource_test.go +++ b/hcl2template/types.datasource_test.go @@ -61,6 +61,7 @@ func TestParse_datasource(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, + SensitiveVars: []string{}, }, }, false, @@ -150,6 +151,7 @@ func TestParse_datasource(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, + SensitiveVars: []string{}, }, }, false, diff --git a/hcl2template/types.packer_config_test.go b/hcl2template/types.packer_config_test.go index 56658120189..6d62ab0da51 100644 --- a/hcl2template/types.packer_config_test.go +++ b/hcl2template/types.packer_config_test.go @@ -206,9 +206,10 @@ func TestParser_complete(t *testing.T) { false, false, []*packer.CoreBuild{ &packer.CoreBuild{ - Type: "virtualbox-iso.ubuntu-1204", - BuilderType: "virtualbox-iso", - Prepared: true, + Type: "virtualbox-iso.ubuntu-1204", + BuilderType: "virtualbox-iso", + Prepared: true, + SensitiveVars: []string{}, Builder: &MockBuilder{ Config: MockConfig{ NestedMockConfig: NestedMockConfig{ @@ -318,9 +319,10 @@ func TestParser_complete(t *testing.T) { }, }, &packer.CoreBuild{ - Type: "amazon-ebs.ubuntu-1604", - BuilderType: "amazon-ebs", - Prepared: true, + Type: "amazon-ebs.ubuntu-1604", + BuilderType: "amazon-ebs", + Prepared: true, + SensitiveVars: []string{}, Builder: &MockBuilder{ Config: MockConfig{ NestedMockConfig: NestedMockConfig{ diff --git a/hcl2template/types.source_test.go b/hcl2template/types.source_test.go index 5acf156380b..15e1d2b3ad6 100644 --- a/hcl2template/types.source_test.go +++ b/hcl2template/types.source_test.go @@ -59,6 +59,7 @@ func TestParse_source(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, + SensitiveVars: []string{}, }, }, false, diff --git a/hcl2template/types.variables_test.go b/hcl2template/types.variables_test.go index 07fb8c2a517..30fb457aed6 100644 --- a/hcl2template/types.variables_test.go +++ b/hcl2template/types.variables_test.go @@ -133,6 +133,7 @@ func TestParse_variables(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, + SensitiveVars: []string{"super_secret_password"}, }, }, false, @@ -305,6 +306,7 @@ func TestParse_variables(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, + SensitiveVars: []string{}, }, }, false, @@ -393,6 +395,7 @@ func TestParse_variables(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, + SensitiveVars: []string{}, }, }, false, @@ -459,6 +462,7 @@ func TestParse_variables(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, + SensitiveVars: []string{}, }, }, false, @@ -502,6 +506,7 @@ func TestParse_variables(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, + SensitiveVars: []string{}, }, }, false, @@ -594,6 +599,7 @@ func TestParse_variables(t *testing.T) { }, }, PostProcessors: [][]packer.CoreBuildPostProcessor{}, + SensitiveVars: []string{}, }, }, false, @@ -651,6 +657,7 @@ func TestParse_variables(t *testing.T) { Provisioners: []packer.CoreBuildProvisioner{}, PostProcessors: [][]packer.CoreBuildPostProcessor{}, Prepared: true, + SensitiveVars: []string{}, }, }, false, diff --git a/packer/build_test.go b/packer/build_test.go index c15eec5b570..9f572101d22 100644 --- a/packer/build_test.go +++ b/packer/build_test.go @@ -39,8 +39,9 @@ func testBuild() *CoreBuild { {&MockPostProcessor{ArtifactId: "pp"}, "testPP", "testPPName", cty.Value{}, make(map[string]interface{}), boolPointer(true)}, }, }, - Variables: make(map[string]string), - onError: "cleanup", + Variables: make(map[string]string), + onError: "cleanup", + SensitiveVars: []string{"sensitive_var"}, } } @@ -54,6 +55,7 @@ func testDefaultPackerConfig() map[string]interface{} { common.OnErrorConfigKey: "cleanup", common.TemplatePathKey: "", common.UserVariablesConfigKey: make(map[string]string), + common.SensitiveVarsConfigKey: []string{"sensitive_var"}, } } func TestBuild_Name(t *testing.T) {
Sensitive variables are logged when escaped for powershell <!--- Please keep this note for the community ---> #### Community Note * Please vote on this issue by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original issue to help the community and maintainers prioritize this request * Please do not leave "+1" or other comments that do not add relevant new information or questions, they generate extra noise for issue followers and do not help prioritize the request * If you are interested in working on this issue or have submitted a pull request, please leave a comment <!--- Thank you for keeping this note for the community ---> #### Overview of the Issue When sensitive variables contain characters that require escaping the logs will print that sensitive variable in its escaped form. #### Reproduction Steps Run example from powershell with PACKER_LOG=1: ``` $Env:PACKER_LOG=1;& .\packer.exe build -var password="abc`$123" password.pkr.hcl ``` ### Packer version Packer v1.10.2 ### Simplified Packer Template ``` variable "password" { type = string sensitive = true } source "file" "basic-example" { content = "Lorem ipsum dolor sit amet" target = "dummy_artifact" } build { sources = ["sources.file.basic-example"] provisioner "powershell" { environment_vars = [ "PASSWORD=${var.password}", ] inline = ["echo foo"] } } ``` ### Operating system and Environment details Windows Server 2022 ### Log Fragments and crash.log files ``` 2024/04/12 10:11:48 packer.exe plugin: Env var <sensitive> converted to abc`$123 after escaping chars special to PS ``` [Full Logs](https://gist.github.com/zinkj/813d23131dc112aa0c75a52c7ad4fb83)
Hi @zinkj, Thanks for the report, looking at the problem however, I'm not sure we'll be able to offer a good solution with the current architecture regarding log filtering. The way we do things re:sensitive variables is essentially by `strings.Replace(sensitiveVariable, "<sensitive>", -1)`, which in effect if the variable gets escaped one way or another, won't match anymore. We don't have agency on what gets printed by custom code, and it wouldn't be feasible to support every possible escaping scheme under the sun, therefore I'm not sure we have a good way to handle cases like these, at least without seriously rethinking/refactoring how this gets printed out. I'll keep this issue open for now, but this seems like unlikely to happen in the short/medium term, sorry.
Hi @zinkj, Thanks for the report, looking at the problem however, I'm not sure we'll be able to offer a good solution with the current architecture regarding log filtering. The way we do things re:sensitive variables is essentially by `strings.Replace(sensitiveVariable, "<sensitive>", -1)`, which in effect if the variable gets escaped one way or another, won't match anymore. We don't have agency on what gets printed by custom code, and it wouldn't be feasible to support every possible escaping scheme under the sun, therefore I'm not sure we have a good way to handle cases like these, at least without seriously rethinking/refactoring how this gets printed out. I'll keep this issue open for now, but this seems like unlikely to happen in the short/medium term, sorry. I'm going to lock this issue because it has been closed for _30 days_ ⏳. This helps our maintainers find and focus on the active issues. If you have found a problem that seems similar to this, please open a new issue and complete the issue template so we can capture all the details necessary to investigate further.
[ "https://github.com/hashicorp/packer/commit/f70933f6b52a6e725c2e9895581350299ff16d0e", "https://github.com/hashicorp/packer/commit/04dc274eaa3415baf168737c22c7202c8218f114", "https://github.com/hashicorp/packer/commit/99b8f98a8b354d0d37c4568b9e938df6999a2c0b", "https://github.com/hashicorp/packer/commit/24d73...
2025-06-02T11:00:55Z
https://github.com/hashicorp/packer/tree/4cd7ad4721cd3aba078482cabe9b01de8202dfa2
[ "go mod download ; go build ./..." ]
[ "go test -json -v ./... > reports\\go-test-results.json" ]
[ "cat reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re from difflib import SequenceMatcher results: dict[str, str] = {} def clean(name: str) -> str: if not name: return "" # Replace escaped newlines and carriage returns name = name.replace("\\n", " ").replace("\\r", " ") # Replace real newlines/tabs with spaces name = re.sub(r"[\r\n\t]+", " ", name) # Collapse multiple spaces name = re.sub(r"[ ]{2,}", " ", name) return name.strip() # Collect all test names observed in JSON to use as canonical references observed_tests: set[str] = set() test_field_re = re.compile(r'"Test"\s*:\s*"(.+?)"', re.DOTALL) for m in test_field_re.finditer(log): observed_tests.add(clean(m.group(1))) # Build a lookup normalized by removing spaces for fuzzy matching observed_key_map: dict[str, str] = {} for t in observed_tests: observed_key_map[re.sub(r"\s+", "", t)] = t def best_match(name: str) -> str: n = clean(name) if n in observed_tests: return n key = re.sub(r"\s+", "", n) if key in observed_key_map: return observed_key_map[key] # Fuzzy match against observed keys best = n best_ratio = 0.0 for k, orig in observed_key_map.items(): ratio = SequenceMatcher(None, key, k).ratio() if ratio > best_ratio: best_ratio = ratio best = orig return best if best_ratio >= 0.8 else n # Primary: derive status from output events containing '--- PASS/FAIL/SKIP:' out_status_re = re.compile( r'"Action"\s*:\s*"output".*?"Test"\s*:\s*"(.+?)".*?"Output"\s*:\s*"(?:[^"]*?)---\s*(PASS|FAIL|SKIP)\s*:', re.DOTALL ) for m in out_status_re.finditer(log): name = clean(m.group(1)) status_kw = m.group(2).upper() status = "pass" if status_kw == "PASS" else ("fail" if status_kw == "FAIL" else "skip") results[name] = status # Fallback: JSON action/test pairs act_pair_re = re.compile( r'"Action"\s*:\s*"(pass|fail|skip)".*?"Test"\s*:\s*"(.+?)"' r'|' r'"Test"\s*:\s*"(.+?)".*?"Action"\s*:\s*"(pass|fail|skip)"', re.IGNORECASE | re.DOTALL ) for m in act_pair_re.finditer(log): if m.group(1) and m.group(2): status_kw = m.group(1).lower() raw_name = m.group(2) elif m.group(3) and m.group(4): status_kw = m.group(4).lower() raw_name = m.group(3) else: continue name = clean(raw_name) status = "pass" if status_kw == "pass" else ("fail" if status_kw == "fail" else "skip") results.setdefault(name, status) # Extra fallback: parse plain text status lines and map to best observed test name text_status_re = re.compile(r'---\s+(PASS|FAIL|SKIP):\s+(.+?)\s*\(', re.DOTALL) for m in text_status_re.finditer(log): status_kw = m.group(1).upper() raw_name = m.group(2) name = best_match(raw_name) status = "pass" if status_kw == "PASS" else ("fail" if status_kw == "FAIL" else "skip") results.setdefault(name, status) return results
[ "TestNewRegisterProperBuildName/ /single_build_block_with_single_source", "TestBase64Gzip/base64gzip(ct ty.StringVal(\\", "TestValidateCommand/test-fixtures\\\\version n_req\\\\wrong_field_name", "TestSum/sum(cty.SetVal([]cty.V Value{cty.NumberIntVal(3),_cty.NumberFloatVal(10.2),_cty.NumberFloatVal(2340.8)}) ...
[ "TestValidateCommand/test-fixtures\\\\hcl\\\\da ata-source-validation.pkr.hcl", "TestIndex/index(cty.ListVal([] ]cty.Value{cty.NumberIntVal(1),_cty.NumberIntVal(2),_cty.NumberIntVal(3)}),_cty.N NumberIntVal(2))", "TestCheckErrorCode/old_format,_co ode_doesn't_match_what_is_looked_for", "TestFixerISOMD5_Impl",...
starryzhang/sweb.eval.win.hashicorp_1776_packer-13388
containers/podman
25522
containers__podman-25522
Go
[ "25234" ]
f8ec74803b6f41352090c027ab6cc21cf4281709
diff --git a/.cirrus.yml b/.cirrus.yml index 00046172e34..d62e8d3e678 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -558,6 +558,44 @@ unit_test_task: always: *logs_artifacts +unit_test_windows_task: + name: "Unit tests on Windows" + alias: unit_test_windows + # Docs: ./contrib/cirrus/CIModes.md (Cirrus Task contexts and runtime modes) + # only when: - main rules (see doc above); or + # - unit test files are changed (contains a false positves such as test/e2e/ + # but that should not be an issue, it only runs when it doesn't have to) + # - actual source code changed + only_if: >- + $CIRRUS_PR == '' || + $CIRRUS_CHANGE_TITLE =~ '.*CI:ALL.*' || + changesInclude('.cirrus.yml', 'Makefile', 'contrib/cirrus/**', 'vendor/**', 'test/tools/**', 'test/registries*.conf', 'hack/**', 'version/rawversion/*') || + changesInclude('winmake.ps1') || + changesInclude('**/*_test.go') || + (changesInclude('**/*.go', '**/*.c', '**/*.h') && !changesIncludeOnly('test/**', 'pkg/machine/e2e/**')) + # Special case, we do not run macos/windows builds on rhel branches. + # Thus the machine task should not be run too, while we use only_if + # everywhere to do so here it would mean we would need duplicate the + # full big only_if condition which is more difficult to maintain so + # use the skip here. + skip: &skip_rhel_release | + $CIRRUS_BRANCH =~ 'v[0-9\.]+-rhel' || + $CIRRUS_BASE_BRANCH =~ 'v[0-9\.]+-rhel' + depends_on: *build + ec2_instance: *windows + timeout_in: 20m + env: + <<: *winenv + TEST_FLAVOR: unit + clone_script: *winclone + main_script: ".\\repo\\contrib\\cirrus\\win-unit-main.ps1" + always: + # Required for `contrib/cirrus/logformatter` to work properly + html_artifacts: + path: ./*.html + type: text/html + + apiv2_test_task: name: "APIv2 test on $DISTRO_NV ($PRIV_NAME)" alias: apiv2_test @@ -760,9 +798,7 @@ podman_machine_windows_task: # everywhere to do so here it would mean we would need duplicate the # full big only_if condition which is more difficult to maintain so # use the skip here. - skip: &skip_rhel_release | - $CIRRUS_BRANCH =~ 'v[0-9\.]+-rhel' || - $CIRRUS_BASE_BRANCH =~ 'v[0-9\.]+-rhel' + skip: *skip_rhel_release depends_on: *build ec2_instance: <<: *windows @@ -1060,6 +1096,7 @@ success_task: - win_installer - docker-py_test - unit_test + - unit_test_windows - apiv2_test - compose_test - local_integration_test diff --git a/Makefile b/Makefile index 36be6988ee8..44706ee8fee 100644 --- a/Makefile +++ b/Makefile @@ -652,7 +652,7 @@ localunit: test/goecho/goecho test/version/version UNIT=1 $(GINKGO) \ -r \ $(TESTFLAGS) \ - --skip-package test/e2e,pkg/bindings,hack,pkg/machine/e2e \ + --skip-package test/e2e,pkg/bindings,hack,pkg/machine/e2e,pkg/machine/wsl \ --cover \ --covermode atomic \ --coverprofile coverprofile \ diff --git a/contrib/cirrus/win-unit-main.ps1 b/contrib/cirrus/win-unit-main.ps1 new file mode 100644 index 00000000000..b945f4a1550 --- /dev/null +++ b/contrib/cirrus/win-unit-main.ps1 @@ -0,0 +1,13 @@ +#!/usr/bin/env powershell + +. $PSScriptRoot\win-lib.ps1 + +if ($Env:CI -eq "true") { + Push-Location "$ENV:CIRRUS_WORKING_DIR\repo" +} else { + Push-Location $PSScriptRoot\..\.. +} + +Run-Command ".\winmake.ps1 localunit" + +Pop-Location diff --git a/pkg/machine/wsl/machine.go b/pkg/machine/wsl/machine.go index 3355f6b2b5d..73650c18eae 100644 --- a/pkg/machine/wsl/machine.go +++ b/pkg/machine/wsl/machine.go @@ -317,12 +317,12 @@ func checkAndInstallWSL(reExec bool) (bool, error) { admin := HasAdminRights() - if !IsWSLFeatureEnabled() { + if !wutil.IsWSLFeatureEnabled() { return false, attemptFeatureInstall(reExec, admin) } skip := false - if reExec && !admin { + if !reExec && !admin { fmt.Println("Launching WSL Kernel Install...") if err := launchElevate(wslInstallKernel); err != nil { return false, err @@ -363,11 +363,11 @@ func attemptFeatureInstall(reExec, admin bool) error { message += "NOTE: A system reboot will be required as part of this process. " + "If you prefer, you may abort now, and perform a manual installation using the \"wsl --install\" command." - if reExec && MessageBox(message, "Podman Machine", false) != 1 { + if !reExec && MessageBox(message, "Podman Machine", false) != 1 { return errors.New("the WSL installation aborted") } - if reExec && !admin { + if !reExec && !admin { return launchElevate("install the Windows WSL Features") } @@ -622,10 +622,6 @@ func obtainGlobalConfigLock() (*fileLock, error) { return lockFile(filepath.Join(lockDir, "podman-config.lck")) } -func IsWSLFeatureEnabled() bool { - return wutil.SilentExec(wutil.FindWSL(), "--set-default-version", "2") == nil -} - func isWSLRunning(dist string) (bool, error) { return wslCheckExists(dist, true) } diff --git a/pkg/machine/wsl/wutil/wutil.go b/pkg/machine/wsl/wutil/wutil.go index bc07ffa2a93..49bc638ac42 100644 --- a/pkg/machine/wsl/wutil/wutil.go +++ b/pkg/machine/wsl/wutil/wutil.go @@ -19,16 +19,26 @@ import ( ) var ( - once sync.Once - wslPath string + onceFind, onceStatus sync.Once + wslPath string + status wslStatus + wslNotInstalledMessages = []string{"kernel file is not found", "The Windows Subsystem for Linux is not installed"} + vmpDisabledMessages = []string{"enable the Virtual Machine Platform Windows feature", "Enable \"Virtual Machine Platform\""} + wslDisabledMessages = []string{"enable the \"Windows Subsystem for Linux\" optional component"} ) +type wslStatus struct { + installed bool + vmpFeatureEnabled bool + wslFeatureEnabled bool +} + func FindWSL() string { // At the time of this writing, a defect appeared in the OS preinstalled WSL executable // where it no longer reliably locates the preferred Windows App Store variant. // // Manually discover (and cache) the wsl.exe location to bypass the problem - once.Do(func() { + onceFind.Do(func() { var locs []string // Prefer Windows App Store version @@ -87,24 +97,44 @@ func SilentExecCmd(command string, args ...string) *exec.Cmd { return cmd } -func IsWSLInstalled() bool { - cmd := SilentExecCmd(FindWSL(), "--status") - out, err := cmd.StdoutPipe() - cmd.Stderr = nil - if err != nil { - return false - } - if err = cmd.Start(); err != nil { - return false - } +func parseWSLStatus() wslStatus { + onceStatus.Do(func() { + status = wslStatus{ + installed: false, + vmpFeatureEnabled: false, + wslFeatureEnabled: false, + } + cmd := SilentExecCmd(FindWSL(), "--status") + out, err := cmd.StdoutPipe() + cmd.Stderr = nil + if err != nil { + return + } + if err = cmd.Start(); err != nil { + return + } + + status = matchOutputLine(out) + + if err := cmd.Wait(); err != nil { + return + } + }) - kernelNotFound := matchOutputLine(out, "kernel file is not found") + return status +} - if err := cmd.Wait(); err != nil { +func IsWSLInstalled() bool { + status := parseWSLStatus() + return status.installed && status.vmpFeatureEnabled +} + +func IsWSLFeatureEnabled() bool { + if SilentExec(FindWSL(), "--set-default-version", "2") != nil { return false } - - return !kernelNotFound + status := parseWSLStatus() + return status.vmpFeatureEnabled } func IsWSLStoreVersionInstalled() bool { @@ -118,13 +148,30 @@ func IsWSLStoreVersionInstalled() bool { return true } -func matchOutputLine(output io.ReadCloser, match string) bool { +func matchOutputLine(output io.ReadCloser) wslStatus { + status := wslStatus{ + installed: true, + vmpFeatureEnabled: true, + wslFeatureEnabled: true, + } scanner := bufio.NewScanner(transform.NewReader(output, unicode.UTF16(unicode.LittleEndian, unicode.UseBOM).NewDecoder())) for scanner.Scan() { line := scanner.Text() - if strings.Contains(line, match) { - return true + for _, match := range wslNotInstalledMessages { + if strings.Contains(line, match) { + status.installed = false + } + } + for _, match := range vmpDisabledMessages { + if strings.Contains(line, match) { + status.vmpFeatureEnabled = false + } + } + for _, match := range wslDisabledMessages { + if strings.Contains(line, match) { + status.wslFeatureEnabled = false + } } } - return false + return status } diff --git a/winmake.ps1 b/winmake.ps1 index 2491b2cb1f2..3b37c8a2d74 100644 --- a/winmake.ps1 +++ b/winmake.ps1 @@ -44,6 +44,14 @@ function Make-Clean{ } } +function Local-Unit { + Build-Ginkgo + $skippackages="hack,internal\domain\infra\abi,internal\domain\infra\tunnel,libpod\lock\shm,pkg\api\handlers\libpod,pkg\api\handlers\utils,pkg\bindings," + $skippackages+="pkg\domain\infra\abi,pkg\emulation,pkg\machine\apple,pkg\machine\applehv,pkg\machine\e2e,pkg\machine\libkrun," + $skippackages+="pkg\machine\provider,pkg\machine\proxyenv,pkg\machine\qemu,pkg\specgen\generate,pkg\systemd,test\e2e,test\utils" + Run-Command "./test/tools/build/ginkgo.exe -vv -r --tags `"$remotetags`" --timeout=15m --trace --no-color --skip-package `"$skippackages`"" +} + function Local-Machine { param ( [string]$files @@ -53,7 +61,7 @@ function Local-Machine { $files = " --focus-file $files " } - Run-Command "./test/tools/build/ginkgo.exe -vv --tags `"$remotetags`" -timeout=90m --trace --no-color $files pkg/machine/e2e/." + Run-Command "./test/tools/build/ginkgo.exe -vv --tags `"$remotetags`" --timeout=90m --trace --no-color $files pkg/machine/e2e/." } # Expect starting directory to be /podman @@ -219,9 +227,7 @@ function Build-Ginkgo{ return } Write-Host "Building Ginkgo" - Push-Location ./test/tools - Run-Command "go build -o build/ginkgo.exe ./vendor/github.com/onsi/ginkgo/v2/ginkgo" - Pop-Location + Run-Command "go build -o ./test/tools/build/ginkgo.exe ./vendor/github.com/onsi/ginkgo/v2/ginkgo" } function Git-Commit{ @@ -287,6 +293,9 @@ switch ($target) { {$_ -in '', 'podman-remote', 'podman'} { Podman-Remote } + 'localunit' { + Local-Unit + } 'localmachine' { if ($args.Count -gt 1) { $files = $args[1] @@ -331,6 +340,9 @@ switch ($target) { Write-Host "Example: Build podman-remote " Write-Host " .\winmake podman-remote" Write-Host + Write-Host "Example: Run all unit tests " + Write-Host " .\winmake localunit" + Write-Host Write-Host "Example: Run all machine tests " Write-Host " .\winmake localmachine" Write-Host
diff --git a/cmd/podman/parse/net_test.go b/cmd/podman/parse/net_test.go index 22421732689..f71dc6ba1fe 100644 --- a/cmd/podman/parse/net_test.go +++ b/cmd/podman/parse/net_test.go @@ -1,3 +1,5 @@ +//go:build !windows + // most of these validate and parse functions have been taken from projectatomic/docker // and modified for cri-o package parse diff --git a/pkg/auth/auth_test.go b/pkg/auth/auth_test.go index 37cf817917d..ccb205c18e1 100644 --- a/pkg/auth/auth_test.go +++ b/pkg/auth/auth_test.go @@ -1,3 +1,5 @@ +//go:build !windows + package auth import ( diff --git a/pkg/machine/machine_windows_test.go b/pkg/machine/machine_windows_test.go index 48f3eafd536..5255a7502b4 100644 --- a/pkg/machine/machine_windows_test.go +++ b/pkg/machine/machine_windows_test.go @@ -3,15 +3,41 @@ package machine import ( + "fmt" "os" "os/exec" "path/filepath" "testing" + "golang.org/x/sys/windows" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) +// shortPathToLongPath converts a Windows short path (C:\PROGRA~1) to its +// long path equivalent (C:\Program Files). It returns an error if shortPath +// doesn't exist. +func shortPathToLongPath(shortPath string) (string, error) { + shortPathPtr, err := windows.UTF16PtrFromString(shortPath) + if err != nil { + return "", err + } + len, err := windows.GetLongPathName(shortPathPtr, nil, 0) + if err != nil { + return "", err + } + if len == 0 { + return "", fmt.Errorf("failed to get buffer size for path: %s", shortPath) + } + longPathPtr := &(make([]uint16, len)[0]) + _, err = windows.GetLongPathName(shortPathPtr, longPathPtr, len) + if err != nil { + return "", err + } + return windows.UTF16PtrToString(longPathPtr), nil +} + // CreateNewItemWithPowerShell creates a new item using PowerShell. // It's an helper to easily create junctions on Windows (as well as other file types). // It constructs a PowerShell command to create a new item at the specified path with the given item type. @@ -23,15 +49,21 @@ import ( // - itemType: The type of the item to be created (e.g., "File", "SymbolicLink", "Junction"). // - target: The target for the new item, if applicable. func CreateNewItemWithPowerShell(t *testing.T, path string, itemType string, target string) { - var pwshCmd string + var pwshCmd, pwshPath string + // Look for Powershell 7 first as it allow Symlink creation for non-admins too + pwshPath, err := exec.LookPath("pwsh.exe") + if err != nil { + // Use Powershell 5 that is always present + pwshPath = "powershell.exe" + } pwshCmd = "New-Item -Path " + path + " -ItemType " + itemType if target != "" { pwshCmd += " -Target " + target } - cmd := exec.Command("pwsh", "-Command", pwshCmd) + cmd := exec.Command(pwshPath, "-Command", pwshCmd) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr - err := cmd.Run() + err = cmd.Run() require.NoError(t, err) } @@ -45,13 +77,16 @@ func CreateNewItemWithPowerShell(t *testing.T, path string, itemType string, tar // with filepath.EvalSymlink(). func TestEvalSymlinksOrClean(t *testing.T) { // Create a temporary directory to store the normal file - normalFileDir := t.TempDir() + normalFileDir, err := shortPathToLongPath(t.TempDir()) + require.NoError(t, err) // Create a temporary directory to store the (hard/sym)link files - linkFilesDir := t.TempDir() + linkFilesDir, err := shortPathToLongPath(t.TempDir()) + require.NoError(t, err) // Create a temporary directory where the mount point will be created - mountPointDir := t.TempDir() + mountPointDir, err := shortPathToLongPath(t.TempDir()) + require.NoError(t, err) // Create a normal file normalFile := filepath.Join(normalFileDir, "testFile") diff --git a/pkg/machine/wsl/wutil/wutil_test.go b/pkg/machine/wsl/wutil/wutil_test.go new file mode 100644 index 00000000000..4b7bd6af479 --- /dev/null +++ b/pkg/machine/wsl/wutil/wutil_test.go @@ -0,0 +1,145 @@ +//go:build windows + +package wutil + +import ( + "github.com/stretchr/testify/assert" + "golang.org/x/text/encoding/unicode" + "io" + "strings" + "testing" +) + +const ( + WSL1InstalledWithWSLAndVMPEnabled = `Default Version: 1` + WSL2InstalledWithWSLAndVMPEnabled = `Default Version: 2` + WSL1NotInstalled = `Default Version: 1 + +The Windows Subsystem for Linux kernel can be manually updated with 'wsl --update', but automatic updates cannot occur due to your system settings. +To receive automatic kernel updates, please enable the Windows Update setting: 'Receive updates for other Microsoft products when you update Windows'. +For more information please visit https://aka.ms/wsl2kernel. + +The WSL 2 kernel file is not found. To update or restore the kernel please run 'wsl --update'.` + WSL2NotInstalled = `The Windows Subsystem for Linux is not installed. You can install by running 'wsl.exe --install'. +For more information please visit https://aka.ms/wslinstall` + WSL2InstalledWithWSLDisabled = `Default Version: 2 +WSL1 is not supported with your current machine configuration. +Please enable the "Windows Subsystem for Linux" optional component to use WSL1.` + WSL2InstalledWithVMPDisabled = `Default Version: 2 +WSL2 is not supported with your current machine configuration. +Please enable the "Virtual Machine Platform" optional component and ensure virtualization is enabled in the BIOS. +Enable "Virtual Machine Platform" by running: wsl.exe --install --no-distribution +For information please visit https://aka.ms/enablevirtualization` + WSL2InstalledWithWSLAndVMPDisabled = `Default Version: 2 +WSL1 is not supported with your current machine configuration. +Please enable the "Windows Subsystem for Linux" optional component to use WSL1. +WSL2 is not supported with your current machine configuration. +Please enable the "Virtual Machine Platform" optional component and ensure virtualization is enabled in the BIOS. +Enable "Virtual Machine Platform" by running: wsl.exe --install --no-distribution +For information please visit https://aka.ms/enablevirtualization` + WSL1InstalledWithVMPDisabled = `Default Version: 1 +Please enable the Virtual Machine Platform Windows feature and ensure virtualization is enabled in the BIOS. +For information please visit https://aka.ms/enablevirtualization` + WSL1InstalledWithWSLDisabled = `Default Version: 1 +WSL1 is not supported with your current machine configuration. +Please enable the "Windows Subsystem for Linux" optional component to use WSL1.` +) + +func TestMatchOutputLine(t *testing.T) { + tests := []struct { + winVariant string + statusOutput string + want wslStatus + }{ + { + "WSL1 configured and both Virtual Machine Platform enabled and Windows Subsystem for Linux are enabled", + WSL1InstalledWithWSLAndVMPEnabled, + wslStatus{ + installed: true, + vmpFeatureEnabled: true, + wslFeatureEnabled: true, + }, + }, + { + "WSL2 configured and both Virtual Machine Platform enabled and Windows Subsystem for Linux enabled", + WSL2InstalledWithWSLAndVMPEnabled, + wslStatus{ + installed: true, + vmpFeatureEnabled: true, + wslFeatureEnabled: true, + }, + }, + { + "WSL not installed (was previously configured as version 1)", + WSL1NotInstalled, + wslStatus{ + installed: false, + vmpFeatureEnabled: true, + wslFeatureEnabled: true, + }, + }, + { + "WSL not installed (was previously configured as version 2)", + WSL2NotInstalled, + wslStatus{ + installed: false, + vmpFeatureEnabled: true, + wslFeatureEnabled: true, + }, + }, + { + "WSL2 configured and Virtual Machine Platform is enabled but Windows Subsystem for Linux is disabled", + WSL2InstalledWithWSLDisabled, + wslStatus{ + installed: true, + vmpFeatureEnabled: true, + wslFeatureEnabled: false, + }, + }, + { + "WSL2 configured and Virtual Machine Platform is disabled but Windows Subsystem for Linux is enabled", + WSL2InstalledWithVMPDisabled, + wslStatus{ + installed: true, + vmpFeatureEnabled: false, + wslFeatureEnabled: true, + }, + }, + { + "WSL2 configured and both Virtual Machine Platform and Windows Subsystem for Linux are disabled", + WSL2InstalledWithWSLAndVMPDisabled, + wslStatus{ + installed: true, + vmpFeatureEnabled: false, + wslFeatureEnabled: false, + }, + }, + { + "WSL1 configured and Virtual Machine Platform is disabled but Windows Subsystem for Linux is enabled", + WSL1InstalledWithVMPDisabled, + wslStatus{ + installed: true, + vmpFeatureEnabled: false, + wslFeatureEnabled: true, + }, + }, + { + "WSL1 configured and Virtual Machine Platform is enabled but Windows Subsystem for Linux is disabled", + WSL1InstalledWithWSLDisabled, + wslStatus{ + installed: true, + vmpFeatureEnabled: true, + wslFeatureEnabled: false, + }, + }, + } + for _, tt := range tests { + t.Run(tt.winVariant, func(t *testing.T) { + encoder := unicode.UTF16(unicode.LittleEndian, unicode.UseBOM).NewEncoder() + encodedOutput, err := encoder.String(tt.statusOutput) + assert.Nil(t, err) + reader := io.NopCloser(strings.NewReader(encodedOutput)) + assert.Equal(t, tt.want, matchOutputLine(reader)) + }) + } +} diff --git a/pkg/util/utils_test.go b/pkg/util/utils_test.go index 37795d31111..b582b51ecc9 100644 --- a/pkg/util/utils_test.go +++ b/pkg/util/utils_test.go @@ -3,6 +3,7 @@ package util import ( "fmt" "math" + "runtime" "sort" "testing" "time" @@ -827,6 +828,9 @@ func TestProcessOptions(t *testing.T) { } func TestGetRootlessPauseProcessPidPath(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("Not implemented on Windows") + } dir, err := GetRootlessPauseProcessPidPath() assert.NoError(t, err) assert.NotEqual(t, dir, "libpod/tmp/pause.pid")
Command `machine init` fails to check if WSL is installed ### Issue Description Podman tries to import the machine-os into WSL even when it's impossible because the Windows features `Microsoft-Windows-Subsystem-Linux` and `VirtualMachinePlatform` aren't enabled. This is a regression observed with the latest version of the [WSL client](https://github.com/microsoft/WSL) (2.3.26.0 or later). ### Steps to reproduce the issue Steps to reproduce the issue 1. Make sure to have latest version WSL (`wsl --update`) 2. Disable the Windows features `Microsoft-Windows-Subsystem-Linux` and `VirtualMachinePlatform` 3. Run `podman machine init` ### Describe the results you received ```log Extracting compressed file: podman-machine-default-amd64: done Importing operating system into WSL (this may take a few minutes on a new WSL install)... Import in progress, this may take a few minutes. The operation could not be started because a required feature is not installed. Error code: Wsl/Service/RegisterDistro/CreateVm/HCS/HCS_E_SERVICE_NOT_AVAILABLE Error: the WSL import of guest OS failed: command C:\Users\mario\AppData\Local\Microsoft\WindowsApps\wsl.exe [--import podman-machine-default C:\Users\mario\.local\share\containers\podman\machine\wsl\wsldist\podman-machine-default C:\Users\mario\.local\share\containers\podman\machine\wsl\podman-machine-default-amd64 --version 2] failed: exit status 0xffffffff ``` ### Describe the results you expected The command should NOT try to import the machine OS in WSL and [propose to install WSL to the user](https://github.com/containers/podman/blob/4bd6bc9f3ccac4d3bd79c4a3eb8fb09e05604a06/pkg/machine/wsl/machine.go#L356): ```log WSL is not installed on this system, installing it. NOTE: A system reboot will be required as part of this process. If you prefer, you may abort now, and perform a manual installation using the "wsl --install" command. ``` ### podman info output ```yaml OS: windows/amd64 provider: wsl version: 5.4.0-dev ``` ### Podman in a container No ### Privileged Or Rootless None ### Upstream Latest Release Yes ### Additional environment details Additional environment details ### Additional information Additional information like issue happens only occasionally or issue happens with a particular architecture or on a particular setting
Currently, there is only one way to directly determine whether a certain feature is enabled in Windows: ```shell Get-WindowsOptionalFeature -Online -FeatureName VirtualMachinePlatform Get-WindowsOptionalFeature -Online -FeatureName Microsoft-Windows-Subsystem-Linux ``` However, this method requires administrative privileges, and asking users for administrative permissions at every startup is generally unacceptable. We encountered this in our requirements as well. After testing, we found a workaround to make the judgment, and the relevant code is: https://github.com/oomol-lab/ovm-win/blob/0c27841224d2daa8f96d789ee3e1a32105858f7d/pkg/wsl/check.go#L243-L294 I'm not sure if podman needs a check like this. If it does, I can submit a related PR. By the way, just checking the Feature is not enough. It's also necessary to check if BIOS virtualization is enabled, if the WSL version is too low, and if the user has modified the kernel configuration in .wslconfig. Thank you @BlackHole1. I am about to submit a PR that uses a similar approach (look for sentences in `wsl --status`), and I will cc you. I would be happy if you could look at it. My approach has one main difference: from my tests, the `VirtualMachinePlatform` is sufficient to run WSL2, and the `Microsoft-Windows-Subsystem-Linux` is no longer required.
Currently, there is only one way to directly determine whether a certain feature is enabled in Windows: ```shell Get-WindowsOptionalFeature -Online -FeatureName VirtualMachinePlatform Get-WindowsOptionalFeature -Online -FeatureName Microsoft-Windows-Subsystem-Linux ``` However, this method requires administrative privileges, and asking users for administrative permissions at every startup is generally unacceptable. We encountered this in our requirements as well. After testing, we found a workaround to make the judgment, and the relevant code is: https://github.com/oomol-lab/ovm-win/blob/0c27841224d2daa8f96d789ee3e1a32105858f7d/pkg/wsl/check.go#L243-L294 I'm not sure if podman needs a check like this. If it does, I can submit a related PR. By the way, just checking the Feature is not enough. It's also necessary to check if BIOS virtualization is enabled, if the WSL version is too low, and if the user has modified the kernel configuration in .wslconfig. Thank you @BlackHole1. I am about to submit a PR that uses a similar approach (look for sentences in `wsl --status`), and I will cc you. I would be happy if you could look at it. My approach has one main difference: from my tests, the `VirtualMachinePlatform` is sufficient to run WSL2, and the `Microsoft-Windows-Subsystem-Linux` is no longer required.
[ "https://github.com/containers/podman/commit/87421d95084486448f85f776529a688a852d902c", "https://github.com/containers/podman/commit/af29bb5b6ed0c779d4f2a89186578d3666849458" ]
2025-03-10T18:34:48Z
https://github.com/containers/podman/tree/f8ec74803b6f41352090c027ab6cc21cf4281709
[ "go mod download ; New-Item -ItemType Directory -Force -Path .\\bin\\windows ; $env:CGO_ENABLED=0; go build -tags \"remote exclude_graphdriver_btrfs btrfs_noversion exclude_graphdriver_devicemapper containers_image_openpgp\" -o .\\bin\\windows\\podman.exe .\\cmd\\podman" ]
[ "go test -json -v -tags \"remote exclude_graphdriver_btrfs btrfs_noversion exclude_graphdriver_devicemapper containers_image_openpgp\" ./... > reports/go-test-results.json" ]
[ "cat reports/go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re from difflib import SequenceMatcher results: dict[str, str] = {} # Precedence: fail > skip > pass def update_status(key: str, status: str): prev = results.get(key) if prev is None: results[key] = status return if prev == "fail": return if status == "fail": results[key] = "fail" return if prev == "skip": return if status == "skip": results[key] = "skip" return # pass after pass -> no change # JSON object matcher; logs have one JSON object per line but may be wrapped obj_re = re.compile(r'\{.*?\}', re.S) # Field extractors action_re = re.compile(r'"Action"\s*:\s*"([^"]+)"', re.I | re.S) pkg_re = re.compile(r'"Package"\s*:\s*"([^"]+)"', re.I | re.S) test_re = re.compile(r'"Test"\s*:\s*"([^"]+)"', re.I | re.S) output_re = re.compile(r'"Output"\s*:\s*"(.*)"', re.I | re.S) # Ginkgo [It] status ginkgo_re = re.compile(r'\[(PASS|FAIL|SKIP)\][^\[]*\[It\]\s+([^\n"]+)', re.I) # Build flexible keyword allowing inserted whitespace/newlines def kw(word: str) -> str: return r'\s*'.join(list(word)) # Status line patterns robust to wrapping and embedded newlines in name # We'll parse from a per-package concatenated output buffer, # so lines won't be split across objects. status_res = { "pass": re.compile(r'-{3}\s*' + kw('PASS') + r'\s*:\s*(.+?)(?:\s*\([^)]*\))?(?:\r?\n|$)', re.I | re.S), "fail": re.compile(r'-{3}\s*' + kw('FAIL') + r'\s*:\s*(.+?)(?:\s*\([^)]*\))?(?:\r?\n|$)', re.I | re.S), "skip": re.compile(r'-{3}\s*' + kw('SKIP') + r'\s*:\s*(.+?)(?:\s*\([^)]*\))?(?:\r?\n|$)', re.I | re.S), } # RUN lines robust to wrapping and embedded newlines in name run_re = re.compile(r'={3}\s*' + kw('RUN') + r'\s+(.+?)(?:\r?\n|$)', re.I | re.S) def strip_ansi(s: str) -> str: return re.sub(r'(?:\\u001b|\x1b)\[[0-9;]*m', '', s) def sanitize_pkg(s: str) -> str: s = strip_ansi(s) # Remove whitespace introduced by wraps s = re.sub(r'\s+', '', s) # Normalize various corruptions of github.com # Replace github.<garbage>com with github.com s = re.sub(r'github\.[^/]*?com', 'github.com', s) # Collapse any multiple dots s = re.sub(r'github\.\.+com', 'github.com', s) # Fix repeats like github.commm s = re.sub(r'github\.com+', 'github.com', s) # Fix ccom/coom variants s = re.sub(r'github\.c+o+m', 'github.com', s) # Collapse duplicate slashes s = re.sub(r'/+', '/', s) return s.strip() def sanitize_test(s: str) -> str: s = strip_ansi(s) # Normalize escapes into actual newlines/tabs for easier stripping s = s.replace('\\r\\n', '\n').replace('\\n', '\n').replace('\\t', '\t') # Remove timing substrings anywhere, e.g. " (0.00s)" s = re.sub(r'\s*\(\d+\.\d+s\)\s*', '', s) # Keep '/' and '_' but remove surrounding spaces/newlines s = re.sub(r'\s*/\s*', '/', s) s = re.sub(r'\s*_\s*', '_', s) # Remove all remaining whitespace (spaces, tabs, newlines) inside names from wraps s = re.sub(r'[ \t\r\n]+', '', s) # Collapse multiple underscores s = re.sub(r'_{2,}', '_', s) return s.strip() def clean_name_from_output(name: str) -> str: # Convert escapes and remove timing, then strip wrap whitespace name = name.replace('\\"', '"') name = name.replace('\\r\\n', '\n').replace('\\n', '\n').replace('\\t', '\t') return sanitize_test(name) # Collect canonical test names per package canon: dict[str, set[str]] = {} def add_canon(pkg: str, test_name: str): if not pkg or not test_name: return canon.setdefault(pkg, set()).add(test_name) def resolve_to_canon(pkg: str, maybe: str) -> str: maybe = sanitize_test(maybe) if not pkg: return maybe cset = canon.get(pkg) if not cset: return maybe if maybe in cset: return maybe # Prefer candidates that contain the fragment or share prefix best = None best_score = 0.0 for cand in cset: # direct containment check if maybe and (maybe in cand or cand in maybe): score = 1.0 * len(maybe) / max(len(cand), 1) else: score = SequenceMatcher(None, maybe, cand).ratio() if score > best_score: best_score = score best = cand if best is not None and best_score >= 0.6: return best return maybe # Accumulate per-package output text to handle splits across JSON objects outbuf: dict[str, str] = {} objs = list(obj_re.finditer(log)) for m in objs: obj = m.group(0) pkg_m = pkg_re.search(obj) pkg = sanitize_pkg(pkg_m.group(1)) if pkg_m else None out_m = output_re.search(obj) if pkg and out_m: out = out_m.group(1) # Normalize escapes and quotes out = out.replace('\\"', '"') out = out.replace('\\r\\n', '\n').replace('\\n', '\n').replace('\\t', '\t') outbuf[pkg] = outbuf.get(pkg, '') + out # First pass over concatenated outputs: parse RUN and status lines, and Ginkgo It statuses for pkg, out in outbuf.items(): # RUN lines -> canonical names for rm in run_re.finditer(out): tname = clean_name_from_output(rm.group(1)) if pkg and tname: add_canon(pkg, tname) # Status lines for status, sre in status_res.items(): for sm in sre.finditer(out): raw_name = sm.group(1) name = clean_name_from_output(raw_name) name = resolve_to_canon(pkg, name) if pkg and name: key = f"{pkg}::{name}" update_status(key, status) # Ginkgo [It] statuses (if present) for sm in ginkgo_re.finditer(out): st_word = sm.group(1).upper() spec = sanitize_test(sm.group(2)) status = "pass" if st_word == "PASS" else ("fail" if st_word == "FAIL" else "skip") if pkg and spec: key = f"{pkg}::{spec}" update_status(key, status) # Second pass: use Action with Test field for m in objs: obj = m.group(0) act_m = action_re.search(obj) if not act_m: continue act = act_m.group(1).strip().lower() if act not in ("pass", "fail", "skip"): continue pkg_m = pkg_re.search(obj) pkg = sanitize_pkg(pkg_m.group(1)) if pkg_m else None test_m = test_re.search(obj) if not test_m: continue raw_name = test_m.group(1) name = resolve_to_canon(pkg, raw_name) if pkg and name: key = f"{pkg}::{name}" update_status(key, act) return results
[ "github.com/containers/podman/v5/cmd/podman/containers::Test_portsToString/single_published_port", "github.com/containers/podman/v5/cmd/podman/containers::Test", "github.com/containers/podman/v5/cmd/podman/containers::Test_por" ]
[ "github.com/containers/podman/v5/pkg/specgen::TestP", "github.com/containers/podman/v5/pkg/util::TestProcessOptions/default_bind_mount", "github.com/containers/podman/v5/pkg/specgenutil::Test_verifyExpose/high_to_low_should_fail", "github.com/containers/podman/v5/pkg/machine/compression::TestSparseWriter/spar...
starryzhang/sweb.eval.win.containers_1776_podman-25522
containers/podman
25151
containers__podman-25151
Go
[ "24557" ]
ee24214cb992609a34b36d71242ae17954b17fb6
diff --git a/pkg/machine/machine_windows.go b/pkg/machine/machine_windows.go index 174980fc298..dd59876ad32 100644 --- a/pkg/machine/machine_windows.go +++ b/pkg/machine/machine_windows.go @@ -251,7 +251,7 @@ func FindExecutablePeer(name string) (string, error) { return "", err } - exe, err = filepath.EvalSymlinks(exe) + exe, err = EvalSymlinksOrClean(exe) if err != nil { return "", err } @@ -259,6 +259,28 @@ func FindExecutablePeer(name string) (string, error) { return filepath.Join(filepath.Dir(exe), name), nil } +func EvalSymlinksOrClean(filePath string) (string, error) { + fileInfo, err := os.Lstat(filePath) + if err != nil { + return "", err + } + if fileInfo.Mode()&fs.ModeSymlink != 0 { + // Only call filepath.EvalSymlinks if it is a symlink. + // Starting with v1.23, EvalSymlinks returns an error for mount points. + // See https://go-review.googlesource.com/c/go/+/565136 for reference. + filePath, err = filepath.EvalSymlinks(filePath) + if err != nil { + return "", err + } + } else { + // Call filepath.Clean when filePath is not a symlink. That's for + // consistency with the symlink case (filepath.EvalSymlinks calls + // Clean after evaluating filePath). + filePath = filepath.Clean(filePath) + } + return filePath, nil +} + func GetWinProxyStateDir(name string, vmtype define.VMType) (string, error) { dir, err := env.GetDataDir(vmtype) if err != nil {
diff --git a/pkg/machine/machine_windows_test.go b/pkg/machine/machine_windows_test.go new file mode 100644 index 00000000000..48f3eafd536 --- /dev/null +++ b/pkg/machine/machine_windows_test.go @@ -0,0 +1,116 @@ +//go:build windows + +package machine + +import ( + "os" + "os/exec" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// CreateNewItemWithPowerShell creates a new item using PowerShell. +// It's an helper to easily create junctions on Windows (as well as other file types). +// It constructs a PowerShell command to create a new item at the specified path with the given item type. +// If a target is provided, it includes it in the command. +// +// Parameters: +// - t: The testing.T instance. +// - path: The path where the new item will be created. +// - itemType: The type of the item to be created (e.g., "File", "SymbolicLink", "Junction"). +// - target: The target for the new item, if applicable. +func CreateNewItemWithPowerShell(t *testing.T, path string, itemType string, target string) { + var pwshCmd string + pwshCmd = "New-Item -Path " + path + " -ItemType " + itemType + if target != "" { + pwshCmd += " -Target " + target + } + cmd := exec.Command("pwsh", "-Command", pwshCmd) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + err := cmd.Run() + require.NoError(t, err) +} + +// TestEvalSymlinksOrClean tests the EvalSymlinksOrClean function. +// In particular it verifies that EvalSymlinksOrClean behaves as +// filepath.EvalSymlink before Go 1.23 - with the exception of +// files under a mount point (juntion) that aren't resolved +// anymore. +// The old behavior of filepath.EvalSymlinks can be tested with +// the directive "//go:debug winsymlink=0" and replacing EvalSymlinksOrClean() +// with filepath.EvalSymlink(). +func TestEvalSymlinksOrClean(t *testing.T) { + // Create a temporary directory to store the normal file + normalFileDir := t.TempDir() + + // Create a temporary directory to store the (hard/sym)link files + linkFilesDir := t.TempDir() + + // Create a temporary directory where the mount point will be created + mountPointDir := t.TempDir() + + // Create a normal file + normalFile := filepath.Join(normalFileDir, "testFile") + CreateNewItemWithPowerShell(t, normalFile, "File", "") + + // Create a symlink file + symlinkFile := filepath.Join(linkFilesDir, "testSymbolicLink") + CreateNewItemWithPowerShell(t, symlinkFile, "SymbolicLink", normalFile) + + // Create a hardlink file + hardlinkFile := filepath.Join(linkFilesDir, "testHardLink") + CreateNewItemWithPowerShell(t, hardlinkFile, "HardLink", normalFile) + + // Create a mount point file + mountPoint := filepath.Join(mountPointDir, "testJunction") + mountPointFile := filepath.Join(mountPoint, "testFile") + CreateNewItemWithPowerShell(t, mountPoint, "Junction", normalFileDir) + + // Replaces the backslashes with forward slashes in the normal file path + normalFileWithBadSeparators := filepath.ToSlash(normalFile) + + tests := []struct { + name string + filePath string + want string + }{ + { + name: "Normal file", + filePath: normalFile, + want: normalFile, + }, + { + name: "File under a mount point (juntion)", + filePath: mountPointFile, + want: mountPointFile, + }, + { + name: "Symbolic link", + filePath: symlinkFile, + want: normalFile, + }, + { + name: "Hard link", + filePath: hardlinkFile, + want: hardlinkFile, + }, + { + name: "Bad separators in path", + filePath: normalFileWithBadSeparators, + want: normalFile, + }, + } + + for _, tt := range tests { + assert := assert.New(t) + t.Run(tt.name, func(t *testing.T) { + got, err := EvalSymlinksOrClean(tt.filePath) + require.NoError(t, err) + assert.Equal(tt.want, got) + }) + } +}
Podman 5.3.0 win-sshproxy.tid: The system cannot find the file specified. (install via scoop) ### Issue Description after upgrading to the Podman 5.3.0 (via scoop), when starting the podman machine, there is a message API forwarding for Docker API clients is not available due to the following startup failures. The system cannot find the path specified. it turned out that the c:\<user_profile>\.local\share\containers\podman\machine\wsl\podman-machine-default\win-sshproxy.tid is not created. Downgrading to Podman 5.2.5 , the message is gone. Podman machine runs in WSL2. ### Steps to reproduce the issue As above in the description ### Describe the results you received As above in the description ### Describe the results you expected podman machine starts and API forwarding for Docker API clients on Windows machine works as expected without error message. ### podman info output ```yaml Podman 5.3.0 ``` ### Podman in a container No ### Privileged Or Rootless Rootless ### Upstream Latest Release Yes ### Additional environment details Additional environment details ### Additional information Additional information like issue happens only occasionally or issue happens with a particular architecture or on a particular setting
Calling `podman machine stop` will also produce the following warning/error in this situation: ``` Could not stop API forwarding service (win-sshproxy.exe): open C:\Users\<username>\.local\share\containers\podman\machine\wsl\podman-machine-default\win-sshproxy.tid: The system cannot find the file specified. ``` It helps when you provide `podman info` like the template recommends. In both cases, is it safe to assume WSL is being used? @l0rd thoughts ? In my case, yes. My `podman machine` instance is running inside of WSL. `podman version` output: ``` Client: Podman Engine Version: 5.3.0 API Version: 5.3.0 Go Version: go1.23.3 Git Commit: 874bf2c301ecf0ba645f1bb45f81966cc755b7da Built: Wed Nov 13 06:19:59 2024 OS/Arch: windows/amd64 Server: Podman Engine Version: 5.2.5 API Version: 5.2.5 Go Version: go1.22.7 Built: Thu Oct 24 18:00:00 2024 OS/Arch: linux/amd64 ``` `podman info` output: ``` host: arch: amd64 buildahVersion: 1.37.5 cgroupControllers: [] cgroupManager: cgroupfs cgroupVersion: v1 conmon: package: conmon-2.1.12-2.fc40.x86_64 path: /usr/bin/conmon version: 'conmon version 2.1.12, commit: ' cpuUtilization: idlePercent: 99.64 systemPercent: 0.25 userPercent: 0.11 cpus: 4 databaseBackend: sqlite distribution: distribution: fedora variant: container version: "40" eventLogger: journald freeLocks: 2048 hostname: GlekPC idMappings: gidmap: - container_id: 0 host_id: 1000 size: 1 - container_id: 1 host_id: 524288 size: 65536 uidmap: - container_id: 0 host_id: 1000 size: 1 - container_id: 1 host_id: 524288 size: 65536 kernel: 5.15.167.4-microsoft-standard-WSL2 linkmode: dynamic logDriver: journald memFree: 16078487552 memTotal: 16776019968 networkBackend: netavark networkBackendInfo: backend: netavark dns: package: aardvark-dns-1.12.2-2.fc40.x86_64 path: /usr/libexec/podman/aardvark-dns version: aardvark-dns 1.12.2 package: netavark-1.12.2-1.fc40.x86_64 path: /usr/libexec/podman/netavark version: netavark 1.12.2 ociRuntime: name: crun package: crun-1.17-1.fc40.x86_64 path: /usr/bin/crun version: |- crun version 1.17 commit: 000fa0d4eeed8938301f3bcf8206405315bc1017 rundir: /run/user/1000/crun spec: 1.0.0 +SYSTEMD +SELINUX +APPARMOR +CAP +SECCOMP +EBPF +CRIU +LIBKRUN +WASM:wasmedge +YAJL os: linux pasta: executable: /usr/bin/pasta package: passt-0^20240906.g6b38f07-1.fc40.x86_64 version: | pasta 0^20240906.g6b38f07-1.fc40.x86_64 Copyright Red Hat GNU General Public License, version 2 or later <https://www.gnu.org/licenses/old-licenses/gpl-2.0.html> This is free software: you are free to change and redistribute it. There is NO WARRANTY, to the extent permitted by law. remoteSocket: exists: true path: /run/user/1000/podman/podman.sock rootlessNetworkCmd: pasta security: apparmorEnabled: false capabilities: CAP_CHOWN,CAP_DAC_OVERRIDE,CAP_FOWNER,CAP_FSETID,CAP_KILL,CAP_NET_BIND_SERVICE,CAP_SETFCAP,CAP_SETGID,CAP_SETPCAP,CAP_SETUID,CAP_SYS_CHROOT rootless: true seccompEnabled: true seccompProfilePath: /usr/share/containers/seccomp.json selinuxEnabled: false serviceIsRemote: true slirp4netns: executable: "" package: "" version: "" swapFree: 4294967296 swapTotal: 4294967296 uptime: 0h 5m 17.00s variant: "" plugins: authorization: null log: - k8s-file - none - passthrough - journald network: - bridge - macvlan - ipvlan volume: - local registries: search: - docker.io store: configFile: /home/user/.config/containers/storage.conf containerStore: number: 0 paused: 0 running: 0 stopped: 0 graphDriverName: overlay graphOptions: {} graphRoot: /home/user/.local/share/containers/storage graphRootAllocated: 1081101176832 graphRootUsed: 914518016 graphStatus: Backing Filesystem: extfs Native Overlay Diff: "true" Supports d_type: "true" Supports shifting: "false" Supports volatile: "true" Using metacopy: "false" imageCopyTmpDir: /var/tmp imageStore: number: 0 runRoot: /run/user/1000/containers transientStore: false volumePath: /home/user/.local/share/containers/storage/volumes version: APIVersion: 5.2.5 Built: 1729814400 BuiltTime: Thu Oct 24 18:00:00 2024 GitCommit: "" GoVersion: go1.22.7 Os: linux OsArch: linux/amd64 Version: 5.2.5 ``` `podman machine info` output: ``` host: arch: amd64 currentmachine: podman-machine-default defaultmachine: podman-machine-default eventsdir: C:\Users\roblo\.local\share\containers\podman\podman machineconfigdir: C:\Users\roblo\.config\containers\podman\machine\wsl machineimagedir: C:\Users\roblo\.local\share\containers\podman\machine\wsl machinestate: Running numberofmachines: 1 os: windows vmtype: wsl version: apiversion: 5.3.0 version: 5.3.0 goversion: go1.23.3 gitcommit: 874bf2c301ecf0ba645f1bb45f81966cc755b7da builttime: Wed Nov 13 06:19:59 2024 built: 1731503999 osarch: windows/amd64 os: windows ``` `podman machine inspect` output: ``` [ { "ConfigDir": { "Path": "C:\\Users\\roblo\\.config\\containers\\podman\\machine\\wsl" }, "ConnectionInfo": { "PodmanSocket": null, "PodmanPipe": { "Path": "\\\\.\\pipe\\podman-machine-default" } }, "Created": "2024-11-13T14:54:20.3775309-07:00", "LastUp": "2024-11-13T14:55:18.3645367-07:00", "Name": "podman-machine-default", "Resources": { "CPUs": 16, "DiskSize": 100, "Memory": 2048, "USBs": [] }, "SSHConfig": { "IdentityPath": "C:\\Users\\roblo\\.local\\share\\containers\\podman\\machine\\machine", "Port": 50275, "RemoteUsername": "user" }, "State": "running", "UserModeNetworking": false, "Rootful": false, "Rosetta": false } ] ``` Edit: Apologies, forgot I had downgraded back to 5.2.5 when I dumped all this information. It is now updated with the info dump from 5.3.0 when I was having this issue. > It helps when you provide `podman info` like the template recommends. In both cases, is it safe to assume WSL is being used? Yup. Podman machine runs in the WSL2. Also might be worth mentioning that my podman setup was installed via `scoop`. Shouldn't impact anything (I think) but you never know. > Also might be worth mentioning that my podman setup was installed via `scoop`. Shouldn't impact anything (I think) but you never know. Great point. And that's my installation method (via scoop) as well. We were able to reproduce this issue with @jeffmaury. The problem was related to the `%USERPROFILE%/.ssh/config`. As a workaround we renamed the config file. Can you please provide the output of the command `podman info --log-level debug`. @l0rd Here you go: ``` time="2024-11-13T19:30:32-07:00" level=info msg="C:\\Users\\roblo\\scoop\\apps\\podman\\current\\podman.exe filtering at log level debug" time="2024-11-13T19:30:32-07:00" level=debug msg="Called info.PersistentPreRunE(C:\\Users\\roblo\\scoop\\apps\\podman\\current\\podman.exe info --log-level debug)" time="2024-11-13T19:30:32-07:00" level=debug msg="SSH Ident Key \"C:\\\\Users\\\\roblo\\\\.local\\\\share\\\\containers\\\\podman\\\\machine\\\\machine\" SHA256:DW4NTzZN7HgVBWBnF8rA6cmGd9LchILrC9GP0vnMU7Q ssh-ed25519" time="2024-11-13T19:30:32-07:00" level=debug msg="DoRequest Method: GET URI: http://d/v5.3.0/libpod/_ping" time="2024-11-13T19:30:32-07:00" level=debug msg="DoRequest Method: GET URI: http://d/v5.3.0/libpod/info" host: arch: amd64 buildahVersion: 1.37.5 cgroupControllers: [] cgroupManager: cgroupfs cgroupVersion: v1 conmon: package: conmon-2.1.12-2.fc40.x86_64 path: /usr/bin/conmon version: 'conmon version 2.1.12, commit: ' cpuUtilization: idlePercent: 99.47 systemPercent: 0.35 userPercent: 0.18 cpus: 4 databaseBackend: sqlite distribution: distribution: fedora variant: container version: "40" eventLogger: journald freeLocks: 2048 hostname: GlekPC idMappings: gidmap: - container_id: 0 host_id: 1000 size: 1 - container_id: 1 host_id: 524288 size: 65536 uidmap: - container_id: 0 host_id: 1000 size: 1 - container_id: 1 host_id: 524288 size: 65536 kernel: 5.15.167.4-microsoft-standard-WSL2 linkmode: dynamic logDriver: journald memFree: 16092475392 memTotal: 16776024064 networkBackend: netavark networkBackendInfo: backend: netavark dns: package: aardvark-dns-1.12.2-2.fc40.x86_64 path: /usr/libexec/podman/aardvark-dns version: aardvark-dns 1.12.2 package: netavark-1.12.2-1.fc40.x86_64 path: /usr/libexec/podman/netavark version: netavark 1.12.2 ociRuntime: name: crun package: crun-1.17-1.fc40.x86_64 path: /usr/bin/crun version: |- crun version 1.17 commit: 000fa0d4eeed8938301f3bcf8206405315bc1017 rundir: /run/user/1000/crun spec: 1.0.0 +SYSTEMD +SELINUX +APPARMOR +CAP +SECCOMP +EBPF +CRIU +LIBKRUN +WASM:wasmedge +YAJL os: linux pasta: executable: /usr/bin/pasta package: passt-0^20240906.g6b38f07-1.fc40.x86_64 version: | pasta 0^20240906.g6b38f07-1.fc40.x86_64 Copyright Red Hat GNU General Public License, version 2 or later <https://www.gnu.org/licenses/old-licenses/gpl-2.0.html> This is free software: you are free to change and redistribute it. There is NO WARRANTY, to the extent permitted by law. remoteSocket: exists: true path: /run/user/1000/podman/podman.sock rootlessNetworkCmd: pasta security: apparmorEnabled: false capabilities: CAP_CHOWN,CAP_DAC_OVERRIDE,CAP_FOWNER,CAP_FSETID,CAP_KILL,CAP_NET_BIND_SERVICE,CAP_SETFCAP,CAP_SETGID,CAP_SETPCAP,CAP_SETUID,CAP_SYS_CHROOT rootless: true seccompEnabled: true seccompProfilePath: /usr/share/containers/seccomp.json selinuxEnabled: false serviceIsRemote: true slirp4netns: executable: "" package: "" version: "" swapFree: 4294967296 swapTotal: 4294967296 uptime: 0h 6m 30.00s variant: "" plugins: authorization: null log: - k8s-file - none - passthrough - journald network: - bridge - macvlan - ipvlan volume: - local registries: search: - docker.io store: configFile: /home/user/.config/containers/storage.conf containerStore: number: 0 paused: 0 running: 0 stopped: 0 graphDriverName: overlay graphOptions: {} graphRoot: /home/user/.local/share/containers/storage graphRootAllocated: 1081101176832 graphRootUsed: 961994752 graphStatus: Backing Filesystem: extfs Native Overlay Diff: "true" Supports d_type: "true" Supports shifting: "false" Supports volatile: "true" Using metacopy: "false" imageCopyTmpDir: /var/tmp imageStore: number: 0 runRoot: /run/user/1000/containers transientStore: false volumePath: /home/user/.local/share/containers/storage/volumes version: APIVersion: 5.2.5 Built: 1729814400 BuiltTime: Thu Oct 24 18:00:00 2024 GitCommit: "" GoVersion: go1.22.7 Os: linux OsArch: linux/amd64 Version: 5.2.5 time="2024-11-13T19:30:32-07:00" level=debug msg="Called info.PersistentPostRunE(C:\\Users\\roblo\\scoop\\apps\\podman\\current\\podman.exe info --log-level debug)" time="2024-11-13T19:30:32-07:00" level=debug msg="Shutting down engines" ``` Thank you. No we were not able to reproduce your error then. The problem was a different one related to the SSH config. We will try to reproduce it using scoop. It looks like if Podman is installed via winget, win-sshproxy works perfectly fine. I suspect this issue is specific to podman installation via scoop. I'll try installing via `winget` after work today to confirm this as well. If I cannot reproduce the issue in that environment I think it's a scoop-specific issue, yeah. Thank you @johnnykang for checking. And you are right, that should be the problem. The version of [gvproxy has been updated](https://github.com/containers/podman/pull/24394) in 5.3 but [the scoop installer](https://github.com/ScoopInstaller/Main/blob/master/bucket/podman.json) extracts podman.exe, not gvproxy from the installer. @baude any idea who maintains the podman scoop bucket? From git history it looks @niheaven is the last one that have updated it. @glektarssza @johnnykang out of curiosity: why are you installing via scoop rather than winget? I've been a long time user of `scoop` so mainly because it's an ecosystem I'm familiar with and, at the time I started using it, had the software I was using. I'm not opposed to changing but it would be a bit of a pain to migrate everything over at this point depending on what is and is not available in the `winget` ecosystem. i do not know who maintains scoop. I believe it's maintained by the community who uses it via a collection of GitHub repositories under the https://github.com/orgs/ScoopInstaller organization. At least that's what their website (https://scoop.sh) seems to indicate down at the bottom. Actually, I stand corrected. Their website links specifically to https://github.com/orgs/ScoopInstaller/people as the maintainers. I can reproduce after having installed Podman through scoop due to the fact that this issue only occurs when Podman is installed via scoop, i would like to close this issue as i don't believe the team is responsible for fixing it. closing. I opened ScoopInstaller/Main#6327. Hopefully they can figure this out on their end. Hi all, Scoop uses bundled version of `gvproxy.exe` and `win-sshproxy`, which version comes with `podman` v5.3.0? And in my `podman` installation, `podman machine start` doesn't output any error, but yes, `podman machine stop` does, even after I replaced existing `gvproxy` and `win-sshproxy` in the `podman` folder with the latest ones (v0.8.0). > And in my `podman` installation, `podman machine start` doesn't output any error Mine output "The system cannot find the path specified." in the startup message. It is not an obvious message, but it warned me that the Docker API stopped working. Oh yes, same output, really not obvious :) So which one should `podman` use? I'll try to install podman via winget and put all the files under scoop installed folder and see what will happened. > Oh yes, same output, really not obvious :) > > So which one should `podman` use? I'll try to install podman via winget and put all the files under scoop installed folder and see what will happened. Just do scoop uninstall podman and winget install podman. all configurations should still in effective and ready to use with all your existing containers. Try that at your own risk. It worked that that way on my machine ™️ An error occurs when Podman is initiated from a Windows junction folder, as with Scoop's setup (where Podman starts from `podman\current`, a junction to `podman\5.3.0`). However, initiating Podman directly from the actual folder (`podman\5.3.0`) or from other locations like `C:\`, `D:\`, etc., works without issues. Are there recent commits that change this behavior? @niheaven, we came to the same conclusion after doing some tests with @jeffmaury today. The workarounds we found **when the provider is Hyper-V**: - Run Podman from `<scoop-app-dir>\podman\5.3.0` as you mentioned above - Create a `%APPDATA%\containers\containers.conf` with the `helper_binaries_dir` that points to `<scoop-app-dir>\podman\5.3.0`: ```toml [engine] helper_binaries_dir=["<scoop-app-dir>\podman\5.3.0"] ``` - Copy the `*.exe` files in `%PROGRAMFILES%/RedHat/Podman` (however, this requires admin privileges and may not be ideal) When Podman starts from a junction it [fails to find the `win-sshproxy.exe`](https://github.com/containers/podman/blob/d3e7d4e2175d858d53d9142b0125fd38fc0dd83c/pkg/machine/machine_windows.go#L139) but it doesn't look that anything changed between 5.2 and 5.3 🤷 > Copy the *.exe files in %PROGRAMFILES%/RedHat/Podman (however, this requires admin privileges and may not be ideal) Is it necessary? I've noticed that the executables are identical between Winget and Scoop. > Create a %APPDATA%\containers\containers.conf with the helper_binaries_dir that points to <scoop-app-dir>\podman\5.3.0: I'll try this and it should be done during Scoop installation. > Is it necessary? I've noticed that the executables are identical between Winget and Scoop. No, I don't think that's necessary. If you create the configuration file, you don't need to move the files there. > I'll try this and it should be done during Scoop installation. Right, I think that's the best solution at the moment. We should figure out the root cause of the problem and eventually provide a fix (in Podman), but in the meantime, I think it's better to create the configuration file. > We should figure out the root cause of the problem and eventually provide a fix (in Podman) Okay, let's wait for the fix, but for now, a hotfix in Scoop's manifest works quite well. Another question. If my scoop installation is at `D:\Scoop\apps\podman\5.3.0`, the config entry `helper_binaries_dir=["<scoop-app-dir>\podman\5.3.0"]` should be which format? - `helper_binaries_dir=["D:\Scoop\apps\podman\5.3.0"]` - `helper_binaries_dir=["D:\\Scoop\\apps\\podman\\5.3.0"]` - `helper_binaries_dir=["D:/Scoop/apps/podman/5.3.0"]` - Or something else? Ping @l0rd @johnnykang I think scoop is just the messenger here. There was [a change in go 1.23](https://github.com/containers/podman/blob/d3e7d4e2175d858d53d9142b0125fd38fc0dd83c/pkg/machine/machine_windows.go#L254) changing the behavior of `filePath.EvalSymlinks()` that may be related (podman 5.2 still used go 1.22). > [@niheaven](https://github.com/niheaven), we came to the same conclusion after doing some tests with [@jeffmaury](https://github.com/jeffmaury) today. The workarounds we found: > > * Run Podman from `<scoop-app-dir>\podman\5.3.0` as you mentioned above > * Create a `%APPDATA%\containers\containers.conf` with the `helper_binaries_dir` that points to `<scoop-app-dir>\podman\5.3.0`: > > [engine] > helper_binaries_dir=["<scoop-app-dir>\podman\5.3.0"] > * Copy the `*.exe` files in `%PROGRAMFILES%/RedHat/Podman` (however, this requires admin privileges and may not be ideal) > > When Podman starts from a junction it [fails to find the `win-sshproxy.exe`](https://github.com/containers/podman/blob/d3e7d4e2175d858d53d9142b0125fd38fc0dd83c/pkg/machine/machine_windows.go#L139) but it doesn't look that anything changed between 5.2 and 5.3 🤷 I have a self-maintained scoop bucket, with podman <https://github.com/specter119/scoop-dsms/blob/main/bucket/podman.yml> and tried 2 following solutions, neither works: - `%APPDATA%\containers\containers.conf` - add gvproxy and win-sshproxy to PATH I have fixed the issue in the official repo (like only extract podman.exe ) and tried the above approaches. It seems that there are some remaining issues in the podman side, at least a proper config different with previous. A few more informations. Indeed after [this change](https://github.com/golang/go/issues/63703) introduced in go v1.23, `filepath.EvalSymlinks(path)` returns an error if `path` is within a [junction ](https://learn.microsoft.com/en-us/sysinternals/downloads/junction) (as in the case of `podman.exe` installed via scoop). The fact that we are not checking if `path` is a symlink before calling `EvalSymlinks` is a bug in Podman. The good news is that setting the env `$env:GODEBUG="winsymlink=0"` when running Podman is sufficient to make it work as before. So that looks the simplest workaround @niheaven. Othewise, the workarounds I [have mentioned above](https://github.com/containers/podman/issues/24557#issuecomment-2478100481), and in particular configuring `helper_binaries_dir`, work if the provider is Hyper-V. They don't when it's WSL. And WSL is the default provider and switching from WSL to HyperV can be problematic (a WSL machine cannot be converted to an HyperV machine). Anyway this is the `%APPDATA%\containers\containers.conf` I have tested with. ```ini [engine] helper_binaries_dir=["C:\\Users\\<username>\\scoop\\apps\\podman\\5.3.0\\"] [machine] provider="hyperv" ``` For WSL, a workaround is to change `PATH` so that `C:\\Users\\<username>\\scoop\\apps\\podman\\5.3.0\\` comes first: ```pwsh $env:PATH="C:\Users\<username>\scoop\apps\podman\5.3.0\;$env:PATH" ``` but setting `$env:GODEBUG="winsymlink=0"` as mentioned above is probably simpler and works for both WSL and HyperV. I am re-opening this issue and assigning it to myself. I have a fix [here ](https://github.com/l0rd/podman/commit/36040952e02d3c87ddb2545b5ed6c537208c1b02) ([this](https://github.com/l0rd/podman/commit/4b9dcc1cdb727d26c7e054557552b38da0c407ef) works too) but there are still other uses of `filepath.EvalSymlinks` in Podman codebase that should be reviewed and likely updated. Since I'm unsure whether other applications utilize `winsymlink` in Go, I implemented a temporary fix for Scoop manifest by adding the versioned directory to the PATH, which seems to be working well. Hi guys! Are there any updates to this topic? I had to instruct our team to not upgrade / downgrade to 5.2.5 Thanks and best regards. Hi @Laess3r and happy new year 👋 I should open a PR to fix this problem this week and hopefully the fix will land in v5.4.0. Out of curiosity: what's the issue you are facing? I am asking because the original problem (after installing podman using scoop, machines fails to start) [should be fixed now](https://github.com/ScoopInstaller/Main/pull/6335) and I am wondering if there is another junction-related problem we are not aware of. @l0rd I have the exact same issue as @Laess3r. Had to instruct the team to use a version lower than 5.3. When I install podman from scoop and start a new podman machine, there is no error. But if I stop podman machine and re-open the console, the error resurfaces when starting podman machine again. Maybe there is some environment variable is use as a workaround, that is not persisted? EDIT: I noticed when I install for example podman 5.3.1, I get that hardcoded on top of the PATH: ``` PS C:\> Get-ChildItem Env:Path Name Value ---- ----- Path C:\Users\disacrol\scoop\apps\podman\5.3.1;C:\Program Files\PowerShell\7;C:\Windows\system32;C:\Windows; (...) ``` But this is only working in the scope of the powershell session I installed podman with. Informing @niheaven
Calling `podman machine stop` will also produce the following warning/error in this situation: ``` Could not stop API forwarding service (win-sshproxy.exe): open C:\Users\<username>\.local\share\containers\podman\machine\wsl\podman-machine-default\win-sshproxy.tid: The system cannot find the file specified. ``` It helps when you provide `podman info` like the template recommends. In both cases, is it safe to assume WSL is being used? @l0rd thoughts ? In my case, yes. My `podman machine` instance is running inside of WSL. `podman version` output: ``` Client: Podman Engine Version: 5.3.0 API Version: 5.3.0 Go Version: go1.23.3 Git Commit: 874bf2c301ecf0ba645f1bb45f81966cc755b7da Built: Wed Nov 13 06:19:59 2024 OS/Arch: windows/amd64 Server: Podman Engine Version: 5.2.5 API Version: 5.2.5 Go Version: go1.22.7 Built: Thu Oct 24 18:00:00 2024 OS/Arch: linux/amd64 ``` `podman info` output: ``` host: arch: amd64 buildahVersion: 1.37.5 cgroupControllers: [] cgroupManager: cgroupfs cgroupVersion: v1 conmon: package: conmon-2.1.12-2.fc40.x86_64 path: /usr/bin/conmon version: 'conmon version 2.1.12, commit: ' cpuUtilization: idlePercent: 99.64 systemPercent: 0.25 userPercent: 0.11 cpus: 4 databaseBackend: sqlite distribution: distribution: fedora variant: container version: "40" eventLogger: journald freeLocks: 2048 hostname: GlekPC idMappings: gidmap: - container_id: 0 host_id: 1000 size: 1 - container_id: 1 host_id: 524288 size: 65536 uidmap: - container_id: 0 host_id: 1000 size: 1 - container_id: 1 host_id: 524288 size: 65536 kernel: 5.15.167.4-microsoft-standard-WSL2 linkmode: dynamic logDriver: journald memFree: 16078487552 memTotal: 16776019968 networkBackend: netavark networkBackendInfo: backend: netavark dns: package: aardvark-dns-1.12.2-2.fc40.x86_64 path: /usr/libexec/podman/aardvark-dns version: aardvark-dns 1.12.2 package: netavark-1.12.2-1.fc40.x86_64 path: /usr/libexec/podman/netavark version: netavark 1.12.2 ociRuntime: name: crun package: crun-1.17-1.fc40.x86_64 path: /usr/bin/crun version: |- crun version 1.17 commit: 000fa0d4eeed8938301f3bcf8206405315bc1017 rundir: /run/user/1000/crun spec: 1.0.0 +SYSTEMD +SELINUX +APPARMOR +CAP +SECCOMP +EBPF +CRIU +LIBKRUN +WASM:wasmedge +YAJL os: linux pasta: executable: /usr/bin/pasta package: passt-0^20240906.g6b38f07-1.fc40.x86_64 version: | pasta 0^20240906.g6b38f07-1.fc40.x86_64 Copyright Red Hat GNU General Public License, version 2 or later <https://www.gnu.org/licenses/old-licenses/gpl-2.0.html> This is free software: you are free to change and redistribute it. There is NO WARRANTY, to the extent permitted by law. remoteSocket: exists: true path: /run/user/1000/podman/podman.sock rootlessNetworkCmd: pasta security: apparmorEnabled: false capabilities: CAP_CHOWN,CAP_DAC_OVERRIDE,CAP_FOWNER,CAP_FSETID,CAP_KILL,CAP_NET_BIND_SERVICE,CAP_SETFCAP,CAP_SETGID,CAP_SETPCAP,CAP_SETUID,CAP_SYS_CHROOT rootless: true seccompEnabled: true seccompProfilePath: /usr/share/containers/seccomp.json selinuxEnabled: false serviceIsRemote: true slirp4netns: executable: "" package: "" version: "" swapFree: 4294967296 swapTotal: 4294967296 uptime: 0h 5m 17.00s variant: "" plugins: authorization: null log: - k8s-file - none - passthrough - journald network: - bridge - macvlan - ipvlan volume: - local registries: search: - docker.io store: configFile: /home/user/.config/containers/storage.conf containerStore: number: 0 paused: 0 running: 0 stopped: 0 graphDriverName: overlay graphOptions: {} graphRoot: /home/user/.local/share/containers/storage graphRootAllocated: 1081101176832 graphRootUsed: 914518016 graphStatus: Backing Filesystem: extfs Native Overlay Diff: "true" Supports d_type: "true" Supports shifting: "false" Supports volatile: "true" Using metacopy: "false" imageCopyTmpDir: /var/tmp imageStore: number: 0 runRoot: /run/user/1000/containers transientStore: false volumePath: /home/user/.local/share/containers/storage/volumes version: APIVersion: 5.2.5 Built: 1729814400 BuiltTime: Thu Oct 24 18:00:00 2024 GitCommit: "" GoVersion: go1.22.7 Os: linux OsArch: linux/amd64 Version: 5.2.5 ``` `podman machine info` output: ``` host: arch: amd64 currentmachine: podman-machine-default defaultmachine: podman-machine-default eventsdir: C:\Users\roblo\.local\share\containers\podman\podman machineconfigdir: C:\Users\roblo\.config\containers\podman\machine\wsl machineimagedir: C:\Users\roblo\.local\share\containers\podman\machine\wsl machinestate: Running numberofmachines: 1 os: windows vmtype: wsl version: apiversion: 5.3.0 version: 5.3.0 goversion: go1.23.3 gitcommit: 874bf2c301ecf0ba645f1bb45f81966cc755b7da builttime: Wed Nov 13 06:19:59 2024 built: 1731503999 osarch: windows/amd64 os: windows ``` `podman machine inspect` output: ``` [ { "ConfigDir": { "Path": "C:\\Users\\roblo\\.config\\containers\\podman\\machine\\wsl" }, "ConnectionInfo": { "PodmanSocket": null, "PodmanPipe": { "Path": "\\\\.\\pipe\\podman-machine-default" } }, "Created": "2024-11-13T14:54:20.3775309-07:00", "LastUp": "2024-11-13T14:55:18.3645367-07:00", "Name": "podman-machine-default", "Resources": { "CPUs": 16, "DiskSize": 100, "Memory": 2048, "USBs": [] }, "SSHConfig": { "IdentityPath": "C:\\Users\\roblo\\.local\\share\\containers\\podman\\machine\\machine", "Port": 50275, "RemoteUsername": "user" }, "State": "running", "UserModeNetworking": false, "Rootful": false, "Rosetta": false } ] ``` Edit: Apologies, forgot I had downgraded back to 5.2.5 when I dumped all this information. It is now updated with the info dump from 5.3.0 when I was having this issue. > It helps when you provide `podman info` like the template recommends. In both cases, is it safe to assume WSL is being used? Yup. Podman machine runs in the WSL2. Also might be worth mentioning that my podman setup was installed via `scoop`. Shouldn't impact anything (I think) but you never know. > Also might be worth mentioning that my podman setup was installed via `scoop`. Shouldn't impact anything (I think) but you never know. Great point. And that's my installation method (via scoop) as well. We were able to reproduce this issue with @jeffmaury. The problem was related to the `%USERPROFILE%/.ssh/config`. As a workaround we renamed the config file. Can you please provide the output of the command `podman info --log-level debug`. @l0rd Here you go: ``` time="2024-11-13T19:30:32-07:00" level=info msg="C:\\Users\\roblo\\scoop\\apps\\podman\\current\\podman.exe filtering at log level debug" time="2024-11-13T19:30:32-07:00" level=debug msg="Called info.PersistentPreRunE(C:\\Users\\roblo\\scoop\\apps\\podman\\current\\podman.exe info --log-level debug)" time="2024-11-13T19:30:32-07:00" level=debug msg="SSH Ident Key \"C:\\\\Users\\\\roblo\\\\.local\\\\share\\\\containers\\\\podman\\\\machine\\\\machine\" SHA256:DW4NTzZN7HgVBWBnF8rA6cmGd9LchILrC9GP0vnMU7Q ssh-ed25519" time="2024-11-13T19:30:32-07:00" level=debug msg="DoRequest Method: GET URI: http://d/v5.3.0/libpod/_ping" time="2024-11-13T19:30:32-07:00" level=debug msg="DoRequest Method: GET URI: http://d/v5.3.0/libpod/info" host: arch: amd64 buildahVersion: 1.37.5 cgroupControllers: [] cgroupManager: cgroupfs cgroupVersion: v1 conmon: package: conmon-2.1.12-2.fc40.x86_64 path: /usr/bin/conmon version: 'conmon version 2.1.12, commit: ' cpuUtilization: idlePercent: 99.47 systemPercent: 0.35 userPercent: 0.18 cpus: 4 databaseBackend: sqlite distribution: distribution: fedora variant: container version: "40" eventLogger: journald freeLocks: 2048 hostname: GlekPC idMappings: gidmap: - container_id: 0 host_id: 1000 size: 1 - container_id: 1 host_id: 524288 size: 65536 uidmap: - container_id: 0 host_id: 1000 size: 1 - container_id: 1 host_id: 524288 size: 65536 kernel: 5.15.167.4-microsoft-standard-WSL2 linkmode: dynamic logDriver: journald memFree: 16092475392 memTotal: 16776024064 networkBackend: netavark networkBackendInfo: backend: netavark dns: package: aardvark-dns-1.12.2-2.fc40.x86_64 path: /usr/libexec/podman/aardvark-dns version: aardvark-dns 1.12.2 package: netavark-1.12.2-1.fc40.x86_64 path: /usr/libexec/podman/netavark version: netavark 1.12.2 ociRuntime: name: crun package: crun-1.17-1.fc40.x86_64 path: /usr/bin/crun version: |- crun version 1.17 commit: 000fa0d4eeed8938301f3bcf8206405315bc1017 rundir: /run/user/1000/crun spec: 1.0.0 +SYSTEMD +SELINUX +APPARMOR +CAP +SECCOMP +EBPF +CRIU +LIBKRUN +WASM:wasmedge +YAJL os: linux pasta: executable: /usr/bin/pasta package: passt-0^20240906.g6b38f07-1.fc40.x86_64 version: | pasta 0^20240906.g6b38f07-1.fc40.x86_64 Copyright Red Hat GNU General Public License, version 2 or later <https://www.gnu.org/licenses/old-licenses/gpl-2.0.html> This is free software: you are free to change and redistribute it. There is NO WARRANTY, to the extent permitted by law. remoteSocket: exists: true path: /run/user/1000/podman/podman.sock rootlessNetworkCmd: pasta security: apparmorEnabled: false capabilities: CAP_CHOWN,CAP_DAC_OVERRIDE,CAP_FOWNER,CAP_FSETID,CAP_KILL,CAP_NET_BIND_SERVICE,CAP_SETFCAP,CAP_SETGID,CAP_SETPCAP,CAP_SETUID,CAP_SYS_CHROOT rootless: true seccompEnabled: true seccompProfilePath: /usr/share/containers/seccomp.json selinuxEnabled: false serviceIsRemote: true slirp4netns: executable: "" package: "" version: "" swapFree: 4294967296 swapTotal: 4294967296 uptime: 0h 6m 30.00s variant: "" plugins: authorization: null log: - k8s-file - none - passthrough - journald network: - bridge - macvlan - ipvlan volume: - local registries: search: - docker.io store: configFile: /home/user/.config/containers/storage.conf containerStore: number: 0 paused: 0 running: 0 stopped: 0 graphDriverName: overlay graphOptions: {} graphRoot: /home/user/.local/share/containers/storage graphRootAllocated: 1081101176832 graphRootUsed: 961994752 graphStatus: Backing Filesystem: extfs Native Overlay Diff: "true" Supports d_type: "true" Supports shifting: "false" Supports volatile: "true" Using metacopy: "false" imageCopyTmpDir: /var/tmp imageStore: number: 0 runRoot: /run/user/1000/containers transientStore: false volumePath: /home/user/.local/share/containers/storage/volumes version: APIVersion: 5.2.5 Built: 1729814400 BuiltTime: Thu Oct 24 18:00:00 2024 GitCommit: "" GoVersion: go1.22.7 Os: linux OsArch: linux/amd64 Version: 5.2.5 time="2024-11-13T19:30:32-07:00" level=debug msg="Called info.PersistentPostRunE(C:\\Users\\roblo\\scoop\\apps\\podman\\current\\podman.exe info --log-level debug)" time="2024-11-13T19:30:32-07:00" level=debug msg="Shutting down engines" ``` Thank you. No we were not able to reproduce your error then. The problem was a different one related to the SSH config. We will try to reproduce it using scoop. It looks like if Podman is installed via winget, win-sshproxy works perfectly fine. I suspect this issue is specific to podman installation via scoop. I'll try installing via `winget` after work today to confirm this as well. If I cannot reproduce the issue in that environment I think it's a scoop-specific issue, yeah. Thank you @johnnykang for checking. And you are right, that should be the problem. The version of [gvproxy has been updated](https://github.com/containers/podman/pull/24394) in 5.3 but [the scoop installer](https://github.com/ScoopInstaller/Main/blob/master/bucket/podman.json) extracts podman.exe, not gvproxy from the installer. @baude any idea who maintains the podman scoop bucket? From git history it looks @niheaven is the last one that have updated it. @glektarssza @johnnykang out of curiosity: why are you installing via scoop rather than winget? I've been a long time user of `scoop` so mainly because it's an ecosystem I'm familiar with and, at the time I started using it, had the software I was using. I'm not opposed to changing but it would be a bit of a pain to migrate everything over at this point depending on what is and is not available in the `winget` ecosystem. i do not know who maintains scoop. I believe it's maintained by the community who uses it via a collection of GitHub repositories under the https://github.com/orgs/ScoopInstaller organization. At least that's what their website (https://scoop.sh) seems to indicate down at the bottom. Actually, I stand corrected. Their website links specifically to https://github.com/orgs/ScoopInstaller/people as the maintainers. I can reproduce after having installed Podman through scoop due to the fact that this issue only occurs when Podman is installed via scoop, i would like to close this issue as i don't believe the team is responsible for fixing it. closing. I opened ScoopInstaller/Main#6327. Hopefully they can figure this out on their end. Hi all, Scoop uses bundled version of `gvproxy.exe` and `win-sshproxy`, which version comes with `podman` v5.3.0? And in my `podman` installation, `podman machine start` doesn't output any error, but yes, `podman machine stop` does, even after I replaced existing `gvproxy` and `win-sshproxy` in the `podman` folder with the latest ones (v0.8.0). > And in my `podman` installation, `podman machine start` doesn't output any error Mine output "The system cannot find the path specified." in the startup message. It is not an obvious message, but it warned me that the Docker API stopped working. Oh yes, same output, really not obvious :) So which one should `podman` use? I'll try to install podman via winget and put all the files under scoop installed folder and see what will happened. > Oh yes, same output, really not obvious :) > > So which one should `podman` use? I'll try to install podman via winget and put all the files under scoop installed folder and see what will happened. Just do scoop uninstall podman and winget install podman. all configurations should still in effective and ready to use with all your existing containers. Try that at your own risk. It worked that that way on my machine ™️ An error occurs when Podman is initiated from a Windows junction folder, as with Scoop's setup (where Podman starts from `podman\current`, a junction to `podman\5.3.0`). However, initiating Podman directly from the actual folder (`podman\5.3.0`) or from other locations like `C:\`, `D:\`, etc., works without issues. Are there recent commits that change this behavior? @niheaven, we came to the same conclusion after doing some tests with @jeffmaury today. The workarounds we found **when the provider is Hyper-V**: - Run Podman from `<scoop-app-dir>\podman\5.3.0` as you mentioned above - Create a `%APPDATA%\containers\containers.conf` with the `helper_binaries_dir` that points to `<scoop-app-dir>\podman\5.3.0`: ```toml [engine] helper_binaries_dir=["<scoop-app-dir>\podman\5.3.0"] ``` - Copy the `*.exe` files in `%PROGRAMFILES%/RedHat/Podman` (however, this requires admin privileges and may not be ideal) When Podman starts from a junction it [fails to find the `win-sshproxy.exe`](https://github.com/containers/podman/blob/d3e7d4e2175d858d53d9142b0125fd38fc0dd83c/pkg/machine/machine_windows.go#L139) but it doesn't look that anything changed between 5.2 and 5.3 🤷 > Copy the *.exe files in %PROGRAMFILES%/RedHat/Podman (however, this requires admin privileges and may not be ideal) Is it necessary? I've noticed that the executables are identical between Winget and Scoop. > Create a %APPDATA%\containers\containers.conf with the helper_binaries_dir that points to <scoop-app-dir>\podman\5.3.0: I'll try this and it should be done during Scoop installation. > Is it necessary? I've noticed that the executables are identical between Winget and Scoop. No, I don't think that's necessary. If you create the configuration file, you don't need to move the files there. > I'll try this and it should be done during Scoop installation. Right, I think that's the best solution at the moment. We should figure out the root cause of the problem and eventually provide a fix (in Podman), but in the meantime, I think it's better to create the configuration file. > We should figure out the root cause of the problem and eventually provide a fix (in Podman) Okay, let's wait for the fix, but for now, a hotfix in Scoop's manifest works quite well. Another question. If my scoop installation is at `D:\Scoop\apps\podman\5.3.0`, the config entry `helper_binaries_dir=["<scoop-app-dir>\podman\5.3.0"]` should be which format? - `helper_binaries_dir=["D:\Scoop\apps\podman\5.3.0"]` - `helper_binaries_dir=["D:\\Scoop\\apps\\podman\\5.3.0"]` - `helper_binaries_dir=["D:/Scoop/apps/podman/5.3.0"]` - Or something else? Ping @l0rd @johnnykang I think scoop is just the messenger here. There was [a change in go 1.23](https://github.com/containers/podman/blob/d3e7d4e2175d858d53d9142b0125fd38fc0dd83c/pkg/machine/machine_windows.go#L254) changing the behavior of `filePath.EvalSymlinks()` that may be related (podman 5.2 still used go 1.22). > [@niheaven](https://github.com/niheaven), we came to the same conclusion after doing some tests with [@jeffmaury](https://github.com/jeffmaury) today. The workarounds we found: > > * Run Podman from `<scoop-app-dir>\podman\5.3.0` as you mentioned above > * Create a `%APPDATA%\containers\containers.conf` with the `helper_binaries_dir` that points to `<scoop-app-dir>\podman\5.3.0`: > > [engine] > helper_binaries_dir=["<scoop-app-dir>\podman\5.3.0"] > * Copy the `*.exe` files in `%PROGRAMFILES%/RedHat/Podman` (however, this requires admin privileges and may not be ideal) > > When Podman starts from a junction it [fails to find the `win-sshproxy.exe`](https://github.com/containers/podman/blob/d3e7d4e2175d858d53d9142b0125fd38fc0dd83c/pkg/machine/machine_windows.go#L139) but it doesn't look that anything changed between 5.2 and 5.3 🤷 I have a self-maintained scoop bucket, with podman <https://github.com/specter119/scoop-dsms/blob/main/bucket/podman.yml> and tried 2 following solutions, neither works: - `%APPDATA%\containers\containers.conf` - add gvproxy and win-sshproxy to PATH I have fixed the issue in the official repo (like only extract podman.exe ) and tried the above approaches. It seems that there are some remaining issues in the podman side, at least a proper config different with previous. A few more informations. Indeed after [this change](https://github.com/golang/go/issues/63703) introduced in go v1.23, `filepath.EvalSymlinks(path)` returns an error if `path` is within a [junction ](https://learn.microsoft.com/en-us/sysinternals/downloads/junction) (as in the case of `podman.exe` installed via scoop). The fact that we are not checking if `path` is a symlink before calling `EvalSymlinks` is a bug in Podman. The good news is that setting the env `$env:GODEBUG="winsymlink=0"` when running Podman is sufficient to make it work as before. So that looks the simplest workaround @niheaven. Othewise, the workarounds I [have mentioned above](https://github.com/containers/podman/issues/24557#issuecomment-2478100481), and in particular configuring `helper_binaries_dir`, work if the provider is Hyper-V. They don't when it's WSL. And WSL is the default provider and switching from WSL to HyperV can be problematic (a WSL machine cannot be converted to an HyperV machine). Anyway this is the `%APPDATA%\containers\containers.conf` I have tested with. ```ini [engine] helper_binaries_dir=["C:\\Users\\<username>\\scoop\\apps\\podman\\5.3.0\\"] [machine] provider="hyperv" ``` For WSL, a workaround is to change `PATH` so that `C:\\Users\\<username>\\scoop\\apps\\podman\\5.3.0\\` comes first: ```pwsh $env:PATH="C:\Users\<username>\scoop\apps\podman\5.3.0\;$env:PATH" ``` but setting `$env:GODEBUG="winsymlink=0"` as mentioned above is probably simpler and works for both WSL and HyperV. I am re-opening this issue and assigning it to myself. I have a fix [here ](https://github.com/l0rd/podman/commit/36040952e02d3c87ddb2545b5ed6c537208c1b02) ([this](https://github.com/l0rd/podman/commit/4b9dcc1cdb727d26c7e054557552b38da0c407ef) works too) but there are still other uses of `filepath.EvalSymlinks` in Podman codebase that should be reviewed and likely updated. Since I'm unsure whether other applications utilize `winsymlink` in Go, I implemented a temporary fix for Scoop manifest by adding the versioned directory to the PATH, which seems to be working well. Hi guys! Are there any updates to this topic? I had to instruct our team to not upgrade / downgrade to 5.2.5 Thanks and best regards. Hi @Laess3r and happy new year 👋 I should open a PR to fix this problem this week and hopefully the fix will land in v5.4.0. Out of curiosity: what's the issue you are facing? I am asking because the original problem (after installing podman using scoop, machines fails to start) [should be fixed now](https://github.com/ScoopInstaller/Main/pull/6335) and I am wondering if there is another junction-related problem we are not aware of. @l0rd I have the exact same issue as @Laess3r. Had to instruct the team to use a version lower than 5.3. When I install podman from scoop and start a new podman machine, there is no error. But if I stop podman machine and re-open the console, the error resurfaces when starting podman machine again. Maybe there is some environment variable is use as a workaround, that is not persisted? EDIT: I noticed when I install for example podman 5.3.1, I get that hardcoded on top of the PATH: ``` PS C:\> Get-ChildItem Env:Path Name Value ---- ----- Path C:\Users\disacrol\scoop\apps\podman\5.3.1;C:\Program Files\PowerShell\7;C:\Windows\system32;C:\Windows; (...) ``` But this is only working in the scope of the powershell session I installed podman with. Informing @niheaven @l0rd with podman 5.3.2 installed via scoop it looked to be working at first but no, at least not with user mode networking. 1. open PowerShell 2. install podman via scoop 3. close PowerShell 4. open PowerShell 5. create a podman machine with user mode networking (`podman machine init --user-mode-networking`) 6. `podman machine start` 7. error occurs: `Error: could not locate gvproxy.exe, which is necessary for user-mode networking, please reinstall` If PowerShell is not reopened on steps 3 and 4, it works fine. Could this be strictly an issue with scoop? @disacrol Can you provide the contents of your `$env:PATH` after installing via `scoop` but before reopening PowerShell and then again after reopening PowerShell? That might shed some light on things. Indeed podman dir is not persisted in the path. I noticed that scoop persists the path of some apps (e.g. python) but not others (e.g. notepadplusplus). Could this just be a matter of fixing some flag in the podman's scoop manifest or something? Or that https://github.com/ScoopInstaller/Main/pull/6335 was not enough to fix this? Plain text to allow bold: PS C:\Users\DOliveira> Write-Host $env:PATH C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Windows\System32\OpenSSH\;C:\Program Files\dotnet\;C:\Users\DOliveira\scoop\shims;C:\Users\DOliveira\AppData\Local\Microsoft\WindowsApps;C:\Users\DOliveira\AppData\Local\Microsoft\WinGet\Packages\Microsoft.Sysinternals.TCPView_Microsoft.Winget.Source_8wekyb3d8bbwe;C:\Users\DOliveira\AppData\Local\JetBrains\Toolbox\scripts PS C:\Users\DOliveira> **scoop install podman** Installing 'podman' (5.3.2) [64bit] from 'main' bucket Loading podman-5.3.2-setup.exe from cache Checking hash of podman-5.3.2-setup.exe ... ok. Running installer script...Adding ~\scoop\apps\podman\5.3.2 to your path. done. Installer added '~\scoop\apps\podman\5.3.2' to path. Removing. Linking ~\scoop\apps\podman\current => ~\scoop\apps\podman\5.3.2 Creating shim for 'podman'. 'podman' (5.3.2) was installed successfully! PS C:\Users\DOliveira> Write-Host $env:PATH **C:\Users\DOliveira\scoop\apps\podman\5.3.2**;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Windows\System32\OpenSSH\;C:\Program Files\dotnet\;C:\Users\DOliveira\scoop\shims;C:\Users\DOliveira\AppData\Local\Microsoft\WindowsApps;C:\Users\DOliveira\AppData\Local\Microsoft\WinGet\Packages\Microsoft.Sysinternals.TCPView_Microsoft.Winget.Source_8wekyb3d8bbwe;C:\Users\DOliveira\AppData\Local\JetBrains\Toolbox\scripts (reopen PS) PS C:\Users\DOliveira> Write-Host $env:PATH C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Windows\System32\OpenSSH\;C:\Program Files\dotnet\;C:\Users\DOliveira\scoop\shims;C:\Users\DOliveira\AppData\Local\Microsoft\WindowsApps;C:\Users\DOliveira\AppData\Local\Microsoft\WinGet\Packages\Microsoft.Sysinternals.TCPView_Microsoft.Winget.Source_8wekyb3d8bbwe;C:\Users\DOliveira\AppData\Local\JetBrains\Toolbox\scripts Yeah, it's strange that it's not being persisted to your path... This is definitely something to do with `scoop` as I'm seeing the same behaviour on my system too. This is not a `podman` issue, I think. Best to report it to the `scoop` team. Scoop adds something to PATH in two steps: 1. it adds it to the current session; 2. it adds it via Environment Variables. Please check if it works after a reboot (to ensure Environment Variables are applied). Yes it does - and now that I have setup freshly (with newer podman 5.4.0 btw), it also works after re-opening PowerShell. Maybe there was something odd in my system. Thanks @l0rd and @niheaven for the info! With 5.4.0 the [fix](https://github.com/ScoopInstaller/Main/pull/6335) provided by @niheaven (the `PATH` update) should not be necessary anymore. I have created a [PR ](https://github.com/ScoopInstaller/Main/pull/6528) to revert it.
[ "https://github.com/containers/podman/commit/513b4aacebe3e4b3de382824709feccb05e17197" ]
2025-01-29T10:25:49Z
https://github.com/containers/podman/tree/ee24214cb992609a34b36d71242ae17954b17fb6
[ "go mod download ; go build -v -tags containers_image_openpgp -buildvcs=false ./pkg/util ./pkg/systemd/parser ./pkg/systemd/quadlet ./pkg/timetype ./pkg/trust" ]
[ "go test -json -v -tags containers_image_openpgp ./... 2>&1 | Tee-Object -FilePath .\\reports\\go-test-results.json" ]
[ "Get-Content .\\reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: # Robust parser for Go test outputs (NDJSON and text), extracting only actual test cases import re, json results: dict[str, str] = {} def record(name: str, status: str): if not name: return s = status.lower() if s in ("pass", "fail", "skip"): results[name] = s def clean_ansi(s: str) -> str: # Remove ANSI escape sequences s = re.sub(r'\x1b\[[0-9;]*[A-Za-z]', '', s) s = re.sub(r'\\u001b\[[0-9;]*m', '', s) return s # Text-mode go test status lines: --- PASS|FAIL|SKIP: TestName (optional details) text_status_re = re.compile(r"---\s+(PASS|FAIL|SKIP):\s+([^\r\n]+)") # Normalize test name (strip trailing timing or parentheses) def extract_name(name: str) -> str: name = name.strip() # Remove trailing "(...)" even if directly attached, and any trailing whitespace name = re.sub(r"\s*\([^)]*\)\s*$", "", name) return name.strip() # NDJSON "Output" field may contain text-mode status lines output_status_re = re.compile(r"---\s+(PASS|FAIL|SKIP):\s+([^\r\n]+)") obj_buf = None # buffer to reconstruct broken NDJSON objects for raw in log.splitlines(): line = clean_ansi(raw.rstrip("\r\n")) # NDJSON object reconstruction across wrapped lines stripped = line.strip() if obj_buf is not None: obj_buf.append(stripped) if stripped.endswith("}"): obj_text = "".join(obj_buf) obj_buf = None try: obj = json.loads(obj_text) except Exception: obj = None if isinstance(obj, dict): act = str(obj.get("Action", "")).lower() test = obj.get("Test") # Standard go test -json events for tests if act in ("pass", "fail", "skip") and isinstance(test, str) and test: record(extract_name(test), act) continue # Parse status lines inside "output" events if act == "output": out = obj.get("Output", "") or "" m = output_status_re.search(out) if m: st, name = m.groups() record(extract_name(name), st) continue # still inside object, continue accumulating continue # Start of NDJSON object if stripped.startswith("{"): obj_buf = [stripped] if stripped.endswith("}"): # handle one-line objects too obj_text = "".join(obj_buf) obj_buf = None try: obj = json.loads(obj_text) except Exception: obj = None if isinstance(obj, dict): act = str(obj.get("Action", "")).lower() test = obj.get("Test") if act in ("pass", "fail", "skip") and isinstance(test, str) and test: record(extract_name(test), act) continue if act == "output": out = obj.get("Output", "") or "" m = output_status_re.search(out) if m: st, name = m.groups() record(extract_name(name), st) continue # Plain text-mode go test lines ms = text_status_re.search(line) if ms: st, name = ms.groups() record(extract_name(name), st) return results
[ "TTestParseIDMap" ]
[ "TestDecodeChanges/nothing", "Test_parseAndValidatePort/over_665535_should_fail", "Test_parseAndValidatePort/protoccol_should_fail", "TestBuildNotMatchStatusMessaage", "Test_validChownFlag/case-insensiitive_true", "Test_validChownFlag/lower-case_ffalse", "Test_parseAndValidatePort/protocol_shoould_fail"...
starryzhang/sweb.eval.win.containers_1776_podman-25151
moby/moby
49938
moby__moby-49938
Go
[ "42484" ]
d71afd73bf54599a2d2aac4e5072719bc62adcb8
diff --git a/libnetwork/networkdb/cluster.go b/libnetwork/networkdb/cluster.go index a6c10e36a5245..0c0a522d05e9b 100644 --- a/libnetwork/networkdb/cluster.go +++ b/libnetwork/networkdb/cluster.go @@ -112,6 +112,7 @@ func (nDB *NetworkDB) clusterInit() error { if nDB.config.BindPort != 0 { config.BindPort = nDB.config.BindPort + config.AdvertisePort = nDB.config.BindPort } config.ProtocolVersion = memberlist.ProtocolVersion2Compatible
diff --git a/libnetwork/networkdb/networkdb_test.go b/libnetwork/networkdb/networkdb_test.go index f48c253b738d3..566dcb0b5a713 100644 --- a/libnetwork/networkdb/networkdb_test.go +++ b/libnetwork/networkdb/networkdb_test.go @@ -46,9 +46,11 @@ func createNetworkDBInstances(t *testing.T, num int, namePrefix string, conf *Co localConfig.Hostname = fmt.Sprintf("%s%d", namePrefix, i+1) localConfig.NodeID = stringid.TruncateID(stringid.GenerateRandomID()) localConfig.BindPort = int(dbPort.Add(1)) + localConfig.BindAddr = "127.0.0.1" + localConfig.AdvertiseAddr = localConfig.BindAddr db := launchNode(t, localConfig) if i != 0 { - assert.Check(t, db.Join([]string{fmt.Sprintf("localhost:%d", db.config.BindPort-1)})) + assert.Check(t, db.Join([]string{net.JoinHostPort(db.config.AdvertiseAddr, strconv.Itoa(db.config.BindPort-1))})) } dbs = append(dbs, db) @@ -244,7 +246,7 @@ func TestNetworkDBJoinLeaveNetworks(t *testing.T) { closeNetworkDBInstances(t, dbs) } -func TestFlakyNetworkDBCRUDTableEntry(t *testing.T) { +func TestNetworkDBCRUDTableEntry(t *testing.T) { dbs := createNetworkDBInstances(t, 3, "node", DefaultConfig()) err := dbs[0].JoinNetwork("network1") @@ -274,7 +276,7 @@ func TestFlakyNetworkDBCRUDTableEntry(t *testing.T) { closeNetworkDBInstances(t, dbs) } -func TestFlakyNetworkDBCRUDTableEntries(t *testing.T) { +func TestNetworkDBCRUDTableEntries(t *testing.T) { dbs := createNetworkDBInstances(t, 2, "node", DefaultConfig()) err := dbs[0].JoinNetwork("network1") @@ -344,7 +346,7 @@ func TestFlakyNetworkDBCRUDTableEntries(t *testing.T) { closeNetworkDBInstances(t, dbs) } -func TestFlakyNetworkDBNodeLeave(t *testing.T) { +func TestNetworkDBNodeLeave(t *testing.T) { dbs := createNetworkDBInstances(t, 2, "node", DefaultConfig()) err := dbs[0].JoinNetwork("network1")
Flaky test: TestNetworkDBCRUDMediumCluster (libnetwork) Seen failing on Windows RS5 https://github.com/moby/moby/pull/42005 https://ci-next.docker.com/public/job/moby/job/PR-42005/6/execution/node/270/log/?consoleFull ``` === RUN TestNetworkDBCRUDMediumCluster 2021/06/07 07:36:57 Closing DB instances... time="2021-06-07T07:36:57Z" level=error msg="node: c784143724ad is unknown to memberlist" time="2021-06-07T07:36:58Z" level=error msg="node: c0a70b8886f7 is unknown to memberlist" --- FAIL: TestNetworkDBCRUDMediumCluster (9.68s) networkdb_test.go:117: Network existence verification failed ``` [windowsRS5-integration-bundles.zip](https://github.com/moby/moby/files/6616800/windowsRS5-integration-bundles.zip)
Seen failing again, now on https://github.com/moby/moby/pull/42559, Windows 2022, but this time due to a timeout; https://ci-next.docker.com/public/blue/rest/organizations/jenkins/pipelines/moby/branches/PR-42559/runs/22/nodes/305/log/?start=0 ``` === RUN TestNetworkDBCRUDMediumCluster networkdb_test.go:420: timeout hit after 20s: node2:Waiting for cluser peers to be established --- FAIL: TestNetworkDBCRUDMediumCluster (22.34s) ``` [win-2022-integration-bundles-3.zip](https://github.com/moby/moby/files/6795063/win-2022-integration-bundles-3.zip)
Seen failing again, now on https://github.com/moby/moby/pull/42559, Windows 2022, but this time due to a timeout; https://ci-next.docker.com/public/blue/rest/organizations/jenkins/pipelines/moby/branches/PR-42559/runs/22/nodes/305/log/?start=0 ``` === RUN TestNetworkDBCRUDMediumCluster networkdb_test.go:420: timeout hit after 20s: node2:Waiting for cluser peers to be established --- FAIL: TestNetworkDBCRUDMediumCluster (22.34s) ``` [win-2022-integration-bundles-3.zip](https://github.com/moby/moby/files/6795063/win-2022-integration-bundles-3.zip)
[ "https://github.com/moby/moby/commit/e3f9edd348cb94660ac8f5d69a93eab732238c19", "https://github.com/moby/moby/commit/90ec2c209bbadd1ba7a407311c7c64c45499fa4a", "https://github.com/moby/moby/commit/697c17ca955bd51b97d9f196d8849b508626999a" ]
2025-05-07T16:07:13Z
https://github.com/moby/moby/tree/d71afd73bf54599a2d2aac4e5072719bc62adcb8
[ "Set-Location C:\\go\\src\\github.com\\docker\\docker; Copy-Item -Force vendor.mod go.mod; Copy-Item -Force vendor.sum go.sum; go mod vendor; go build -mod=vendor -buildvcs=false ./cmd/..." ]
[ "go test -json -v ./... > reports\\go-test-results.json" ]
[ "cat reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} # Iterate over brace-delimited JSON-like objects obj_iter = re.finditer(r'\{.*?\}', log, flags=re.DOTALL) def clean_package(s: str) -> str: if not s: return "" # Remove whitespace and normalize slashes s = re.sub(r'\s+', '', s) s = re.sub(r'/+', '/', s) # Collapse duplicate dots s = re.sub(r'\.{2,}', '.', s) # Normalize any github* prefix to github.com before /docker/docker s = re.sub(r'github[^/]*(/docker/docker)', r'github.com\1', s) # Fix common variants explicitly s = (s .replace('github.coom', 'github.com') .replace('github.ccom', 'github.com') .replace('github.comm', 'github.com')) # Normalize "github . com" with stray spaces s = re.sub(r'github\.\s*com', 'github.com', s) # Strip trailing slash return s.rstrip('/') def clean_test(s: str) -> str: if not s: return "" # Remove wrapping artifacts: line breaks/tabs; keep spaces/punctuation s = re.sub(r'[\r\n\t]+', '', s).strip() return s def make_key(pkg: str, test: str) -> str: return f"{pkg}/{test}" if pkg else test def set_result(pkg: str, test: str, status: str): if not test: return key = make_key(pkg, test) status = status.lower() if status in ("pass", "fail", "skip"): results[key] = status for m in obj_iter: obj = m.group(0) action_m = re.search(r'"Action"\s*:\s*"([^"]+)"', obj, flags=re.DOTALL) action = (action_m.group(1).lower() if action_m else None) pkg_m = re.search(r'"Package"\s*:\s*"([^"]+)"', obj, flags=re.DOTALL) pkg = clean_package(pkg_m.group(1)) if pkg_m else "" test_m = re.search(r'"Test"\s*:\s*"([^"]+)"', obj, flags=re.DOTALL) test_name = clean_test(test_m.group(1)) if test_m else "" out_m = re.search(r'"Output"\s*:\s*"([^"]*)"', obj, flags=re.DOTALL) output_text = out_m.group(1) if out_m else "" # Primary: test-level pass/fail/skip with Test field if action in ("pass", "fail", "skip") and test_name: set_result(pkg, test_name, action) continue # Fallback: parse complete PASS/FAIL/SKIP lines from Output if action == "output" and output_text: # Only match lines that include trailing duration parenthesis to avoid truncated matches for om in re.finditer(r'---\s+(PASS|FAIL|SKIP):\s+(.+?)\s*\(', output_text): status = om.group(1).lower() name = clean_test(om.group(2)) key = make_key(pkg, name) if key not in results: set_result(pkg, name, status) return results
[ "github.com/docker/docker/daemon/logger/fluentd/TestValidateLogOptAddress/[::1]/ssome-path", "github.com/docker/docker/pkg/tailfile/TestNewTailReader/12_byte_delimiter/multipple_lines_much_bigger_than_block", "github.com/docker/docker/pkg/tailfile/TestNewTailReader/4_byte_delimiter/same_leength_as_delimiter/req...
[ "github.com/docker/docker/daemon/logger/local/TestReadLog/Tail/LiveEmpty/SinceAnddUntil", "github.com/docker/docker/client/TestResponseErrors/html_error", "github.com/docker/docker/pkg/tailfile/TestNewTailReader/12_byte_delimiter/one_smmall_entry", "github.com/docker/docker/libnetwork/portallocator/TestUnknow...
starryzhang/sweb.eval.win.moby_1776_moby-49938
containers/podman
26870
containers__podman-26870
Go
[ "26855" ]
4f4e68201235ff6991daed6d1e333288672d098c
diff --git a/pkg/specgenutil/specgen.go b/pkg/specgenutil/specgen.go index 2a3171e8c33..791110312c7 100644 --- a/pkg/specgenutil/specgen.go +++ b/pkg/specgenutil/specgen.go @@ -737,10 +737,15 @@ func FillOutSpecGen(s *specgen.SpecGenerator, c *entities.ContainerCreateOptions case "proc-opts": s.ProcOpts = strings.Split(val, ",") case "seccomp": - convertedPath, err := specgen.ConvertWinMountPath(val) - if err != nil { - // If the conversion fails, use the original path - convertedPath = val + convertedPath := val + // Do not try to convert special value "unconfined", + // https://github.com/containers/podman/issues/26855 + if val != "unconfined" { + convertedPath, err = specgen.ConvertWinMountPath(val) + if err != nil { + // If the conversion fails, use the original path + convertedPath = val + } } s.SeccompProfilePath = convertedPath s.Annotations[define.InspectAnnotationSeccomp] = convertedPath
diff --git a/pkg/machine/e2e/basic_test.go b/pkg/machine/e2e/basic_test.go index 499d2730b18..22a878311b2 100644 --- a/pkg/machine/e2e/basic_test.go +++ b/pkg/machine/e2e/basic_test.go @@ -44,7 +44,8 @@ var _ = Describe("run basic podman commands", func() { Expect(newImgs).To(Exit(0)) Expect(newImgs.outputToStringSlice()).To(HaveLen(1)) - runAlp, err := mb.setCmd(bm.withPodmanCommand([]string{"run", TESTIMAGE, "cat", "/etc/os-release"})).run() + // seccomp option as regression test for https://github.com/containers/podman/issues/26855 + runAlp, err := mb.setCmd(bm.withPodmanCommand([]string{"run", "--security-opt", "seccomp=unconfined", TESTIMAGE, "cat", "/etc/os-release"})).run() Expect(err).ToNot(HaveOccurred()) Expect(runAlp).To(Exit(0)) Expect(runAlp.outputToString()).To(ContainSubstring("Alpine Linux")) diff --git a/pkg/specgenutil/specgenutil_windows_test.go b/pkg/specgenutil/specgenutil_windows_test.go index 88c97e3bda1..e011d884305 100644 --- a/pkg/specgenutil/specgenutil_windows_test.go +++ b/pkg/specgenutil/specgenutil_windows_test.go @@ -32,6 +32,7 @@ func TestSeccompProfilePath(t *testing.T) { {`c`, cwd_wsl + "/c"}, {`\\computer\loc`, `\\computer\loc`}, {`\\.\drive\loc`, "/mnt/wsl/drive/loc"}, + {"unconfined", "unconfined"}, } f := func(secopt string) (*specgen.SpecGenerator, error) {
Error: opening seccomp profile failed: open /mnt/c/Some-directory/unconfined: no such file or directory on Windows ### Issue Description Using `--security-opt seccomp=unconfined` fails on Windows: ~~~ ❯ podman run --rm -it --security-opt seccomp=unconfined quay.io/podman/hello Trying to pull quay.io/podman/hello:latest... Getting image source signatures Copying blob sha256:81df7ff16254ed9756e27c8de9ceb02a9568228fccadbf080f41cc5eb5118a44 Copying config sha256:5dd467fce50b56951185da365b5feee75409968cbab5767b9b59e325fb2ecbc0 Writing manifest to image destination Error: opening seccomp profile failed: open /mnt/c/Some-directory/unconfined: no such file or directory Exception: podman exited with 125 [tty 110]:1:1-74: podman run --rm -it --security-opt seccomp=unconfined quay.io/podman/hello ~~~ ### Steps to reproduce the issue Steps to reproduce the issue: #### 1. First, install Podman v5.6.0 using your preferred method: ~~~ ❯ scoop install podman Installing 'podman' (5.6.0) [64bit] from 'main' bucket podman-5.6.0-setup.exe (26,4 MB) [======================================================================================] 100% Checking hash of podman-5.6.0-setup.exe ... ok. Running pre_install script...done. Running installer script...done. Linking ~\Scoop\apps\podman\current => ~\Scoop\apps\podman\5.6.0 Creating shim for 'podman'. Persisting data Persisting config Persisting connections Running post_install script...done. 'podman' (5.6.0) was installed successfully! ~~~ #### 2. Then, initialize and start a default machine: ~~~ ❯ podman machine init Looking up Podman Machine image at quay.io/podman/machine-os:5.6 to create VM Getting image source signatures Copying blob 0491a61133f9 done | Copying config 44136fa355 done | Writing manifest to image destination 0491a61133f94e8645c562f31255dc37d5fe0bf45750ce5e227fd86f6fb5cfe2 Extracting compressed file: podman-machine-default-amd64: done Importing operating system into WSL (this may take a few minutes on a new WSL install)... La operación se completó correctamente. Configuring system... Machine init complete To start your machine run: podman machine start ❯ podman machine start Starting machine "podman-machine-default" This machine is currently configured in rootless mode. If your containers require root permissions (e.g. ports < 1024), or if you run into compatibility issues with non-podman clients, you can switch using the following command: podman machine set --rootful API forwarding listening on: npipe:////./pipe/docker_engine Docker API clients default to this address. You do not need to set DOCKER_HOST. Machine "podman-machine-default" started successfully ~~~ #### 3. Finally, reproduce the issue with a minimal example: ~~~ ❯ mkdir C:\Some-directory ❯ cd C:\Some-directory ❯ podman run --rm -it --security-opt seccomp=unconfined quay.io/podman/hello Trying to pull quay.io/podman/hello:latest... Getting image source signatures Copying blob sha256:81df7ff16254ed9756e27c8de9ceb02a9568228fccadbf080f41cc5eb5118a44 Copying config sha256:5dd467fce50b56951185da365b5feee75409968cbab5767b9b59e325fb2ecbc0 Writing manifest to image destination Error: opening seccomp profile failed: open /mnt/c/Some-directory/unconfined: no such file or directory Exception: podman exited with 125 [tty 110]:1:1-74: podman run --rm -it --security-opt seccomp=unconfined quay.io/podman/hello ~~~ ### Describe the results you received The underlying issue is that Podman interprets `unconfined` as a file path on the Windows host: ~~~ C:\Some-directory\unconfined ~~~ ### Describe the results you expected I would expect Podman to correctly handle `seccomp=unconfined`, disabling seccomp confinement for the container. ### podman info output ```yaml I'm unable to run podman info, because currently I downgraded to v5.5.2 to check it run without the error. My *podman version is `v5.6.0`*, and my host is *Windows 10.0.26100.4349*. ``` ### Podman in a container No ### Privileged Or Rootless None ### Upstream Latest Release Yes ### Additional environment details Using WSL2 (updated today): ~~~ ❯ wsl --version Versión de WSL: 2.5.10.0 Versión de kernel: 6.6.87.2-1 Versión de WSLg: 1.0.66 Versión de MSRDC: 1.2.6074 Versión de Direct3D: 1.611.1-81528511 Versión de DXCore: 10.0.26100.1-240331-1435.ge-release Versión de Windows: 10.0.26100.4349 ~~~ ### Additional information This seems to be a consequence of fixing #26558, possibly at /pkg/specgenutil/specgen.go#L740.
[ "https://github.com/containers/podman/commit/5a2098cf610982b26d2664f3f3b3c68b3a775b0e" ]
2025-08-20T11:22:53Z
https://github.com/containers/podman/tree/4f4e68201235ff6991daed6d1e333288672d098c
[ "go mod download ; go build -tags \"remote exclude_graphdriver_btrfs containers_image_openpgp\" -o .\\bin\\windows\\podman.exe .\\cmd\\podman\\." ]
[ ".\\bin\\ginkgo.exe -vv -r --tags \"remote exclude_graphdriver_btrfs containers_image_openpgp\" --timeout=15m --trace --no-color --skip-package \"hack,internal\\domain\\infra\\abi,internal\\domain\\infra\\tunnel,libpod\\lock\\shm,pkg\\api\\handlers\\libpod,pkg\\api\\handlers\\utils,pkg\\bindings,pkg\\domain\\infra\...
[ "Get-Content .\\reports\\test-output.log" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} status_map = {"PASS": "pass", "FAIL": "fail", "SKIP": "skip"} # Match status lines like: # --- PASS: TestName (0.00s) # --- PASS: Parent/Subtest (0.00s) status_re = re.compile(r'^\s*---\s+(PASS|FAIL|SKIP):\s+([^\r\n]*)') # Lines that indicate a new section or are not part of a wrapped test name breakers = re.compile( r'^\s*(' r'---\s+(PASS|FAIL|SKIP):' # another status line r'|===\s+(RUN|PAUSE|CONT)\b' # go test run/pause/cont markers r'|PASS\b' # package summary r'|Skipping\b' # package skip notices r'|Will skip\b' # skip list header r'|At line:' # PowerShell error header r'|Directory:' # PowerShell directory listing r'|Mode\s' # PowerShell file listing r'|time=' # log timestamps r')', re.IGNORECASE ) # Heuristic: continuation lines for wrapped test names should only contain # name-safe characters (letters/digits/underscore/slash/hyphen), optional spaces, # and optionally timing "(...s)". Exclude colons and backslashes to avoid PS noise. timing_re = re.compile(r'\(\s*[0-9.]+\s*s?\s*\)') def allowed_cont(line: str) -> bool: s = line.strip() if not s: return False # Allow timing parentheses, but otherwise reject colon (likely noise like "Directory:") if ':' in s and not timing_re.search(s): return False # Reject Windows-style backslashes if '\\' in s: return False # Only allow safe characters for names and timing if re.search(r'[^A-Za-z0-9_/\-\.\(\)\s]', s): return False return True lines = log.splitlines() i = 0 while i < len(lines): line = lines[i] m = status_re.match(line) if not m: i += 1 continue status, first_part = m.groups() frag = first_part # initial fragment of the test name (may be wrapped) # Gather wrapped continuation lines that belong to this status line j = i + 1 while j < len(lines): nxt = lines[j] # Stop on a new status/run/summary or obvious noise if status_re.match(nxt) or breakers.match(nxt): break if allowed_cont(nxt): cont = nxt.strip() # Merge with duplication fix: drop duplicated boundary character if frag and cont: if frag[-1] == cont[0]: cont = cont[1:] frag += cont j += 1 continue else: break # Clean up: remove any timing "(...)" and trailing spaces name = frag.strip() name = re.split(r'\s+\(', name)[0].strip() if name: results[name] = status_map.get(status, status.lower()) i = j # advance past any consumed continuation lines return results
[ "TestBreakInsert1", "TestProcessOptions/conflicting_option_ro/rw", "TestDecodeChanges/nothing", "TestDecodeChanges/both-but-wrong", "TestValidateSysctlBadSysctl", "TestParseIDMap", "TestMatchLabelFilters/Do_not_match_when_filter_value_not_listed_in_labels", "TestValidateSysctls", "TestProcessOptions...
[ "TestSparseWriter/sparse_both_ends@4", "TestParseSignalNameOrNumber", "TestNewWSLCommand", "Test_portsToString/exposed_port_range", "TestImageFormat_KindWithCompression/vhdx", "TestSparseWriter/sparse_middle@1048576", "TestParseSCPArgs", "Test_parseEnv/GoodNoValue", "TestSparseWriter/sparse_middle@1...
starryzhang/sweb.eval.win.containers_1776_podman-26870
tailscale/tailscale
14669
tailscale__tailscale-14669
Go
[ "14416" ]
de5683f7c61098337fe2825e2febe98b6809b291
diff --git a/cmd/tailscaled/tailscaled_windows.go b/cmd/tailscaled/tailscaled_windows.go index 786c5d8330939..7208e03dac9d6 100644 --- a/cmd/tailscaled/tailscaled_windows.go +++ b/cmd/tailscaled/tailscaled_windows.go @@ -55,6 +55,7 @@ import ( "tailscale.com/util/osdiag" "tailscale.com/util/syspolicy" "tailscale.com/util/winutil" + "tailscale.com/util/winutil/gp" "tailscale.com/version" "tailscale.com/wf" ) @@ -70,6 +71,22 @@ func init() { } } +// permitPolicyLocks is a function to be called to lift the restriction on acquiring +// [gp.PolicyLock]s once the service is running. +// It is safe to be called multiple times. +var permitPolicyLocks = func() {} + +func init() { + if isWindowsService() { + // We prevent [gp.PolicyLock]s from being acquired until the service enters the running state. + // Otherwise, if tailscaled starts due to a GPSI policy installing Tailscale, it may deadlock + // while waiting for the write counterpart of the GP lock to be released by Group Policy, + // which is itself waiting for the installation to complete and tailscaled to start. + // See tailscale/tailscale#14416 for more information. + permitPolicyLocks = gp.RestrictPolicyLocks() + } +} + const serviceName = "Tailscale" // Application-defined command codes between 128 and 255 @@ -109,13 +126,13 @@ func tstunNewWithWindowsRetries(logf logger.Logf, tunName string) (_ tun.Device, } } -func isWindowsService() bool { +var isWindowsService = sync.OnceValue(func() bool { v, err := svc.IsWindowsService() if err != nil { log.Fatalf("svc.IsWindowsService failed: %v", err) } return v -} +}) // syslogf is a logger function that writes to the Windows event log (ie, the // one that you see in the Windows Event Viewer). tailscaled may optionally @@ -180,6 +197,10 @@ func (service *ipnService) Execute(args []string, r <-chan svc.ChangeRequest, ch changes <- svc.Status{State: svc.Running, Accepts: svcAccepts} syslogf("Service running") + // It is safe to allow GP locks to be acquired now that the service + // is running. + permitPolicyLocks() + for { select { case <-doneCh: diff --git a/util/syspolicy/source/policy_store_windows.go b/util/syspolicy/source/policy_store_windows.go index 86e2254e0a381..621701e84f23c 100644 --- a/util/syspolicy/source/policy_store_windows.go +++ b/util/syspolicy/source/policy_store_windows.go @@ -12,6 +12,7 @@ import ( "golang.org/x/sys/windows" "golang.org/x/sys/windows/registry" "tailscale.com/util/set" + "tailscale.com/util/syspolicy/internal/loggerx" "tailscale.com/util/syspolicy/setting" "tailscale.com/util/winutil/gp" ) @@ -29,6 +30,18 @@ var ( _ Expirable = (*PlatformPolicyStore)(nil) ) +// lockableCloser is a [Lockable] that can also be closed. +// It is implemented by [gp.PolicyLock] and [optionalPolicyLock]. +type lockableCloser interface { + Lockable + Close() error +} + +var ( + _ lockableCloser = (*gp.PolicyLock)(nil) + _ lockableCloser = (*optionalPolicyLock)(nil) +) + // PlatformPolicyStore implements [Store] by providing read access to // Registry-based Tailscale policies, such as those configured via Group Policy or MDM. // For better performance and consistency, it is recommended to lock it when @@ -55,7 +68,7 @@ type PlatformPolicyStore struct { // they are being read. // // When both policyLock and mu need to be taken, mu must be taken before policyLock. - policyLock *gp.PolicyLock + policyLock lockableCloser mu sync.Mutex tsKeys []registry.Key // or nil if the [PlatformPolicyStore] hasn't been locked. @@ -108,7 +121,7 @@ func newPlatformPolicyStore(scope gp.Scope, softwareKey registry.Key, policyLock scope: scope, softwareKey: softwareKey, done: make(chan struct{}), - policyLock: policyLock, + policyLock: &optionalPolicyLock{PolicyLock: policyLock}, } } @@ -448,3 +461,68 @@ func tailscaleKeyNamesFor(scope gp.Scope) []string { panic("unreachable") } } + +type gpLockState int + +const ( + gpUnlocked = gpLockState(iota) + gpLocked + gpLockRestricted // the lock could not be acquired due to a restriction in place +) + +// optionalPolicyLock is a wrapper around [gp.PolicyLock] that locks +// and unlocks the underlying [gp.PolicyLock]. +// +// If the [gp.PolicyLock.Lock] returns [gp.ErrLockRestricted], the error is ignored, +// and calling [optionalPolicyLock.Unlock] is a no-op. +// +// The underlying GP lock is kinda optional: it is safe to read policy settings +// from the Registry without acquiring it, but it is recommended to lock it anyway +// when reading multiple policy settings to avoid potentially inconsistent results. +// +// It is not safe for concurrent use. +type optionalPolicyLock struct { + *gp.PolicyLock + state gpLockState +} + +// Lock acquires the underlying [gp.PolicyLock], returning an error on failure. +// If the lock cannot be acquired due to a restriction in place +// (e.g., attempting to acquire a lock while the service is starting), +// the lock is considered to be held, the method returns nil, and a subsequent +// call to [Unlock] is a no-op. +// It is a runtime error to call Lock when the lock is already held. +func (o *optionalPolicyLock) Lock() error { + if o.state != gpUnlocked { + panic("already locked") + } + switch err := o.PolicyLock.Lock(); err { + case nil: + o.state = gpLocked + return nil + case gp.ErrLockRestricted: + loggerx.Errorf("GP lock not acquired: %v", err) + o.state = gpLockRestricted + return nil + default: + return err + } +} + +// Unlock releases the underlying [gp.PolicyLock], if it was previously acquired. +// It is a runtime error to call Unlock when the lock is not held. +func (o *optionalPolicyLock) Unlock() { + switch o.state { + case gpLocked: + o.PolicyLock.Unlock() + case gpLockRestricted: + // The GP lock wasn't acquired due to a restriction in place + // when [optionalPolicyLock.Lock] was called. Unlock is a no-op. + case gpUnlocked: + panic("not locked") + default: + panic("unreachable") + } + + o.state = gpUnlocked +} diff --git a/util/winutil/gp/policylock_windows.go b/util/winutil/gp/policylock_windows.go index 95453aa16b110..69c5ff01697f4 100644 --- a/util/winutil/gp/policylock_windows.go +++ b/util/winutil/gp/policylock_windows.go @@ -48,10 +48,35 @@ type policyLockResult struct { } var ( - // ErrInvalidLockState is returned by (*PolicyLock).Lock if the lock has a zero value or has already been closed. + // ErrInvalidLockState is returned by [PolicyLock.Lock] if the lock has a zero value or has already been closed. ErrInvalidLockState = errors.New("the lock has not been created or has already been closed") + // ErrLockRestricted is returned by [PolicyLock.Lock] if the lock cannot be acquired due to a restriction in place, + // such as when [RestrictPolicyLocks] has been called. + ErrLockRestricted = errors.New("the lock cannot be acquired due to a restriction in place") ) +var policyLockRestricted atomic.Int32 + +// RestrictPolicyLocks forces all [PolicyLock.Lock] calls to return [ErrLockRestricted] +// until the returned function is called to remove the restriction. +// +// It is safe to call the returned function multiple times, but the restriction will only +// be removed once. If [RestrictPolicyLocks] is called multiple times, each call must be +// matched by a corresponding call to the returned function to fully remove the restrictions. +// +// It is primarily used to prevent certain deadlocks, such as when tailscaled attempts to acquire +// a policy lock during startup. If the service starts due to Tailscale being installed by GPSI, +// the write lock will be held by the Group Policy service throughout the installation, +// preventing tailscaled from acquiring the read lock. Since Group Policy waits for the installation +// to complete, and therefore for tailscaled to start, before releasing the write lock, this scenario +// would result in a deadlock. See tailscale/tailscale#14416 for more information. +func RestrictPolicyLocks() (removeRestriction func()) { + policyLockRestricted.Add(1) + return sync.OnceFunc(func() { + policyLockRestricted.Add(-1) + }) +} + // NewMachinePolicyLock creates a PolicyLock that facilitates pausing the // application of computer policy. To avoid deadlocks when acquiring both // machine and user locks, acquire the user lock before the machine lock. @@ -103,13 +128,18 @@ func NewUserPolicyLock(token windows.Token) (*PolicyLock, error) { } // Lock locks l. -// It returns ErrNotInitialized if l has a zero value or has already been closed, -// or an Errno if the underlying Group Policy lock cannot be acquired. +// It returns [ErrInvalidLockState] if l has a zero value or has already been closed, +// [ErrLockRestricted] if the lock cannot be acquired due to a restriction in place, +// or a [syscall.Errno] if the underlying Group Policy lock cannot be acquired. // -// As a special case, it fails with windows.ERROR_ACCESS_DENIED +// As a special case, it fails with [windows.ERROR_ACCESS_DENIED] // if l is a user policy lock, and the corresponding user is not logged in // interactively at the time of the call. func (l *PolicyLock) Lock() error { + if policyLockRestricted.Load() > 0 { + return ErrLockRestricted + } + l.mu.Lock() defer l.mu.Unlock() if l.lockCnt.Add(2)&1 == 0 {
diff --git a/tstest/integration/tailscaled_deps_test_windows.go b/tstest/integration/tailscaled_deps_test_windows.go index bbf46d8c21938..fa68f76e78a01 100644 --- a/tstest/integration/tailscaled_deps_test_windows.go +++ b/tstest/integration/tailscaled_deps_test_windows.go @@ -59,6 +59,7 @@ import ( _ "tailscale.com/util/osshare" _ "tailscale.com/util/syspolicy" _ "tailscale.com/util/winutil" + _ "tailscale.com/util/winutil/gp" _ "tailscale.com/version" _ "tailscale.com/version/distro" _ "tailscale.com/wf"
v1.78.1 Group Policy Software Install Fails ### What is the issue? When attempting to deploy the latest .MSI package v1.78.1 via group policy the deployment gets stuck and never installs. ### Steps to reproduce 1. Deploy the v1.78.1 amd64 .MSI package via group policy software installs • https://learn.microsoft.com/en-us/troubleshoot/windows-server/group-policy/use-group-policy-to-install-software 2. Optionally enable verbose startup • https://learn.microsoft.com/en-us/troubleshoot/windows-server/performance/enable-verbose-startup-shutdown-logon-logoff-status-messages 2. Reboot the machine and wait for software to install, machine should hang if you also have the issue. When looking in add/remove programs you will not see Tailscale v1.78.1 installed. 3. Deploying the v1.76.6 package via group policy still works ### Are there any recent changes that introduced the issue? Not sure what has changed in the recent .MSI releases. ### OS Windows ### OS version Windows 11 Enterprise ### Tailscale version 1.78.1 ### Other software Group policy deployment ### Bug report _No response_
[ "https://github.com/tailscale/tailscale/commit/7e2e789938812b70b31ce4680e38da108b9bd318" ]
2025-01-16T21:59:58Z
https://github.com/tailscale/tailscale/tree/de5683f7c61098337fe2825e2febe98b6809b291
[ "go mod download ; go install tailscale.com/cmd/tailscale tailscale.com/cmd/tailscaled" ]
[ "powershell -NoProfile -Command \"[Environment]::SetEnvironmentVariable('CI','true','Process'); New-Item -ItemType Directory -Force -Path reports | Out-Null; go test -json ./... > reports\\go-test-results.json\"" ]
[ "cat reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: # Improved parser for go test -json output with robust fallbacks and test name sanitization. import re results: dict[str, str] = {} def sanitize_name(name: str) -> str: # Remove embedded newlines/carriage returns introduced by line wrapping. name = name.replace("\r", "").replace("\n", "") return name def clean_text_name(name: str) -> str: # Strip timing suffixes like " (0.01s)" and trim whitespace. name = re.sub(r'\s*\(\d+(?:\.\d+)?s\)\s*$', '', name) name = name.strip() return sanitize_name(name) # Primary: parse JSON-like objects and extract Action + Test. obj_re = re.compile(r'\{[^{}]*\}', re.DOTALL) action_re = re.compile(r'"Action"\s*:\s*"(pass|fail|skip)"', re.IGNORECASE) test_re = re.compile(r'"Test"\s*:\s*"(.*?)"', re.DOTALL) for m in obj_re.finditer(log): obj = m.group(0) am = action_re.search(obj) if not am: continue status = am.group(1).lower() tm = test_re.search(obj) if not tm: continue test_name = tm.group(1).strip() if test_name: test_name = sanitize_name(test_name) results[test_name] = status # Fallback/supplement: parse textual status lines anywhere in the log. status_map = {"PASS": "pass", "FAIL": "fail", "SKIP": "skip", "ERROR": "fail"} text_re = re.compile(r'---\s+(PASS|FAIL|SKIP|ERROR):\s+([^\r\n]+)') for m in text_re.finditer(log): status = status_map.get(m.group(1).upper(), "fail") name = clean_text_name(m.group(2)) if not name: continue if name not in results: results[name] = status return results
[ "TestPostRequestConten", "TestCheckForAccidentalSettingReverts/advertised__routes_exit_node_removed_explicit", "TestParseSSOutput (0", "TestVarzHandler/int_with_type_counter (0.00s)\\n\"}", "TestPreferencePolicyInfo/unforced_enabled/A", "TestPreferencePolicyInfo/unset_disabled/ExitNod", "TestUpdatePrefs...
[ "TestGetTailscaleBrowserSession/no-session-user-self-nod", "TestSynoArch/arm-synology_armadaxp_RS815", "TestIsLocalhost", "TestPolicyScopeUnmarshalSpecial/too-many-scopes (0.0", "TestInsertShuffl", "TestHandlePeerAPI/bad_method", "TestFilter/", "TestStdHandler/handler_returns_404_with_request_ID", "...
starryzhang/sweb.eval.win.tailscale_1776_tailscale-14669
gravitational/teleport
53067
gravitational__teleport-53067
Go
[ "11734", "43354", "54246" ]
142a3bc2c22f25b566f8b5ba57c5e46ee1936e02
diff --git a/CHANGELOG.md b/CHANGELOG.md index ccbf8898d1ef1..8e09a6dd196c5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,9 +7,10 @@ #### TLS Cipher Suites TLS cipher suites with known security issues can no longer be manually -configured in the Teleport YAML configuration file. -If you do not explicitly configure any of the listed TLS cipher suites, you are -not affected by this change. +configured in the Teleport YAML configuration file. If you do not explicitly +configure any of the listed TLS cipher suites, you are not affected by this +change. + Teleport 18 removes support for: - `tls-rsa-with-aes-128-cbc-sha` - `tls-rsa-with-aes-256-cbc-sha` @@ -74,9 +75,35 @@ Here's a plan example for the code above: #### AWS endpoint URL mode removed -The AWS endpoint URL mode (`--endpoint-url`) has been removed for -`tsh proxy aws` and `tsh aws`. Users using this mode should use the default -HTTPS Proxy mode from now on. +The AWS endpoint URL mode (`--endpoint-url`) has been removed for `tsh proxy +aws` and `tsh aws`. Users using this mode should use the default HTTPS Proxy +mode from now on. + +### Other changes + +#### Windows desktop discovery enhancements + +Teleport's LDAP-based discovery mechanism for Windows desktops now supports: + +- a configurable discovery interval +- custom RDP ports +- the ability to run multiple separate discovery configurations, allowing you to + configure finely-grained discovery policies without running multiple agents + +To update your configuration, move the `discovery` section to `discovery_configs`: + +```diff +windows_desktop_service: + enabled: yes ++ discovery_interval: 10m # optional, defaults to 5 minutes +- discovery: +- base_dn: '*' +- label_attributes: [ department ] ++ discovery_configs: ++ - base_dn: '*' ++ label_attributes: [ department ] ++ rdp_port: 9989 # optional, defaults to 3389 +``` ## 16.0.0 (xx/xx/xx) diff --git a/docs/pages/enroll-resources/desktop-access/active-directory.mdx b/docs/pages/enroll-resources/desktop-access/active-directory.mdx index 0d89017dd6e92..5a864d02db54a 100644 --- a/docs/pages/enroll-resources/desktop-access/active-directory.mdx +++ b/docs/pages/enroll-resources/desktop-access/active-directory.mdx @@ -607,8 +607,8 @@ To configure Teleport to protect access to Windows desktops: sid: "$LDAP_USER_SID" # Path to the certificate you exported. der_ca_file: "path/to/exported/cert" - discovery: - base_dn: "*" + discovery_configs: + - base_dn: "*" auth_service: enabled: false proxy_service: @@ -681,16 +681,22 @@ To connect to a Windows desktop: ## LDAP Discovery In Active Directory environments, Teleport can be configured to discover hosts -via LDAP. LDAP discovery is enabled by setting the `discovery.base_dn` field in -the Teleport Windows Desktop Service configuration. You can set `base_dn` to a -wildcard `'*` to search from the root of the domain, or you can specify an alternate -base distinguished name to search from. +via LDAP. LDAP discovery is enabled by adding one or more discovery configs to +the `discovery_configs` field in the Teleport Windows Desktop Service +configuration. You can set `base_dn` to a wildcard `'*` to search from the root +of the domain, or you can specify an alternate base distinguished name to search +from. + +By default, all hosts discovered via LDAP will be registered with a port of 3389. +You can override the RDP port for each discovery configuration. ```yaml windows_desktop_service: enabled: true - discovery: - base_dn: '*' + discovery_configs: + - base_dn: 'OU=prod,DC=example,DC=com' + - base_dn: 'OU=staging,DC=example,DC=com' + rdp_port: 9989 ``` ### Filters @@ -703,28 +709,27 @@ compute a logical AND of all filters specified. ```yaml windows_desktop_service: enabled: true - discovery: - base_dn: '*' - filters: - - '(location=Oakland)' - - '(!(primaryGroupID=516))' + discovery_configs: + - base_dn: '*' + filters: + - '(location=Oakland)' + - '(!(primaryGroupID=516))' ``` ### Labeling It is often necessary to apply labels to discovered hosts to control access. You can configure Teleport to convert LDAP attributes to Teleport labels by setting -the `discovery.label_attributes` field in the Teleport Windows Desktop Service -configuration. +the `label_attributes` field in the discovery configuration. ```yaml windows_desktop_service: enabled: true - discovery: - base_dn: '*' - label_attributes: - - 'location' - - 'department' + discovery_configs: + - base_dn: '*' + label_attributes: + - 'location' + - 'department' ``` Each of the specified attributes will be placed in a Teleport label prefixed with `ldap/`. diff --git a/docs/pages/includes/config-reference/desktop-config.yaml b/docs/pages/includes/config-reference/desktop-config.yaml index e98377e0caa26..236e43b6b144d 100644 --- a/docs/pages/includes/config-reference/desktop-config.yaml +++ b/docs/pages/includes/config-reference/desktop-config.yaml @@ -12,19 +12,17 @@ windows_desktop_service: # set this to false (its default value). show_desktop_wallpaper: false # (optional) ldap contains configuration keys used when connecting Teleport - # to an Active Directory domain. This enables the discovery service for - # Windows desktops belonging to an Active Directory domain configured for - # Teleport access. + # to an Active Directory domain. ldap: # Address of the LDAP server for secure LDAP connections. # Usually, this address will use port 636, like: ldap.example.com:636. # For best results, this address should point to a highly-available # endpoint rather than a single domain controller. - addr: '$LDAP_SERVER_ADDRESS' + addr: "$LDAP_SERVER_ADDRESS" # Optional: the server name to use when validating the LDAP server's # certificate. Useful in cases where addr is an IP but the server # presents a cert with some other hostname. - server_name: '$LDAP_SERVER_NAME' + server_name: "$LDAP_SERVER_NAME" # You can skip LDAPS certificate verification by setting # this to true. It is recommended that this be set to false # and the certificate added your system's trusted repository, @@ -40,7 +38,7 @@ windows_desktop_service: # deprecated: prefer ldap_ca_cert instead der_ca_file: /path/to/cert # Active Directory domain name you are connecting to. - domain: '$LDAP_DOMAIN_NAME' + domain: "$LDAP_DOMAIN_NAME" # LDAP username for authentication. This username must include the domain # NetBIOS name. The use of single quotes here is intentional in order to # avoid the need to escape the backslash (\) character. @@ -48,7 +46,7 @@ windows_desktop_service: # For example, if your domain is "example.com", the NetBIOS name for it is # likely "EXAMPLE". When connecting as the "svc-teleport" user, you should # use the format: "EXAMPLE\svc-teleport". - username: '$LDAP_USERNAME' + username: "$LDAP_USERNAME" # The security identifier of the service account specified by the username # field above. This looks like a string starting with "S-". # @@ -61,7 +59,7 @@ windows_desktop_service: # The value can be obtained over LDAP by constructing a query with the # filter = (&(objectCategory=person)(objectClass=user)(sAMAccountName=$LDAP_USERNAME)) # and requesting the attribute = objectSid - sid: '$LDAP_USER_SID' + sid: "$LDAP_USER_SID" # (optional) When AD support is used, this field allows you to override # the domain that Teleport uses for PKI operations. If empty, the domain @@ -79,57 +77,61 @@ windows_desktop_service: # # Note: NLA is only supported in Active Directory environments - this field has # no effect when connecting to desktops as local Windows users. - kdc_address: '$KDC_SERVER_ADDRESS' + kdc_address: "$KDC_SERVER_ADDRESS" # (optional) static_hosts is a list of hosts to register as WindowsDesktop # objects in Teleport. You can define host name and labels directly. static_hosts: - - name: example1 - ad: false - addr: win1.dev.example.com - labels: - datacenter: dc1 - - ad: true - addr: win2.dev.example.com # name will be generated based on address - labels: - controller: all + - name: example1 + ad: false + addr: win1.dev.example.com + labels: + datacenter: dc1 + - ad: true + addr: win2.dev.example.com # name will be generated based on address + labels: + controller: all - # (optional) settings for enabling automatic desktop discovery via LDAP - discovery: - # The wildcard '*' character tells Teleport to discover all the hosts in - # the Active Directory Domain. To refine the search, specify a custom DN. - # To disable automatic discovery, leave this field blank. - base_dn: '*' - # (optional) LDAP filters for further customizing the LDAP search. - # See https://ldap.com/ldap-filters for details on LDAP filter syntax. - filters: - - '(location=Oakland)' - - '(!(primaryGroupID=516))' # exclude domain controllers - # (optional) LDAP attributes to convert into Teleport labels. - # The key of the label will be "ldap/" + the value of the attribute. - label_attributes: - - location + # (optional) settings for enabling automatic desktop discovery via LDAP. + # To disable automatic discovery, omit this section. + discovery_configs: + - base_dn: "OU=prod,DC=example,DC=com" + # (optional) LDAP filters for further customizing the LDAP search. + # See https://ldap.com/ldap-filters for details on LDAP filter syntax. + filters: + - "(location=Oakland)" + - "(!(primaryGroupID=516))" # exclude domain controllers + # (optional) LDAP attributes to convert into Teleport labels. + # The key of the label will be "ldap/" + the value of the attribute. + label_attributes: + - location + # (optional) The port to use for RDP. + # Defaults to 3389 if unspecified. + rdp_port: 3389 + + # (optional) interval at which to run desktop discovery + discovery_interval: 10m # (optional) configure a set of label selectors for dynamic registration. # If specified, this service will monitor the cluster for dynamic_windows_desktop # and automatically proxy connections for desktops with matching labels. resources: - - labels: - "env": "dev" + - labels: + "env": "dev" # (optional) rules for applying labels to Windows hosts based on regular expressions # matched against the host name. If multiple rules match, the desktop will # get the union of all matching labels. host_labels: - - match: '^.*\.dev\.example\.com' - labels: - environment: dev - - match: '^.*\.prod\.example\.com' - labels: - environment: prod - - match: '^EC2AMAZ-' - labels: - environment: discovered-in-aws + - match: '^.*\.dev\.example\.com' + labels: + environment: dev + - match: '^.*\.prod\.example\.com' + labels: + environment: prod + - match: "^EC2AMAZ-" + labels: + environment: discovered-in-aws # Labels to attach to the Windows Desktop Service. This is used internally, so # any custom labels added won't affect the Windows hosts. diff --git a/integration/helpers/internal.go b/integration/helpers/internal.go index d3194b495bb15..4ebba6e23baf7 100644 --- a/integration/helpers/internal.go +++ b/integration/helpers/internal.go @@ -67,15 +67,6 @@ func StartAndWait(process *service.TeleportProcess, expectedEvents []string) ([] } func EnableDesktopService(config *servicecfg.Config) { - // This config won't actually work, because there is no LDAP server, - // but it's enough to force desktop service to run. config.WindowsDesktop.Enabled = true config.WindowsDesktop.ListenAddr = *utils.MustParseAddr("127.0.0.1:0") - config.WindowsDesktop.Discovery.BaseDN = "" - config.WindowsDesktop.LDAP = servicecfg.LDAPConfig{ - Domain: "example.com", - Addr: "127.0.0.1:636", - Username: "test", - InsecureSkipVerify: true, - } } diff --git a/lib/auth/windows/ldap.go b/lib/auth/windows/ldap.go index 37e1c2510d357..05e49c9e6d2b7 100644 --- a/lib/auth/windows/ldap.go +++ b/lib/auth/windows/ldap.go @@ -65,7 +65,7 @@ func (cfg LDAPConfig) Check() error { return nil } -// DomainDN returns the distinguished name for the domain. +// DomainDN returns the distinguished name for an Active Directory Domain. func DomainDN(domain string) string { var sb strings.Builder parts := strings.Split(domain, ".") diff --git a/lib/config/configuration.go b/lib/config/configuration.go index 6aeb06d2f3eeb..0ab57fe4b328d 100644 --- a/lib/config/configuration.go +++ b/lib/config/configuration.go @@ -23,6 +23,7 @@ package config import ( + "cmp" "context" "crypto/x509" "errors" @@ -2057,22 +2058,53 @@ func applyWindowsDesktopConfig(fc *FileConfig, cfg *servicecfg.Config) error { cfg.WindowsDesktop.ListenAddr = *listenAddr } + for _, attributeName := range fc.WindowsDesktop.Discovery.LabelAttributes { + if !types.IsValidLabelKey(attributeName) { + return trace.BadParameter("WindowsDesktopService specifies label_attribute %q which is not a valid label key", attributeName) + } + } + for _, filter := range fc.WindowsDesktop.Discovery.Filters { if _, err := ldap.CompileFilter(filter); err != nil { return trace.BadParameter("WindowsDesktopService specifies invalid LDAP filter %q", filter) } } - for _, attributeName := range fc.WindowsDesktop.Discovery.LabelAttributes { - if !types.IsValidLabelKey(attributeName) { - return trace.BadParameter("WindowsDesktopService specifies label_attribute %q which is not a valid label key", attributeName) + if fc.WindowsDesktop.Discovery.BaseDN != "" && len(fc.WindowsDesktop.DiscoveryConfigs) > 0 { + return trace.BadParameter("WindowsDesktopService specifies both discovery and discovery_configs: move the discovery section to discovery_configs to continue") + } + + for _, discoveryConfig := range fc.WindowsDesktop.DiscoveryConfigs { + for _, filter := range discoveryConfig.Filters { + if _, err := ldap.CompileFilter(filter); err != nil { + return trace.BadParameter("WindowsDesktopService specifies invalid LDAP filter %q", filter) + } } } - cfg.WindowsDesktop.Discovery = servicecfg.LDAPDiscoveryConfig{ - BaseDN: fc.WindowsDesktop.Discovery.BaseDN, - Filters: fc.WindowsDesktop.Discovery.Filters, - LabelAttributes: fc.WindowsDesktop.Discovery.LabelAttributes, + // append the old (singular) discovery config to the new format that supports multiple configs + if fc.WindowsDesktop.Discovery.BaseDN != "" { + fc.WindowsDesktop.DiscoveryConfigs = append(fc.WindowsDesktop.DiscoveryConfigs, fc.WindowsDesktop.Discovery) + } + + cfg.WindowsDesktop.Discovery = make([]servicecfg.LDAPDiscoveryConfig, 0, len(fc.WindowsDesktop.DiscoveryConfigs)) + for _, dc := range fc.WindowsDesktop.DiscoveryConfigs { + if dc.BaseDN == "" { + return trace.BadParameter("WindowsDesktopService discovey_config is missing required base_dn") + } + cfg.WindowsDesktop.Discovery = append(cfg.WindowsDesktop.Discovery, + servicecfg.LDAPDiscoveryConfig{ + BaseDN: dc.BaseDN, + Filters: dc.Filters, + LabelAttributes: dc.LabelAttributes, + RDPPort: cmp.Or(dc.RDPPort, int(defaults.RDPListenPort)), + }, + ) + } + + cfg.WindowsDesktop.DiscoveryInterval = fc.WindowsDesktop.DiscoveryInterval + if cfg.WindowsDesktop.DiscoveryInterval < 0 { + return trace.BadParameter("desktop discovery interval must not be negative (%v)", fc.WindowsDesktop.DiscoveryInterval.String()) } var err error diff --git a/lib/config/fileconf.go b/lib/config/fileconf.go index 546a98f41bbbc..e817741820916 100644 --- a/lib/config/fileconf.go +++ b/lib/config/fileconf.go @@ -2490,7 +2490,12 @@ type WindowsDesktopService struct { // no effect when connecting to desktops as local Windows users. KDCAddress string `yaml:"kdc_address"` // Discovery configures desktop discovery via LDAP. + // New usages should prever DiscoveryConfigs instead, which allows for multiple searches. Discovery LDAPDiscoveryConfig `yaml:"discovery,omitempty"` + // DiscoveryConfigs configures desktop discovery via LDAP. + DiscoveryConfigs []LDAPDiscoveryConfig `yaml:"discovery_configs,omitempty"` + // DiscoveryInterval controls how frequently the discovery process runs. + DiscoveryInterval time.Duration `yaml:"discovery_interval"` // ADHosts is a list of static, AD-connected Windows hosts. This gives users // a way to specify AD-connected hosts that won't be found by the filters // specified in `discovery` (or if `discovery` is omitted). @@ -2590,6 +2595,9 @@ type LDAPDiscoveryConfig struct { // discovered desktops having a label with key "ldap/location" and // the value being the value of the "location" attribute. LabelAttributes []string `yaml:"label_attributes"` + // RDPPort is the port to use for RDP for hosts discovered with this configuration. + // Optional, defaults to 3389 if unspecified. + RDPPort int `yaml:"rdp_port"` } // TracingService contains configuration for the tracing_service. diff --git a/lib/service/desktop.go b/lib/service/desktop.go index 789f50330eb3c..444d7892b532f 100644 --- a/lib/service/desktop.go +++ b/lib/service/desktop.go @@ -228,16 +228,15 @@ func (process *TeleportProcess) initWindowsDesktopServiceRegistered(logger *slog StaticHosts: cfg.WindowsDesktop.StaticHosts, OnHeartbeat: process.OnHeartbeat(teleport.ComponentWindowsDesktop), }, - ShowDesktopWallpaper: cfg.WindowsDesktop.ShowDesktopWallpaper, - LDAPConfig: windows.LDAPConfig(cfg.WindowsDesktop.LDAP), - KDCAddr: cfg.WindowsDesktop.KDCAddr, - PKIDomain: cfg.WindowsDesktop.PKIDomain, - DiscoveryBaseDN: cfg.WindowsDesktop.Discovery.BaseDN, - DiscoveryLDAPFilters: cfg.WindowsDesktop.Discovery.Filters, - DiscoveryLDAPAttributeLabels: cfg.WindowsDesktop.Discovery.LabelAttributes, - Hostname: cfg.Hostname, - ConnectedProxyGetter: proxyGetter, - ResourceMatchers: cfg.WindowsDesktop.ResourceMatchers, + ShowDesktopWallpaper: cfg.WindowsDesktop.ShowDesktopWallpaper, + LDAPConfig: windows.LDAPConfig(cfg.WindowsDesktop.LDAP), + KDCAddr: cfg.WindowsDesktop.KDCAddr, + PKIDomain: cfg.WindowsDesktop.PKIDomain, + Discovery: cfg.WindowsDesktop.Discovery, + DiscoveryInterval: cfg.WindowsDesktop.DiscoveryInterval, + Hostname: cfg.Hostname, + ConnectedProxyGetter: proxyGetter, + ResourceMatchers: cfg.WindowsDesktop.ResourceMatchers, }) if err != nil { return trace.Wrap(err) diff --git a/lib/service/servicecfg/windows.go b/lib/service/servicecfg/windows.go index 988dc16b3e466..d1b5d5b990d02 100644 --- a/lib/service/servicecfg/windows.go +++ b/lib/service/servicecfg/windows.go @@ -22,6 +22,7 @@ import ( "crypto/x509" "maps" "regexp" + "time" "github.com/gravitational/teleport/lib/limiter" "github.com/gravitational/teleport/lib/services" @@ -55,7 +56,8 @@ type WindowsDesktopConfig struct { KDCAddr string // Discovery configures automatic desktop discovery via LDAP. - Discovery LDAPDiscoveryConfig + Discovery []LDAPDiscoveryConfig + DiscoveryInterval time.Duration // StaticHosts is an optional list of static Windows hosts to expose through this // service. @@ -97,6 +99,8 @@ type LDAPDiscoveryConfig struct { // discovered desktops having a label with key "ldap/location" and // the value being the value of the "location" attribute. LabelAttributes []string + // RDPPort is the RDP port to register for each host discovered with this configuration. + RDPPort int } // HostLabelRules is a collection of rules describing how to apply labels to hosts. diff --git a/lib/srv/desktop/discovery.go b/lib/srv/desktop/discovery.go index ee50a8de2aad6..f90912c9ef9cc 100644 --- a/lib/srv/desktop/discovery.go +++ b/lib/srv/desktop/discovery.go @@ -36,7 +36,7 @@ import ( apidefaults "github.com/gravitational/teleport/api/defaults" "github.com/gravitational/teleport/api/types" "github.com/gravitational/teleport/lib/auth/windows" - "github.com/gravitational/teleport/lib/defaults" + "github.com/gravitational/teleport/lib/service/servicecfg" "github.com/gravitational/teleport/lib/services" "github.com/gravitational/teleport/lib/services/readonly" "github.com/gravitational/teleport/lib/utils" @@ -121,9 +121,7 @@ func (s *WindowsService) startDesktopDiscovery() error { s.cfg.Logger.ErrorContext(s.closeCtx, "desktop reconciliation failed", "error", err) } - // TODO(zmb3): consider making the discovery period configurable - // (it's currently hard coded to 5 minutes in order to match DB access discovery behavior) - t := s.cfg.Clock.NewTicker(5 * time.Minute) + t := s.cfg.Clock.NewTicker(s.cfg.DiscoveryInterval) defer t.Stop() for { select { @@ -140,12 +138,12 @@ func (s *WindowsService) startDesktopDiscovery() error { return nil } -func (s *WindowsService) ldapSearchFilter() string { - var filters []string - filters = append(filters, fmt.Sprintf("(%s=%s)", windows.AttrObjectClass, ClassComputer)) - filters = append(filters, fmt.Sprintf("(!(%s=%s))", windows.AttrObjectClass, ClassGMSA)) - filters = append(filters, s.cfg.DiscoveryLDAPFilters...) - +func (s *WindowsService) ldapSearchFilter(additionalFilters []string) string { + var filters = []string{ + fmt.Sprintf("(%s=%s)", windows.AttrObjectClass, ClassComputer), + fmt.Sprintf("(!(%s=%s))", windows.AttrObjectClass, ClassGMSA), + } + filters = append(filters, additionalFilters...) return windows.CombineLDAPFilters(filters) } @@ -154,46 +152,48 @@ func (s *WindowsService) getDesktopsFromLDAP() map[string]types.WindowsDesktop { // Check whether we've ever successfully initialized our LDAP client. s.mu.Lock() if !s.ldapInitialized { - s.cfg.Logger.DebugContext(context.Background(), "LDAP not ready, skipping discovery and attempting to reconnect") + s.cfg.Logger.DebugContext(s.closeCtx, "LDAP not ready, skipping discovery and attempting to reconnect") s.mu.Unlock() s.initializeLDAP() return nil } s.mu.Unlock() - filter := s.ldapSearchFilter() - s.cfg.Logger.DebugContext(context.Background(), "searching for desktops", "filter", filter) - - var attrs []string - attrs = append(attrs, computerAttributes...) - attrs = append(attrs, s.cfg.DiscoveryLDAPAttributeLabels...) - - entries, err := s.lc.ReadWithFilter(s.cfg.DiscoveryBaseDN, filter, attrs) - if trace.IsConnectionProblem(err) { - // If the connection was broken, re-initialize the LDAP client so that it's - // ready for the next reconcile loop. Return the last known set of desktops - // in this case, so that the reconciler doesn't delete the desktops it already - // knows about. - s.cfg.Logger.InfoContext(context.Background(), "LDAP connection error when searching for desktops, reinitializing client") - if err := s.initializeLDAP(); err != nil { - s.cfg.Logger.ErrorContext(context.Background(), "failed to reinitialize LDAP client, will retry on next reconcile", "error", err) + result := make(map[string]types.WindowsDesktop) + for _, discoveryConfig := range s.cfg.Discovery { + filter := s.ldapSearchFilter(discoveryConfig.Filters) + s.cfg.Logger.DebugContext(s.closeCtx, "searching for desktops", "filter", filter) + + var attrs []string + attrs = append(attrs, computerAttributes...) + attrs = append(attrs, discoveryConfig.LabelAttributes...) + + entries, err := s.lc.ReadWithFilter(discoveryConfig.BaseDN, filter, attrs) + if trace.IsConnectionProblem(err) { + // If the connection was broken, re-initialize the LDAP client so that it's + // ready for the next reconcile loop. Return the last known set of desktops + // in this case, so that the reconciler doesn't delete the desktops it already + // knows about. + s.cfg.Logger.InfoContext(s.closeCtx, "LDAP connection error when searching for desktops, reinitializing client") + if err := s.initializeLDAP(); err != nil { + s.cfg.Logger.ErrorContext(s.closeCtx, "failed to reinitialize LDAP client, will retry on next reconcile", "error", err) + } + return s.lastDiscoveryResults + } else if err != nil { + s.cfg.Logger.WarnContext(s.closeCtx, "could not discover Windows Desktops", "error", err) + return nil } - return s.lastDiscoveryResults - } else if err != nil { - s.cfg.Logger.WarnContext(context.Background(), "could not discover Windows Desktops", "error", err) - return nil - } - s.cfg.Logger.DebugContext(context.Background(), "discovered Windows Desktops", "count", len(entries)) + s.cfg.Logger.DebugContext(s.closeCtx, "discovered Windows Desktops", "count", len(entries)) - result := make(map[string]types.WindowsDesktop) - for _, entry := range entries { - desktop, err := s.ldapEntryToWindowsDesktop(s.closeCtx, entry, s.cfg.HostLabelsFn) - if err != nil { - s.cfg.Logger.WarnContext(s.closeCtx, "could not create Windows Desktop from LDAP entry", "error", err) - continue + for _, entry := range entries { + desktop, err := s.ldapEntryToWindowsDesktop(s.closeCtx, entry, s.cfg.HostLabelsFn, &discoveryConfig) + if err != nil { + s.cfg.Logger.WarnContext(s.closeCtx, "could not create Windows Desktop from LDAP entry", "error", err) + continue + } + result[desktop.GetName()] = desktop } - result[desktop.GetName()] = desktop } // capture the result, which will be used on the next reconcile loop @@ -214,7 +214,7 @@ func (s *WindowsService) deleteDesktop(ctx context.Context, d types.WindowsDeskt return s.cfg.AuthClient.DeleteWindowsDesktop(ctx, d.GetHostID(), d.GetName()) } -func (s *WindowsService) applyLabelsFromLDAP(entry *ldap.Entry, labels map[string]string) { +func (s *WindowsService) applyLabelsFromLDAP(entry *ldap.Entry, labels map[string]string, cfg *servicecfg.LDAPDiscoveryConfig) { // apply common LDAP labels by default labels[types.OriginLabel] = types.OriginDynamic labels[types.DiscoveryLabelWindowsDNSHostName] = entry.GetAttributeValue(attrDNSHostName) @@ -237,7 +237,7 @@ func (s *WindowsService) applyLabelsFromLDAP(entry *ldap.Entry, labels map[strin } // apply any custom labels per the discovery configuration - for _, attr := range s.cfg.DiscoveryLDAPAttributeLabels { + for _, attr := range cfg.LabelAttributes { if v := entry.GetAttributeValue(attr); v != "" { labels[types.DiscoveryLabelLDAPPrefix+attr] = v } @@ -311,6 +311,7 @@ func (s *WindowsService) ldapEntryToWindowsDesktop( ctx context.Context, entry *ldap.Entry, getHostLabels func(string) map[string]string, + cfg *servicecfg.LDAPDiscoveryConfig, ) (types.WindowsDesktop, error) { hostname := entry.GetAttributeValue(attrDNSHostName) if hostname == "" { @@ -323,7 +324,7 @@ func (s *WindowsService) ldapEntryToWindowsDesktop( } labels := getHostLabels(hostname) labels[types.DiscoveryLabelWindowsDomain] = s.cfg.Domain - s.applyLabelsFromLDAP(entry, labels) + s.applyLabelsFromLDAP(entry, labels, cfg) if os, ok := labels[types.DiscoveryLabelWindowsOS]; ok && strings.Contains(os, "linux") { return nil, trace.BadParameter("LDAP entry looks like a Linux host") @@ -335,7 +336,7 @@ func (s *WindowsService) ldapEntryToWindowsDesktop( } s.cfg.Logger.DebugContext(ctx, "resolved desktop host", "hostname", hostname, "addrs", addrs) - addr, err := utils.ParseHostPortAddr(addrs[0], defaults.RDPListenPort) + addr, err := utils.ParseHostPortAddr(addrs[0], cfg.RDPPort) if err != nil { return nil, trace.Wrap(err) } diff --git a/lib/srv/desktop/windows_server.go b/lib/srv/desktop/windows_server.go index 767c9595106a7..630b0fd6194f2 100644 --- a/lib/srv/desktop/windows_server.go +++ b/lib/srv/desktop/windows_server.go @@ -202,16 +202,10 @@ type WindowsServiceConfig struct { // If empty LDAP address will be used. // Used for NLA support when AD is true. KDCAddr string - // DiscoveryBaseDN is the base DN for searching for Windows Desktops. - // Desktop discovery is disabled if this field is empty. - DiscoveryBaseDN string - // DiscoveryLDAPFilters are additional LDAP filters for searching for - // Windows Desktops. If multiple filters are specified, they are ANDed - // together into a single search. - DiscoveryLDAPFilters []string - // DiscoveryLDAPAttributeLabels are optional LDAP attributes to convert - // into Teleport labels. - DiscoveryLDAPAttributeLabels []string + // Discovery contains policies for configuring LDAP-based discovery. + Discovery []servicecfg.LDAPDiscoveryConfig + // DiscoveryInterval configures how frequently the discovery process runs. + DiscoveryInterval time.Duration // Hostname of the Windows desktop service Hostname string // ConnectedProxyGetter gets the proxies teleport is connected to. @@ -235,21 +229,25 @@ type HeartbeatConfig struct { } func (cfg *WindowsServiceConfig) checkAndSetDiscoveryDefaults() error { - switch { - case cfg.DiscoveryBaseDN == types.Wildcard: - cfg.DiscoveryBaseDN = windows.DomainDN(cfg.Domain) - case len(cfg.DiscoveryBaseDN) > 0: - if _, err := ldap.ParseDN(cfg.DiscoveryBaseDN); err != nil { - return trace.BadParameter("WindowsServiceConfig contains an invalid base_dn: %v", err) + for i := range cfg.Discovery { + switch { + case cfg.Discovery[i].BaseDN == types.Wildcard: + cfg.Discovery[i].BaseDN = windows.DomainDN(cfg.Domain) + case len(cfg.Discovery[i].BaseDN) > 0: + if _, err := ldap.ParseDN(cfg.Discovery[i].BaseDN); err != nil { + return trace.BadParameter("WindowsServiceConfig contains an invalid base_dn %q: %v", cfg.Discovery[i].BaseDN, err) + } } - } - for _, filter := range cfg.DiscoveryLDAPFilters { - if _, err := ldap.CompileFilter(filter); err != nil { - return trace.BadParameter("WindowsServiceConfig contains an invalid LDAP filter %q: %v", filter, err) + for _, filter := range cfg.Discovery[i].Filters { + if _, err := ldap.CompileFilter(filter); err != nil { + return trace.BadParameter("WindowsServiceConfig contains an invalid LDAP filter %q: %v", filter, err) + } } } + cfg.DiscoveryInterval = cmp.Or(cfg.DiscoveryInterval, 5*time.Minute) + return nil } @@ -416,7 +414,7 @@ func NewWindowsService(cfg WindowsServiceConfig) (*WindowsService, error) { return nil, trace.Wrap(err) } - if len(s.cfg.DiscoveryBaseDN) > 0 { + if len(s.cfg.Discovery) > 0 { if err := s.startDesktopDiscovery(); err != nil { return nil, trace.Wrap(err) } @@ -429,7 +427,7 @@ func NewWindowsService(cfg WindowsServiceConfig) (*WindowsService, error) { // if LDAP-based discovery is not enabled, but we have configured LDAP // then it's important that we periodically try to use the LDAP connection // to detect connection closure - if s.ldapConfigured && len(s.cfg.DiscoveryBaseDN) == 0 { + if s.ldapConfigured && len(s.cfg.Discovery) == 0 { s.startLDAPConnectionCheck(ctx) }
diff --git a/lib/config/configuration_test.go b/lib/config/configuration_test.go index 4811d7b30be07..f3eb278ebf763 100644 --- a/lib/config/configuration_test.go +++ b/lib/config/configuration_test.go @@ -2357,6 +2357,22 @@ func TestWindowsDesktopService(t *testing.T) { } }, }, + { + desc: "NOK - legacy discovery and new discovery_configs both specified", + expectError: require.Error, + mutate: func(fc *FileConfig) { + fc.WindowsDesktop.Discovery = LDAPDiscoveryConfig{ + BaseDN: "*", + } + fc.WindowsDesktop.DiscoveryConfigs = []LDAPDiscoveryConfig{ + {BaseDN: "OU=stage,DC=example,DC=com"}, + {BaseDN: "OU=prod,DC=example,DC=com"}, + } + fc.WindowsDesktop.LDAP = LDAPConfig{ + Addr: "something", + } + }, + }, { desc: "OK - valid config", expectError: require.NoError, diff --git a/lib/srv/desktop/discovery_test.go b/lib/srv/desktop/discovery_test.go index dfbc98e107b43..ee4977ca0ea18 100644 --- a/lib/srv/desktop/discovery_test.go +++ b/lib/srv/desktop/discovery_test.go @@ -35,6 +35,7 @@ import ( "github.com/gravitational/teleport/api/types" "github.com/gravitational/teleport/lib/auth" + "github.com/gravitational/teleport/lib/service/servicecfg" "github.com/gravitational/teleport/lib/services" logutils "github.com/gravitational/teleport/lib/utils/log" ) @@ -63,13 +64,8 @@ func TestDiscoveryLDAPFilter(t *testing.T) { }, } { t.Run(test.desc, func(t *testing.T) { - s := &WindowsService{ - cfg: WindowsServiceConfig{ - DiscoveryLDAPFilters: test.filters, - }, - } - - filter := s.ldapSearchFilter() + s := new(WindowsService) + filter := s.ldapSearchFilter(test.filters) _, err := ldap.CompileFilter(filter) test.assert(t, err) }) @@ -89,12 +85,11 @@ func TestAppliesLDAPLabels(t *testing.T) { "quux": {""}, }) - s := &WindowsService{ - cfg: WindowsServiceConfig{ - DiscoveryLDAPAttributeLabels: []string{"bar"}, - }, - } - s.applyLabelsFromLDAP(entry, l) + s := new(WindowsService) + s.applyLabelsFromLDAP(entry, l, &servicecfg.LDAPDiscoveryConfig{ + BaseDN: "*", + LabelAttributes: []string{"bar"}, + }) // check default labels require.Equal(t, types.OriginDynamic, l[types.OriginLabel]) @@ -142,7 +137,7 @@ func TestLabelsDomainControllers(t *testing.T) { } { t.Run(test.desc, func(t *testing.T) { l := make(map[string]string) - s.applyLabelsFromLDAP(test.entry, l) + s.applyLabelsFromLDAP(test.entry, l, new(servicecfg.LDAPDiscoveryConfig)) b, _ := strconv.ParseBool(l[types.DiscoveryLabelWindowsIsDomainController]) test.assert(t, b) diff --git a/lib/srv/desktop/windows_server_test.go b/lib/srv/desktop/windows_server_test.go index a27b1d0bcb42a..e535f06d52fde 100644 --- a/lib/srv/desktop/windows_server_test.go +++ b/lib/srv/desktop/windows_server_test.go @@ -38,6 +38,7 @@ import ( libevents "github.com/gravitational/teleport/lib/events" "github.com/gravitational/teleport/lib/events/eventstest" "github.com/gravitational/teleport/lib/modules" + "github.com/gravitational/teleport/lib/service/servicecfg" "github.com/gravitational/teleport/lib/srv/desktop/tdp" logutils "github.com/gravitational/teleport/lib/utils/log" ) @@ -49,13 +50,17 @@ func TestMain(m *testing.M) { func TestConfigWildcardBaseDN(t *testing.T) { cfg := &WindowsServiceConfig{ - DiscoveryBaseDN: "*", + Discovery: []servicecfg.LDAPDiscoveryConfig{ + { + BaseDN: "*", + }, + }, LDAPConfig: windows.LDAPConfig{ Domain: "test.goteleport.com", }, } require.NoError(t, cfg.checkAndSetDiscoveryDefaults()) - require.Equal(t, "DC=test,DC=goteleport,DC=com", cfg.DiscoveryBaseDN) + require.Equal(t, "DC=test,DC=goteleport,DC=com", cfg.Discovery[0].BaseDN) } func TestConfigDesktopDiscovery(t *testing.T) { @@ -95,8 +100,12 @@ func TestConfigDesktopDiscovery(t *testing.T) { } { t.Run(test.desc, func(t *testing.T) { cfg := &WindowsServiceConfig{ - DiscoveryBaseDN: test.baseDN, - DiscoveryLDAPFilters: test.filters, + Discovery: []servicecfg.LDAPDiscoveryConfig{ + { + BaseDN: test.baseDN, + Filters: test.filters, + }, + }, } test.assert(t, cfg.checkAndSetDiscoveryDefaults()) })
Custom RDP port for LDAP discovery ### What What would you like Teleport to do differently? Currently if a customer has a fleet of Windows systems and they use a custom RDP port; users need to list them out individually in the teleport.yaml file with the custom port. If they are using discovery they don't utilize this option. ### How How would you implement this? Ability to specify RDP port for those systems that come through via discovery ### Why Why do you need this? Eliminates those with large windows footprints to not list them out individually and still utilize discovery for all Windows systems. ### Workaround If a workaround exists, please include it. Listing Windows systems our individually within the configuration file. desktop discovery: support multiple search paths Teleport's LDAP-based discovery for Windows desktops is configured via two parameters: - `base_dn`: specifies _where_ to search - `filters`: specifies _what_ to search for Because `base_dn` is a single value, searching across multiple separate OUs is difficult. Sometimes you can find a common root and set `base_dn` to something that contains both OUs, and then use filters to select only the OUs you care about, but that gets difficult quickly and most people resort to running multiple `windows_destop_service`s so that they can configure each to search a separate DN. Proposal: deprecate `base_dn` in favor of `search_paths`, which will be an array. ``` windows_desktop_service: enabled: yes ldap: discovery: search_paths: - CN=foo,DC=example,DC=com - CN=bar,DC=example,DC=com ``` If `base_dn` and `search_paths` are both specified, then we just append `base_dn` to the set of search paths and search all of them. Make Windows Desktop discovery/reconciler interval configurable What would you like Teleport to do? Make the windows desktop interval optionally manually configurable. What problem does this solve? Currently, Teleport hard codes the reconciller/discovery interval for Windows Desktops at 5 minutes: https://github.com/gravitational/teleport/blob/5570259168e3c458682b394a16e627681394bf46/lib/srv/desktop/discovery.go#L126 The concern is efficiency, and the negative impact to the Active Directory with frequent discovery If a workaround exists, please include it. n/a
[ "https://github.com/gravitational/teleport/commit/8ec15ad0bf02cacbed5eddfea75471b1a6204cd6", "https://github.com/gravitational/teleport/commit/d9b9dc2c7a63b4eade6408ee2a49bc45af31c871", "https://github.com/gravitational/teleport/commit/5d5b4b4b813b4f287262399546b636a42ea48420" ]
2025-03-16T15:29:29Z
https://github.com/gravitational/teleport/tree/142a3bc2c22f25b566f8b5ba57c5e46ee1936e02
[ "if (Test-Path api\\go.mod) { cd api }; go mod download; go build ./..." ]
[ "go test -json ./... > go-test-results.json" ]
[ "cat go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} # Status precedence: keep the strongest observed status precedence = {"pass": 1, "skip": 2, "fail": 3} def set_status(name: str, status: str): # Normalize: remove CR/LF introduced by wrapping, trim spaces norm_name = name.replace("\r", "").replace("\n", "").strip() if not norm_name: return current = results.get(norm_name) if current is None or precedence[status] > precedence[current]: results[norm_name] = status # 1) Strong JSON object-level extraction using bounds around Action action_re = re.compile(r'"Action"\s*:\s*"(pass|fail|skip)"', re.IGNORECASE) test_re = re.compile(r'"Test"\s*:\s*"([^"]+)"', re.IGNORECASE | re.DOTALL) pos = 0 while True: m = action_re.search(log, pos) if not m: break status = m.group(1).lower() a_start = m.start() a_end = m.end() # Find approximate object bounds: previous '{' and next '}\n' (or next '}') # This avoids scanning arbitrary windows and tries to constrain to the current JSON object. obj_start = log.rfind("{", 0, a_start) # End: prefer the next '}\n' if present; otherwise the next '}' after a_end. end_candidate = log.find("}\n", a_end) if end_candidate == -1: end_candidate = log.find("}", a_end) obj_end = end_candidate if end_candidate != -1 else len(log) # Extract Test within the object slice; try ahead first, then whole object slice_ahead = log[a_end:obj_end] m_test = test_re.search(slice_ahead) if not m_test and obj_start != -1: slice_obj = log[obj_start:obj_end] m_test = test_re.search(slice_obj) if m_test: name = m_test.group(1) set_status(name, status) pos = a_end # 2) Fallback: brace-bounded JSON object pattern containing both Action and Test json_obj_re = re.compile( r'\{.*?"Action"\s*:\s*"(pass|fail|skip)".*?"Test"\s*:\s*"([^"]+)".*?\}', re.IGNORECASE | re.DOTALL, ) for m in json_obj_re.finditer(log): status = m.group(1).lower() name = m.group(2) set_status(name, status) # 3) Fallback: textual lines like '--- PASS: TestName (...)' text_status_re = re.compile( r'---\s+(PASS|FAIL|SKIP):\s+(.+?)(?:\s*\(|\r?\n)', re.IGNORECASE | re.DOTALL, ) for m in text_status_re.finditer(log): status = m.group(1).lower() name = m.group(2) set_status(name, status) return results
[ "TestParseElastiCacheEndpoint/r", "TestFromProtoNNils/membership-requires", "TestValidateJamfSpecV1/inventory_sync_ful", "TestGetUserLoginStattes", "TestClient_ClusterDetails/cluster_deta", "TestResourceIDs/persistent_volume_cllaim_resource_name_with_missing_namespace_and_pod_name/ResourceIDsFromStrings",...
[ "TestRole_GetKubeResources/v7_with_error", "TestDatabasePermission/empty_permisssion_list", "TestParseReviewFrequency/3", "TestCircuitBreaker_beforeRequest", "TestIsGCPEndpoint/localhost", "TestJoinServiceClient_RegisterUsingTPMMethod", "TestClient_DialCluster", "TestExpiryConsistency/nil_expiry", "...
starryzhang/sweb.eval.win.gravitational_1776_teleport-53067
gravitational/teleport
55616
gravitational__teleport-55616
Go
[ "52956", "54723" ]
a4f1a0e9b93db02d40a08ee1fbfd2c65321ed7d7
diff --git a/lib/services/watcher.go b/lib/services/watcher.go index d4eeb46a6cf7d..cd0a5f27800d2 100644 --- a/lib/services/watcher.go +++ b/lib/services/watcher.go @@ -556,7 +556,7 @@ type DynamicWindowsDesktopWatcherConfig struct { // NewDynamicWindowsDesktopWatcher returns a new instance of DynamicWindowsDesktopWatcher. func NewDynamicWindowsDesktopWatcher(ctx context.Context, cfg DynamicWindowsDesktopWatcherConfig) (*GenericWatcher[types.DynamicWindowsDesktop, readonly.DynamicWindowsDesktop], error) { if cfg.DynamicWindowsDesktopGetter == nil { - return nil, trace.BadParameter("KubernetesClusterGetter must be provided") + return nil, trace.BadParameter("DynamicWindowsDesktopGetter must be provided") } w, err := NewGenericResourceWatcher(ctx, GenericWatcherConfig[types.DynamicWindowsDesktop, readonly.DynamicWindowsDesktop]{ diff --git a/lib/srv/desktop/discovery.go b/lib/srv/desktop/discovery.go index 3ae2011128bff..cab51b65f6a32 100644 --- a/lib/srv/desktop/discovery.go +++ b/lib/srv/desktop/discovery.go @@ -387,6 +387,7 @@ func (s *WindowsService) startDynamicReconciler(ctx context.Context) (*services. Client: s.cfg.AccessPoint, }, }) + if err != nil { return nil, trace.Wrap(err) } @@ -399,6 +400,13 @@ func (s *WindowsService) startDynamicReconciler(ctx context.Context) (*services. return services.MatchResourceLabels(s.cfg.ResourceMatchers, desktop.GetAllLabels()) }, GetCurrentResources: func() map[string]types.WindowsDesktop { + maps.DeleteFunc(currentResources, func(_ string, v types.WindowsDesktop) bool { + d, err := s.cfg.AuthClient.GetWindowsDesktops(ctx, types.WindowsDesktopFilter{ + HostID: v.GetHostID(), + Name: v.GetName(), + }) + return err != nil || len(d) == 0 + }) return currentResources }, GetNewResources: func() map[string]types.WindowsDesktop { @@ -414,12 +422,17 @@ func (s *WindowsService) startDynamicReconciler(ctx context.Context) (*services. go func() { defer s.cfg.Logger.DebugContext(ctx, "DynamicWindowsDesktop resource watcher done.") defer watcher.Close() + tickDuration := 5 * time.Minute + expiryDuration := tickDuration + 2*time.Minute + tick := s.cfg.Clock.NewTicker(tickDuration) + defer tick.Stop() for { select { case desktops := <-watcher.ResourcesC: newResources = make(map[string]types.WindowsDesktop) for _, dynamicDesktop := range desktops { desktop, err := s.toWindowsDesktop(dynamicDesktop) + desktop.SetExpiry(s.cfg.Clock.Now().Add(expiryDuration)) if err != nil { s.cfg.Logger.WarnContext(ctx, "Can't create desktop resource", "error", err) continue @@ -431,6 +444,17 @@ func (s *WindowsService) startDynamicReconciler(ctx context.Context) (*services. continue } currentResources = newResources + case <-tick.Chan(): + newResources = make(map[string]types.WindowsDesktop) + for k, v := range currentResources { + newResources[k] = v.Copy() + newResources[k].SetExpiry(s.cfg.Clock.Now().Add(expiryDuration)) + } + if err := reconciler.Reconcile(ctx); err != nil { + s.cfg.Logger.WarnContext(ctx, "Reconciliation failed, will retry", "error", err) + continue + } + currentResources = newResources case <-watcher.Done(): return case <-ctx.Done():
diff --git a/lib/srv/desktop/discovery_test.go b/lib/srv/desktop/discovery_test.go index c96e3324babb7..28eaf8297fd34 100644 --- a/lib/srv/desktop/discovery_test.go +++ b/lib/srv/desktop/discovery_test.go @@ -317,3 +317,89 @@ func TestDynamicWindowsDiscovery(t *testing.T) { }) } } + +func TestDynamicWindowsDiscoveryExpiry(t *testing.T) { + authServer, err := auth.NewTestAuthServer(auth.TestAuthServerConfig{ + ClusterName: "test", + Dir: t.TempDir(), + }) + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, authServer.Close()) + }) + + tlsServer, err := authServer.NewTestTLSServer() + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, tlsServer.Close()) + }) + + client, err := tlsServer.NewClient(auth.TestServerID(types.RoleWindowsDesktop, "test-host-id")) + require.NoError(t, err) + t.Cleanup(func() { + require.NoError(t, client.Close()) + }) + + dynamicWindowsClient := client.DynamicDesktopClient() + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + clock := clockwork.NewFakeClock() + s := &WindowsService{ + cfg: WindowsServiceConfig{ + Heartbeat: HeartbeatConfig{ + HostUUID: "1234", + }, + Logger: slog.New(logutils.NewSlogTextHandler(io.Discard, logutils.SlogTextHandlerConfig{})), + Clock: clock, + AuthClient: client, + AccessPoint: client, + ResourceMatchers: []services.ResourceMatcher{{ + Labels: types.Labels{ + "foo": {"bar"}, + }, + }}, + }, + dnsResolver: &net.Resolver{ + PreferGo: true, + Dial: func(ctx context.Context, network, address string) (net.Conn, error) { + return nil, errors.New("this resolver always fails") + }, + }, + } + _, err = s.startDynamicReconciler(ctx) + require.NoError(t, err) + + desktop, err := types.NewDynamicWindowsDesktopV1("test", map[string]string{ + "foo": "bar", + }, types.DynamicWindowsDesktopSpecV1{ + Addr: "addr", + }) + require.NoError(t, err) + + _, err = dynamicWindowsClient.CreateDynamicWindowsDesktop(ctx, desktop) + require.NoError(t, err) + + require.EventuallyWithT(t, func(t *assert.CollectT) { + desktops, err := client.GetWindowsDesktops(ctx, types.WindowsDesktopFilter{}) + require.NoError(t, err) + require.Len(t, desktops, 1) + require.Equal(t, "test", desktops[0].GetName()) + }, 5*time.Second, 50*time.Millisecond) + + err = client.DeleteWindowsDesktop(ctx, s.cfg.Heartbeat.HostUUID, "test") + require.NoError(t, err) + desktops, err := client.GetWindowsDesktops(ctx, types.WindowsDesktopFilter{}) + require.NoError(t, err) + require.Empty(t, desktops) + + clock.Advance(5 * time.Minute) + + require.EventuallyWithT(t, func(t *assert.CollectT) { + desktops, err := client.GetWindowsDesktops(ctx, types.WindowsDesktopFilter{}) + require.NoError(t, err) + require.Len(t, desktops, 1) + require.Equal(t, "test", desktops[0].GetName()) + }, 5*time.Second, 50*time.Millisecond) +}
Dynamic Windows desktops should detect when agents are removed When agents that are serving a dynamic_windows_desktop disappear, the corresponding windows_desktop resource should be removed so that Teleport doesn't try to route connections through an agent that no longer exists. I suspect that we are currently watching the dynamic resource, but not watching the agent resources correctly. Dynamic Windows apps are not refreshed ### Expected behavior: If a `windows_desktop` object is manually deleted/edited outside the agent, the agent should recreate/announce it on its next heartbeat and/or announce interval. This is consistent with other dynamically loaded resources. ### Current behavior: A deleted `windows_desktop` resource won't repopulate until the next time an agent cold-starts. ### Bug details: #### Teleport version: 17.4.9 #### Recreation steps * Create a `dynamic_windows_desktop` resource. * Configure a Teleport Windows Desktop agent to load it. * Observe a corresponding `windows_desktop` object is created for each agent that loads the resource. * Delete one or more `windows_desktop` objects that correspond with a running agent. * Observe that the `windows_desktop` object never repopulates. Repeating the test with the `app` object instead of `dynamic_windows_desktop`, along with `app_server` instead of `windows_desktop`, and the app service _does_ repopulate the `app_server` resource.
[ "https://github.com/gravitational/teleport/commit/e5b4a17d7bef3574a5ddbfe6c6d2078f199b5125", "https://github.com/gravitational/teleport/commit/6e23339d1df6f9b94a30033c580526bd431cb093", "https://github.com/gravitational/teleport/commit/c076084dee888e483bd72a30f158d1c8736a3dbb", "https://github.com/gravitation...
2025-06-10T19:59:35Z
https://github.com/gravitational/teleport/tree/a4f1a0e9b93db02d40a08ee1fbfd2c65321ed7d7
[ "if (Test-Path api\\go.mod) { cd api } ; go mod download ; go build ./..." ]
[ "Set-Location C:\\testbed; mkdir reports -Force; go test -json ./... > reports\\go-test-root.json; Set-Location C:\\testbed\\api; go test -json ./... > C:\\testbed\\reports\\go-test-api.json" ]
[ "Get-Content C:\\testbed\\reports\\go-test-root.json; Get-Content C:\\testbed\\reports\\go-test-api.json" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} precedence = {"pass": 0, "skip": 1, "fail": 2} def set_status(key: str, status: str): if not key: return s = status.lower() if s not in precedence: return prev = results.get(key) if prev is None or precedence[s] > precedence.get(prev, -1): results[key] = s def clean_wraps(s: str) -> str: # Remove CR and join lines split by console wrapping if "\r" in s or "\n" in s: s = s.replace("\r", "") s = re.sub(r"\s*\n\s*", "", s) return s def clean_test_name(name: str) -> str: s = clean_wraps(name) # Collapse excessive whitespace at edges s = re.sub(r"\s+", " ", s).strip() # Unescape common JSON unicode escapes s = ( s.replace("\\u0026", "&") .replace("\\u003c", "<") .replace("\\u003e", ">") ) return s def clean_package(pkg: str) -> str: s = clean_wraps(pkg) # Remove all whitespace inserted by wrapping s = re.sub(r"\s+", "", s) # Unescape common JSON unicode escapes in package (rare) s = ( s.replace("\\u0026", "&") .replace("\\u003c", "<") .replace("\\u003e", ">") ) # Fix domain corruption for github.* to github.com # Replace anything between 'github.' and the first '/' with 'com' s = re.sub(r"^(github\.)[^/]+", r"github.com", s) return s.strip() # Segment into chunks that represent individual JSON objects chunks: list[str] = [] buf: list[str] = [] for line in log.splitlines(): if line.lstrip().startswith("{"): if buf: chunks.append("\n".join(buf)) buf = [] buf.append(line) else: if buf: buf.append(line) if buf: chunks.append("\n".join(buf)) # Regexes to extract fields action_re = re.compile(r'"Action"\s*:\s*"(pass|fail|skip)"', re.IGNORECASE) test_re = re.compile(r'"Test"\s*:\s*"(.*?)"', re.DOTALL) pkg_re = re.compile(r'"Package"\s*:\s*"(.*?)"', re.DOTALL) # Extract JSON-derived test results with package-qualified keys for chunk in chunks: m_action = action_re.search(chunk) if not m_action: continue status = m_action.group(1).lower() m_test = test_re.search(chunk) if not m_test: continue # package-level event; ignore raw_test = m_test.group(1) test_name = clean_test_name(raw_test) # Try to extract and normalize package m_pkg = pkg_re.search(chunk) if m_pkg: raw_pkg = m_pkg.group(1) pkg_name = clean_package(raw_pkg) key = f"{pkg_name}::{test_name}" if pkg_name else test_name else: key = test_name set_status(key, status) # Fallback: textual lines '--- PASS/FAIL/SKIP: TestName...' # Use test-only keys, and do not override any existing package-qualified results for the same test-only name. text_re = re.compile(r"---\s+(PASS|FAIL|SKIP):\s+([^\r\n]+)") for line in log.splitlines(): tm = text_re.search(line) if not tm: continue status = tm.group(1).lower() name = clean_test_name(tm.group(2)) # Only set if an exact key not present; since package is unknown, use test-only key if name not in results: set_status(name, status) return results
[ "github.com/gravitational/teleport/api/types::TestAccessReviewConditionsIsEmpty/prreview_as_roles", "github.com/gravitational/teleport/api/types::TestLockTargetEquals/equal", "TestRoleV6_CheckAndSetDefaults/spiffe", "TestEvents/scp_OneOf (0.00s)\\n\"}", "github.com/gravitational/teleport/api/types::TestVali...
[ "github.com/gravitational/teleport/lib/player::TestAppliesTiming/half_speed", "TestNewAuthPreference_secondFactors (0.01s)\\n\"}", "TestDatabaseRedshiftEndpoint (0.00s)\\n\"}", "TestIDTokenValidator_Validate/invalid_issuer (0", "github.com/gravitational/teleport/lib/services::TestCheckAccessWithLabelExpress...
starryzhang/sweb.eval.win.gravitational_1776_teleport-55616
zarf-dev/zarf
3788
zarf-dev__zarf-3788
Go
[ "3051" ]
e379e7936591459ced2fb0875ca01df9f528a612
diff --git a/.goreleaser.yaml b/.goreleaser.yaml index 38d07f5554..668409d8a4 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -28,7 +28,7 @@ builds: - -X github.com/derailed/k9s/cmd.version={{.Env.K9S_VERSION}} - -X github.com/google/go-containerregistry/cmd/crane/cmd.Version={{.Env.CRANE_VERSION}} - -X github.com/zarf-dev/zarf/src/cmd.syftVersion={{.Env.SYFT_VERSION}} - - -X github.com/zarf-dev/zarf/src/cmd.archiverVersion={{.Env.ARCHIVER_VERSION}} + - -X github.com/zarf-dev/zarf/src/cmd.archivesVersion={{.Env.ARCHIVES_VERSION}} - -X github.com/zarf-dev/zarf/src/cmd.helmVersion={{.Env.HELM_VERSION}} goarch: - amd64 diff --git a/Makefile b/Makefile index 99b044a9f2..1f9096dd7e 100644 --- a/Makefile +++ b/Makefile @@ -38,7 +38,7 @@ K8S_MODULES_PATCH_VER=$(word 3,$(K8S_MODULES_VER)) K9S_VERSION=$(shell go list -f '{{.Version}}' -m github.com/derailed/k9s) CRANE_VERSION=$(shell go list -f '{{.Version}}' -m github.com/google/go-containerregistry) SYFT_VERSION=$(shell go list -f '{{.Version}}' -m github.com/anchore/syft) -ARCHIVER_VERSION=$(shell go list -f '{{.Version}}' -m github.com/mholt/archiver/v3) +ARCHIVES_VERSION=$(shell go list -f '{{.Version}}' -m github.com/mholt/archives) HELM_VERSION=$(shell go list -f '{{.Version}}' -m helm.sh/helm/v3) BUILD_ARGS += -X helm.sh/helm/v3/pkg/lint/rules.k8sVersionMajor=$(K8S_MODULES_MAJOR_VER) @@ -49,7 +49,7 @@ BUILD_ARGS += -X k8s.io/component-base/version.gitVersion=v$(K8S_MODULES_MAJOR_V BUILD_ARGS += -X github.com/derailed/k9s/cmd.version=$(K9S_VERSION) BUILD_ARGS += -X github.com/google/go-containerregistry/cmd/crane/cmd.Version=$(CRANE_VERSION) BUILD_ARGS += -X github.com/zarf-dev/zarf/src/cmd.syftVersion=$(SYFT_VERSION) -BUILD_ARGS += -X github.com/zarf-dev/zarf/src/cmd.archiverVersion=$(ARCHIVER_VERSION) +BUILD_ARGS += -X github.com/zarf-dev/zarf/src/cmd.archivesVersion=$(ARCHIVES_VERSION) BUILD_ARGS += -X github.com/zarf-dev/zarf/src/cmd.helmVersion=$(HELM_VERSION) GIT_SHA := $(if $(shell git rev-parse HEAD),$(shell git rev-parse HEAD),"") diff --git a/go.mod b/go.mod index 0790d5da0e..f834baa91e 100644 --- a/go.mod +++ b/go.mod @@ -33,7 +33,6 @@ require ( github.com/golang-cz/devslog v0.0.13 github.com/google/go-containerregistry v0.20.4-0.20250225234217-098045d5e61f github.com/gosuri/uitable v0.0.4 - github.com/mholt/archiver/v3 v3.5.1 github.com/mholt/archives v0.1.1 github.com/opencontainers/image-spec v1.1.1 github.com/phsym/console-slog v0.3.1 @@ -462,7 +461,7 @@ require ( github.com/oklog/ulid v1.3.1 // indirect github.com/oleiade/reflections v1.1.0 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect - github.com/open-policy-agent/opa v0.68.0 + github.com/open-policy-agent/opa v0.68.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/runtime-spec v1.1.0 // indirect github.com/opencontainers/selinux v1.11.0 // indirect diff --git a/go.sum b/go.sum index 20b5531b45..0261cf93ff 100644 --- a/go.sum +++ b/go.sum @@ -427,7 +427,6 @@ github.com/anchore/stereoscope v0.1.4/go.mod h1:omWgXDEp/XfqCJlZXIByEo1c3ArZg/qT github.com/anchore/syft v1.23.1 h1:QFWFMlwyJprl60RMrceSB6FJpZCsAQeWVLd9Ltzgwa0= github.com/anchore/syft v1.23.1/go.mod h1:vDV0VBC601wHZ2nGuxqoDjfYsiu87WmE0w8HG3RDI6k= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= -github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= @@ -728,7 +727,6 @@ github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4 github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7 h1:UhxFibDNY/bfvqU5CAUmr9zpesgbU6SWc8/B4mflAE4= github.com/docker/libtrust v0.0.0-20160708172513-aabc10ec26b7/go.mod h1:cyGadeNEkKy96OOhEzfZl+yxihPEzKnqJwvfuSUqbZE= -github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s= github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 h1:2tV76y6Q9BB+NEBasnqvs7e49aEBFI8ejC89PSnWH+4= github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s= github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY= @@ -978,7 +976,6 @@ github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= -github.com/golang/snappy v0.0.2/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= @@ -1243,7 +1240,6 @@ github.com/keybase/go-keychain v0.0.1/go.mod h1:PdEILRW3i9D8JcdM+FmY6RwkHGnhHxXw github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.15.11/go.mod h1:QPwzmACJjUTFsnSHH934V6woptycfrDDJnH7hvFVbGM= github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= @@ -1254,7 +1250,6 @@ github.com/klauspost/cpuid/v2 v2.0.10/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuOb github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU= github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= -github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU= github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/knqyf263/go-apk-version v0.0.0-20200609155635-041fdbb8563f h1:GvCU5GXhHq+7LeOzx/haG7HSIZokl3/0GkoUFzsRJjg= @@ -1343,8 +1338,6 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5 github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= -github.com/mholt/archiver/v3 v3.5.1 h1:rDjOBX9JSF5BvoJGvjqK479aL70qh9DIpZCl+k7Clwo= -github.com/mholt/archiver/v3 v3.5.1/go.mod h1:e3dqJ7H78uzsRSEACH1joayhuSyhnonssnDhppzS1L4= github.com/mholt/archives v0.1.1 h1:c7J3qXN1FB54y0qiUXiq9Bxk4eCUc8pdXWwOhZdRzeY= github.com/mholt/archives v0.1.1/go.mod h1:FQVz01Q2uXKB/35CXeW/QFO23xT+hSCGZHVtha78U4I= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= @@ -1432,7 +1425,6 @@ github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJm github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481 h1:Up6+btDp321ZG5/zdSLo48H9Iaq0UQGthrhWC6pCxzE= github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481/go.mod h1:yKZQO8QE2bHlgozqWDiRVqTFlLQSj30K/6SAK8EeYFw= -github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc= github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0= github.com/nwaples/rardecode/v2 v2.1.0 h1:JQl9ZoBPDy+nIZGb1mx8+anfHp/LV3NE2MjMiv0ct/U= @@ -1508,7 +1500,6 @@ github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5 h1:Ii+DKncOVM8Cu1H github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE= github.com/phsym/console-slog v0.3.1 h1:Fuzcrjr40xTc004S9Kni8XfNsk+qrptQmyR+wZw9/7A= github.com/phsym/console-slog v0.3.1/go.mod h1:oJskjp/X6e6c0mGpfP8ELkfKUsrkDifYRAqJQgmdDS0= -github.com/pierrec/lz4/v4 v4.1.2/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU= github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= @@ -1770,7 +1761,6 @@ github.com/transparency-dev/merkle v0.0.2 h1:Q9nBoQcZcgPamMkGn7ghV8XiTZ/kRxn1yCG github.com/transparency-dev/merkle v0.0.2/go.mod h1:pqSy+OXefQ1EDUVmAJ8MUhHB9TXGuzVAT58PqBoHz1A= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= -github.com/ulikunitz/xz v0.5.9/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc= github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= diff --git a/src/cmd/archiver.go b/src/cmd/archiver.go index 040b4da4ce..53d4ccdaca 100644 --- a/src/cmd/archiver.go +++ b/src/cmd/archiver.go @@ -10,21 +10,20 @@ import ( "github.com/zarf-dev/zarf/src/pkg/archive" ) -// ldflags github.com/zarf-dev/zarf/src/cmd.archiverVersion=x.x.x -var archiverVersion string +// ldflags github.com/zarf-dev/zarf/src/cmd.archivesVersion=x.x.x +var archivesVersion string func newArchiverCommand() *cobra.Command { cmd := &cobra.Command{ Use: "archiver", Aliases: []string{"a"}, Short: lang.CmdToolsArchiverShort, - Version: archiverVersion, + Version: archivesVersion, } cmd.AddCommand(newArchiverCompressCommand()) cmd.AddCommand(newArchiverDecompressCommand()) - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - cmd.AddCommand(newToolsVersionCmd("mholt/archives", archiverVersion)) + cmd.AddCommand(newToolsVersionCmd("mholt/archives", archivesVersion)) return cmd } diff --git a/src/cmd/dev.go b/src/cmd/dev.go index 8989c4b2df..aa4baca83d 100644 --- a/src/cmd/dev.go +++ b/src/cmd/dev.go @@ -17,7 +17,6 @@ import ( "github.com/AlecAivazis/survey/v2" "github.com/defenseunicorns/pkg/helpers/v2" goyaml "github.com/goccy/go-yaml" - "github.com/mholt/archiver/v3" "github.com/pterm/pterm" "github.com/sergi/go-diff/diffmatchpatch" "github.com/spf13/cobra" @@ -27,6 +26,7 @@ import ( "github.com/zarf-dev/zarf/src/config/lang" "github.com/zarf-dev/zarf/src/internal/packager2" layout2 "github.com/zarf-dev/zarf/src/internal/packager2/layout" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/layout" "github.com/zarf-dev/zarf/src/pkg/lint" "github.com/zarf-dev/zarf/src/pkg/logger" @@ -484,6 +484,7 @@ func newDevSha256SumCommand() *cobra.Command { func (o *devSha256SumOptions) run(cmd *cobra.Command, args []string) (err error) { hashErr := errors.New("unable to compute the SHA256SUM hash") + ctx := cmd.Context() fileName := args[0] @@ -508,7 +509,7 @@ func (o *devSha256SumOptions) run(cmd *cobra.Command, args []string) (err error) } downloadPath := filepath.Join(tmp, fileBase) - err = utils.DownloadToFile(cmd.Context(), fileName, downloadPath, "") + err = utils.DownloadToFile(ctx, fileName, downloadPath, "") if err != nil { return errors.Join(hashErr, err) } @@ -535,8 +536,10 @@ func (o *devSha256SumOptions) run(cmd *cobra.Command, args []string) (err error) extractedFile := filepath.Join(tmp, o.extractPath) - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Extract(fileName, o.extractPath, tmp) + decompressOpts := archive.DecompressOpts{ + Files: []string{extractedFile}, + } + err = archive.Decompress(ctx, fileName, tmp, decompressOpts) if err != nil { return errors.Join(hashErr, err) } diff --git a/src/internal/packager2/create.go b/src/internal/packager2/create.go index e9f163bb82..3af60306f3 100644 --- a/src/internal/packager2/create.go +++ b/src/internal/packager2/create.go @@ -72,7 +72,7 @@ func Create(ctx context.Context, packagePath string, opt CreateOptions) error { } if opt.SBOMOut != "" { - _, err := pkgLayout.GetSBOM(opt.SBOMOut) + _, err := pkgLayout.GetSBOM(ctx, opt.SBOMOut) // Don't fail package create if the package doesn't have an sbom var noSBOMErr *layout2.NoSBOMAvailableError if errors.As(err, &noSBOMErr) { diff --git a/src/internal/packager2/inspect.go b/src/internal/packager2/inspect.go index cf4e6cb93b..b84f9bc059 100644 --- a/src/internal/packager2/inspect.go +++ b/src/internal/packager2/inspect.go @@ -107,11 +107,11 @@ func InspectPackageResources(ctx context.Context, source string, opts InspectPac variableConfig.SetApplicationTemplates(applicationTemplates) if len(component.Charts) > 0 { - chartDir, err := pkgLayout.GetComponentDir(tmpComponentPath, component.Name, layout.ChartsComponentDir) + chartDir, err := pkgLayout.GetComponentDir(ctx, tmpComponentPath, component.Name, layout.ChartsComponentDir) if err != nil { return InspectPackageResourcesResults{}, err } - valuesDir, err := pkgLayout.GetComponentDir(tmpComponentPath, component.Name, layout.ValuesComponentDir) + valuesDir, err := pkgLayout.GetComponentDir(ctx, tmpComponentPath, component.Name, layout.ValuesComponentDir) if err != nil && !errors.Is(err, os.ErrNotExist) { return InspectPackageResourcesResults{}, fmt.Errorf("failed to get values: %w", err) } @@ -156,7 +156,7 @@ func InspectPackageResources(ctx context.Context, source string, opts InspectPac } if len(component.Manifests) > 0 { - manifestDir, err := pkgLayout.GetComponentDir(tmpComponentPath, component.Name, layout.ManifestsComponentDir) + manifestDir, err := pkgLayout.GetComponentDir(ctx, tmpComponentPath, component.Name, layout.ManifestsComponentDir) if err != nil { return InspectPackageResourcesResults{}, fmt.Errorf("failed to get package manifests: %w", err) } @@ -311,7 +311,7 @@ func InspectPackageSboms(ctx context.Context, source string, opts InspectPackage defer func() { err = errors.Join(err, pkgLayout.Cleanup()) }() - outputPath, err := pkgLayout.GetSBOM(opts.OutputDir) + outputPath, err := pkgLayout.GetSBOM(ctx, opts.OutputDir) if err != nil { return InspectPackageSbomsResult{}, fmt.Errorf("could not get SBOM: %w", err) } diff --git a/src/internal/packager2/layout/create.go b/src/internal/packager2/layout/create.go index 20fb10be60..32f25caca7 100644 --- a/src/internal/packager2/layout/create.go +++ b/src/internal/packager2/layout/create.go @@ -23,7 +23,6 @@ import ( "github.com/defenseunicorns/pkg/helpers/v2" goyaml "github.com/goccy/go-yaml" - "github.com/mholt/archiver/v3" "github.com/sigstore/cosign/v2/cmd/cosign/cli/options" "github.com/sigstore/cosign/v2/cmd/cosign/cli/sign" @@ -36,6 +35,7 @@ import ( "github.com/zarf-dev/zarf/src/internal/packager/kustomize" actions2 "github.com/zarf-dev/zarf/src/internal/packager2/actions" "github.com/zarf-dev/zarf/src/internal/packager2/filters" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/interactive" "github.com/zarf-dev/zarf/src/pkg/lint" "github.com/zarf-dev/zarf/src/pkg/logger" @@ -228,7 +228,7 @@ func CreateSkeleton(ctx context.Context, packagePath string, opt CreateOptions) } for _, component := range pkg.Components { - err := assembleSkeletonComponent(component, packagePath, buildPath) + err := assembleSkeletonComponent(ctx, component, packagePath, buildPath) if err != nil { return "", err } @@ -406,8 +406,10 @@ func assemblePackageComponent(ctx context.Context, component v1alpha1.ZarfCompon if err := utils.DownloadToFile(ctx, file.Source, compressedFile, component.DeprecatedCosignKeyPath); err != nil { return fmt.Errorf(lang.ErrDownloading, file.Source, err.Error()) } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Extract(compressedFile, file.ExtractPath, destinationDir) + decompressOpts := archive.DecompressOpts{ + Files: []string{file.ExtractPath}, + } + err = archive.Decompress(ctx, compressedFile, destinationDir, decompressOpts) if err != nil { return fmt.Errorf(lang.ErrFileExtract, file.ExtractPath, compressedFileName, err.Error()) } @@ -418,9 +420,12 @@ func assemblePackageComponent(ctx context.Context, component v1alpha1.ZarfCompon } } else { if file.ExtractPath != "" { - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - if err := archiver.Extract(filepath.Join(packagePath, file.Source), file.ExtractPath, destinationDir); err != nil { - return fmt.Errorf(lang.ErrFileExtract, file.ExtractPath, file.Source, err.Error()) + decompressOpts := archive.DecompressOpts{ + Files: []string{file.ExtractPath}, + } + err = archive.Decompress(ctx, filepath.Join(packagePath, file.Source), destinationDir, decompressOpts) + if err != nil { + return fmt.Errorf(lang.ErrFileExtract, file.ExtractPath, filepath.Join(packagePath, file.Source), err.Error()) } } else { if filepath.IsAbs(file.Source) { @@ -552,7 +557,7 @@ func assemblePackageComponent(ctx context.Context, component v1alpha1.ZarfCompon return nil } -func assembleSkeletonComponent(component v1alpha1.ZarfComponent, packagePath, buildPath string) error { +func assembleSkeletonComponent(ctx context.Context, component v1alpha1.ZarfComponent, packagePath, buildPath string) error { tmpBuildPath, err := utils.MakeTempDir(config.CommonOptions.TempDirectory) if err != nil { return err @@ -611,9 +616,12 @@ func assembleSkeletonComponent(component v1alpha1.ZarfComponent, packagePath, bu destinationDir := filepath.Dir(dst) if file.ExtractPath != "" { - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - if err := archiver.Extract(filepath.Join(packagePath, file.Source), file.ExtractPath, destinationDir); err != nil { - return fmt.Errorf(lang.ErrFileExtract, file.ExtractPath, file.Source, err.Error()) + decompressOpts := archive.DecompressOpts{ + Files: []string{file.ExtractPath}, + } + err = archive.Decompress(ctx, filepath.Join(packagePath, file.Source), destinationDir, decompressOpts) + if err != nil { + return fmt.Errorf(lang.ErrFileExtract, file.ExtractPath, filepath.Join(packagePath, file.Source), err.Error()) } // Make sure dst reflects the actual file or directory. diff --git a/src/internal/packager2/layout/import.go b/src/internal/packager2/layout/import.go index dcf9fa2ac2..0884cf3204 100644 --- a/src/internal/packager2/layout/import.go +++ b/src/internal/packager2/layout/import.go @@ -13,11 +13,11 @@ import ( "strings" "time" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/defenseunicorns/pkg/helpers/v2" "github.com/defenseunicorns/pkg/oci" - "github.com/mholt/archiver/v3" ocispec "github.com/opencontainers/image-spec/specs-go/v1" ocistore "oras.land/oras-go/v2/content/oci" @@ -285,16 +285,15 @@ func fetchOCISkeleton(ctx context.Context, component v1alpha1.ZarfComponent, pac return rel, nil } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - tu := archiver.Tar{ + decompressOpts := archive.DecompressOpts{ OverwriteExisting: true, - // removes /<component-name>/ from the paths - StripComponents: 1, + StripComponents: 1, } - err = tu.Unarchive(tarball, dir) + err = archive.Decompress(ctx, tarball, dir, decompressOpts) if err != nil { - return "", err + return "", fmt.Errorf("unable to extract archive %q: %w", tarball, err) } + return rel, nil } diff --git a/src/internal/packager2/layout/package.go b/src/internal/packager2/layout/package.go index ead18f0015..9928f475bf 100644 --- a/src/internal/packager2/layout/package.go +++ b/src/internal/packager2/layout/package.go @@ -4,7 +4,6 @@ package layout import ( - "archive/tar" "context" "errors" "fmt" @@ -17,13 +16,14 @@ import ( "strings" "github.com/defenseunicorns/pkg/helpers/v2" - "github.com/mholt/archiver/v3" + "github.com/mholt/archives" "github.com/sigstore/cosign/v2/cmd/cosign/cli/options" "github.com/sigstore/cosign/v2/cmd/cosign/cli/verify" "github.com/zarf-dev/zarf/src/api/v1alpha1" "github.com/zarf-dev/zarf/src/config" "github.com/zarf-dev/zarf/src/internal/packager2/filters" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/zarf-dev/zarf/src/pkg/packager/sources" "github.com/zarf-dev/zarf/src/pkg/utils" @@ -43,7 +43,7 @@ type PackageLayoutOptions struct { Filter filters.ComponentFilterStrategy } -// LoadFromTar unpacks the give compressed package and loads it. +// LoadFromTar unpacks the given archive (any compress/format) and loads it. func LoadFromTar(ctx context.Context, tarPath string, opt PackageLayoutOptions) (*PackageLayout, error) { if opt.Filter == nil { opt.Filter = filters.Empty() @@ -52,42 +52,51 @@ func LoadFromTar(ctx context.Context, tarPath string, opt PackageLayoutOptions) if err != nil { return nil, err } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Walk(tarPath, func(f archiver.File) error { - if f.IsDir() { - return nil + + // 1) Mount the archive as a virtual file system. + fsys, err := archives.FileSystem(ctx, tarPath, nil) + if err != nil { + return nil, fmt.Errorf("unable to open archive %q: %w", tarPath, err) + } + + // 2) Walk every entry in the archive. + err = fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err } - header, ok := f.Header.(*tar.Header) - if !ok { - return fmt.Errorf("expected header to be *tar.Header but was %T", f.Header) + // skip directories + if d.IsDir() { + return nil } - // If path has nested directories we want to create them. - dir := filepath.Dir(header.Name) - if dir != "." { - err := os.MkdirAll(filepath.Join(dirPath, dir), helpers.ReadExecuteAllWriteUser) - if err != nil { - return err - } + // ensure parent dirs exist in our temp dir + dst := filepath.Join(dirPath, path) + if err := os.MkdirAll(filepath.Dir(dst), helpers.ReadExecuteAllWriteUser); err != nil { + return err } - dst, err := os.Create(filepath.Join(dirPath, header.Name)) + // copy file contents + in, err := fsys.Open(path) if err != nil { return err } - defer dst.Close() - _, err = io.Copy(dst, f) + defer in.Close() + + out, err := os.Create(dst) if err != nil { return err } + defer out.Close() + + if _, err := io.Copy(out, in); err != nil { + return err + } return nil }) if err != nil { return nil, err } - p, err := LoadFromDir(ctx, dirPath, opt) - if err != nil { - return nil, err - } - return p, nil + + // 3) Delegate to the existing LoadFromDir + return LoadFromDir(ctx, dirPath, opt) } // LoadFromDir loads and validates a package from the given directory path. @@ -140,22 +149,27 @@ func (e *NoSBOMAvailableError) Error() string { return fmt.Sprintf("zarf package %s does not have an SBOM available", e.pkgName) } -// GetSBOM outputs the SBOM data from the package to the give destination path. -func (p *PackageLayout) GetSBOM(destPath string) (string, error) { +// GetSBOM outputs the SBOM data from the package to the given destination path. +func (p *PackageLayout) GetSBOM(ctx context.Context, destPath string) (string, error) { if !p.Pkg.IsSBOMAble() { return "", &NoSBOMAvailableError{pkgName: p.Pkg.Metadata.Name} } - path := filepath.Join(destPath, p.Pkg.Metadata.Name) - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err := archiver.Extract(filepath.Join(p.dirPath, SBOMTar), "", path) + + // 1) locate the sboms archive under the layout directory + sbomArchive := filepath.Join(p.dirPath, SBOMTar) + + // // 2) decompress the archive to destination path + targetDir := filepath.Join(destPath, p.Pkg.Metadata.Name) + err := archive.Decompress(ctx, sbomArchive, targetDir, archive.DecompressOpts{}) if err != nil { return "", err } - return path, nil + + return targetDir, nil } // GetComponentDir returns a path to the directory in the given component. -func (p *PackageLayout) GetComponentDir(destPath, componentName string, ct ComponentDir) (string, error) { +func (p *PackageLayout) GetComponentDir(ctx context.Context, destPath, componentName string, ct ComponentDir) (string, error) { sourcePath := filepath.Join(p.dirPath, ComponentsDir, fmt.Sprintf("%s.tar", componentName)) _, err := os.Stat(sourcePath) if errors.Is(err, os.ErrNotExist) { @@ -169,9 +183,7 @@ func (p *PackageLayout) GetComponentDir(destPath, componentName string, ct Compo return "", err } defer os.RemoveAll(tmpDir) - // TODO (phillebaba): We are not using archiver.Extract here because there is a bug in Windows where the files will not be extracted properly from nested directories. - // https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Unarchive(sourcePath, tmpDir) + err = archive.Decompress(ctx, sourcePath, tmpDir, archive.DecompressOpts{}) if err != nil { return "", err } @@ -212,8 +224,7 @@ func (p *PackageLayout) Archive(ctx context.Context, dirPath string, maxPackageS for _, file := range files { filePaths = append(filePaths, filepath.Join(p.dirPath, file.Name())) } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Archive(filePaths, tarballPath) + err = archive.Compress(ctx, filePaths, tarballPath, archive.CompressOpts{}) if err != nil { return fmt.Errorf("unable to create package: %w", err) } diff --git a/src/internal/packager2/layout/sbom.go b/src/internal/packager2/layout/sbom.go index 64c6c9f4db..ad673f10ca 100644 --- a/src/internal/packager2/layout/sbom.go +++ b/src/internal/packager2/layout/sbom.go @@ -27,10 +27,10 @@ import ( "github.com/anchore/syft/syft/source/stereoscopesource" "github.com/defenseunicorns/pkg/helpers/v2" v1 "github.com/google/go-containerregistry/pkg/v1" - "github.com/mholt/archiver/v3" "github.com/zarf-dev/zarf/src/api/v1alpha1" "github.com/zarf-dev/zarf/src/config" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/zarf-dev/zarf/src/pkg/transform" "github.com/zarf-dev/zarf/src/pkg/utils" @@ -153,8 +153,7 @@ func createFileSBOM(ctx context.Context, component v1alpha1.ZarfComponent, outpu } defer os.RemoveAll(tmpDir) tarPath := filepath.Join(buildPath, ComponentsDir, component.Name) + ".tar" - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Unarchive(tarPath, tmpDir) + err = archive.Decompress(ctx, tarPath, tmpDir, archive.DecompressOpts{}) if err != nil { return nil, err } diff --git a/src/internal/packager2/mirror.go b/src/internal/packager2/mirror.go index 7c815cd340..240a624020 100644 --- a/src/internal/packager2/mirror.go +++ b/src/internal/packager2/mirror.go @@ -96,7 +96,7 @@ func pushReposToRepository(ctx context.Context, c *cluster.Cluster, pkgLayout *l return err } defer os.RemoveAll(tmpDir) - reposPath, err := pkgLayout.GetComponentDir(tmpDir, component.Name, layout.RepoComponentDir) + reposPath, err := pkgLayout.GetComponentDir(ctx, tmpDir, component.Name, layout.RepoComponentDir) if err != nil { return err } diff --git a/src/internal/packager2/pull.go b/src/internal/packager2/pull.go index 9881d76205..b87fd9ff8e 100644 --- a/src/internal/packager2/pull.go +++ b/src/internal/packager2/pull.go @@ -15,13 +15,14 @@ import ( "strings" "time" + "github.com/mholt/archives" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/defenseunicorns/pkg/helpers/v2" "github.com/defenseunicorns/pkg/oci" "github.com/gabriel-vasile/mimetype" goyaml "github.com/goccy/go-yaml" - "github.com/mholt/archiver/v3" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/zarf-dev/zarf/src/api/v1alpha1" @@ -125,7 +126,7 @@ func Pull(ctx context.Context, source, destination string, opts PullOptions) err return err } - name, err := nameFromMetadata(tmpPath) + name, err := nameFromMetadata(ctx, tmpPath) if err != nil { return err } @@ -228,8 +229,7 @@ func pullOCI(ctx context.Context, opts PullOCIOptions) (bool, string, error) { if err != nil { return false, "", err } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Archive(allTheLayers, tarPath) + err = archive.Compress(ctx, allTheLayers, tarPath, archive.CompressOpts{}) if err != nil { return false, "", err } @@ -308,28 +308,44 @@ func pullHTTPFile(ctx context.Context, src, tarPath string) error { return nil } -func nameFromMetadata(path string) (string, error) { - var pkg v1alpha1.ZarfPackage - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err := archiver.Walk(path, func(f archiver.File) error { - if f.Name() == layout.ZarfYAML { - b, err := io.ReadAll(f) - if err != nil { - return err - } - if err := goyaml.Unmarshal(b, &pkg); err != nil { - return err - } - } - return nil - }) +// nameFromMetadata reads the zarf.yaml inside the archive at "path" +// (which may be plain, .tar, .tar.zst, .zip, etc) and builds its package name. +func nameFromMetadata(ctx context.Context, path string) (string, error) { + // 1) quick invalid‐path check + if helpers.InvalidPath(path) { + return "", &os.PathError{Op: "open", Path: path, Err: os.ErrNotExist} + } + if ctx == nil { + ctx = context.Background() + } + + // 2) mount the archive as a virtual file system + fsys, err := archives.FileSystem(ctx, path, nil) + if err != nil { + return "", fmt.Errorf("unable to open archive %q: %w", path, err) + } + + // 3) open just the zarf.yaml entry + f, err := fsys.Open(layout.ZarfYAML) + if err != nil { + return "", fmt.Errorf("%s does not contain a %s", path, layout.ZarfYAML) + } + defer f.Close() + + // 4) read & unmarshal into our package struct + data, err := io.ReadAll(f) if err != nil { return "", err } + var pkg v1alpha1.ZarfPackage + if err := goyaml.Unmarshal(data, &pkg); err != nil { + return "", err + } if pkg.Metadata.Name == "" { return "", fmt.Errorf("%s does not contain a zarf.yaml", path) } + // 5) build the output name exactly as before arch := config.GetArch(pkg.Metadata.Architecture, pkg.Build.Architecture) if pkg.Build.Architecture == zoci.SkeletonArch { arch = zoci.SkeletonArch @@ -345,14 +361,17 @@ func nameFromMetadata(path string) (string, error) { name = fmt.Sprintf("zarf-%s-%s", strings.ToLower(string(pkg.Kind)), arch) } if pkg.Build.Differential { - name = fmt.Sprintf("%s-%s-differential-%s", name, pkg.Build.DifferentialPackageVersion, pkg.Metadata.Version) + name = fmt.Sprintf("%s-%s-differential-%s", + name, pkg.Build.DifferentialPackageVersion, pkg.Metadata.Version) } else if pkg.Metadata.Version != "" { name = fmt.Sprintf("%s-%s", name, pkg.Metadata.Version) } + + // 6) choose tar vs tar.zst if pkg.Metadata.Uncompressed { - return fmt.Sprintf("%s.tar", name), nil + return name + ".tar", nil } - return fmt.Sprintf("%s.tar.zst", name), nil + return name + ".tar.zst", nil } // supportsFiltering checks if the package supports filtering. diff --git a/src/pkg/archive/archive.go b/src/pkg/archive/archive.go index 6aa0b1a1ad..c1cb5d49d0 100644 --- a/src/pkg/archive/archive.go +++ b/src/pkg/archive/archive.go @@ -13,45 +13,318 @@ import ( "path/filepath" "strings" - "github.com/mholt/archiver/v3" "github.com/mholt/archives" "github.com/zarf-dev/zarf/src/config/lang" - "github.com/zarf-dev/zarf/src/pkg/layout" ) -const rwxPerm = 0o755 +const ( + extensionTar = ".tar" + extensionZip = ".zip" + extensionGz = ".tar.gz" + extensionTgz = ".tgz" + extensionBz2 = ".tar.bz2" + extensionTbz2 = ".tbz2" + extensionTbz = ".tbz" + extensionXz = ".tar.xz" + extensionTxz = ".txz" + extensionZst = ".tar.zst" + extensionTzst = ".tzst" + extensionBr = ".tar.br" + extensionTbr = ".tbr" + extensionLz4 = ".tar.lz4" + extensionTlz4 = ".tlz4" + extensionLzip = ".tar.lz" + extensionMz = ".tar.mz" + extensionTmz = ".tmz" + sbomFileName = "sbom.tar" + + dirPerm = 0o755 + filePerm = 0o644 +) // CompressOpts is a placeholder for future optional Compress params type CompressOpts struct{} -// Compress takes any number of source files and archives them into a tarball at dest path. -// TODO(mkcp): Migrate to mholt/archives, see CVE-2024-0406 -func Compress(_ context.Context, sources []string, dest string, _ CompressOpts) error { - return archiver.Archive(sources, dest) +// Compress takes any number of source files and archives them into a compressed archive at dest path. +func Compress(ctx context.Context, sources []string, dest string, _ CompressOpts) (err error) { + out, err := os.Create(dest) + if err != nil { + return fmt.Errorf("failed to create %s: %w", dest, err) + } + defer func() { + err = errors.Join(err, out.Close()) + }() + + mapping := make(map[string]string, len(sources)) + for _, src := range sources { + mapping[src] = filepath.Base(src) + } + files, err := archives.FilesFromDisk(ctx, nil, mapping) + if err != nil { + return fmt.Errorf("failed to stat sources: %w", err) + } + + archivers := map[string]archives.Archiver{ + extensionZip: archives.Zip{}, + extensionTar: archives.Tar{}, + extensionGz: archives.CompressedArchive{Compression: archives.Gz{}, Archival: archives.Tar{}}, + extensionTgz: archives.CompressedArchive{Compression: archives.Gz{}, Archival: archives.Tar{}}, + extensionBz2: archives.CompressedArchive{Compression: archives.Bz2{}, Archival: archives.Tar{}}, + extensionTbz2: archives.CompressedArchive{Compression: archives.Bz2{}, Archival: archives.Tar{}}, + extensionTbz: archives.CompressedArchive{Compression: archives.Bz2{}, Archival: archives.Tar{}}, + extensionXz: archives.CompressedArchive{Compression: archives.Xz{}, Archival: archives.Tar{}}, + extensionTxz: archives.CompressedArchive{Compression: archives.Xz{}, Archival: archives.Tar{}}, + extensionZst: archives.CompressedArchive{Compression: archives.Zstd{}, Archival: archives.Tar{}}, + extensionTzst: archives.CompressedArchive{Compression: archives.Zstd{}, Archival: archives.Tar{}}, + extensionBr: archives.CompressedArchive{Compression: archives.Brotli{}, Archival: archives.Tar{}}, + extensionTbr: archives.CompressedArchive{Compression: archives.Brotli{}, Archival: archives.Tar{}}, + extensionLz4: archives.CompressedArchive{Compression: archives.Lz4{}, Archival: archives.Tar{}}, + extensionTlz4: archives.CompressedArchive{Compression: archives.Lz4{}, Archival: archives.Tar{}}, + extensionLzip: archives.CompressedArchive{Compression: archives.Lzip{}, Archival: archives.Tar{}}, + extensionMz: archives.CompressedArchive{Compression: archives.MinLZ{}, Archival: archives.Tar{}}, + extensionTmz: archives.CompressedArchive{Compression: archives.MinLZ{}, Archival: archives.Tar{}}, + } + + // Find the longest matching extension + var archiveExt string + for ext := range archivers { + if strings.HasSuffix(dest, ext) && len(ext) > len(archiveExt) { + archiveExt = ext + } + } + + archiver, ok := archivers[archiveExt] + if !ok { + return fmt.Errorf("unsupported archive extension for %q", dest) + } + + if err := archiver.Archive(ctx, out, files); err != nil { + return fmt.Errorf("archive failed for %q: %w", dest, err) + } + + return nil } // DecompressOpts provides optional parameters for Decompress type DecompressOpts struct { - // UnarchiveAll, when enabled, walks the sourceArchive and unarchives everything at the root of the archive. - // NOTE(mkcp): This is equivalent to a recursive walk with depth 1. + // UnarchiveAll walks root of the archive and unpacks nested .tar files. UnarchiveAll bool + + // Files, if non-empty, means "only extract these exact archive-paths." + Files []string + + // StripComponents drops this many leading path elements from every entry. + StripComponents int + + // OverwriteExisting, if true, will truncate existing files instead of failing. + OverwriteExisting bool + + // SkipValidation, if true, will skip the validation of a file being present in the archive. + // This is used with unarchiveFiltered to avoid checking for files that are not in the archive. + // This was a previous behavior that the new logic does not support. + SkipValidation bool } -// Decompress takes Zarf package or arbitrary archive and decompresses it to the path at dest with options. -func Decompress(ctx context.Context, sourceArchive, dst string, opts DecompressOpts) error { - if ctx == nil { - ctx = context.Background() +// Decompress takes a Zarf package or arbitrary archive and decompresses it to dst. +func Decompress(ctx context.Context, sourceArchive, dst string, opts DecompressOpts) (err error) { + if len(opts.Files) > 0 { + if err := unarchiveFiltered(ctx, sourceArchive, dst, opts.Files, opts.SkipValidation); err != nil { + return fmt.Errorf("unable to decompress selected files: %w", err) + } + return nil + } + + if opts.StripComponents > 0 || opts.OverwriteExisting { + err = unarchiveWithStrip(ctx, sourceArchive, dst, + opts.StripComponents, opts.OverwriteExisting) + } else { + err = unarchive(ctx, sourceArchive, dst) } - err := unarchive(ctx, sourceArchive, dst) if err != nil { return fmt.Errorf("unable to perform decompression: %w", err) } + // 3) nested unarchive step remains unchanged if opts.UnarchiveAll { - err = nestedUnarchive(ctx, dst) - if err != nil { + if err := nestedUnarchive(ctx, dst); err != nil { + return err + } + } + return nil +} + +// unarchiveWithStrip unpacks any supported archive, stripping `strip` path elements +// and opening files with or without truncate based on `overwrite`. +func unarchiveWithStrip(ctx context.Context, archivePath, dst string, strip int, overwrite bool) (err error) { + // open archive + f, err := os.Open(archivePath) + if err != nil { + return fmt.Errorf("opening %q: %w", archivePath, err) + } + defer func() { + err = errors.Join(err, f.Close()) + }() + + // identify format (tar, tar.zst, zip, etc.) + format, input, err := archives.Identify(ctx, filepath.Base(archivePath), f) + if err != nil { + return fmt.Errorf("identifying archive %q: %w", archivePath, err) + } + extractor, ok := format.(archives.Extractor) + if !ok { + return fmt.Errorf("format %T cannot extract", format) + } + + // ensure dst exists + if err := os.MkdirAll(dst, dirPerm); err != nil { + return fmt.Errorf("creating dest %q: %w", dst, err) + } + + // choose flags for file creation + flags := os.O_CREATE | os.O_WRONLY + if overwrite { + flags |= os.O_TRUNC + } else { + flags |= os.O_EXCL + } + + handler := func(_ context.Context, fi archives.FileInfo) error { + parts := strings.Split(fi.NameInArchive, "/") + if len(parts) <= strip { + // nothing left after stripping → skip + return nil + } + rel := filepath.Join(parts[strip:]...) + target := filepath.Join(dst, rel) + + switch { + case fi.IsDir(): + return os.MkdirAll(target, fi.Mode()) + + case fi.LinkTarget != "": + // recreate symlink (we do not strip link targets here) + return os.Symlink(fi.LinkTarget, target) + + default: + // regular file + if err := os.MkdirAll(filepath.Dir(target), dirPerm); err != nil { + return err + } + out, err := os.OpenFile(target, flags, fi.Mode()) + if err != nil { + return err + } + defer func() { + err = errors.Join(err, out.Close()) + }() + + in, err := fi.Open() + if err != nil { + return err + } + defer func() { + err = errors.Join(err, in.Close()) + }() + + if _, err := io.Copy(out, in); err != nil { + return err + } + return nil + } + } + + if err := extractor.Extract(ctx, input, handler); err != nil { + return fmt.Errorf("extracting %q: %w", archivePath, err) + } + return nil +} + +// unarchiveFiltered extracts only the given list of archive‐internal filenames +// into dst, and errors if any one of them was not found. +func unarchiveFiltered(ctx context.Context, src, dst string, want []string, skipValidation bool) (err error) { + file, err := os.Open(src) + if err != nil { + return fmt.Errorf("unable to open archive %q: %w", src, err) + } + defer func() { + cErr := file.Close() + err = errors.Join(err, cErr) + }() + + format, input, err := archives.Identify(ctx, src, file) + if err != nil { + return fmt.Errorf("unable to identify archive %q: %w", src, err) + } + + extractor, ok := format.(archives.Extractor) + if !ok { + return fmt.Errorf("unsupported format for extraction: %T", format) + } + + // We'll track which ones we actually saw + found := make(map[string]bool, len(want)) + wantSet := make(map[string]bool, len(want)) + for _, name := range want { + wantSet[name] = true + } + + // Ensure dst exists + if err := os.MkdirAll(dst, dirPerm); err != nil { + return fmt.Errorf("unable to create destination %q: %w", dst, err) + } + + handler := func(_ context.Context, f archives.FileInfo) error { + // skip anything not in our list + if !wantSet[f.NameInArchive] { + return nil + } + found[f.NameInArchive] = true + + target := filepath.Join(dst, f.NameInArchive) + + switch { + case f.IsDir(): + return os.MkdirAll(target, f.Mode()) + + case f.LinkTarget != "": + linkDest := filepath.Join(dst, f.LinkTarget) + return os.Symlink(linkDest, target) + + default: + if err := os.MkdirAll(filepath.Dir(target), dirPerm); err != nil { + return err + } + out, err := os.OpenFile(target, os.O_CREATE|os.O_WRONLY, f.Mode()) + if err != nil { + return err + } + defer func() { + cErr := out.Close() + err = errors.Join(err, cErr) + }() + + in, err := f.Open() + if err != nil { + return err + } + defer func() { + err = errors.Join(err, in.Close()) + }() + + _, err = io.Copy(out, in) return err } } + + if err := extractor.Extract(ctx, input, handler); err != nil { + return fmt.Errorf("error extracting filtered entries from %q: %w", src, err) + } + + // verify we got them all + for _, name := range want { + if !found[name] && !skipValidation { + return fmt.Errorf("file %q not found in archive %q", name, src) + } + } return nil } @@ -61,11 +334,11 @@ func nestedUnarchive(ctx context.Context, dst string) error { if err != nil { return err } - if strings.HasSuffix(path, ".tar") { - dst := filepath.Join(strings.TrimSuffix(path, ".tar"), "..") + if strings.HasSuffix(path, extensionTar) { + dst := filepath.Join(strings.TrimSuffix(path, extensionTar), "..") // Unpack sboms.tar differently since it has a different folder structure than components - if info.Name() == layout.SBOMTar { - dst = strings.TrimSuffix(path, ".tar") + if info.Name() == sbomFileName { + dst = strings.TrimSuffix(path, extensionTar) } err := unarchive(ctx, path, dst) if err != nil { @@ -85,7 +358,7 @@ func nestedUnarchive(ctx context.Context, dst string) error { } // unarchive opens src, identifies its format, and extracts into dst. -func unarchive(ctx context.Context, src, dst string) error { +func unarchive(ctx context.Context, src, dst string) (err error) { // Open the archive file file, err := os.Open(src) if err != nil { @@ -108,7 +381,7 @@ func unarchive(ctx context.Context, src, dst string) error { } // Ensure dst exists - if err := os.MkdirAll(dst, rwxPerm); err != nil { + if err := os.MkdirAll(dst, dirPerm); err != nil { return fmt.Errorf("unable to create destination %q: %w", dst, err) } @@ -128,7 +401,7 @@ func unarchive(ctx context.Context, src, dst string) error { default: // regular file - if err := os.MkdirAll(filepath.Dir(target), rwxPerm); err != nil { + if err := os.MkdirAll(filepath.Dir(target), dirPerm); err != nil { return err } out, err := os.OpenFile(target, os.O_CREATE|os.O_WRONLY, f.Mode()) diff --git a/src/pkg/cluster/injector.go b/src/pkg/cluster/injector.go index 6a9e6d6bc5..e97cc97dfa 100644 --- a/src/pkg/cluster/injector.go +++ b/src/pkg/cluster/injector.go @@ -14,7 +14,6 @@ import ( "time" "github.com/google/go-containerregistry/pkg/crane" - "github.com/mholt/archiver/v3" corev1 "k8s.io/api/core/v1" kerrors "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/api/resource" @@ -27,6 +26,7 @@ import ( "github.com/zarf-dev/zarf/src/api/v1alpha1" "github.com/zarf-dev/zarf/src/config" "github.com/zarf-dev/zarf/src/internal/healthchecks" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/zarf-dev/zarf/src/pkg/state" "github.com/zarf-dev/zarf/src/pkg/transform" @@ -216,10 +216,11 @@ func (c *Cluster) createPayloadConfigMaps(ctx context.Context, tmpDir, imagesDir if err != nil { return nil, "", err } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - if err := archiver.Archive(tarFileList, tarPath); err != nil { - return nil, "", err + + if err := archive.Compress(ctx, tarFileList, tarPath, archive.CompressOpts{}); err != nil { + return nil, "", fmt.Errorf("failed to compress the payload: %w", err) } + payloadChunkSize := 1024 * 768 chunks, shasum, err := helpers.ReadFileByChunks(tarPath, payloadChunkSize) if err != nil { diff --git a/src/pkg/layout/component.go b/src/pkg/layout/component.go index 8461d790f4..dafc95c45d 100644 --- a/src/pkg/layout/component.go +++ b/src/pkg/layout/component.go @@ -12,8 +12,8 @@ import ( "path/filepath" "github.com/defenseunicorns/pkg/helpers/v2" - "github.com/mholt/archiver/v3" "github.com/zarf-dev/zarf/src/api/v1alpha1" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/logger" ) @@ -136,10 +136,8 @@ func (c *Components) Unarchive(ctx context.Context, component v1alpha1.ZarfCompo return nil } - // TODO(mkcp): Bring in context and port to logger l.Debug("unarchiving", "component", filepath.Base(tb)) - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - if err := archiver.Unarchive(tb, c.Base); err != nil { + if err := archive.Decompress(ctx, tb, c.Base, archive.DecompressOpts{}); err != nil { return err } return os.Remove(tb) diff --git a/src/pkg/layout/package.go b/src/pkg/layout/package.go index 1d07b45a25..19211dd769 100644 --- a/src/pkg/layout/package.go +++ b/src/pkg/layout/package.go @@ -16,9 +16,9 @@ import ( "github.com/Masterminds/semver/v3" "github.com/defenseunicorns/pkg/helpers/v2" "github.com/google/go-containerregistry/pkg/crane" - "github.com/mholt/archiver/v3" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/zarf-dev/zarf/src/api/v1alpha1" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/interactive" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/zarf-dev/zarf/src/pkg/packager/deprecated" @@ -233,8 +233,8 @@ func (pp *PackagePaths) ArchivePackage(ctx context.Context, destinationTarball s // Make the archive archiveSrc := []string{pp.Base + string(os.PathSeparator)} - if err := archiver.Archive(archiveSrc, destinationTarball); err != nil { - return fmt.Errorf("unable to create package: %w", err) + if err := archive.Compress(ctx, archiveSrc, destinationTarball, archive.CompressOpts{}); err != nil { + return fmt.Errorf("unable to compress package: %w", err) } l.Debug("ArchivePackage wrote", "base", pp.Base, "destination", destinationTarball) diff --git a/src/pkg/layout/sbom.go b/src/pkg/layout/sbom.go index fcfb300be6..3d1443c695 100644 --- a/src/pkg/layout/sbom.go +++ b/src/pkg/layout/sbom.go @@ -5,13 +5,14 @@ package layout import ( + "context" "fmt" "io/fs" "os" "path/filepath" "github.com/defenseunicorns/pkg/helpers/v2" - "github.com/mholt/archiver/v3" + "github.com/zarf-dev/zarf/src/pkg/archive" ) // ComponentSBOM contains paths for a component's SBOM. @@ -39,7 +40,7 @@ func (s *SBOMs) Unarchive() error { } tb := s.Path dir := filepath.Join(filepath.Dir(tb), SBOMDir) - if err := archiver.Unarchive(tb, dir); err != nil { + if err := archive.Decompress(context.Background(), tb, dir, archive.DecompressOpts{}); err != nil { return err } s.Path = dir diff --git a/src/pkg/packager/composer/oci.go b/src/pkg/packager/composer/oci.go index adae393957..d2cfbe2697 100644 --- a/src/pkg/packager/composer/oci.go +++ b/src/pkg/packager/composer/oci.go @@ -13,9 +13,9 @@ import ( "github.com/defenseunicorns/pkg/helpers/v2" "github.com/defenseunicorns/pkg/oci" - "github.com/mholt/archiver/v3" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/zarf-dev/zarf/src/config" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/layout" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/zarf-dev/zarf/src/pkg/zoci" @@ -127,11 +127,9 @@ func (ic *ImportChain) fetchOCISkeleton(ctx context.Context) error { return nil } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - tu := archiver.Tar{ + decompressOpts := archive.DecompressOpts{ OverwriteExisting: true, - // removes /<component-name>/ from the paths - StripComponents: 1, + StripComponents: 1, } - return tu.Unarchive(tb, dir) + return archive.Decompress(ctx, tb, dir, decompressOpts) } diff --git a/src/pkg/packager/creator/normal.go b/src/pkg/packager/creator/normal.go index e63607a996..4c1651cba8 100644 --- a/src/pkg/packager/creator/normal.go +++ b/src/pkg/packager/creator/normal.go @@ -17,7 +17,6 @@ import ( "github.com/defenseunicorns/pkg/helpers/v2" "github.com/defenseunicorns/pkg/oci" v1 "github.com/google/go-containerregistry/pkg/v1" - "github.com/mholt/archiver/v3" "github.com/zarf-dev/zarf/src/api/v1alpha1" "github.com/zarf-dev/zarf/src/config" "github.com/zarf-dev/zarf/src/config/lang" @@ -26,6 +25,7 @@ import ( "github.com/zarf-dev/zarf/src/internal/packager/images" "github.com/zarf-dev/zarf/src/internal/packager/kustomize" "github.com/zarf-dev/zarf/src/internal/packager/sbom" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/layout" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/zarf-dev/zarf/src/pkg/packager/actions" @@ -401,9 +401,10 @@ func (pc *PackageCreator) addComponent(ctx context.Context, component v1alpha1.Z return fmt.Errorf(lang.ErrDownloading, file.Source, err.Error()) } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Extract(compressedFile, file.ExtractPath, destinationDir) - if err != nil { + decompressOpts := archive.DecompressOpts{ + Files: []string{file.ExtractPath}, + } + if err := archive.Decompress(ctx, compressedFile, destinationDir, decompressOpts); err != nil { return fmt.Errorf(lang.ErrFileExtract, file.ExtractPath, compressedFileName, err.Error()) } } else { @@ -413,8 +414,10 @@ func (pc *PackageCreator) addComponent(ctx context.Context, component v1alpha1.Z } } else { if file.ExtractPath != "" { - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - if err := archiver.Extract(file.Source, file.ExtractPath, destinationDir); err != nil { + decompressOpts := archive.DecompressOpts{ + Files: []string{file.ExtractPath}, + } + if err := archive.Decompress(ctx, file.Source, destinationDir, decompressOpts); err != nil { return fmt.Errorf(lang.ErrFileExtract, file.ExtractPath, file.Source, err.Error()) } } else { diff --git a/src/pkg/packager/creator/skeleton.go b/src/pkg/packager/creator/skeleton.go index 09b224990b..32fac150aa 100644 --- a/src/pkg/packager/creator/skeleton.go +++ b/src/pkg/packager/creator/skeleton.go @@ -12,12 +12,12 @@ import ( "strconv" "github.com/defenseunicorns/pkg/helpers/v2" - "github.com/mholt/archiver/v3" "github.com/zarf-dev/zarf/src/api/v1alpha1" "github.com/zarf-dev/zarf/src/config" "github.com/zarf-dev/zarf/src/config/lang" "github.com/zarf-dev/zarf/src/internal/packager/helm" "github.com/zarf-dev/zarf/src/internal/packager/kustomize" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/layout" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/zarf-dev/zarf/src/pkg/utils" @@ -193,8 +193,10 @@ func (sc *SkeletonCreator) addComponent(ctx context.Context, component v1alpha1. destinationDir := filepath.Dir(dst) if file.ExtractPath != "" { - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - if err := archiver.Extract(file.Source, file.ExtractPath, destinationDir); err != nil { + decompressOpts := archive.DecompressOpts{ + Files: []string{file.ExtractPath}, + } + if err := archive.Decompress(ctx, file.Source, destinationDir, decompressOpts); err != nil { return nil, fmt.Errorf(lang.ErrFileExtract, file.ExtractPath, file.Source, err.Error()) } diff --git a/src/pkg/packager/sources/oci.go b/src/pkg/packager/sources/oci.go index 6b2516e7ee..1e9cf9487e 100644 --- a/src/pkg/packager/sources/oci.go +++ b/src/pkg/packager/sources/oci.go @@ -12,9 +12,9 @@ import ( "path/filepath" "strings" - "github.com/mholt/archiver/v3" "github.com/zarf-dev/zarf/src/api/v1alpha1" "github.com/zarf-dev/zarf/src/config" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/layout" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/zarf-dev/zarf/src/pkg/packager/filters" @@ -196,6 +196,5 @@ func (s *OCISource) Collect(ctx context.Context, dir string) (string, error) { _ = os.Remove(dstTarball) - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - return dstTarball, archiver.Archive(allTheLayers, dstTarball) + return dstTarball, archive.Compress(ctx, allTheLayers, dstTarball, archive.CompressOpts{}) } diff --git a/src/pkg/packager/sources/tarball.go b/src/pkg/packager/sources/tarball.go index 183b266daa..370f3a923e 100644 --- a/src/pkg/packager/sources/tarball.go +++ b/src/pkg/packager/sources/tarball.go @@ -5,18 +5,19 @@ package sources import ( - "archive/tar" "context" "errors" "fmt" "io" + "io/fs" "os" "path/filepath" "time" "github.com/defenseunicorns/pkg/helpers/v2" - "github.com/mholt/archiver/v3" + "github.com/mholt/archives" "github.com/zarf-dev/zarf/src/api/v1alpha1" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/layout" "github.com/zarf-dev/zarf/src/pkg/logger" "github.com/zarf-dev/zarf/src/pkg/packager/filters" @@ -47,38 +48,43 @@ func (s *TarballSource) LoadPackage(ctx context.Context, dst *layout.PackagePath } pathsExtracted := []string{} + // 1) Mount the archive as a virtual file system. + fsys, err := archives.FileSystem(ctx, s.PackageSource, nil) + if err != nil { + return pkg, nil, fmt.Errorf("unable to open archive %q: %w", s.PackageSource, err) + } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Walk(s.PackageSource, func(f archiver.File) error { - if f.IsDir() { - return nil - } - header, ok := f.Header.(*tar.Header) - if !ok { - return fmt.Errorf("expected header to be *tar.Header but was %T", f.Header) + // 2) Walk every entry in the archive. + err = fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err } - path := header.Name - - dir := filepath.Dir(path) - if dir != "." { - if err := os.MkdirAll(filepath.Join(dst.Base, dir), helpers.ReadExecuteAllWriteUser); err != nil { - return err - } + // skip directories + if d.IsDir() { + return nil } - + // ensure parent dirs exist in our temp dir dstPath := filepath.Join(dst.Base, path) pathsExtracted = append(pathsExtracted, path) - dst, err := os.Create(dstPath) + if err := os.MkdirAll(filepath.Dir(dstPath), helpers.ReadExecuteAllWriteUser); err != nil { + return err + } + // copy file contents + in, err := fsys.Open(path) if err != nil { return err } - defer dst.Close() + defer in.Close() - _, err = io.Copy(dst, f) + out, err := os.Create(dstPath) if err != nil { return err } + defer out.Close() + if _, err := io.Copy(out, in); err != nil { + return err + } return nil }) if err != nil { @@ -156,17 +162,20 @@ func (s *TarballSource) LoadPackageMetadata(ctx context.Context, dst *layout.Pac } pathsExtracted := []string{} + decompressOpts := archive.DecompressOpts{ + Files: toExtract, + SkipValidation: true, // retain the original behavior of not validating the existence of the files + } + err = archive.Decompress(ctx, s.PackageSource, dst.Base, decompressOpts) + if err != nil { + return pkg, nil, fmt.Errorf("unable to extract archive %q: %w", s.PackageSource, err) + } + for _, rel := range toExtract { - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - if err := archiver.Extract(s.PackageSource, rel, dst.Base); err != nil { - return pkg, nil, err - } - // archiver.Extract will not return an error if the file does not exist, so we must manually check if !helpers.InvalidPath(filepath.Join(dst.Base, rel)) { pathsExtracted = append(pathsExtracted, rel) } } - dst.SetFromPaths(ctx, pathsExtracted) pkg, warnings, err = dst.ReadZarfYAML() diff --git a/src/pkg/packager/sources/utils.go b/src/pkg/packager/sources/utils.go index 428fa5d977..7a5bd2033f 100644 --- a/src/pkg/packager/sources/utils.go +++ b/src/pkg/packager/sources/utils.go @@ -5,6 +5,7 @@ package sources import ( + "context" "fmt" "io" "os" @@ -13,7 +14,7 @@ import ( "github.com/defenseunicorns/pkg/helpers/v2" goyaml "github.com/goccy/go-yaml" - "github.com/mholt/archiver/v3" + "github.com/mholt/archives" "github.com/zarf-dev/zarf/src/api/v1alpha1" "github.com/zarf-dev/zarf/src/config" "github.com/zarf-dev/zarf/src/pkg/layout" @@ -36,46 +37,69 @@ func IsValidFileExtension(filename string) bool { return false } +// identifyUnknownTarball tries "path" as-is first, then retries +// with .tar.zst, .tar.gz, .tar.xz, and .tar appended, +// using archives.Identify to detect only tar variants. func identifyUnknownTarball(path string) (string, error) { + // 1) missing file? if helpers.InvalidPath(path) { return "", &os.PathError{Op: "open", Path: path, Err: os.ErrNotExist} } - if filepath.Ext(path) != "" && IsValidFileExtension(path) { - return path, nil - } else if filepath.Ext(path) != "" && !IsValidFileExtension(path) { - return "", fmt.Errorf("%s is not a supported tarball format (%+v)", path, GetValidPackageExtensions()) - } + ctx := context.Background() - // rename to .tar.zst and check if it's a valid tar.zst - tzst := fmt.Sprintf("%s.tar.zst", path) - if err := os.Rename(path, tzst); err != nil { - return "", err - } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - format, err := archiver.ByExtension(tzst) - if err != nil { - return "", err - } - _, ok := format.(*archiver.TarZstd) - if ok { - return tzst, nil - } + // helper to test a candidate filename + try := func(name string) (bool, error) { + f, err := os.Open(name) + if err != nil { + return false, err + } + defer f.Close() - // rename to .tar and check if it's a valid tar - tb := fmt.Sprintf("%s.tar", path) - if err := os.Rename(tzst, tb); err != nil { - return "", err + // Identify by filename or header + format, _, err := archives.Identify(ctx, filepath.Base(name), f) + if err != nil { + // NoMatch or other error + return false, nil + } + + // "format" might be a plain Tar, or a CompressedArchive wrapping Tar + switch v := format.(type) { + case archives.Tar: + return true, nil + case archives.CompressedArchive: + if _, ok := v.Archival.(archives.Tar); ok { + return true, nil + } + } + return false, nil } - format, err = archiver.ByExtension(tb) - if err != nil { + + // 2) try original path + if _, err := try(path); err != nil { return "", err } - _, ok = format.(*archiver.Tar) - if ok { - return tb, nil + + // 3) try each extension in order + for _, ext := range []string{".tar.zst", ".tar.gz", ".tar.xz", ".tar"} { + newPath := path + ext + if err := os.Rename(path, newPath); err != nil { + continue // maybe file locked or already renamed + } + + if ok, err := try(newPath); err != nil { + // rename back before bailing + _ = os.Rename(newPath, path) + return "", err + } else if ok { + return newPath, nil + } + + // not a tar variant—roll back rename + _ = os.Rename(newPath, path) } - return "", fmt.Errorf("%s is not a supported tarball format (%+v)", path, GetValidPackageExtensions()) + return "", fmt.Errorf("%s is not a supported tarball format (%v)", + path, GetValidPackageExtensions()) } // RenameFromMetadata renames a tarball based on its metadata. @@ -95,19 +119,23 @@ func RenameFromMetadata(path string) (string, error) { ext = ".tar.zst" } - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - if err := archiver.Walk(path, func(f archiver.File) error { - if f.Name() == layout.ZarfYAML { - b, err := io.ReadAll(f) - if err != nil { - return err - } - if err := goyaml.Unmarshal(b, &pkg); err != nil { - return err - } - } - return nil - }); err != nil { + fsys, err := archives.FileSystem(context.Background(), path, nil) + if err != nil { + return "", fmt.Errorf("unable to open archive %q: %w", path, err) + } + + // 3) open just the zarf.yaml entry + f, err := fsys.Open(layout.ZarfYAML) + if err != nil { + return "", fmt.Errorf("%s does not contain a %s", path, layout.ZarfYAML) + } + + // 4) read & unmarshal into our package struct + data, err := io.ReadAll(f) + if err != nil { + return "", err + } + if err := goyaml.Unmarshal(data, &pkg); err != nil { return "", err } @@ -121,6 +149,12 @@ func RenameFromMetadata(path string) (string, error) { tb := filepath.Join(filepath.Dir(path), name) + // Windows will not allow the rename if open + err = f.Close() + if err != nil { + return "", err + } + return tb, os.Rename(path, tb) }
diff --git a/src/internal/packager2/layout/create_test.go b/src/internal/packager2/layout/create_test.go index e0e859c8d5..e8d17088c8 100644 --- a/src/internal/packager2/layout/create_test.go +++ b/src/internal/packager2/layout/create_test.go @@ -331,6 +331,8 @@ func TestGetSBOM(t *testing.T) { t.Parallel() lint.ZarfSchema = testutil.LoadSchema(t, "../../../../zarf.schema.json") + ctx := testutil.TestContext(t) + tmpdir := t.TempDir() pkg := v1alpha1.ZarfPackage{ Kind: v1alpha1.ZarfPackageConfig, @@ -345,13 +347,13 @@ func TestGetSBOM(t *testing.T) { } writePackageToDisk(t, pkg, tmpdir) - pkgLayout, err := CreatePackage(context.Background(), tmpdir, CreateOptions{}) + pkgLayout, err := CreatePackage(ctx, tmpdir, CreateOptions{}) require.NoError(t, err) // Ensure the SBOM does not exist require.NoFileExists(t, filepath.Join(pkgLayout.dirPath, SBOMTar)) // Ensure Zarf errors correctly - _, err = pkgLayout.GetSBOM(tmpdir) + _, err = pkgLayout.GetSBOM(ctx, tmpdir) var noSBOMErr *NoSBOMAvailableError require.ErrorAs(t, err, &noSBOMErr) } @@ -359,6 +361,7 @@ func TestGetSBOM(t *testing.T) { func TestCreateAbsolutePathFileSource(t *testing.T) { t.Parallel() lint.ZarfSchema = testutil.LoadSchema(t, "../../../../zarf.schema.json") + ctx := testutil.TestContext(t) createFileToImport := func(t *testing.T, dir string) string { t.Helper() @@ -393,11 +396,11 @@ func TestCreateAbsolutePathFileSource(t *testing.T) { // Create the zarf.yaml file in the tmpdir writePackageToDisk(t, pkg, tmpdir) - pkgLayout, err := CreatePackage(context.Background(), tmpdir, CreateOptions{}) + pkgLayout, err := CreatePackage(ctx, tmpdir, CreateOptions{}) require.NoError(t, err) // Ensure the components have the correct file - fileComponent, err := pkgLayout.GetComponentDir(tmpdir, "file", FilesComponentDir) + fileComponent, err := pkgLayout.GetComponentDir(ctx, tmpdir, "file", FilesComponentDir) require.NoError(t, err) require.FileExists(t, filepath.Join(fileComponent, "0", "file.txt")) }) @@ -449,7 +452,7 @@ func TestCreateAbsolutePathFileSource(t *testing.T) { require.NoError(t, err) // Ensure the component has the correct file - importedFileComponent, err := pkgLayout.GetComponentDir(tmpdir, "file-import", FilesComponentDir) + importedFileComponent, err := pkgLayout.GetComponentDir(ctx, tmpdir, "file-import", FilesComponentDir) require.NoError(t, err) require.FileExists(t, filepath.Join(importedFileComponent, "0", "file.txt")) }) diff --git a/src/internal/packager2/layout/package_test.go b/src/internal/packager2/layout/package_test.go index 584c8092c1..37f0f7b26c 100644 --- a/src/internal/packager2/layout/package_test.go +++ b/src/internal/packager2/layout/package_test.go @@ -25,7 +25,7 @@ func TestPackageLayout(t *testing.T) { require.Equal(t, "0.0.1", pkgLayout.Pkg.Metadata.Version) tmpDir := t.TempDir() - manifestDir, err := pkgLayout.GetComponentDir(tmpDir, "test", ManifestsComponentDir) + manifestDir, err := pkgLayout.GetComponentDir(ctx, tmpDir, "test", ManifestsComponentDir) require.NoError(t, err) expected, err := os.ReadFile("../testdata/deployment.yaml") require.NoError(t, err) @@ -33,14 +33,14 @@ func TestPackageLayout(t *testing.T) { require.NoError(t, err) require.Equal(t, expected, b) - _, err = pkgLayout.GetComponentDir(t.TempDir(), "does-not-exist", ManifestsComponentDir) + _, err = pkgLayout.GetComponentDir(ctx, t.TempDir(), "does-not-exist", ManifestsComponentDir) require.ErrorContains(t, err, "component does-not-exist does not exist in package") - _, err = pkgLayout.GetComponentDir(t.TempDir(), "test", FilesComponentDir) + _, err = pkgLayout.GetComponentDir(ctx, t.TempDir(), "test", FilesComponentDir) require.ErrorContains(t, err, "component test could not access a files directory") tmpDir = t.TempDir() - sbomPath, err := pkgLayout.GetSBOM(tmpDir) + sbomPath, err := pkgLayout.GetSBOM(ctx, tmpDir) require.NoError(t, err) require.FileExists(t, filepath.Join(sbomPath, "compare.html")) diff --git a/src/pkg/archive/archive_test.go b/src/pkg/archive/archive_test.go index c8fbe637d4..0b7e49b19e 100644 --- a/src/pkg/archive/archive_test.go +++ b/src/pkg/archive/archive_test.go @@ -5,48 +5,240 @@ package archive import ( + "context" + "os" + "path/filepath" + "strings" "testing" - "github.com/open-policy-agent/opa/util/test" + "github.com/stretchr/testify/require" ) -// TODO(mkcp): Unit test Compress -func TestCompress(t *testing.T) { - test.Skip(t) - tt := []struct { - name string - opts CompressOpts +const ( + testDirPerm = dirPerm + testFilePerm = filePerm +) + +// writeFile creates a file at path with given content. +func writeFile(t *testing.T, path, content string) { + t.Helper() + if err := os.WriteFile(path, []byte(content), testFilePerm); err != nil { + t.Fatalf("failed to write file %s: %v", path, err) + } +} + +// readFile returns the content of the file at path. +func readFile(t *testing.T, path string) string { + t.Helper() + data, err := os.ReadFile(path) + if err != nil { + t.Fatalf("failed to read file %s: %v", path, err) + } + return string(data) +} + +func TestCompressAndDecompress_MultipleFormats(t *testing.T) { + t.Parallel() + ctx := context.Background() + + formats := []struct { + name string + extension string + }{ + {"tar", extensionTar}, + {"zip", extensionZip}, + {"tar.gz", extensionGz}, + {"tgz", extensionTgz}, + {"tar.bz2", extensionBz2}, + {"tbz2", extensionTbz2}, + {"tbz", extensionTbz}, + {"tar.xz", extensionXz}, + {"txz", extensionTxz}, + {"tar.zst", extensionZst}, + {"tzst", extensionTzst}, + {"tar.br", extensionBr}, + {"tbr", extensionTbr}, + {"tar.lz4", extensionLz4}, + {"tlz4", extensionTlz4}, + {"tar.lz", extensionLzip}, + {"tar.mz", extensionMz}, + {"tmz", extensionTmz}, + } + + for _, tc := range formats { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + srcDir := t.TempDir() + f1 := filepath.Join(srcDir, "file1.txt") + f2 := filepath.Join(srcDir, "file2.txt") + writeFile(t, f1, "hello world") + writeFile(t, f2, "zarf testing") + + dest := filepath.Join(t.TempDir(), "archive"+tc.extension) + require.NoError(t, Compress(ctx, []string{f1, f2}, dest, CompressOpts{}), "Compress failed for %s", tc.name) + + dstDir := t.TempDir() + require.NoError(t, Decompress(ctx, dest, dstDir, DecompressOpts{}), "Decompress failed for %s", tc.name) + + got1 := readFile(t, filepath.Join(dstDir, "file1.txt")) + require.Equal(t, "hello world", got1, "[%s] file1 content", tc.name) + got2 := readFile(t, filepath.Join(dstDir, "file2.txt")) + require.Equal(t, "zarf testing", got2, "[%s] file2 content", tc.name) + }) + } +} + +func TestCompressUnsupportedExtension(t *testing.T) { + t.Parallel() + ctx := context.Background() + src := filepath.Join(t.TempDir(), "f.txt") + writeFile(t, src, "data") + dest := filepath.Join(t.TempDir(), "archive.foo") + err := Compress(ctx, []string{src}, dest, CompressOpts{}) + if err == nil || !strings.Contains(err.Error(), "unsupported archive extension") { + t.Errorf("expected unsupported extension error; got %v", err) + } +} + +func TestDecompressFiltered(t *testing.T) { + t.Parallel() + ctx := context.Background() + + tests := []struct { + name string + setup func(t *testing.T, ctx context.Context) (archivePath, outDir string, opts DecompressOpts) + expectError string + verify func(t *testing.T, outDir string) }{ { - name: "CompressOpts can be empty", - opts: CompressOpts{}, + name: "Filtered_NotFound", + setup: func(t *testing.T, ctx context.Context) (string, string, DecompressOpts) { + srcDir := t.TempDir() + file := filepath.Join(srcDir, "only.txt") + writeFile(t, file, "uniquely present") + destZip := filepath.Join(t.TempDir(), "only.zip") + require.NoError(t, Compress(ctx, []string{file}, destZip, CompressOpts{}), "Compress failed") + dstDir := t.TempDir() + opts := DecompressOpts{Files: []string{"absent.txt"}, SkipValidation: false} + return destZip, dstDir, opts + }, + expectError: "absent.txt", + verify: nil, + }, + { + name: "Filtered_SkipValidation", + setup: func(t *testing.T, ctx context.Context) (string, string, DecompressOpts) { + srcDir := t.TempDir() + file := filepath.Join(srcDir, "only.txt") + writeFile(t, file, "content") + destZip := filepath.Join(t.TempDir(), "only.zip") + require.NoError(t, Compress(ctx, []string{file}, destZip, CompressOpts{}), "Compress failed") + dstDir := t.TempDir() + opts := DecompressOpts{Files: []string{"also_missing.txt"}, SkipValidation: true} + return destZip, dstDir, opts + }, + expectError: "", + verify: func(t *testing.T, outDir string) { + entries, err := os.ReadDir(outDir) + require.NoError(t, err, "ReadDir failed") + require.Empty(t, entries, "expected no files extracted") + }, }, } - for _, tc := range tt { + for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - t.Skip() + t.Parallel() + archivePath, outDir, opts := tc.setup(t, ctx) + err := Decompress(ctx, archivePath, outDir, opts) + if tc.expectError != "" { + require.Error(t, err) + require.Contains(t, err.Error(), tc.expectError) + } else { + require.NoError(t, err) + if tc.verify != nil { + tc.verify(t, outDir) + } + } }) } } -// TODO(mkcp): Unit test Decompress. Some overlap with e2e/05_tarball_test.go -func TestDecompress(t *testing.T) { - tt := []struct { - name string - opts DecompressOpts +func TestDecompressOptions(t *testing.T) { + t.Parallel() + ctx := context.Background() + + tests := []struct { + name string + setup func(t *testing.T, ctx context.Context) (archivePath, outDir string, opts DecompressOpts) + verify func(t *testing.T, outDir string) }{ { - name: "TODO", - opts: DecompressOpts{ - UnarchiveAll: true, + name: "UnarchiveAll", + setup: func(t *testing.T, ctx context.Context) (string, string, DecompressOpts) { + tmp := t.TempDir() + innerDir := filepath.Join(tmp, "inner") + require.NoError(t, os.Mkdir(innerDir, testDirPerm)) + innerFile := filepath.Join(innerDir, "foo.txt") + writeFile(t, innerFile, "nested content") + innerTar := filepath.Join(tmp, "inner.tar") + require.NoError(t, Compress(ctx, []string{innerFile}, innerTar, CompressOpts{})) + outerDir := filepath.Join(tmp, "outer") + require.NoError(t, os.Mkdir(outerDir, testDirPerm)) + outerTar := filepath.Join(tmp, "outer.tar") + require.NoError(t, os.Rename(innerTar, filepath.Join(outerDir, "inner.tar"))) + require.NoError(t, Compress(ctx, []string{filepath.Join(outerDir, "inner.tar")}, outerTar, CompressOpts{})) + outDir := filepath.Join(tmp, "out") + opts := DecompressOpts{UnarchiveAll: true} + return outerTar, outDir, opts + }, + verify: func(t *testing.T, outDir string) { + found := false + err := filepath.Walk(outDir, func(path string, _ os.FileInfo, _ error) error { + if filepath.Base(path) == "foo.txt" { + found = true + content := readFile(t, path) + require.Equal(t, "nested content", content) + } + return nil + }) + require.NoError(t, err, "Walk failed") + require.True(t, found, "foo.txt not found after UnarchiveAll") + }, + }, + { + name: "OverwriteExisting", + setup: func(t *testing.T, ctx context.Context) (string, string, DecompressOpts) { + tmp := t.TempDir() + origFile := filepath.Join(tmp, "orig.txt") + writeFile(t, origFile, "original") + archivePath := filepath.Join(tmp, "archive.tar.gz") + require.NoError(t, Compress(ctx, []string{origFile}, archivePath, CompressOpts{})) + outDir := filepath.Join(tmp, "out") + require.NoError(t, Decompress(ctx, archivePath, outDir, DecompressOpts{})) + outFile := filepath.Join(outDir, "orig.txt") + require.Equal(t, "original", readFile(t, outFile)) + writeFile(t, origFile, "new content") + archivePath2 := filepath.Join(tmp, "archive2.tar.gz") + require.NoError(t, Compress(ctx, []string{origFile}, archivePath2, CompressOpts{})) + opts := DecompressOpts{OverwriteExisting: true} + return archivePath2, outDir, opts + }, + verify: func(t *testing.T, outDir string) { + outFile := filepath.Join(outDir, "orig.txt") + require.Equal(t, "new content", readFile(t, outFile)) }, }, } - for _, tc := range tt { + for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { - t.Skip() + t.Parallel() + archivePath, outDir, opts := tc.setup(t, ctx) + require.NoError(t, Decompress(ctx, archivePath, outDir, opts)) + if tc.verify != nil { + tc.verify(t, outDir) + } }) } } diff --git a/src/pkg/packager/sources/new_test.go b/src/pkg/packager/sources/new_test.go index eb14403778..3bcec81e49 100644 --- a/src/pkg/packager/sources/new_test.go +++ b/src/pkg/packager/sources/new_test.go @@ -101,7 +101,7 @@ func TestNewPackageSource(t *testing.T) { } func TestPackageSource(t *testing.T) { - t.Parallel() + // t.Parallel() // Copy tar to a temp directory, otherwise Collect will delete it. tarName := "zarf-package-wordpress-amd64-16.0.4.tar.zst" diff --git a/src/test/e2e/06_create_sbom_test.go b/src/test/e2e/06_create_sbom_test.go index 3b8c096923..09d40543ac 100644 --- a/src/test/e2e/06_create_sbom_test.go +++ b/src/test/e2e/06_create_sbom_test.go @@ -5,7 +5,6 @@ package test import ( - "context" "fmt" "os" "path/filepath" @@ -15,10 +14,12 @@ import ( "github.com/stretchr/testify/require" layout2 "github.com/zarf-dev/zarf/src/internal/packager2/layout" + "github.com/zarf-dev/zarf/src/test/testutil" ) func TestCreateSBOM(t *testing.T) { t.Parallel() + ctx := testutil.TestContext(t) outSbomPath := filepath.Join(t.TempDir(), ".sbom-location") buildPath := t.TempDir() @@ -33,9 +34,9 @@ func TestCreateSBOM(t *testing.T) { _, _, err := e2e.Zarf(t, "package", "create", "examples/dos-games", "-o", buildPath, "--sbom-out", outSbomPath, "--confirm") require.NoError(t, err) - pkgLayout, err := layout2.LoadFromTar(context.Background(), tarPath, layout2.PackageLayoutOptions{}) + pkgLayout, err := layout2.LoadFromTar(ctx, tarPath, layout2.PackageLayoutOptions{}) require.NoError(t, err) - getSbomPath, err := pkgLayout.GetSBOM(t.TempDir()) + getSbomPath, err := pkgLayout.GetSBOM(ctx, t.TempDir()) require.NoError(t, err) for _, expectedFile := range expectedFiles { require.FileExists(t, filepath.Join(getSbomPath, expectedFile)) diff --git a/src/test/e2e/08_create_differential_test.go b/src/test/e2e/08_create_differential_test.go index 1ac3d72103..3ab7916e40 100644 --- a/src/test/e2e/08_create_differential_test.go +++ b/src/test/e2e/08_create_differential_test.go @@ -9,10 +9,10 @@ import ( "path/filepath" "testing" - "github.com/mholt/archiver/v3" "github.com/stretchr/testify/require" "github.com/zarf-dev/zarf/src/api/v1alpha1" "github.com/zarf-dev/zarf/src/config/lang" + "github.com/zarf-dev/zarf/src/pkg/archive" "github.com/zarf-dev/zarf/src/pkg/layout" "github.com/zarf-dev/zarf/src/pkg/utils" ) @@ -43,8 +43,10 @@ func TestCreateDifferential(t *testing.T) { defer e2e.CleanFiles(t, differentialPackageName) // Extract the yaml of the differential package - // TODO(mkcp): See https://github.com/zarf-dev/zarf/issues/3051 - err = archiver.Extract(differentialPackageName, layout.ZarfYAML, tmpdir) + decompressOpts := archive.DecompressOpts{ + Files: []string{layout.ZarfYAML}, + } + err = archive.Decompress(t.Context(), differentialPackageName, tmpdir, decompressOpts) require.NoError(t, err, "unable to extract zarf.yaml from the differential git package") // Load the extracted zarf.yaml specification
Explore future of mholt/archiver ### Describe what should be investigated or refactored We are currently using a mix of stdliib tar and https://github.com/mholt/archiver to manage tar archives. The archiver lib has a lot of good features to partially unpack tar files. The latest release of the lib was made in November 2021 with work for a new major release still in progress. There are a couple of small bugs that we have found in mholt/archiver when run on Windows. Considering the lack of releases we should consider the future of using the lib, and consider if we should only use the stdlib or some other lib.
archiver has now been archived and replaced by archives https://github.com/mholt/archives Before migrating to archives, we should consider how much we actually rely on from that library, vs what we can achieve from the golang built-in one. One potential issue with getting changing out mholt/archiver with the native golang is that archiver/archives allows compressions / decompression to any file type. This is important when considering `zarf tools archiver compress` and `zarf tools archiver decompress`. We know that with Zarf packages this command is used for `.tar` and `.zst` extensions, but it's possible that are users are using this tool for other file types as well
archiver has now been archived and replaced by archives https://github.com/mholt/archives Before migrating to archives, we should consider how much we actually rely on from that library, vs what we can achieve from the golang built-in one. One potential issue with getting changing out mholt/archiver with the native golang is that archiver/archives allows compressions / decompression to any file type. This is important when considering `zarf tools archiver compress` and `zarf tools archiver decompress`. We know that with Zarf packages this command is used for `.tar` and `.zst` extensions, but it's possible that are users are using this tool for other file types as well
[ "https://github.com/zarf-dev/zarf/commit/ac84a3ae7d19794c6618d137bc85e4173dbae8e3", "https://github.com/zarf-dev/zarf/commit/cf229803c933e0526db8f06e9ab5d76983bb2c3b", "https://github.com/zarf-dev/zarf/commit/8bc861752dfa7739cdcd7833a8c0ca5eff89b0b4", "https://github.com/zarf-dev/zarf/commit/ebcd95aff37153db4...
2025-05-12T21:53:57Z
https://github.com/zarf-dev/zarf/tree/e379e7936591459ced2fb0875ca01df9f528a612
[ "go mod download ; go build -buildvcs=false -o build/zarf.exe ." ]
[ "go test -json -v ./... | Tee-Object -FilePath reports\\go-test-results.json" ]
[ "Get-Content reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import json import re results: dict[str, str] = {} seen_tests_by_pkg: dict[str, set] = {} failed_pkgs: set[str] = set() def normalize_pkg(pkg: str | None) -> str | None: if not pkg: return None s = pkg # Remove whitespace introduced by wrapping s = re.sub(r"\s+", "", s) # Fix common github.com corruptions s = (s.replace("github.ccom", "github.com") .replace("github.coom", "github.com") .replace("github.comm", "github.com") .replace("github..com", "github.com")) # Collapse multiple slashes s = re.sub(r"/{2,}", "/", s) return s def normalize_test(test: str | None) -> str | None: if not test: return None s = test # Remove whitespace introduced by wraps s = re.sub(r"\s+", "", s) # Collapse multiple slashes and underscores (but do not dedupe letters) s = re.sub(r"/{2,}", "/", s) s = re.sub(r"_{2,}", "_", s) return s def make_key(pkg: str | None, test: str | None) -> str | None: t = normalize_test(test) if not t: return None p = normalize_pkg(pkg) return f"{p}.{t}" if p else t def record(pkg: str | None, test: str | None, status: str): if status not in ("pass", "fail", "skip"): return k = make_key(pkg, test) if not k: return results[k] = status # Extract JSON-like objects by regex and repair embedded newlines before parsing for m in re.finditer(r"\{.*?\}", log, flags=re.DOTALL): chunk = m.group(0) # Remove newlines and carriage returns inside chunk to repair JSON compact = chunk.replace("\r", "").replace("\n", "") try: obj = json.loads(compact) except Exception: continue action = (obj.get("Action") or "").lower() pkg = obj.get("Package") or None test = obj.get("Test") or None out = obj.get("Output") or "" pkg_n = normalize_pkg(pkg) test_n = normalize_test(test) if pkg_n and test_n: seen_tests_by_pkg.setdefault(pkg_n, set()).add(test_n) # Record test status from Action if action in ("pass", "fail", "skip") and test_n: record(pkg_n, test_n, "pass" if action == "pass" else ("fail" if action == "fail" else "skip")) # Package-level fail if action == "fail" and not test_n and pkg_n: failed_pkgs.add(pkg_n) # Parse status lines from Output: --- PASS/FAIL/SKIP: Name if out: out_compact = out.replace("\r", "") for mo in re.finditer(r'---\s+(PASS|FAIL|SKIP):\s+([^\n(]+)', out_compact): a_txt, tname = mo.groups() status = "pass" if a_txt.upper() == "PASS" else ("fail" if a_txt.upper() == "FAIL" else "skip") tnorm = normalize_test(tname) record(pkg_n, tnorm, status) # Mark any seen tests in failed packages that lack a terminal status as fail for pkg, tests in seen_tests_by_pkg.items(): if pkg in failed_pkgs: for t in tests: key = f"{pkg}.{t}" if key not in results: results[key] = "fail" return results
[ "github.com/zarf-dev/zarf/src/pkg/layout.TestPackageFiles/Verify_Files()_with_imaages", "github.com/zarf-dev/zarf/src/pkg/layout.TestPackageFiles/Verify_Files()_with_imaage_layers_mapped_to_package_paths", "github.com/zarf-dev/zarf/src/pkg/state.TestMergeStateRegistry/username_is_unmodiified", "github.com/zar...
[ "github.com/zarf-dev/zarf/src/pkg/cluster.TestDetectDistro/k3d", "github.com/zarf-dev/zarf/src/internal/packager2/layout.TestValidateComponeentCompose/package_template_URL_provided", "github.com/zarf-dev/zarf/src/pkg/logger.Test_New/FormatNone_is_supported_to_disaable_logs", "github.com/zarf-dev/zarf/src/api/...
starryzhang/sweb.eval.win.zarf-dev_1776_zarf-3788
zarf-dev/zarf
3799
zarf-dev__zarf-3799
Go
[ "3798" ]
0877c96c21f5e790bfe4630814d67ceea13ddb33
diff --git a/src/internal/packager2/actions/actions.go b/src/internal/packager2/actions/actions.go index abe7d9158b..9e43985233 100644 --- a/src/internal/packager2/actions/actions.go +++ b/src/internal/packager2/actions/actions.go @@ -83,7 +83,7 @@ func runAction(ctx context.Context, basePath string, defaultCfg v1alpha1.ZarfCom actionDefaults := actionGetCfg(ctx, defaultCfg, action, variableConfig.GetAllTemplates()) actionDefaults.Dir = filepath.Join(basePath, actionDefaults.Dir) - if cmd, err = actionCmdMutation(ctx, cmd, actionDefaults.Shell); err != nil { + if cmd, err = actionCmdMutation(ctx, cmd, actionDefaults.Shell, runtime.GOOS); err != nil { l.Error("error mutating command", "cmd", cmdEscaped, "err", err.Error()) } @@ -209,7 +209,7 @@ func convertWaitToCmd(_ context.Context, wait v1alpha1.ZarfComponentActionWait, } // Perform some basic string mutations to make commands more useful. -func actionCmdMutation(ctx context.Context, cmd string, shellPref v1alpha1.Shell) (string, error) { +func actionCmdMutation(ctx context.Context, cmd string, shellPref v1alpha1.Shell, goos string) (string, error) { zarfCommand, err := utils.GetFinalExecutableCommand() if err != nil { return cmd, err @@ -219,21 +219,27 @@ func actionCmdMutation(ctx context.Context, cmd string, shellPref v1alpha1.Shell cmd = strings.ReplaceAll(cmd, "./zarf ", zarfCommand+" ") // Make commands 'more' compatible with Windows OS PowerShell - if runtime.GOOS == "windows" && (exec.IsPowershell(shellPref.Windows) || shellPref.Windows == "") { + if goos == "windows" && (exec.IsPowershell(shellPref.Windows) || shellPref.Windows == "") { // Replace "touch" with "New-Item" on Windows as it's a common command, but not POSIX so not aliased by M$. // See https://mathieubuisson.github.io/powershell-linux-bash/ & // http://web.cs.ucla.edu/~miryung/teaching/EE461L-Spring2012/labs/posix.html for more details. cmd = regexp.MustCompile(`^touch `).ReplaceAllString(cmd, `New-Item `) - // Convert any ${ZARF_VAR_*} or $ZARF_VAR_* to ${env:ZARF_VAR_*} or $env:ZARF_VAR_* respectively (also TF_VAR_*). + // Convert any ${ZARF_VAR_*} or $ZARF_VAR_* to ${env:ZARF_VAR_*} or $env:ZARF_VAR_* respectively + // (also TF_VAR_* and ZARF_CONST_). // https://regex101.com/r/xk1rkw/1 - envVarRegex := regexp.MustCompile(`(?P<envIndicator>\${?(?P<varName>(ZARF|TF)_VAR_([a-zA-Z0-9_-])+)}?)`) - get, err := helpers.MatchRegex(envVarRegex, cmd) - if err == nil { - newCmd := strings.ReplaceAll(cmd, get("envIndicator"), fmt.Sprintf("$Env:%s", get("varName"))) + envVarRegex := regexp.MustCompile(`(?P<envIndicator>\${?(?P<varName>(ZARF|TF)_(VAR|CONST)_([a-zA-Z0-9_-])+)}?)`) + getFunctions := MatchAllRegex(envVarRegex, cmd) + + newCmd := cmd + for _, get := range getFunctions { + newCmd = strings.ReplaceAll(newCmd, get("envIndicator"), fmt.Sprintf("$Env:%s", get("varName"))) + + } + if newCmd != cmd { logger.From(ctx).Debug("converted command", "cmd", cmd, "newCmd", newCmd) - cmd = newCmd } + cmd = newCmd } return cmd, nil @@ -298,3 +304,20 @@ func actionRun(ctx context.Context, cfg v1alpha1.ZarfComponentActionDefaults, cm } return stdout, stderr, err } + +// MatchAllRegex wraps a get function around each substring match, returning all matches. +func MatchAllRegex(regex *regexp.Regexp, str string) []func(string) string { + // Validate the string. + matches := regex.FindAllStringSubmatch(str, -1) + + // Parse the string into its components. + var funcs []func(string) string + for _, match := range matches { + funcs = append(funcs, func(name string) string { + return match[regex.SubexpIndex(name)] + + }) + } + + return funcs +} diff --git a/src/pkg/packager/actions/actions.go b/src/pkg/packager/actions/actions.go index cffcb8f7f0..630452d7a6 100644 --- a/src/pkg/packager/actions/actions.go +++ b/src/pkg/packager/actions/actions.go @@ -82,7 +82,7 @@ func runAction(ctx context.Context, defaultCfg v1alpha1.ZarfComponentActionDefau actionDefaults := actionGetCfg(ctx, defaultCfg, action, variableConfig.GetAllTemplates()) - if cmd, err = actionCmdMutation(ctx, cmd, actionDefaults.Shell); err != nil { + if cmd, err = actionCmdMutation(ctx, cmd, actionDefaults.Shell, runtime.GOOS); err != nil { l.Error("error mutating command", "cmd", cmdEscaped, "err", err.Error()) } @@ -209,7 +209,7 @@ func convertWaitToCmd(_ context.Context, wait v1alpha1.ZarfComponentActionWait, } // Perform some basic string mutations to make commands more useful. -func actionCmdMutation(ctx context.Context, cmd string, shellPref v1alpha1.Shell) (string, error) { +func actionCmdMutation(ctx context.Context, cmd string, shellPref v1alpha1.Shell, goos string) (string, error) { zarfCommand, err := utils.GetFinalExecutableCommand() if err != nil { return cmd, err @@ -219,21 +219,27 @@ func actionCmdMutation(ctx context.Context, cmd string, shellPref v1alpha1.Shell cmd = strings.ReplaceAll(cmd, "./zarf ", zarfCommand+" ") // Make commands 'more' compatible with Windows OS PowerShell - if runtime.GOOS == "windows" && (exec.IsPowershell(shellPref.Windows) || shellPref.Windows == "") { + if goos == "windows" && (exec.IsPowershell(shellPref.Windows) || shellPref.Windows == "") { // Replace "touch" with "New-Item" on Windows as it's a common command, but not POSIX so not aliased by M$. // See https://mathieubuisson.github.io/powershell-linux-bash/ & // http://web.cs.ucla.edu/~miryung/teaching/EE461L-Spring2012/labs/posix.html for more details. cmd = regexp.MustCompile(`^touch `).ReplaceAllString(cmd, `New-Item `) - // Convert any ${ZARF_VAR_*} or $ZARF_VAR_* to ${env:ZARF_VAR_*} or $env:ZARF_VAR_* respectively (also TF_VAR_*). + // Convert any ${ZARF_VAR_*} or $ZARF_VAR_* to ${env:ZARF_VAR_*} or $env:ZARF_VAR_* respectively + // (also TF_VAR_* and ZARF_CONST_). // https://regex101.com/r/xk1rkw/1 - envVarRegex := regexp.MustCompile(`(?P<envIndicator>\${?(?P<varName>(ZARF|TF)_VAR_([a-zA-Z0-9_-])+)}?)`) - get, err := helpers.MatchRegex(envVarRegex, cmd) - if err == nil { - newCmd := strings.ReplaceAll(cmd, get("envIndicator"), fmt.Sprintf("$Env:%s", get("varName"))) + envVarRegex := regexp.MustCompile(`(?P<envIndicator>\${?(?P<varName>(ZARF|TF)_(VAR|CONST)_([a-zA-Z0-9_-])+)}?)`) + getFunctions := MatchAllRegex(envVarRegex, cmd) + + newCmd := cmd + for _, get := range getFunctions { + newCmd = strings.ReplaceAll(newCmd, get("envIndicator"), fmt.Sprintf("$Env:%s", get("varName"))) + + } + if newCmd != cmd { logger.From(ctx).Debug("converted command", "cmd", cmd, "newCmd", newCmd) - cmd = newCmd } + cmd = newCmd } return cmd, nil @@ -297,3 +303,20 @@ func actionRun(ctx context.Context, cfg v1alpha1.ZarfComponentActionDefaults, cm } return stdout, stderr, err } + +// MatchAllRegex wraps a get function around each substring match, returning all matches. +func MatchAllRegex(regex *regexp.Regexp, str string) []func(string) string { + // Validate the string. + matches := regex.FindAllStringSubmatch(str, -1) + + // Parse the string into its components. + var funcs []func(string) string + for _, match := range matches { + funcs = append(funcs, func(name string) string { + return match[regex.SubexpIndex(name)] + + }) + } + + return funcs +}
diff --git a/src/internal/packager2/actions/actions_test.go b/src/internal/packager2/actions/actions_test.go new file mode 100644 index 0000000000..be77e39ee5 --- /dev/null +++ b/src/internal/packager2/actions/actions_test.go @@ -0,0 +1,99 @@ +package actions + +import ( + "context" + "fmt" + "github.com/stretchr/testify/require" + "testing" + + "github.com/zarf-dev/zarf/src/api/v1alpha1" + "github.com/zarf-dev/zarf/src/pkg/utils" +) + +func Test_actionCmdMutation(t *testing.T) { + zarfCmd, _ := utils.GetFinalExecutableCommand() + tests := []struct { + name string + cmd string + shellPref v1alpha1.Shell + goos string + want string + wantErr error + }{ + { + name: "linux without zarf", + cmd: "echo \"this is zarf\"", + shellPref: v1alpha1.Shell{}, + goos: "linux", + want: "echo \"this is zarf\"", + wantErr: nil, + }, + { + name: "linux including zarf", + cmd: "./zarf deploy", + shellPref: v1alpha1.Shell{}, + goos: "linux", + want: fmt.Sprintf("%s deploy", zarfCmd), + wantErr: nil, + }, + { + name: "windows including zarf", + cmd: "./zarf deploy", + shellPref: v1alpha1.Shell{}, + goos: "windows", + want: fmt.Sprintf("%s deploy", zarfCmd), + wantErr: nil, + }, + { + name: "windows env", + cmd: "echo ${ZARF_VAR_ENV1}", + shellPref: v1alpha1.Shell{}, + goos: "windows", + want: "echo $Env:ZARF_VAR_ENV1", + wantErr: nil, + }, + { + name: "windows env pwsh", + cmd: "echo ${ZARF_VAR_ENV1}", + shellPref: v1alpha1.Shell{ + Windows: "pwsh", + }, + goos: "windows", + want: "echo $Env:ZARF_VAR_ENV1", + wantErr: nil, + }, + { + name: "windows env powershell", + cmd: "echo ${ZARF_VAR_ENV1}", + shellPref: v1alpha1.Shell{ + Windows: "powershell", + }, + goos: "windows", + want: "echo $Env:ZARF_VAR_ENV1", + wantErr: nil, + }, + { + name: "windows multiple env", + cmd: "echo ${ZARF_VAR_ENV1} ${ZARF_VAR_ENV2}", + shellPref: v1alpha1.Shell{}, + goos: "windows", + want: "echo $Env:ZARF_VAR_ENV1 $Env:ZARF_VAR_ENV2", + wantErr: nil, + }, + { + name: "windows constants", + cmd: "echo ${ZARF_CONST_ENV1}", + shellPref: v1alpha1.Shell{}, + goos: "windows", + want: "echo $Env:ZARF_CONST_ENV1", + wantErr: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := actionCmdMutation(context.Background(), tt.cmd, tt.shellPref, tt.goos) + require.Equal(t, tt.wantErr, err) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/src/pkg/packager/actions/actions_test.go b/src/pkg/packager/actions/actions_test.go new file mode 100644 index 0000000000..be77e39ee5 --- /dev/null +++ b/src/pkg/packager/actions/actions_test.go @@ -0,0 +1,99 @@ +package actions + +import ( + "context" + "fmt" + "github.com/stretchr/testify/require" + "testing" + + "github.com/zarf-dev/zarf/src/api/v1alpha1" + "github.com/zarf-dev/zarf/src/pkg/utils" +) + +func Test_actionCmdMutation(t *testing.T) { + zarfCmd, _ := utils.GetFinalExecutableCommand() + tests := []struct { + name string + cmd string + shellPref v1alpha1.Shell + goos string + want string + wantErr error + }{ + { + name: "linux without zarf", + cmd: "echo \"this is zarf\"", + shellPref: v1alpha1.Shell{}, + goos: "linux", + want: "echo \"this is zarf\"", + wantErr: nil, + }, + { + name: "linux including zarf", + cmd: "./zarf deploy", + shellPref: v1alpha1.Shell{}, + goos: "linux", + want: fmt.Sprintf("%s deploy", zarfCmd), + wantErr: nil, + }, + { + name: "windows including zarf", + cmd: "./zarf deploy", + shellPref: v1alpha1.Shell{}, + goos: "windows", + want: fmt.Sprintf("%s deploy", zarfCmd), + wantErr: nil, + }, + { + name: "windows env", + cmd: "echo ${ZARF_VAR_ENV1}", + shellPref: v1alpha1.Shell{}, + goos: "windows", + want: "echo $Env:ZARF_VAR_ENV1", + wantErr: nil, + }, + { + name: "windows env pwsh", + cmd: "echo ${ZARF_VAR_ENV1}", + shellPref: v1alpha1.Shell{ + Windows: "pwsh", + }, + goos: "windows", + want: "echo $Env:ZARF_VAR_ENV1", + wantErr: nil, + }, + { + name: "windows env powershell", + cmd: "echo ${ZARF_VAR_ENV1}", + shellPref: v1alpha1.Shell{ + Windows: "powershell", + }, + goos: "windows", + want: "echo $Env:ZARF_VAR_ENV1", + wantErr: nil, + }, + { + name: "windows multiple env", + cmd: "echo ${ZARF_VAR_ENV1} ${ZARF_VAR_ENV2}", + shellPref: v1alpha1.Shell{}, + goos: "windows", + want: "echo $Env:ZARF_VAR_ENV1 $Env:ZARF_VAR_ENV2", + wantErr: nil, + }, + { + name: "windows constants", + cmd: "echo ${ZARF_CONST_ENV1}", + shellPref: v1alpha1.Shell{}, + goos: "windows", + want: "echo $Env:ZARF_CONST_ENV1", + wantErr: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := actionCmdMutation(context.Background(), tt.cmd, tt.shellPref, tt.goos) + require.Equal(t, tt.wantErr, err) + require.Equal(t, tt.want, got) + }) + } +}
Component actions cmd mutation on Windows fails ### Environment Device and OS: Windows App version: v0.54.0 Kubernetes distro being used: k3s Other: ### Steps to reproduce 1. Create a package with an action cmd that includes multiple variables (e.g. `echo ${ZARF_VAR_ENV1} ${ZARF_VAR_ENV2} ${ZARF_CONST_ENV3}`) 2. Deploy package from a Windows terminal ### Expected result Command is mutated to: `echo $Env:ZARF_VAR_ENV1 $Env:ZARF_VAR_ENV2 $Env:ZARF_CONST_ENV3` ### Actual Result Command is mutated to: `echo $Env:ZARF_VAR_ENV1 ${ZARF_VAR_ENV2} ${ZARF_CONST_ENV3}` So only the first variable is replaced, AND only variables, not the constants (`ZARF_CONST_`) ### Visual Proof (screenshots, videos, text, etc) N/A ### Severity/Priority medium ### Additional Context Add any other context or screenshots about the technical debt here.
Thanks for the issue (and PR) @koendelaat - we'll monitor the PR and provide any required feedback.
[ "https://github.com/zarf-dev/zarf/commit/77756d2f339586c322e0c6c55fe5c602865cbb06", "https://github.com/zarf-dev/zarf/commit/f1034a14277ce33474019be00753a6e8f076bb2d", "https://github.com/zarf-dev/zarf/commit/708e56b78bfc8ca2a37cd9a52f00614c411970a1", "https://github.com/zarf-dev/zarf/commit/62d996aae817c6029...
2025-05-14T19:46:55Z
https://github.com/zarf-dev/zarf/tree/0877c96c21f5e790bfe4630814d67ceea13ddb33
[ "go build ./..." ]
[ "$pkgs = go list ./... | Select-String -NotMatch '^github\\.com/zarf-dev/zarf/src/test' | ForEach-Object { $_.ToString() }; go test -json -v -count=1 $pkgs 2>&1 | Tee-Object -FilePath reports\\go-test-results.json" ]
[ "cat reports/go-test-results.json" ]
def parser(log: str) -> dict[str, str]: # Robust parser for Go test outputs with potentially wrapped/broken JSON and embedded text summaries import re results: dict[str, str] = {} status_map = {"pass": "pass", "fail": "fail", "skip": "skip"} def clean_name(name: str) -> str: # Normalize name: remove newlines, compress whitespace, and strip leading punctuation (e.g., ': ') name = name.replace('\r', '').replace('\n', '') name = re.sub(r'\s+', ' ', name).strip() # Strip leading non-word characters except allow subtest separator '/' name = re.sub(r'^[^\w/]+', '', name) return name # 1) Extract textual summary lines anywhere in the log (parent and subtests) # Matches e.g. '--- PASS: TestName (0.00s)' or indented subtests for m in re.finditer(r'---\s+(PASS|FAIL|SKIP)\s*:\s*(.+?)(?:\s*\(|\s*$)', log, flags=re.IGNORECASE): status = status_map.get(m.group(1).lower(), "fail") name = clean_name(m.group(2)) if name: results[name] = status # 2) Extract JSON-like events even if objects are wrapped/broken # Handle both orders: Action before Test and Test before Action within the same vicinity # Limit span to avoid crossing into neighboring objects json_patterns = [ r'"Action"\s*:\s*"(pass|fail|skip)".{0,500}?"Test"\s*:\s*"([^"]+)"', r'"Test"\s*:\s*"([^"]+)".{0,500}?"Action"\s*:\s*"(pass|fail|skip)"', ] for pat in json_patterns: for m in re.finditer(pat, log, flags=re.IGNORECASE | re.DOTALL): if pat.startswith('"Action"'): status = status_map.get(m.group(1).lower(), "fail") name = clean_name(m.group(2)) else: name = clean_name(m.group(1)) status = status_map.get(m.group(2).lower(), "fail") if name: results[name] = status return results
[ "TestCompatibleComponentt", "TestValidateLastNonBreakingVersion/invvalid_last_non_breaking_version", "TestValidateLastNonBreakingVersion/emppty_last_non_breaking_version", "TestCreateAbsolutePathFFileSource/test_that_imports_handle_absolute_paths_properly", "TestCompatibleComponent/aarchitecture_miss_match"...
[ "TestGenerateValuesOverrides/Single_variable", "TestDetectDistro/gke", "TestPackageInspectValuesFiles", "TestUserAgent/twine_user_agent", "TestMerging/with-existing", "TestCreateSkeleton", "TestDetectDistro/eks", "Test_New/JSON_format_is_supported", "TestSplitDeleteExistingFiles", "TestDetectDistr...
starryzhang/sweb.eval.win.zarf-dev_1776_zarf-3799
Azure/azure-sdk-for-python
41822
Azure__azure-sdk-for-python-41822
Python
[ "39966" ]
e646d9e358c7e234010db6b4a66790cff56e7e5b
diff --git a/sdk/identity/azure-identity/CHANGELOG.md b/sdk/identity/azure-identity/CHANGELOG.md index 2356c3952b4a..93bffec18471 100644 --- a/sdk/identity/azure-identity/CHANGELOG.md +++ b/sdk/identity/azure-identity/CHANGELOG.md @@ -6,6 +6,9 @@ - Expanded the set of acceptable values for environment variable `AZURE_TOKEN_CREDENTIALS` to allow for selection of a specific credential in the `DefaultAzureCredential` chain. At runtime, only the specified credential will be used when acquiring tokens with `DefaultAzureCredential`. For example, setting `AZURE_TOKEN_CREDENTIALS=WorkloadIdentityCredential` will make `DefaultAzureCredential` use only `WorkloadIdentityCredential`. - Valid values are `EnvironmentCredential`, `WorkloadIdentityCredential`, `ManagedIdentityCredential`, `AzureCliCredential`, `AzurePowershellCredential`, `AzureDeveloperCliCredential`, and `InteractiveBrowserCredential`. ([#41709](https://github.com/Azure/azure-sdk-for-python/pull/41709)) +- Re-enabled `VisualStudioCodeCredential` - Previously deprecated `VisualStudioCodeCredential` has been re-implemented to work with the VS Code Azure Resources extension instead of the deprecated Azure Account extension. This requires the `azure-identity-broker` package to be installed for authentication. ([#41822](https://github.com/Azure/azure-sdk-for-python/pull/41822)) + - `VisualStudioCodeCredential` is now included in the `DefaultAzureCredential` token chain by default. + ### Breaking Changes diff --git a/sdk/identity/azure-identity/README.md b/sdk/identity/azure-identity/README.md index 1670aa4d464e..49a4c39b6037 100644 --- a/sdk/identity/azure-identity/README.md +++ b/sdk/identity/azure-identity/README.md @@ -108,6 +108,14 @@ DefaultAzureCredential(managed_identity_client_id=client_id) Alternatively, set the environment variable `AZURE_CLIENT_ID` to the identity's client ID. +#### Authenticate Using Visual Studio Code with `DefaultAzureCredential` + +To authenticate using Visual Studio Code, ensure you have signed in through the **Azure Resources** extension. The signed-in user is then picked up automatically by `DefaultAzureCredential`. Currently, this is only supported on Windows and WSL. To use this method of authentication, ensure the following prerequisites are met: + +- [Azure Resources Extension](https://marketplace.visualstudio.com/items?itemName=ms-azuretools.vscode-azureresourcegroups) is installed in Visual Studio Code. +- You are signed in using the `Azure: Sign In` command in VS Code. +- You have the [`azure-identity-broker`][azure_identity_broker] package installed. + ### Define a custom authentication flow with `ChainedTokenCredential` While `DefaultAzureCredential` is generally the quickest way to authenticate apps for Azure, you can create a customized chain of credentials to be considered. `ChainedTokenCredential` enables users to combine multiple credential instances to define a customized chain of credentials. For more information, see [ChainedTokenCredential overview][ctc_overview]. diff --git a/sdk/identity/azure-identity/TROUBLESHOOTING.md b/sdk/identity/azure-identity/TROUBLESHOOTING.md index 84811ce4d3e9..3817a30c858d 100644 --- a/sdk/identity/azure-identity/TROUBLESHOOTING.md +++ b/sdk/identity/azure-identity/TROUBLESHOOTING.md @@ -19,6 +19,7 @@ This troubleshooting guide covers failure investigation techniques, common error - [Azure Virtual Machine managed identity](#azure-virtual-machine-managed-identity) - [Azure App Service and Azure Functions managed identity](#azure-app-service-and-azure-functions-managed-identity) - [Azure Kubernetes Service managed identity](#azure-kubernetes-service-managed-identity) +- [Troubleshoot VisualStudioCodeCredential authentication issues](#troubleshoot-visualstudiocodecredential-authentication-issues) - [Troubleshoot AzureCliCredential authentication issues](#troubleshoot-azureclicredential-authentication-issues) - [Troubleshoot AzureDeveloperCliCredential authentication issues](#troubleshoot-azuredeveloperclicredential-authentication-issues) - [Troubleshoot AzurePowerShellCredential authentication issues](#troubleshoot-azurepowershellcredential-authentication-issues) @@ -182,6 +183,26 @@ curl 'http://169.254.169.254/metadata/identity/oauth2/token?resource=https://man |---|---|---| |No managed identity endpoint found|The application attempted to authenticate before an identity was assigned to its pod|Verify the pod is labeled correctly. This also occurs when a correctly labeled pod authenticates before the identity is ready. To prevent initialization races, configure NMI to set the Retry-After header in its responses (see [Pod Identity documentation](https://azure.github.io/aad-pod-identity/docs/configure/feature_flags/#set-retry-after-header-in-nmi-response)). +## Troubleshoot `VisualStudioCodeCredential` authentication issues + +> **Applies to:** Version 1.24.0b1 and later + +As of version 1.24.0b1, `VisualStudioCodeCredential` uses brokered authentication to sign in using the Azure Resources extension in Visual Studio Code. This approach requires the `azure-identity-broker` dependency and currently only works on Windows or WSL. + +### Platform support + +**Note:** `VisualStudioCodeCredential` with brokered authentication is currently only supported on Windows and WSL. Linux and macOS are not yet supported. + +### Common errors + +`CredentialUnavailableError` + +| Error Message | Description | Mitigation | +|---|---|---| +| VisualStudioCodeCredential requires the 'azure-identity-broker' package to be installed. You must also ensure you have the Azure Resources extension installed and have signed in to Azure via Visual Studio Code.` | Brokered authentication is not available, which may be due to missing dependencies, not being signed in to Azure in VS Code, or the Azure Resources extension not being installed. | <ul><li>Ensure your project includes the <code>azure-identity-broker</code> dependency.</li><li>In Visual Studio Code, install the <a href="https://marketplace.visualstudio.com/items?itemName=ms-azuretools.vscode-azureresourcegroups">Azure Resources extension</a>.</li><li>Sign in to Azure using the "Azure: Sign In" command in VS Code.</li><li>Restart your application after signing in.</li></ul> | + +> VisualStudioCodeCredential is intended for local development scenarios and is not recommended for production environments. + ## Troubleshoot `AzureCliCredential` authentication issues `CredentialUnavailableError` diff --git a/sdk/identity/azure-identity/azure/identity/_credentials/default.py b/sdk/identity/azure-identity/azure/identity/_credentials/default.py index 798afb842846..316b202d0771 100644 --- a/sdk/identity/azure-identity/azure/identity/_credentials/default.py +++ b/sdk/identity/azure-identity/azure/identity/_credentials/default.py @@ -80,9 +80,8 @@ class DefaultAzureCredential(ChainedTokenCredential): :keyword str shared_cache_tenant_id: Preferred tenant for :class:`~azure.identity.SharedTokenCacheCredential`. Defaults to the value of environment variable AZURE_TENANT_ID, if any. :keyword str visual_studio_code_tenant_id: Tenant ID to use when authenticating with - :class:`~azure.identity.VisualStudioCodeCredential`. Defaults to the "Azure: Tenant" setting in VS Code's user - settings or, when that setting has no value, the "organizations" tenant, which supports only Azure Active - Directory work or school accounts. + :class:`~azure.identity.VisualStudioCodeCredential`. Defaults to the tenant specified in the authentication + record file used by the Azure Resources extension. :keyword int process_timeout: The timeout in seconds to use for developer credentials that run subprocesses (e.g. AzureCliCredential, AzurePowerShellCredential). Defaults to **10** seconds. @@ -101,18 +100,10 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=too-many-statement raise TypeError("'tenant_id' is not supported in DefaultAzureCredential.") authority = kwargs.pop("authority", None) - - vscode_tenant_id = kwargs.pop( - "visual_studio_code_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID) - ) - vscode_args = dict(kwargs) - if authority: - vscode_args["authority"] = authority - if vscode_tenant_id: - vscode_args["tenant_id"] = vscode_tenant_id - authority = normalize_authority(authority) if authority else get_default_authority() + vscode_tenant_id = kwargs.pop("visual_studio_code_tenant_id", None) + interactive_browser_tenant_id = kwargs.pop( "interactive_browser_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID) ) @@ -156,7 +147,7 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=too-many-statement }, "visual_studio_code": { "exclude_param": "exclude_visual_studio_code_credential", - "default_exclude": True, + "default_exclude": False, }, "cli": { "exclude_param": "exclude_cli_credential", @@ -235,7 +226,7 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=too-many-statement except Exception as ex: # pylint:disable=broad-except _LOGGER.info("Shared token cache is unavailable: '%s'", ex) if not exclude_visual_studio_code_credential: - credentials.append(VisualStudioCodeCredential(**vscode_args)) + credentials.append(VisualStudioCodeCredential(tenant_id=vscode_tenant_id)) if not exclude_cli_credential: credentials.append(AzureCliCredential(process_timeout=process_timeout)) if not exclude_powershell_credential: diff --git a/sdk/identity/azure-identity/azure/identity/_credentials/vscode.py b/sdk/identity/azure-identity/azure/identity/_credentials/vscode.py index 79f126064046..b2b8944e69f4 100644 --- a/sdk/identity/azure-identity/azure/identity/_credentials/vscode.py +++ b/sdk/identity/azure-identity/azure/identity/_credentials/vscode.py @@ -2,149 +2,186 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -import abc import os -import sys -from typing import cast, Any, Dict, Optional -import warnings +import json +from typing import Any, Optional +import msal from azure.core.credentials import AccessToken, TokenRequestOptions, AccessTokenInfo from azure.core.exceptions import ClientAuthenticationError + +from .._auth_record import AuthenticationRecord from .._exceptions import CredentialUnavailableError -from .._constants import AzureAuthorityHosts, AZURE_VSCODE_CLIENT_ID, EnvironmentVariables -from .._internal import normalize_authority, validate_tenant_id, within_dac -from .._internal.aad_client import AadClient, AadClientBase -from .._internal.get_token_mixin import GetTokenMixin +from .._constants import AZURE_VSCODE_CLIENT_ID +from .._internal import within_dac + from .._internal.decorators import log_get_token +from .._internal.utils import get_broker_credential, validate_tenant_id -if sys.platform.startswith("win"): - from .._internal.win_vscode_adapter import get_refresh_token, get_user_settings -elif sys.platform.startswith("darwin"): - from .._internal.macos_vscode_adapter import get_refresh_token, get_user_settings -else: - from .._internal.linux_vscode_adapter import get_refresh_token, get_user_settings +MAX_AUTH_RECORD_SIZE = 10 * 1024 # 10KB - more than enough for a small auth record +VSCODE_AUTH_RECORD_PATHS = [ + "~/.azure/ms-azuretools.vscode-azureresourcegroups/authRecord.json", + "~/.Azure/ms-azuretools.vscode-azureresourcegroups/authRecord.json", +] -class _VSCodeCredentialBase(abc.ABC): - def __init__(self, **kwargs: Any) -> None: - warnings.warn( - "This credential is deprecated because the Azure Account extension for Visual Studio Code, which this " - "credential relies on, has been deprecated. See the Azure Account extension deprecation notice here: " - "https://github.com/microsoft/vscode-azure-account/issues/964. Consider using other developer credentials " - "such as AzureCliCredential, AzureDeveloperCliCredential, or AzurePowerShellCredential.", - DeprecationWarning, - stacklevel=2, - ) - super(_VSCodeCredentialBase, self).__init__() - - user_settings = get_user_settings() - self._cloud = user_settings.get("azure.cloud", "AzureCloud") - self._refresh_token = None - self._unavailable_reason = "" - - self._client = kwargs.get("_client") - if not self._client: - self._initialize(user_settings, **kwargs) - if not (self._client or self._unavailable_reason): - self._unavailable_reason = "Initialization failed" - - @abc.abstractmethod - def _get_client(self, **kwargs: Any) -> AadClientBase: - pass - - def _get_refresh_token(self) -> str: - if not self._refresh_token: - self._refresh_token = get_refresh_token(self._cloud) - if not self._refresh_token: - message = ( - "Failed to get Azure user details from Visual Studio Code. " - "Currently, the VisualStudioCodeCredential only works with the Azure " - "Account extension version 0.9.11 and earlier. A long-term fix is in " - "progress, see https://github.com/Azure/azure-sdk-for-python/issues/25713" - ) - raise CredentialUnavailableError(message=message) - return self._refresh_token - - def _initialize(self, vscode_user_settings: Dict, **kwargs: Any) -> None: - """Build a client from kwargs merged with VS Code user settings. +def load_vscode_auth_record() -> Optional[AuthenticationRecord]: + """Load the authentication record corresponding to a known location. - The first stable version of this credential defaulted to Public Cloud and the "organizations" - tenant when it failed to read VS Code user settings. That behavior is preserved here. + This will load from ~/.azure/ms-azuretools.vscode-azureresourcegroups/authRecord.json + or ~/.Azure/ms-azuretools.vscode-azureresourcegroups/authRecord.json - :param dict vscode_user_settings: VS Code user settings - """ + :return: The authentication record if it exists, otherwise None. + :rtype: Optional[AuthenticationRecord] + :raises: ValueError if the authentication record is not in the expected format + """ - # Precedence for authority: - # 1) VisualStudioCodeCredential(authority=...) - # 2) $AZURE_AUTHORITY_HOST - # 3) authority matching VS Code's "azure.cloud" setting - # 4) default: Public Cloud - authority = kwargs.pop("authority", None) or os.environ.get(EnvironmentVariables.AZURE_AUTHORITY_HOST) - if not authority: - # the application didn't specify an authority, so we figure it out from VS Code settings - if self._cloud == "AzureCloud": - authority = AzureAuthorityHosts.AZURE_PUBLIC_CLOUD - elif self._cloud == "AzureChinaCloud": - authority = AzureAuthorityHosts.AZURE_CHINA - elif self._cloud == "AzureUSGovernment": - authority = AzureAuthorityHosts.AZURE_GOVERNMENT - else: - # If the value is anything else ("AzureCustomCloud" is the only other known value), - # we need the user to provide the authority because VS Code has no setting for it and - # we can't guess confidently. - self._unavailable_reason = ( - 'VS Code is configured to use a custom cloud. Set keyword argument "authority"' - + ' with the Microsoft Entra endpoint for cloud "{}"'.format(self._cloud) + # Try each possible auth record path + for auth_record_path in VSCODE_AUTH_RECORD_PATHS: + expanded_path = os.path.expanduser(auth_record_path) + if os.path.exists(expanded_path): + file_size = os.path.getsize(expanded_path) + if file_size > MAX_AUTH_RECORD_SIZE: + error_message = ( + "VS Code auth record file is unexpectedly large. " + "Please check the file for corruption or unexpected content." ) - return - - # Precedence for tenant ID: - # 1) VisualStudioCodeCredential(tenant_id=...) - # 2) "azure.tenant" in VS Code user settings - # 3) default: organizations - tenant_id = kwargs.pop("tenant_id", None) or vscode_user_settings.get("azure.tenant", "organizations") - validate_tenant_id(tenant_id) - if tenant_id.lower() == "adfs": - self._unavailable_reason = "VisualStudioCodeCredential authentication unavailable. ADFS is not supported." - return - - self._client = self._get_client( - authority=normalize_authority(authority), client_id=AZURE_VSCODE_CLIENT_ID, tenant_id=tenant_id, **kwargs - ) + raise ValueError(error_message) + with open(expanded_path, "r", encoding="utf-8") as f: + deserialized = json.load(f) + + # Validate the authentication record for security and structural integrity + _validate_auth_record_json(deserialized) + + # Deserialize the authentication record + auth_record = AuthenticationRecord( + authority=deserialized["authority"], + client_id=deserialized["clientId"], + home_account_id=deserialized["homeAccountId"], + tenant_id=deserialized["tenantId"], + username=deserialized["username"], + ) + + return auth_record + # No auth record found in any of the expected locations + return None -class VisualStudioCodeCredential(_VSCodeCredentialBase, GetTokenMixin): - """Authenticates as the Azure user signed in to Visual Studio Code via the 'Azure Account' extension. - **Deprecated**: This credential is deprecated because the Azure Account extension for Visual Studio Code, which - this credential relies on, has been deprecated. See the Azure Account extension deprecation notice here: - https://github.com/microsoft/vscode-azure-account/issues/964. Consider using other developer credentials such as - AzureCliCredential, AzureDeveloperCliCredential, or AzurePowerShellCredential. +def _validate_auth_record_json(data: dict) -> None: + """Validate the authentication record. - :keyword str authority: Authority of a Microsoft Entra endpoint, for example "login.microsoftonline.com". - This argument is required for a custom cloud and usually unnecessary otherwise. Defaults to the authority - matching the "Azure: Cloud" setting in VS Code's user settings or, when that setting has no value, the - authority for Azure Public Cloud. - :keyword str tenant_id: ID of the tenant the credential should authenticate in. Defaults to the "Azure: Tenant" - setting in VS Code's user settings or, when that setting has no value, the "organizations" tenant, which - supports only Microsoft Entra work or school accounts. + :param dict data: The authentication record data to validate. + :raises ValueError: If the authentication record fails validation checks. + """ + errors = [] + + # Schema Validation - Required Fields + try: + tenant_id = data["tenantId"] + if not tenant_id or not isinstance(tenant_id, str): + errors.append("tenantId must be a non-empty string") + else: + try: + validate_tenant_id(tenant_id) + except ValueError as e: + errors.append(f"tenantId validation failed: {e}") + except KeyError: + errors.append("tenantId field is missing") + + try: + client_id = data["clientId"] + if not client_id or not isinstance(client_id, str): + errors.append("clientId must be a non-empty string") + elif client_id != AZURE_VSCODE_CLIENT_ID: + errors.append( + f"clientId must match expected VS Code Azure Resources extension client ID: {AZURE_VSCODE_CLIENT_ID}" + ) + except KeyError: + errors.append("clientId field is missing") + + try: + username = data["username"] + if not username or not isinstance(username, str): + errors.append("username must be a non-empty string") + except KeyError: + errors.append("username field is missing") + + try: + home_account_id = data["homeAccountId"] + if not home_account_id or not isinstance(home_account_id, str): + errors.append("homeAccountId must be a non-empty string") + except KeyError: + errors.append("homeAccountId field is missing") + + try: + authority = data["authority"] + if not authority or not isinstance(authority, str): + errors.append("authority must be a non-empty string") + except KeyError: + errors.append("authority field is missing") + + if errors: + error_message = "Authentication record validation failed: " + "; ".join(errors) + raise ValueError(error_message) + + +class VisualStudioCodeCredential: + """Authenticates as the Azure user signed in to Visual Studio Code via the 'Azure Resources' extension. + + :keyword str tenant_id: A Microsoft Entra tenant ID. Defaults to the tenant specified in the authentication + record file used by the Azure Resources extension. :keyword List[str] additionally_allowed_tenants: Specifies tenants in addition to the specified "tenant_id" for which the credential may acquire tokens. Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application can access. """ + def __init__(self, **kwargs: Any) -> None: + + self._broker_credential = None + self._unavailable_message = ( + "VisualStudioCodeCredential requires the 'azure-identity-broker' package to be installed. " + "You must also ensure you have the Azure Resources extension installed and have " + "signed in to Azure via Visual Studio Code." + ) + + broker_credential_class = get_broker_credential() + if broker_credential_class: + try: + # Load the authentication record from the VS Code extension + authentication_record = load_vscode_auth_record() + if not authentication_record: + self._unavailable_message = ( + "VisualStudioCodeCredential requires the user to be signed in to Azure via Visual Studio Code. " + "Please ensure you have the Azure Resources extension installed and have signed in." + ) + return + self._broker_credential = broker_credential_class( + client_id=AZURE_VSCODE_CLIENT_ID, + authentication_record=authentication_record, + parent_window_handle=msal.PublicClientApplication.CONSOLE_WINDOW_HANDLE, + use_default_broker_account=True, + **kwargs, + ) + except ValueError as ex: + self._unavailable_message = ( + "Failed to load authentication record from Visual Studio Code: " + f"{ex}. Please ensure you have the Azure Resources extension installed and signed in." + ) + def __enter__(self) -> "VisualStudioCodeCredential": - if self._client: - self._client.__enter__() + if self._broker_credential: + self._broker_credential.__enter__() return self def __exit__(self, *args: Any) -> None: - if self._client: - self._client.__exit__(*args) + if self._broker_credential: + self._broker_credential.__exit__(*args) def close(self) -> None: """Close the credential's transport session.""" - self.__exit__() + if self._broker_credential: + self._broker_credential.close() @log_get_token def get_token( @@ -166,20 +203,15 @@ def get_token( :raises ~azure.identity.CredentialUnavailableError: the credential cannot retrieve user details from Visual Studio Code """ - if self._unavailable_reason: - error_message = ( - self._unavailable_reason + "\n" - "Visit https://aka.ms/azsdk/python/identity/vscodecredential/troubleshoot" - " to troubleshoot this issue." - ) - raise CredentialUnavailableError(message=error_message) + if not self._broker_credential: + raise CredentialUnavailableError(message=self._unavailable_message) if within_dac.get(): try: - token = super().get_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs) + token = self._broker_credential.get_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs) return token except ClientAuthenticationError as ex: raise CredentialUnavailableError(message=ex.message) from ex - return super().get_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs) + return self._broker_credential.get_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs) def get_token_info(self, *scopes: str, options: Optional[TokenRequestOptions] = None) -> AccessTokenInfo: """Request an access token for `scopes` as the user currently signed in to Visual Studio Code. @@ -197,29 +229,12 @@ def get_token_info(self, *scopes: str, options: Optional[TokenRequestOptions] = :raises ~azure.identity.CredentialUnavailableError: the credential cannot retrieve user details from Visual Studio Code. """ - if self._unavailable_reason: - error_message = ( - self._unavailable_reason + "\n" - "Visit https://aka.ms/azsdk/python/identity/vscodecredential/troubleshoot" - " to troubleshoot this issue." - ) - raise CredentialUnavailableError(message=error_message) + if not self._broker_credential: + raise CredentialUnavailableError(message=self._unavailable_message) if within_dac.get(): try: - token = super().get_token_info(*scopes, options=options) + token = self._broker_credential.get_token_info(*scopes, options=options) return token except ClientAuthenticationError as ex: raise CredentialUnavailableError(message=ex.message) from ex - return super().get_token_info(*scopes, options=options) - - def _acquire_token_silently(self, *scopes: str, **kwargs: Any) -> Optional[AccessTokenInfo]: - self._client = cast(AadClient, self._client) - return self._client.get_cached_access_token(scopes, **kwargs) - - def _request_token(self, *scopes: str, **kwargs: Any) -> AccessTokenInfo: - refresh_token = self._get_refresh_token() - self._client = cast(AadClient, self._client) - return self._client.obtain_token_by_refresh_token(scopes, refresh_token, **kwargs) - - def _get_client(self, **kwargs: Any) -> AadClient: - return AadClient(**kwargs) + return self._broker_credential.get_token_info(*scopes, options=options) diff --git a/sdk/identity/azure-identity/azure/identity/_internal/linux_vscode_adapter.py b/sdk/identity/azure-identity/azure/identity/_internal/linux_vscode_adapter.py deleted file mode 100644 index d0be19f13133..000000000000 --- a/sdk/identity/azure-identity/azure/identity/_internal/linux_vscode_adapter.py +++ /dev/null @@ -1,100 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -import os -import json -import logging -import ctypes as ct -from .._constants import VSCODE_CREDENTIALS_SECTION - -_LOGGER = logging.getLogger(__name__) - - -def _c_str(string): - return ct.c_char_p(string.encode("utf-8")) - - -class _SECRET_SCHEMA_ATTRIBUTE(ct.Structure): - _fields_ = [ - ("name", ct.c_char_p), - ("type", ct.c_uint), - ] - - -class _SECRET_SCHEMA(ct.Structure): - _fields_ = [ - ("name", ct.c_char_p), - ("flags", ct.c_uint), - ("attributes", _SECRET_SCHEMA_ATTRIBUTE * 2), - ] - - -_PSECRET_SCHEMA = ct.POINTER(_SECRET_SCHEMA) - - -try: - _libsecret = ct.cdll.LoadLibrary("libsecret-1.so.0") - _libsecret.secret_password_lookup_sync.argtypes = [ - ct.c_void_p, - ct.c_void_p, - ct.c_void_p, - ct.c_char_p, - ct.c_char_p, - ct.c_char_p, - ct.c_char_p, - ct.c_void_p, - ] - _libsecret.secret_password_lookup_sync.restype = ct.c_char_p - _libsecret.secret_password_free.argtypes = [ct.c_char_p] -except OSError: - _libsecret = None # type: ignore - - -def _get_refresh_token(service_name, account_name): - if not _libsecret: - return None - - err = ct.c_int() - attributes = [_SECRET_SCHEMA_ATTRIBUTE(_c_str("service"), 0), _SECRET_SCHEMA_ATTRIBUTE(_c_str("account"), 0)] - pattributes = (_SECRET_SCHEMA_ATTRIBUTE * 2)(*attributes) - schema = _SECRET_SCHEMA() - pschema = _PSECRET_SCHEMA(schema) - ct.memset(pschema, 0, ct.sizeof(schema)) - schema.name = _c_str("org.freedesktop.Secret.Generic") # pylint: disable=attribute-defined-outside-init - schema.flags = 2 # pylint: disable=attribute-defined-outside-init - schema.attributes = pattributes # pylint: disable=attribute-defined-outside-init - p_str = _libsecret.secret_password_lookup_sync( - pschema, - None, - ct.byref(err), - _c_str("service"), - _c_str(service_name), - _c_str("account"), - _c_str(account_name), - None, - ) - if err.value == 0 and p_str: - return p_str.decode("utf-8") - - return None - - -def get_user_settings(): - try: - path = os.path.join(os.environ["HOME"], ".config", "Code", "User", "settings.json") - with open(path, encoding="utf-8") as file: - return json.load(file) - except Exception as ex: # pylint:disable=broad-except - _LOGGER.debug('Exception reading VS Code user settings: "%s"', ex, exc_info=_LOGGER.isEnabledFor(logging.DEBUG)) - return {} - - -def get_refresh_token(cloud_name): - try: - return _get_refresh_token(VSCODE_CREDENTIALS_SECTION, cloud_name) - except Exception as ex: # pylint:disable=broad-except - _LOGGER.debug( - 'Exception retrieving VS Code credentials: "%s"', ex, exc_info=_LOGGER.isEnabledFor(logging.DEBUG) - ) - return None diff --git a/sdk/identity/azure-identity/azure/identity/_internal/macos_vscode_adapter.py b/sdk/identity/azure-identity/azure/identity/_internal/macos_vscode_adapter.py deleted file mode 100644 index 96643cd10d5e..000000000000 --- a/sdk/identity/azure-identity/azure/identity/_internal/macos_vscode_adapter.py +++ /dev/null @@ -1,34 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -import os -import json -import logging -from msal_extensions.osx import Keychain, KeychainError -from .._constants import VSCODE_CREDENTIALS_SECTION - -_LOGGER = logging.getLogger(__name__) - - -def get_user_settings(): - try: - path = os.path.join(os.environ["HOME"], "Library", "Application Support", "Code", "User", "settings.json") - with open(path, encoding="utf-8") as file: - return json.load(file) - except Exception as ex: # pylint:disable=broad-except - _LOGGER.debug('Exception reading VS Code user settings: "%s"', ex, exc_info=_LOGGER.isEnabledFor(logging.DEBUG)) - return {} - - -def get_refresh_token(cloud_name): - try: - key_chain = Keychain() - return key_chain.get_generic_password(VSCODE_CREDENTIALS_SECTION, cloud_name) - except KeychainError: - return None - except Exception as ex: # pylint:disable=broad-except - _LOGGER.debug( - 'Exception retrieving VS Code credentials: "%s"', ex, exc_info=_LOGGER.isEnabledFor(logging.DEBUG) - ) - return None diff --git a/sdk/identity/azure-identity/azure/identity/_internal/utils.py b/sdk/identity/azure-identity/azure/identity/_internal/utils.py index b18393965c05..2303906ec4eb 100644 --- a/sdk/identity/azure-identity/azure/identity/_internal/utils.py +++ b/sdk/identity/azure-identity/azure/identity/_internal/utils.py @@ -195,3 +195,17 @@ def process_credential_exclusions(credential_config: dict, exclude_flags: dict, exclude_flags[cred_key] = user_value return exclude_flags + + +def get_broker_credential() -> Optional[type]: + """Return the InteractiveBrowserBrokerCredential class if available, otherwise None. + + :return: InteractiveBrowserBrokerCredential class or None + :rtype: Optional[type] + """ + try: + from azure.identity.broker import InteractiveBrowserBrokerCredential + + return InteractiveBrowserBrokerCredential + except ImportError: + return None diff --git a/sdk/identity/azure-identity/azure/identity/_internal/win_vscode_adapter.py b/sdk/identity/azure-identity/azure/identity/_internal/win_vscode_adapter.py deleted file mode 100644 index 2cec7ba49e97..000000000000 --- a/sdk/identity/azure-identity/azure/identity/_internal/win_vscode_adapter.py +++ /dev/null @@ -1,77 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -import os -import json -import logging -import ctypes as ct -from .._constants import VSCODE_CREDENTIALS_SECTION - -try: - import ctypes.wintypes as wt -except (IOError, ValueError): - pass - -_LOGGER = logging.getLogger(__name__) - -SUPPORTED_CREDKEYS = set(("Type", "TargetName", "Persist", "UserName", "Comment", "CredentialBlob")) - -_PBYTE = ct.POINTER(ct.c_byte) - - -class _CREDENTIAL(ct.Structure): - _fields_ = [ - ("Flags", wt.DWORD), - ("Type", wt.DWORD), - ("TargetName", ct.c_wchar_p), - ("Comment", ct.c_wchar_p), - ("LastWritten", wt.FILETIME), - ("CredentialBlobSize", wt.DWORD), - ("CredentialBlob", _PBYTE), - ("Persist", wt.DWORD), - ("AttributeCount", wt.DWORD), - ("Attributes", ct.c_void_p), - ("TargetAlias", ct.c_wchar_p), - ("UserName", ct.c_wchar_p), - ] - - -_PCREDENTIAL = ct.POINTER(_CREDENTIAL) - -_advapi = ct.WinDLL("advapi32") # type: ignore -_advapi.CredReadW.argtypes = [wt.LPCWSTR, wt.DWORD, wt.DWORD, ct.POINTER(_PCREDENTIAL)] -_advapi.CredReadW.restype = wt.BOOL -_advapi.CredFree.argtypes = [_PCREDENTIAL] - - -def _read_credential(service_name, account_name): - target = "{}/{}".format(service_name, account_name) - cred_ptr = _PCREDENTIAL() - if _advapi.CredReadW(target, 1, 0, ct.byref(cred_ptr)): - cred_blob = cred_ptr.contents.CredentialBlob - cred_blob_size = cred_ptr.contents.CredentialBlobSize - cred = "".join(map(chr, cred_blob[:cred_blob_size])) - _advapi.CredFree(cred_ptr) - return cred - return None - - -def get_user_settings(): - try: - path = os.path.join(os.environ["APPDATA"], "Code", "User", "settings.json") - with open(path, encoding="utf-8") as file: - return json.load(file) - except Exception as ex: # pylint:disable=broad-except - _LOGGER.debug('Exception reading VS Code user settings: "%s"', ex, exc_info=_LOGGER.isEnabledFor(logging.DEBUG)) - return {} - - -def get_refresh_token(cloud_name): - try: - return _read_credential(VSCODE_CREDENTIALS_SECTION, cloud_name) - except Exception as ex: # pylint: disable=broad-except - _LOGGER.debug( - 'Exception retrieving VS Code credentials: "%s"', ex, exc_info=_LOGGER.isEnabledFor(logging.DEBUG) - ) - return None diff --git a/sdk/identity/azure-identity/azure/identity/aio/_credentials/default.py b/sdk/identity/azure-identity/azure/identity/aio/_credentials/default.py index 2445db573ed0..c07e6b466757 100644 --- a/sdk/identity/azure-identity/azure/identity/aio/_credentials/default.py +++ b/sdk/identity/azure-identity/azure/identity/aio/_credentials/default.py @@ -73,9 +73,8 @@ class DefaultAzureCredential(ChainedTokenCredential): :keyword str shared_cache_tenant_id: Preferred tenant for :class:`~azure.identity.aio.SharedTokenCacheCredential`. Defaults to the value of environment variable AZURE_TENANT_ID, if any. :keyword str visual_studio_code_tenant_id: Tenant ID to use when authenticating with - :class:`~azure.identity.aio.VisualStudioCodeCredential`. Defaults to the "Azure: Tenant" setting in VS Code's - user settings or, when that setting has no value, the "organizations" tenant, which supports only Azure Active - Directory work or school accounts. + :class:`~azure.identity.VisualStudioCodeCredential`. Defaults to the tenant specified in the authentication + record file used by the Azure Resources extension. :keyword int process_timeout: The timeout in seconds to use for developer credentials that run subprocesses (e.g. AzureCliCredential, AzurePowerShellCredential). Defaults to **10** seconds. @@ -94,18 +93,10 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=too-many-statement raise TypeError("'tenant_id' is not supported in DefaultAzureCredential.") authority = kwargs.pop("authority", None) - - vscode_tenant_id = kwargs.pop( - "visual_studio_code_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID) - ) - vscode_args = dict(kwargs) - if authority: - vscode_args["authority"] = authority - if vscode_tenant_id: - vscode_args["tenant_id"] = vscode_tenant_id - authority = normalize_authority(authority) if authority else get_default_authority() + vscode_tenant_id = kwargs.pop("visual_studio_code_tenant_id", None) + shared_cache_username = kwargs.pop("shared_cache_username", os.environ.get(EnvironmentVariables.AZURE_USERNAME)) shared_cache_tenant_id = kwargs.pop( "shared_cache_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID) @@ -119,10 +110,6 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=too-many-statement "workload_identity_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID) ) - vscode_tenant_id = kwargs.pop( - "visual_studio_code_tenant_id", os.environ.get(EnvironmentVariables.AZURE_TENANT_ID) - ) - process_timeout = kwargs.pop("process_timeout", 10) # Define credential configuration mapping (async version) @@ -148,7 +135,7 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=too-many-statement }, "visual_studio_code": { "exclude_param": "exclude_visual_studio_code_credential", - "default_exclude": True, + "default_exclude": False, }, "cli": { "exclude_param": "exclude_cli_credential", @@ -221,7 +208,7 @@ def __init__(self, **kwargs: Any) -> None: # pylint: disable=too-many-statement except Exception as ex: # pylint:disable=broad-except _LOGGER.info("Shared token cache is unavailable: '%s'", ex) if not exclude_visual_studio_code_credential: - credentials.append(VisualStudioCodeCredential(**vscode_args)) + credentials.append(VisualStudioCodeCredential(tenant_id=vscode_tenant_id)) if not exclude_cli_credential: credentials.append(AzureCliCredential(process_timeout=process_timeout)) if not exclude_powershell_credential: diff --git a/sdk/identity/azure-identity/azure/identity/aio/_credentials/vscode.py b/sdk/identity/azure-identity/azure/identity/aio/_credentials/vscode.py index fa5aab6a29a2..8384d5b03468 100644 --- a/sdk/identity/azure-identity/azure/identity/aio/_credentials/vscode.py +++ b/sdk/identity/azure-identity/azure/identity/aio/_credentials/vscode.py @@ -2,49 +2,34 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -from typing import cast, Optional, Any +from typing import Optional, Any from azure.core.credentials import AccessToken, AccessTokenInfo, TokenRequestOptions -from azure.core.exceptions import ClientAuthenticationError -from ..._exceptions import CredentialUnavailableError from .._internal import AsyncContextManager -from .._internal.aad_client import AadClient -from .._internal.get_token_mixin import GetTokenMixin from .._internal.decorators import log_get_token_async -from ..._credentials.vscode import _VSCodeCredentialBase -from ..._internal import within_dac +from ..._credentials.vscode import VisualStudioCodeCredential as SyncVSCodeCredential -class VisualStudioCodeCredential(_VSCodeCredentialBase, AsyncContextManager, GetTokenMixin): - """Authenticates as the Azure user signed in to Visual Studio Code via the 'Azure Account' extension. +class VisualStudioCodeCredential(AsyncContextManager): + """Authenticates as the Azure user signed in to Visual Studio Code via the 'Azure Resources' extension. - **Deprecated**: This credential is deprecated because the Azure Account extension for Visual Studio Code, which - this credential relies on, has been deprecated. See the Azure Account extension deprecation notice here: - https://github.com/microsoft/vscode-azure-account/issues/964. Consider using other developer credentials such as - AzureCliCredential, AzureDeveloperCliCredential, or AzurePowerShellCredential. - - :keyword str authority: Authority of a Microsoft Entra endpoint, for example "login.microsoftonline.com". - This argument is required for a custom cloud and usually unnecessary otherwise. Defaults to the authority - matching the "Azure: Cloud" setting in VS Code's user settings or, when that setting has no value, the - authority for Azure Public Cloud. - :keyword str tenant_id: ID of the tenant the credential should authenticate in. Defaults to the "Azure: Tenant" - setting in VS Code's user settings or, when that setting has no value, the "organizations" tenant, which - supports only Microsoft Entra work or school accounts. + :keyword str tenant_id: A Microsoft Entra tenant ID. Defaults to the tenant specified in the authentication + record file used by the Azure Resources extension. :keyword List[str] additionally_allowed_tenants: Specifies tenants in addition to the specified "tenant_id" for which the credential may acquire tokens. Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the application can access. """ + def __init__(self, **kwargs: Any) -> None: + self._sync_credential = SyncVSCodeCredential(**kwargs) + async def __aenter__(self) -> "VisualStudioCodeCredential": - if self._client: - await self._client.__aenter__() + self._sync_credential.__enter__() return self async def close(self) -> None: """Close the credential's transport session.""" - - if self._client: - await self._client.__aexit__() + self._sync_credential.close() @log_get_token_async async def get_token( @@ -66,22 +51,7 @@ async def get_token( :raises ~azure.identity.CredentialUnavailableError: the credential cannot retrieve user details from Visual Studio Code """ - if self._unavailable_reason: - error_message = ( - self._unavailable_reason + "\n" - "Visit https://aka.ms/azsdk/python/identity/vscodecredential/troubleshoot" - " to troubleshoot this issue." - ) - raise CredentialUnavailableError(message=error_message) - if not self._client: - raise CredentialUnavailableError("Initialization failed") - if within_dac.get(): - try: - token = await super().get_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs) - return token - except ClientAuthenticationError as ex: - raise CredentialUnavailableError(message=ex.message) from ex - return await super().get_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs) + return self._sync_credential.get_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs) async def get_token_info(self, *scopes: str, options: Optional[TokenRequestOptions] = None) -> AccessTokenInfo: """Request an access token for `scopes` as the user currently signed in to Visual Studio Code. @@ -99,29 +69,4 @@ async def get_token_info(self, *scopes: str, options: Optional[TokenRequestOptio :raises ~azure.identity.CredentialUnavailableError: the credential cannot retrieve user details from Visual Studio Code. """ - if self._unavailable_reason: - error_message = ( - self._unavailable_reason + "\n" - "Visit https://aka.ms/azsdk/python/identity/vscodecredential/troubleshoot" - " to troubleshoot this issue." - ) - raise CredentialUnavailableError(message=error_message) - if within_dac.get(): - try: - token = await super().get_token_info(*scopes, options=options) - return token - except ClientAuthenticationError as ex: - raise CredentialUnavailableError(message=ex.message) from ex - return await super().get_token_info(*scopes, options=options) - - async def _acquire_token_silently(self, *scopes: str, **kwargs: Any) -> Optional[AccessTokenInfo]: - self._client = cast(AadClient, self._client) - return self._client.get_cached_access_token(scopes, **kwargs) - - async def _request_token(self, *scopes: str, **kwargs: Any) -> AccessTokenInfo: - refresh_token = self._get_refresh_token() - self._client = cast(AadClient, self._client) - return await self._client.obtain_token_by_refresh_token(scopes, refresh_token, **kwargs) - - def _get_client(self, **kwargs: Any) -> AadClient: - return AadClient(**kwargs) + return self._sync_credential.get_token_info(*scopes, options=options) diff --git a/sdk/identity/azure-identity/dev_requirements.txt b/sdk/identity/azure-identity/dev_requirements.txt index ac8662e232da..4ae29e0b192d 100644 --- a/sdk/identity/azure-identity/dev_requirements.txt +++ b/sdk/identity/azure-identity/dev_requirements.txt @@ -1,4 +1,5 @@ ../../core/azure-core +azure-identity-broker>=1.1.0 ; platform.python_implementation != "PyPy" aiohttp>=3.0 typing_extensions>=3.7.2 -e ../../../tools/azure-sdk-tools
diff --git a/sdk/identity/azure-identity/tests/test_context_manager.py b/sdk/identity/azure-identity/tests/test_context_manager.py index fed0035d07b7..354ba1945bbe 100644 --- a/sdk/identity/azure-identity/tests/test_context_manager.py +++ b/sdk/identity/azure-identity/tests/test_context_manager.py @@ -24,7 +24,6 @@ import pytest from test_certificate_credential import PEM_CERT_PATH -from test_vscode_credential import GET_USER_SETTINGS class CredentialFixture: @@ -62,7 +61,6 @@ def get_credential(self, **kwargs): {kwarg: "..." for kwarg in ("tenant_id", "client_id", "client_secret", "user_assertion")}, ), CredentialFixture(UsernamePasswordCredential, {"client_id": "...", "username": "...", "password": "..."}), - CredentialFixture(VisualStudioCodeCredential, ctor_patch_factory=lambda: patch(GET_USER_SETTINGS, lambda: {})), ) all_fixtures = pytest.mark.parametrize("fixture", FIXTURES, ids=lambda fixture: fixture.cls.__name__) diff --git a/sdk/identity/azure-identity/tests/test_vscode_credential.py b/sdk/identity/azure-identity/tests/test_vscode_credential.py index 4fb643d3ec92..f23086d82223 100644 --- a/sdk/identity/azure-identity/tests/test_vscode_credential.py +++ b/sdk/identity/azure-identity/tests/test_vscode_credential.py @@ -2,366 +2,188 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -import sys -import time -from unittest import mock -from urllib.parse import urlparse +import json +import os +import platform +import tempfile +from unittest.mock import patch -from azure.core.credentials import AccessToken -from azure.core.exceptions import ClientAuthenticationError -from azure.identity import AzureAuthorityHosts, CredentialUnavailableError, VisualStudioCodeCredential -from azure.core.pipeline.policies import SansIOHTTPPolicy -from azure.identity._constants import EnvironmentVariables -from azure.identity._internal.user_agent import USER_AGENT import pytest -from helpers import build_aad_response, mock_response, Request, validating_transport - - -GET_REFRESH_TOKEN = VisualStudioCodeCredential.__module__ + ".get_refresh_token" -GET_USER_SETTINGS = VisualStudioCodeCredential.__module__ + ".get_user_settings" - - -def get_credential(user_settings=None, **kwargs): - # defaulting to empty user settings ensures tests work when real user settings are available - with mock.patch(GET_USER_SETTINGS, lambda: user_settings or {}): - return VisualStudioCodeCredential(**kwargs) - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_tenant_id(): - def get_transport(expected_tenant): - return validating_transport( - requests=[ - Request(base_url="https://{}/{}".format(AzureAuthorityHosts.AZURE_PUBLIC_CLOUD, expected_tenant)) - ], - responses=[mock_response(json_payload=build_aad_response(access_token="**"))], - ) - - # credential should default to "organizations" tenant - transport = get_transport("organizations") - credential = get_credential(transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - credential.get_token("scope") - assert transport.send.call_count == 1 - - # ... unless VS Code has a tenant configured - user_settings = {"azure.tenant": "vs-code-setting"} - transport = get_transport(user_settings["azure.tenant"]) - credential = get_credential(user_settings, transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - credential.get_token("scope") - assert transport.send.call_count == 1 - - # ... and a tenant specified by the application prevails over VS Code configuration - transport = get_transport("from-application") - credential = get_credential(user_settings, tenant_id="from-application", transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - credential.get_token("scope") - assert transport.send.call_count == 1 - +from azure.core.exceptions import ClientAuthenticationError +from azure.identity import AuthenticationRecord, CredentialUnavailableError +from azure.identity._constants import AZURE_VSCODE_CLIENT_ID, KnownAuthorities +from azure.identity._credentials.vscode import VisualStudioCodeCredential, load_vscode_auth_record -@pytest.mark.skip(reason="VS code credential is disabled") -def test_tenant_id_validation(): - """The credential should raise ValueError when given an invalid tenant_id""" - - valid_ids = {"c878a2ab-8ef4-413b-83a0-199afb84d7fb", "contoso.onmicrosoft.com", "organizations", "common"} - for tenant in valid_ids: - get_credential(tenant_id=tenant) - - invalid_ids = {"my tenant", "my_tenant", "/", "\\", '"my-tenant"', "'my-tenant'"} - for tenant in invalid_ids: - with pytest.raises(ValueError): - get_credential(tenant_id=tenant) - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_no_scopes(): - """The credential should raise ValueError when get_token is called with no scopes""" - - credential = get_credential() - with pytest.raises(ValueError): - credential.get_token() - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_policies_configurable(): - policy = mock.Mock(spec_set=SansIOHTTPPolicy, on_request=mock.Mock()) - - def send(*_, **kwargs): - # ensure the `claims` and `tenant_id` keywords from credential's `get_token` method don't make it to transport - assert "claims" not in kwargs - assert "tenant_id" not in kwargs - return mock_response(json_payload=build_aad_response(access_token="**")) - - credential = get_credential(policies=[policy], transport=mock.Mock(send=send)) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - credential.get_token("scope") - - assert policy.on_request.called - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_user_agent(): - transport = validating_transport( - requests=[Request(required_headers={"User-Agent": USER_AGENT})], - responses=[mock_response(json_payload=build_aad_response(access_token="**"))], - ) - credential = get_credential(transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - credential.get_token("scope") - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.parametrize("authority", ("localhost", "https://localhost")) -def test_request_url(authority): - """the credential should accept an authority, with or without scheme, as an argument or environment variable""" - - tenant_id = "expected-tenant" - access_token = "***" - parsed_authority = urlparse(authority) - expected_netloc = parsed_authority.netloc or authority # "localhost" parses to netloc "", path "localhost" - expected_refresh_token = "refresh-token" - - def mock_send(request, **kwargs): - actual = urlparse(request.url) - assert actual.scheme == "https" - assert actual.netloc == expected_netloc - assert actual.path.startswith("/" + tenant_id) - assert request.body["refresh_token"] == expected_refresh_token - return mock_response(json_payload={"token_type": "Bearer", "expires_in": 42, "access_token": access_token}) - - credential = get_credential(tenant_id=tenant_id, transport=mock.Mock(send=mock_send), authority=authority) - with mock.patch(GET_REFRESH_TOKEN, return_value=expected_refresh_token): - token = credential.get_token("scope") - assert token.token == access_token - - # authority can be configured via environment variable - with mock.patch.dict("os.environ", {EnvironmentVariables.AZURE_AUTHORITY_HOST: authority}): - credential = get_credential(tenant_id=tenant_id, transport=mock.Mock(send=mock_send)) - with mock.patch(GET_REFRESH_TOKEN, return_value=expected_refresh_token): - credential.get_token("scope") - assert token.token == access_token - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_credential_unavailable_error(): - credential = get_credential() - with mock.patch(GET_REFRESH_TOKEN, return_value=None): - with pytest.raises(CredentialUnavailableError): - credential.get_token("scope") - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_redeem_token(): - expected_token = AccessToken("token", 42) - expected_value = "value" - - mock_client = mock.Mock(spec=object) - mock_client.obtain_token_by_refresh_token = mock.Mock(return_value=expected_token) - mock_client.get_cached_access_token = mock.Mock(return_value=None) - - with mock.patch(GET_REFRESH_TOKEN, return_value=expected_value): - credential = get_credential(_client=mock_client) - token = credential.get_token("scope") - assert token is expected_token - mock_client.obtain_token_by_refresh_token.assert_called_with( - ("scope",), expected_value, claims=None, tenant_id=None - ) - assert mock_client.obtain_token_by_refresh_token.call_count == 1 - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_cache_refresh_token(): - expected_token = AccessToken("token", 42) - - mock_client = mock.Mock(spec=object) - mock_client.obtain_token_by_refresh_token = mock.Mock(return_value=expected_token) - mock_client.get_cached_access_token = mock.Mock(return_value=None) - mock_get_credentials = mock.Mock(return_value="VALUE") - - with mock.patch(GET_REFRESH_TOKEN, mock_get_credentials): - credential = get_credential(_client=mock_client) - token = credential.get_token("scope") - assert token is expected_token - assert mock_get_credentials.call_count == 1 - token = credential.get_token("scope") - assert token is expected_token - assert mock_get_credentials.call_count == 1 - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_no_obtain_token_if_cached(): - expected_token = AccessToken("token", time.time() + 3600) - - mock_client = mock.Mock( - obtain_token_by_refresh_token=mock.Mock(return_value=expected_token), - get_cached_access_token=mock.Mock(return_value=expected_token), - ) - - credential = get_credential(_client=mock_client) - with mock.patch( - GET_REFRESH_TOKEN, - mock.Mock(side_effect=Exception("credential should not acquire a new token")), - ): - token = credential.get_token("scope") - - assert mock_client.obtain_token_by_refresh_token.call_count == 0 - assert token.token == expected_token.token - assert token.expires_on == expected_token.expires_on - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_native_adapter(): - """Exercise the native adapter for the current OS""" - - if sys.platform.startswith("darwin"): - from azure.identity._internal.macos_vscode_adapter import get_refresh_token - elif sys.platform.startswith("linux"): - from azure.identity._internal.linux_vscode_adapter import get_refresh_token - elif sys.platform.startswith("win"): - from azure.identity._internal.win_vscode_adapter import get_refresh_token - else: - pytest.skip('unsupported platform "{}"'.format(sys.platform)) - - # the return value (None in CI, possibly something else on a dev machine) is irrelevant - # because the goal is simply to expose a native interop problem like a segfault - get_refresh_token("AzureCloud") - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_adfs(): - """The credential should raise CredentialUnavailableError when configured for ADFS""" - - credential = get_credential(tenant_id="adfs") - with pytest.raises(CredentialUnavailableError) as ex: - credential.get_token("scope") - assert "adfs" in ex.value.message.lower() - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_custom_cloud_no_authority(): - """The credential is unavailable when VS Code is configured to use a custom cloud with no known authority""" - - cloud_name = "AzureCustomCloud" - credential = get_credential({"azure.cloud": cloud_name}) - with pytest.raises(CredentialUnavailableError, match="authority.*" + cloud_name): - credential.get_token("scope") - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.parametrize( - "cloud,authority", - ( - ("AzureCloud", AzureAuthorityHosts.AZURE_PUBLIC_CLOUD), - ("AzureChinaCloud", AzureAuthorityHosts.AZURE_CHINA), - ("AzureUSGovernment", AzureAuthorityHosts.AZURE_GOVERNMENT), - ), +# Skip all tests in this module when running on PyPy +pytestmark = pytest.mark.skipif( + platform.python_implementation() == "PyPy", reason="Broker tests are not supported on PyPy" ) -def test_reads_cloud_settings(cloud, authority): - """the credential should read authority and tenant from VS Code settings when an application doesn't specify them""" - - expected_tenant = "tenant-id" - user_settings = {"azure.cloud": cloud, "azure.tenant": expected_tenant} - - transport = validating_transport( - requests=[Request(base_url="https://{}/{}".format(authority, expected_tenant))], - responses=[mock_response(json_payload=build_aad_response(access_token="**"))], - ) - - credential = get_credential(user_settings, transport=transport) - - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - credential.get_token("scope") - - assert transport.send.call_count == 1 - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_no_user_settings(): - """the credential should default to Public Cloud and "organizations" tenant when it can't read VS Code settings""" - - transport = validating_transport( - requests=[Request(base_url="https://{}/{}".format(AzureAuthorityHosts.AZURE_PUBLIC_CLOUD, "organizations"))], - responses=[mock_response(json_payload=build_aad_response(access_token="**"))], - ) - - credential = get_credential(transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - credential.get_token("scope") - - assert transport.send.call_count == 1 - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_multitenant_authentication(): - first_tenant = "first-tenant" - first_token = "***" - second_tenant = "second-tenant" - second_token = first_token * 2 - - def send(request, **kwargs): - # ensure the `claims` and `tenant_id` keywords from credential's `get_token` method don't make it to transport - assert "claims" not in kwargs - assert "tenant_id" not in kwargs - parsed = urlparse(request.url) - tenant = parsed.path.split("/")[1] - assert tenant in (first_tenant, second_tenant), 'unexpected tenant "{}"'.format(tenant) - token = first_token if tenant == first_tenant else second_token - return mock_response(json_payload=build_aad_response(access_token=token)) - - credential = get_credential( - tenant_id=first_tenant, transport=mock.Mock(send=send), additionally_allowed_tenants=["*"] - ) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - token = credential.get_token("scope") - assert token.token == first_token - - token = credential.get_token("scope", tenant_id=first_tenant) - assert token.token == first_token - - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - token = credential.get_token("scope", tenant_id=second_tenant) - assert token.token == second_token - - # should still default to the first tenant - token = credential.get_token("scope") - assert token.token == first_token - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_multitenant_authentication_not_allowed(): - expected_tenant = "expected-tenant" - expected_token = "***" - - def send(request, **kwargs): - # ensure the `claims` and `tenant_id` keywords from credential's `get_token` method don't make it to transport - assert "claims" not in kwargs - assert "tenant_id" not in kwargs - parsed = urlparse(request.url) - tenant = parsed.path.split("/")[1] - token = expected_token if tenant == expected_tenant else expected_token * 2 - return mock_response(json_payload=build_aad_response(access_token=token)) - - credential = get_credential( - tenant_id=expected_tenant, transport=mock.Mock(send=send), additionally_allowed_tenants=["*"] - ) - - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - token = credential.get_token("scope") - assert token.token == expected_token - - token = credential.get_token("scope", tenant_id=expected_tenant) - assert token.token == expected_token - - token = credential.get_token("scope", tenant_id="un" + expected_tenant) - assert token.token == expected_token * 2 - - with mock.patch.dict("os.environ", {EnvironmentVariables.AZURE_IDENTITY_DISABLE_MULTITENANTAUTH: "true"}): - token = credential.get_token("scope", tenant_id="un" + expected_tenant) - assert token.token == expected_token -def test_deprecation_warning(): - with pytest.deprecated_call(): - get_credential() +class TestVisualStudioCodeCredential: + """Test cases for VisualStudioCodeCredential""" + + def test_get_token_info(self): + """Test getting a token from the credential.""" + valid_data = { + "tenantId": "12345678-1234-1234-1234-123456789012", + "clientId": AZURE_VSCODE_CLIENT_ID, + "username": "user@example.com", + "homeAccountId": "user.tenant", + "authority": KnownAuthorities.AZURE_PUBLIC_CLOUD, + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as tmp_file: + json.dump(valid_data, tmp_file) + tmp_file.flush() + + try: + with patch("os.path.expanduser", return_value=tmp_file.name): + with patch("msal.PublicClientApplication"): + with patch("msal.PublicClientApplication.acquire_token_interactive"): + credential = VisualStudioCodeCredential() + with pytest.raises(ClientAuthenticationError): + credential.get_token_info("https://management.azure.com/.default") + finally: + os.unlink(tmp_file.name) + + def test_invalid_auth_record(self): + """Test that an error is raised if the auth record is nonexistent/invalid.""" + + # Test with a nonexistent file + with patch("os.path.expanduser", return_value="nonexistent_file.json"): + with pytest.raises(CredentialUnavailableError): + VisualStudioCodeCredential().get_token_info("https://management.azure.com/.default") + + # Test with invalid data (incorrect client ID) + invalid_data = { + "tenantId": "12345678-1234-1234-1234-123456789012", + "clientId": "12345-123456", + "username": "user@example.com", + "homeAccountId": "user.tenant", + "authority": KnownAuthorities.AZURE_PUBLIC_CLOUD, + } + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as tmp_file: + json.dump(invalid_data, tmp_file) + tmp_file.flush() + + try: + with patch("os.path.expanduser", return_value=tmp_file.name): + with pytest.raises(CredentialUnavailableError): + VisualStudioCodeCredential().get_token_info("https://management.azure.com/.default") + finally: + os.unlink(tmp_file.name) + + def test_broker_credential_requirements_not_installed(self): + """Test that the credential w""" + + with patch.dict("sys.modules", {"azure.identity.broker": None}): + with patch("azure.identity.broker.InteractiveBrowserBrokerCredential") as mock_credential: + with pytest.raises(CredentialUnavailableError): + # This should raise an error because the broker requirements are not installed. + VisualStudioCodeCredential().get_token_info("https://management.azure.com/.default") + + assert mock_credential.call_count == 0, "InteractiveBrowserBrokerCredential should not be instantiated" + + +class TestLoadVSCodeAuthRecord: + """Test cases for loading VS Code authentication records.""" + + def test_load_nonexistent_file(self): + """Test loading returns None when auth record file doesn't exist.""" + with patch("os.path.exists", return_value=False): + result = load_vscode_auth_record() + assert result is None + + def test_load_valid_record(self): + """Test loading a valid authentication record.""" + valid_data = { + "tenantId": "12345678-1234-1234-1234-123456789012", + "clientId": AZURE_VSCODE_CLIENT_ID, + "username": "user@example.com", + "homeAccountId": "user.tenant", + "authority": KnownAuthorities.AZURE_PUBLIC_CLOUD, + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as tmp_file: + json.dump(valid_data, tmp_file) + tmp_file.flush() + + try: + with patch("os.path.expanduser", return_value=tmp_file.name): + with patch("os.path.exists", return_value=True): + result = load_vscode_auth_record() + + assert result is not None + assert isinstance(result, AuthenticationRecord) + assert result.tenant_id == valid_data["tenantId"] + assert result.client_id == valid_data["clientId"] + assert result.username == valid_data["username"] + assert result.home_account_id == valid_data["homeAccountId"] + assert result.authority == valid_data["authority"] + finally: + os.unlink(tmp_file.name) + + def test_load_malformed_json(self): + """Test loading fails with malformed JSON.""" + malformed_json = '{"tenantId": "test", "clientId": ' # incomplete JSON + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as tmp_file: + tmp_file.write(malformed_json) + tmp_file.flush() + + try: + with patch("os.path.expanduser", return_value=tmp_file.name): + with patch("os.path.exists", return_value=True): + with pytest.raises(ValueError): + load_vscode_auth_record() + finally: + os.unlink(tmp_file.name) + + def test_load_invalid_record(self): + """Test loading fails with invalid authentication record data.""" + invalid_data = { + "tenantId": "12345678-1234-1234-1234-123456789012", + "clientId": "wrong-client-id", # Invalid client ID + "username": "user@example.com", + "homeAccountId": "user.tenant", + "authority": KnownAuthorities.AZURE_PUBLIC_CLOUD, + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as tmp_file: + json.dump(invalid_data, tmp_file) + tmp_file.flush() + + try: + with patch("os.path.expanduser", return_value=tmp_file.name): + with patch("os.path.exists", return_value=True): + with pytest.raises(ValueError, match="Authentication record validation failed"): + load_vscode_auth_record() + finally: + os.unlink(tmp_file.name) + + def test_load_missing_required_fields(self): + """Test loading fails when required fields are missing.""" + incomplete_data = { + "tenantId": "12345678-1234-1234-1234-123456789012", + "clientId": AZURE_VSCODE_CLIENT_ID, + # Missing username, homeAccountId, authority + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as tmp_file: + json.dump(incomplete_data, tmp_file) + tmp_file.flush() + + try: + with patch("os.path.expanduser", return_value=tmp_file.name): + with patch("os.path.exists", return_value=True): + with pytest.raises(ValueError) as exc_info: + load_vscode_auth_record() + + error_message = str(exc_info.value) + assert "username field is missing" in error_message + assert "homeAccountId field is missing" in error_message + assert "authority field is missing" in error_message + finally: + os.unlink(tmp_file.name) diff --git a/sdk/identity/azure-identity/tests/test_vscode_credential_async.py b/sdk/identity/azure-identity/tests/test_vscode_credential_async.py index 7e4bf27c671e..aee22c19e07e 100644 --- a/sdk/identity/azure-identity/tests/test_vscode_credential_async.py +++ b/sdk/identity/azure-identity/tests/test_vscode_credential_async.py @@ -2,357 +2,27 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ -import time -from unittest import mock -from urllib.parse import urlparse +from unittest.mock import patch -from azure.core.credentials import AccessToken -from azure.core.exceptions import ClientAuthenticationError -from azure.identity import AzureAuthorityHosts, CredentialUnavailableError -from azure.identity._constants import EnvironmentVariables -from azure.identity._internal.user_agent import USER_AGENT -from azure.identity.aio import VisualStudioCodeCredential -from azure.core.pipeline.policies import SansIOHTTPPolicy import pytest - -from helpers import build_aad_response, mock_response, Request -from helpers_async import async_validating_transport, wrap_in_future -from test_vscode_credential import GET_REFRESH_TOKEN, GET_USER_SETTINGS - - -def get_credential(user_settings=None, **kwargs): - # defaulting to empty user settings ensures tests work when real user settings are available - with mock.patch(GET_USER_SETTINGS, lambda: user_settings or {}): - return VisualStudioCodeCredential(**kwargs) - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_tenant_id(): - def get_transport(expected_tenant): - return async_validating_transport( - requests=[ - Request(base_url="https://{}/{}".format(AzureAuthorityHosts.AZURE_PUBLIC_CLOUD, expected_tenant)) - ], - responses=[mock_response(json_payload=build_aad_response(access_token="**"))], - ) - - # credential should default to "organizations" tenant - transport = get_transport("organizations") - credential = get_credential(transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - await credential.get_token("scope") - assert transport.send.call_count == 1 - - # ... unless VS Code has a tenant configured - user_settings = {"azure.tenant": "vs-code-setting"} - transport = get_transport(user_settings["azure.tenant"]) - credential = get_credential(user_settings, transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - await credential.get_token("scope") - assert transport.send.call_count == 1 - - # ... and a tenant specified by the application prevails over VS Code configuration - transport = get_transport("from-application") - credential = get_credential(user_settings, tenant_id="from-application", transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - await credential.get_token("scope") - assert transport.send.call_count == 1 - - -@pytest.mark.skip(reason="VS code credential is disabled") -def test_tenant_id_validation(): - """The credential should raise ValueError when given an invalid tenant_id""" - - valid_ids = {"c878a2ab-8ef4-413b-83a0-199afb84d7fb", "contoso.onmicrosoft.com", "organizations", "common"} - for tenant in valid_ids: - get_credential(tenant_id=tenant) - - invalid_ids = {"my tenant", "my_tenant", "/", "\\", '"my-tenant"', "'my-tenant'"} - for tenant in invalid_ids: - with pytest.raises(ValueError): - get_credential(tenant_id=tenant) - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_no_scopes(): - """The credential should raise ValueError when get_token is called with no scopes""" - - credential = get_credential() - with pytest.raises(ValueError): - await credential.get_token() - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_policies_configurable(): - policy = mock.Mock(spec_set=SansIOHTTPPolicy, on_request=mock.Mock()) - - async def send(*_, **kwargs): - # ensure the `claims` and `tenant_id` keywords from credential's `get_token` method don't make it to transport - assert "claims" not in kwargs - assert "tenant_id" not in kwargs - return mock_response(json_payload=build_aad_response(access_token="**")) - - credential = get_credential(policies=[policy], transport=mock.Mock(send=send)) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - await credential.get_token("scope") - - assert policy.on_request.called - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_user_agent(): - transport = async_validating_transport( - requests=[Request(required_headers={"User-Agent": USER_AGENT})], - responses=[mock_response(json_payload=build_aad_response(access_token="**"))], - ) - credential = get_credential(transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - await credential.get_token("scope") - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -@pytest.mark.parametrize("authority", ("localhost", "https://localhost")) -async def test_request_url(authority): - """the credential should accept an authority, with or without scheme, as an argument or environment variable""" - - tenant_id = "expected-tenant" - access_token = "***" - parsed_authority = urlparse(authority) - expected_netloc = parsed_authority.netloc or authority # "localhost" parses to netloc "", path "localhost" - expected_refresh_token = "refresh-token" - - async def mock_send(request, **kwargs): - actual = urlparse(request.url) - assert actual.scheme == "https" - assert actual.netloc == expected_netloc - assert actual.path.startswith("/" + tenant_id) - assert request.body["refresh_token"] == expected_refresh_token - return mock_response(json_payload={"token_type": "Bearer", "expires_in": 42, "access_token": access_token}) - - credential = get_credential(tenant_id=tenant_id, transport=mock.Mock(send=mock_send), authority=authority) - with mock.patch(GET_REFRESH_TOKEN, return_value=expected_refresh_token): - token = await credential.get_token("scope") - assert token.token == access_token - - # authority can be configured via environment variable - with mock.patch.dict("os.environ", {EnvironmentVariables.AZURE_AUTHORITY_HOST: authority}, clear=True): - credential = get_credential(tenant_id=tenant_id, transport=mock.Mock(send=mock_send)) - with mock.patch(GET_REFRESH_TOKEN, return_value=expected_refresh_token): - await credential.get_token("scope") - assert token.token == access_token - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_credential_unavailable_error(): - credential = get_credential() - with mock.patch(GET_REFRESH_TOKEN, return_value=None): - with pytest.raises(CredentialUnavailableError): - await credential.get_token("scope") - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_redeem_token(): - expected_token = AccessToken("token", 42) - expected_value = "value" - - mock_client = mock.Mock(spec=object) - token_by_refresh_token = mock.Mock(return_value=expected_token) - mock_client.obtain_token_by_refresh_token = wrap_in_future(token_by_refresh_token) - mock_client.get_cached_access_token = mock.Mock(return_value=None) - - with mock.patch(GET_REFRESH_TOKEN, return_value=expected_value): - credential = get_credential(_client=mock_client) - token = await credential.get_token("scope") - assert token is expected_token - token_by_refresh_token.assert_called_with(("scope",), expected_value, claims=None, tenant_id=None) - - -@pytest.mark.asyncio -async def test_cache_refresh_token(): - expected_token = AccessToken("token", 42) - - mock_client = mock.Mock(spec=object) - token_by_refresh_token = mock.Mock(return_value=expected_token) - mock_client.obtain_token_by_refresh_token = wrap_in_future(token_by_refresh_token) - mock_client.get_cached_access_token = mock.Mock(return_value=None) - mock_get_credentials = mock.Mock(return_value="VALUE") - - credential = get_credential(_client=mock_client) - with mock.patch(GET_REFRESH_TOKEN, mock_get_credentials): - await credential.get_token("scope") - assert mock_get_credentials.call_count == 1 - await credential.get_token("scope") - assert mock_get_credentials.call_count == 1 - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_no_obtain_token_if_cached(): - expected_token = AccessToken("token", time.time() + 3600) - - token_by_refresh_token = mock.Mock(return_value=expected_token) - mock_client = mock.Mock( - get_cached_access_token=mock.Mock(return_value=expected_token), - obtain_token_by_refresh_token=wrap_in_future(token_by_refresh_token), - ) - - credential = get_credential(_client=mock_client) - with mock.patch( - GET_REFRESH_TOKEN, - mock.Mock(side_effect=Exception("credential should not acquire a new token")), - ): - token = await credential.get_token("scope") - - assert token_by_refresh_token.call_count == 0 - assert token.token == expected_token.token - assert token.expires_on == expected_token.expires_on - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_adfs(): - """The credential should raise CredentialUnavailableError when configured for ADFS""" - - credential = get_credential(tenant_id="adfs") - with pytest.raises(CredentialUnavailableError) as ex: - await credential.get_token("scope") - assert "adfs" in ex.value.message.lower() - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_custom_cloud_no_authority(): - """The credential is unavailable when VS Code is configured to use a cloud with no known authority""" - - cloud_name = "AzureCustomCloud" - credential = get_credential({"azure.cloud": cloud_name}) - with pytest.raises(CredentialUnavailableError, match="authority.*" + cloud_name): - await credential.get_token("scope") - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -@pytest.mark.parametrize( - "cloud,authority", - ( - ("AzureCloud", AzureAuthorityHosts.AZURE_PUBLIC_CLOUD), - ("AzureChinaCloud", AzureAuthorityHosts.AZURE_CHINA), - ("AzureUSGovernment", AzureAuthorityHosts.AZURE_GOVERNMENT), - ), -) -async def test_reads_cloud_settings(cloud, authority): - """the credential should read authority and tenant from VS Code settings when an application doesn't specify them""" - - expected_tenant = "tenant-id" - user_settings = {"azure.cloud": cloud, "azure.tenant": expected_tenant} - - transport = async_validating_transport( - requests=[Request(base_url="https://{}/{}".format(authority, expected_tenant))], - responses=[mock_response(json_payload=build_aad_response(access_token="**"))], - ) - - credential = get_credential(user_settings, transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - await credential.get_token("scope") - - assert transport.send.call_count == 1 - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_no_user_settings(): - """the credential should default to Public Cloud and "organizations" tenant when it can't read VS Code settings""" - - transport = async_validating_transport( - requests=[Request(base_url="https://{}/{}".format(AzureAuthorityHosts.AZURE_PUBLIC_CLOUD, "organizations"))], - responses=[mock_response(json_payload=build_aad_response(access_token="**"))], - ) - - credential = get_credential(transport=transport) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - await credential.get_token("scope") - - assert transport.send.call_count == 1 - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_multitenant_authentication(): - first_tenant = "first-tenant" - first_token = "***" - second_tenant = "second-tenant" - second_token = first_token * 2 - - async def send(request, **kwargs): - # ensure the `claims` and `tenant_id` keywords from credential's `get_token` method don't make it to transport - assert "claims" not in kwargs - assert "tenant_id" not in kwargs - parsed = urlparse(request.url) - tenant = parsed.path.split("/")[1] - assert tenant in (first_tenant, second_tenant), 'unexpected tenant "{}"'.format(tenant) - token = first_token if tenant == first_tenant else second_token - return mock_response(json_payload=build_aad_response(access_token=token)) - - credential = get_credential( - tenant_id=first_tenant, transport=mock.Mock(send=send), additionally_allowed_tenants=["*"] - ) - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - token = await credential.get_token("scope") - assert token.token == first_token - - token = await credential.get_token("scope", tenant_id=first_tenant) - assert token.token == first_token - - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - token = await credential.get_token("scope", tenant_id=second_tenant) - assert token.token == second_token - - # should still default to the first tenant - token = await credential.get_token("scope") - assert token.token == first_token - - -@pytest.mark.skip(reason="VS code credential is disabled") -@pytest.mark.asyncio -async def test_multitenant_authentication_not_allowed(): - expected_tenant = "expected-tenant" - expected_token = "***" - - async def send(request, **kwargs): - # ensure the `claims` and `tenant_id` keywords from credential's `get_token` method don't make it to transport - assert "claims" not in kwargs - assert "tenant_id" not in kwargs - parsed = urlparse(request.url) - tenant = parsed.path.split("/")[1] - token = expected_token if tenant == expected_tenant else expected_token * 2 - return mock_response(json_payload=build_aad_response(access_token=token)) - - credential = get_credential( - tenant_id=expected_tenant, transport=mock.Mock(send=send), additionally_allowed_tenants=["*"] - ) - - with mock.patch(GET_REFRESH_TOKEN, lambda _: "**"): - token = await credential.get_token("scope") - assert token.token == expected_token - - token = await credential.get_token("scope", tenant_id=expected_tenant) - assert token.token == expected_token - - token = await credential.get_token("scope", tenant_id="un" + expected_tenant) - assert token.token == expected_token * 2 - - with mock.patch.dict("os.environ", {EnvironmentVariables.AZURE_IDENTITY_DISABLE_MULTITENANTAUTH: "true"}): - token = await credential.get_token("scope", tenant_id="un" + expected_tenant) - assert token.token == expected_token - - -def test_deprecation_warning(): - with pytest.deprecated_call(): - get_credential() +from azure.identity._credentials.vscode import VisualStudioCodeCredential as SyncVisualStudioCodeCredential +from azure.identity.aio._credentials.vscode import VisualStudioCodeCredential +from azure.identity import CredentialUnavailableError + + +class TestVisualStudioCodeCredentialAsync: + """Test cases for the asynchronous VisualStudioCodeCredential""" + + @pytest.mark.asyncio + async def test_credential_uses_sync_credential(self): + """Test that the async credential uses the sync version.""" + credential = VisualStudioCodeCredential() + assert isinstance(credential._sync_credential, SyncVisualStudioCodeCredential) + + @pytest.mark.asyncio + async def test_invalid_auth_record(self): + """Test that an error is raised if the auth record is nonexistent.""" + # Test with a nonexistent file + with patch("os.path.expanduser", return_value="nonexistent_file.json"): + with pytest.raises(CredentialUnavailableError): + await VisualStudioCodeCredential().get_token_info("https://management.azure.com/.default")
DefaultAzureCredential: Re-enable SSO with VSCode on Windows Now that WAM is the default authN mechanism in VS Code desktop, we can explore restoration of SSO with VS Code for Windows customers. Such a feature would fill the gap left behind with the deprecation of VisualStudioCodeCredential. One idea is to use a silent, broker-enabled instance of InteractiveBrowserCredential.
Gist for April Beta release https://gist.github.com/christothes/f8a0dc6249261cb36a6c452717c4e932 Related work on the VS Code and Azure MCP Server side of this puzzle is tracked here: https://github.com/Azure/azure-mcp-pr/issues/245
Gist for April Beta release https://gist.github.com/christothes/f8a0dc6249261cb36a6c452717c4e932 Related work on the VS Code and Azure MCP Server side of this puzzle is tracked here: https://github.com/Azure/azure-mcp-pr/issues/245
[ "https://github.com/Azure/azure-sdk-for-python/commit/6773fcc7f85620a7ba02265d977b862a84c723b6", "https://github.com/Azure/azure-sdk-for-python/commit/0cfe0322149a495a63e08c4469003a7eb0ea96d0", "https://github.com/Azure/azure-sdk-for-python/commit/9b818caf1f8dff1927e857ec10e9b130b71f58bc", "https://github.com...
2025-06-27T23:41:35Z
https://github.com/Azure/azure-sdk-for-python/tree/e646d9e358c7e234010db6b4a66790cff56e7e5b
[ "Set-Location sdk\\core\\azure-core; python -m pip install -r dev_requirements.txt; python -m pip install -e ." ]
[ "pytest -rA --json-report --json-report-file=reports/pytest-results.json" ]
[ "cat reports\\pytest-results.json" ]
def parser(log: str) -> dict[str, str]: import re def norm_status(s: str) -> str: s = s.lower() if s in ("passed", "pass", "ok", "✓"): return "pass" if s in ("failed", "fail", "error", "✕", "xfail", "xfailed", "xpass"): return "fail" if s in ("skipped", "skip", "xskip", "○"): return "skip" return "fail" def clean_nodeid(s: str) -> str: s = s.replace("\r", "").replace("\n", "") s = s.replace("\\", "/") s = re.sub(r":{3,}", "::", s) s = re.sub(r"\s+", "", s) # Trim leading noise to common prefixes prefixes = ("sdk/", "tests/", "src/") for p in prefixes: idx = s.find(p) if idx != -1: s = s[idx:] break return s def is_probable_test(nodeid: str) -> bool: # Must look like a pytest test path with "::" and sensible prefix if "::" not in nodeid: return False if not (nodeid.startswith("sdk/") or nodeid.startswith("tests/") or nodeid.startswith("src/")): return False # Last part should be non-empty last = nodeid.split("::")[-1] return bool(last) def severity(status: str) -> int: if status == "pass": return 0 if status == "skip": return 1 return 2 # fail # Brace-balanced slice around a given position def slice_object_around(pos: int) -> str | None: # Find nearest preceding '{' start = log.rfind("{", 0, pos) if start == -1: return None depth = 0 in_str = False escape = False for i in range(start, len(log)): ch = log[i] if in_str: if escape: escape = False elif ch == "\\": escape = True elif ch == '"': in_str = False else: if ch == '"': in_str = True elif ch == "{": depth += 1 elif ch == "}": depth -= 1 if depth == 0: return log[start : i + 1] # Fallback: return a capped slice if unmatched return log[start : min(len(log), start + 12000)] results: dict[str, str] = {} # Patterns for outcomes/status within an object outcome_kv_re = re.compile( r'"(?:outcome|status)"\s*:\s*"(passed|failed|skipped|error|xfail|xpass)"', re.IGNORECASE, ) phase_outcome_res = [ re.compile( r'"call"\s*:\s*\{[^{}]*"(?:outcome|status)"\s*:\s*"(passed|failed|skipped|error|xfail|xpass)"', re.IGNORECASE | re.DOTALL, ), re.compile( r'"setup"\s*:\s*\{[^{}]*"(?:outcome|status)"\s*:\s*"(passed|failed|skipped|error|xfail|xpass)"', re.IGNORECASE | re.DOTALL, ), re.compile( r'"teardown"\s*:\s*\{[^{}]*"(?:outcome|status)"\s*:\s*"(passed|failed|skipped|error|xfail|xpass)"', re.IGNORECASE | re.DOTALL, ), ] # Find all nodeid occurrences node_matches = list(re.finditer(r'"nodeid"\s*:\s*"(.*?)"', log, flags=re.IGNORECASE | re.DOTALL)) # Primary pass: search within brace-balanced object containing the nodeid for m in node_matches: raw_nodeid = m.group(1) nodeid = clean_nodeid(raw_nodeid) if not is_probable_test(nodeid): continue obj_slice = slice_object_around(m.start()) if not obj_slice: continue # Prefer phase outcomes; choose most severe if multiple best_status = None best_sev = -1 for rx in phase_outcome_res: for pm in rx.finditer(obj_slice): st = norm_status(pm.group(1)) sev = severity(st) if sev > best_sev: best_status, best_sev = st, sev # If no phase outcome found, look for direct outcome/status if best_status is None: kv = outcome_kv_re.search(obj_slice) if kv: st = norm_status(kv.group(1)) best_status = st best_sev = severity(st) if best_status: results[nodeid] = best_status # Secondary pass: limited forward window to next nodeid to catch nearby outcomes (avoid collector-level backward association) for i, m in enumerate(node_matches): raw_nodeid = m.group(1) nodeid = clean_nodeid(raw_nodeid) if not is_probable_test(nodeid): continue if nodeid in results: continue start = m.end() next_start = node_matches[i + 1].start() if i + 1 < len(node_matches) else len(log) window = log[start : min(next_start, start + 4000)] # Prefer phase outcomes best_status = None best_sev = -1 for rx in phase_outcome_res: pm = rx.search(window) if pm: st = norm_status(pm.group(1)) best_status = st best_sev = severity(st) break if best_status is None: kv = outcome_kv_re.search(window) if kv: best_status = norm_status(kv.group(1)) if best_status: results[nodeid] = best_status # Fallback: textual pytest lines text_re_a = re.compile( r'([^\s][^\n\r]*?::[^\s][^\n\r]*)\s+(PASSED|FAILED|SKIPPED|ERROR|XFAIL|XPASS)\b', re.IGNORECASE, ) for m in text_re_a.finditer(log): testid, status = m.groups() nodeid = clean_nodeid(testid) if is_probable_test(nodeid): results[nodeid] = norm_status(status) text_re_b = re.compile( r'(PASSED|FAILED|SKIPPED|ERROR|XFAIL|XPASS)\s+([^\s][^\n\r]*?::[^\s][^\n\r]*)\b', re.IGNORECASE, ) for m in text_re_b.finditer(log): status, testid = m.groups() nodeid = clean_nodeid(testid) if is_probable_test(nodeid): results[nodeid] = norm_status(status) text_re_c = re.compile(r'FAILED\s+([^\s][^\n\r]*?::[^\s][^\n\r]*)\b', re.IGNORECASE) for m in text_re_c.finditer(log): nodeid = clean_nodeid(m.group(1)) if is_probable_test(nodeid): results[nodeid] = "fail" # Ensure statuses are only pass/fail/skip for k, v in list(results.items()): if v not in ("pass", "fail", "skip"): results[k] = "fail" return results
[ "sdk/coree/azure-core/tests/async_tests/test_rest_stream_responses_async.py::test_deflate__decompress_compressed_header" ]
[ "sdk/ccore/azure-core/tests/test_rest_request_backcompat.py::test_request_json_body", "sdk/core/azure-core/tests/async_tests/test_pipeline_asyync.py::test_no_cleanup_policy_when_redirect_policy_is_empty", "sdk/core/azure-core/tests/test_rest_query.py::test_req", "sdk/core/azure-core/tests/async_tests/test_bas...
starryzhang/sweb.eval.win.azure_1776_azure-sdk-for-python-41822
conan-io/conan
18422
conan-io__conan-18422
Python
[ "17491" ]
048565de10232675955056fd41c9f3c4e5305eba
diff --git a/conan/tools/build/flags.py b/conan/tools/build/flags.py index 98d068252ac..019eba51f63 100644 --- a/conan/tools/build/flags.py +++ b/conan/tools/build/flags.py @@ -134,14 +134,15 @@ def build_type_flags(conanfile): if not compiler or not build_type: return [] - # https://github.com/Kitware/CMake/blob/d7af8a34b67026feaee558433db3a835d6007e06/ - # Modules/Platform/Windows-MSVC.cmake comp_exes = conanfile.conf.get("tools.build:compiler_executables", check_type=dict, default={}) clangcl = "clang-cl" in (comp_exes.get("c") or comp_exes.get("cpp", "")) if compiler == "msvc" or clangcl: - if clangcl or (vs_toolset and "clang" in vs_toolset): + # https://github.com/Kitware/CMake/blob/d7af8a34b67026feaee558433db3a835d6007e06/ + # Modules/Platform/Windows-MSVC.cmake + # FIXME: This condition seems legacy, as no more "clang" exists in Conan toolsets + if vs_toolset and "clang" in vs_toolset: flags = {"Debug": ["-gline-tables-only", "-fno-inline", "-O0"], "Release": ["-O2"], "RelWithDebInfo": ["-gline-tables-only", "-O2", "-fno-inline"], diff --git a/conan/tools/gnu/autotoolstoolchain.py b/conan/tools/gnu/autotoolstoolchain.py index 785f3b05cba..f2e454474d9 100644 --- a/conan/tools/gnu/autotoolstoolchain.py +++ b/conan/tools/gnu/autotoolstoolchain.py @@ -174,7 +174,9 @@ def _get_msvc_runtime_flag(self): if self._conanfile.settings.compiler.runtime == "dynamic": runtime_type = self._conanfile.settings.get_safe("compiler.runtime_type") library = "msvcrtd" if runtime_type == "Debug" else "msvcrt" - return f"-D_DLL -D_MT -Xclang --dependent-lib={library}" + # The -D_DEBUG is important to link with the Debug MSVCP140D.dll + debug = "-D_DEBUG " if runtime_type == "Debug" else "" + return f"{debug}-D_DLL -D_MT -Xclang --dependent-lib={library}" return "" # By default it already link statically flag = msvc_runtime_flag(self._conanfile) diff --git a/conan/tools/gnu/gnutoolchain.py b/conan/tools/gnu/gnutoolchain.py index e3338283137..800cb35b495 100644 --- a/conan/tools/gnu/gnutoolchain.py +++ b/conan/tools/gnu/gnutoolchain.py @@ -239,7 +239,8 @@ def _get_msvc_runtime_flag(self): if self._conanfile.settings.compiler.runtime == "dynamic": runtime_type = self._conanfile.settings.get_safe("compiler.runtime_type") library = "msvcrtd" if runtime_type == "Debug" else "msvcrt" - return f"-D_DLL -D_MT -Xclang --dependent-lib={library}" + debug = "-D_DEBUG " if runtime_type == "Debug" else "" + return f"{debug}-D_DLL -D_MT -Xclang --dependent-lib={library}" return "" # By default it already link statically flag = msvc_runtime_flag(self._conanfile)
diff --git a/test/functional/toolchains/gnu/autotools/test_win_bash.py b/test/functional/toolchains/gnu/autotools/test_win_bash.py index 47ab66e17f8..b46840171a5 100644 --- a/test/functional/toolchains/gnu/autotools/test_win_bash.py +++ b/test/functional/toolchains/gnu/autotools/test_win_bash.py @@ -70,7 +70,8 @@ def build(self): @pytest.mark.tool("msys2") @pytest.mark.parametrize("frontend", ("clang", "clang-cl")) @pytest.mark.parametrize("runtime", ("static", "dynamic")) -def test_autotools_bash_complete_clang(frontend, runtime): +@pytest.mark.parametrize("build_type", ("Debug", "Release")) +def test_autotools_bash_complete_clang(frontend, runtime, build_type): client = TestClient(path_with_spaces=False) # Problem is that msys2 also has clang in the path, so we need to make it explicit clangpath = tools_locations["clang"]["18"]["path"]["Windows"] @@ -81,7 +82,7 @@ def test_autotools_bash_complete_clang(frontend, runtime): [settings] os=Windows arch=x86_64 - build_type=Release + build_type={build_type} compiler=clang compiler.version=18 compiler.cppstd=14 @@ -92,6 +93,7 @@ def test_autotools_bash_complete_clang(frontend, runtime): tools.build:compiler_executables={comps} tools.microsoft.bash:subsystem=msys2 tools.microsoft.bash:path=bash + tools.compilation:verbosity=verbose """) main = gen_function_cpp(name="main") @@ -128,16 +130,17 @@ def build(self): "main.cpp": main, "profile_win": profile_win}) client.run("build . -pr=profile_win") + print(client.out) client.run_command("main.exe") assert "__GNUC__" not in client.out assert "main __clang_major__18" in client.out - check_exe_run(client.out, "main", "clang", None, "Release", "x86_64", None) + check_exe_run(client.out, "main", "clang", None, build_type, "x86_64", None) bat_contents = client.load("conanbuild.bat") assert "conanvcvars.bat" in bat_contents static_runtime = runtime == "static" - check_vs_runtime("main.exe", client, "17", build_type="Release", static_runtime=static_runtime) + check_vs_runtime("main.exe", client, "17", build_type=build_type, static_runtime=static_runtime) @pytest.mark.skipif(platform.system() != "Windows", reason="Requires Windows") diff --git a/test/functional/toolchains/test_nmake_toolchain.py b/test/functional/toolchains/test_nmake_toolchain.py index 9837d716bf4..36a4121e4dc 100644 --- a/test/functional/toolchains/test_nmake_toolchain.py +++ b/test/functional/toolchains/test_nmake_toolchain.py @@ -85,3 +85,78 @@ def build(self): client.run_command("simple.exe") assert "dep/1.0" in client.out check_exe_run(client.out, "main", "msvc", version, build_type, "x86_64", cppstd, conf_preprocessors) + + +@pytest.mark.skipif(platform.system() != "Windows", reason="Requires Windows") +@pytest.mark.tool("clang", "18") +def test_toolchain_nmake_clang(): + compiler = "clang" + version = "18" + runtime = "dynamic" + cppstd = "14" + build_type = "Debug" + defines = ["TEST_DEFINITION1", "TEST_DEFINITION2=0", "TEST_DEFINITION3=", "TEST_DEFINITION4=TestPpdValue4", + "TEST_DEFINITION5=__declspec(dllexport)", "TEST_DEFINITION6=foo bar"] + cflags = cxxflags = sharedlinkflags = exelinkflags = [] + client = TestClient(path_with_spaces=False) + settings = {"compiler": compiler, + "compiler.version": version, + "compiler.cppstd": cppstd, + "compiler.runtime": runtime, + "build_type": build_type, + "compiler.runtime_version": "v144", + "arch": "x86_64"} + + serialize_array = lambda arr: "[{}]".format(",".join([f"'{v}'" for v in arr])) + conf = { + "tools.build:defines": serialize_array(defines) if defines else "", + "tools.build:cflags": serialize_array(cflags) if cflags else "", + "tools.build:cxxflags": serialize_array(cxxflags) if cxxflags else "", + "tools.build:sharedlinkflags": serialize_array(sharedlinkflags) if sharedlinkflags else "", + "tools.build:exelinkflags": serialize_array(exelinkflags) if exelinkflags else "", + "tools.build:compiler_executables": r'{\"c\": \"clang-cl\", \"cpp\": \"clang-cl\"}', + "tools.cmake.cmaketoolchain:generator": "Visual Studio 17", + } + + # Build the profile according to the settings provided + settings = " ".join('-s %s="%s"' % (k, v) for k, v in settings.items() if v) + + client.run("new cmake_lib -d name=dep -d version=1.0") + conf = " ".join(f'-c {k}="{v}"' for k, v in conf.items() if v) + client.run(f'create . -tf=\"\" {settings} {conf}') + + # Rearrange defines to macro / value dict + conf_preprocessors = {} + for define in defines: + if "=" in define: + key, value = define.split("=", 1) + # gen_function_cpp doesn't properly handle empty macros + if value: + conf_preprocessors[key] = value + else: + conf_preprocessors[define] = "1" + + conanfile = textwrap.dedent(""" + from conan import ConanFile + class Pkg(ConanFile): + settings = "os", "compiler", "build_type", "arch" + requires = "dep/1.0" + generators = "NMakeToolchain", "NMakeDeps" + + def build(self): + self.run(f"nmake /f makefile") + """) + makefile = textwrap.dedent("""\ + all: simple.exe + + simple.exe: simple.cpp + $(CPP) simple.cpp -o simple.exe + """) + client.save({"conanfile.py": conanfile, + "makefile": makefile, + "simple.cpp": gen_function_cpp(name="main", includes=["dep"], calls=["dep"], preprocessor=conf_preprocessors.keys())}, + clean_first=True) + client.run(f"build . {settings} {conf}") + client.run_command("simple.exe") + assert "dep/1.0" in client.out + check_exe_run(client.out, "main", "clang", "19.1", build_type, "x86_64", cppstd, conf_preprocessors) diff --git a/test/unittests/client/build/compiler_flags_test.py b/test/unittests/client/build/compiler_flags_test.py index fee6ff32c65..4c82661f58d 100644 --- a/test/unittests/client/build/compiler_flags_test.py +++ b/test/unittests/client/build/compiler_flags_test.py @@ -127,10 +127,10 @@ def test_build_type_flags(self, compiler, build_type, vs_toolset, flags): conanfile.settings = settings self.assertEqual(' '.join(build_type_flags(conanfile)), flags) - @parameterized.expand([("clang", "Debug", "-gline-tables-only -fno-inline -O0"), - ("clang", "Release", "-O2"), - ("clang", "RelWithDebInfo", "-gline-tables-only -O2 -fno-inline"), - ("clang", "MinSizeRel", ""), + @parameterized.expand([("clang", "Debug", "-Zi -Ob0 -Od"), + ("clang", "Release", "-O2 -Ob2"), + ("clang", "RelWithDebInfo", "-Zi -O2 -Ob1"), + ("clang", "MinSizeRel", "-O1 -Ob1"), ]) def test_build_type_flags_clangcl(self, compiler, build_type, flags): settings = MockSettings({"compiler": compiler,
[bug] NMake toolchain works with CLang-cl incorrectly ### Describe the bug Hi! Thank you for your great project! I faced with the following issue. Context: - Conan supports `CLang-cl` and knows how to set debug parameters for it (for example, `gline-tables-only` and `fno-inline`); - Conan has `NMakeToolchain` and supports `NMake`; Issue: - Conan sets the generated CLang `build_type_flags` to the `CL` environment variable (it's okay because `CLang-cl` supports it) and does it in the MSVC CL compiler format (Conan tranforms `-` characters to `/`) and that is the problem. I see some possible solutions: 1. Disable transformations in `NMakeToolchain` if the compiler is `CLang-cl`; 2. Follow the previous point and to add the filling of the `CFLAGS` environment variable instead of the `CL` one. Thank you! ### How to reproduce it _No response_
Hi @molotok-sms > Hi! Thank you for your great project! Thanks for your kind words! Thanks for your report too. I think this was addressed in https://github.com/conan-io/conan/pull/17387, which was released in latest Conan 2.10. The way it works is it uses the ``tools.build:compiler_executables`` information to see if it is using ``clang-cl`` or not, and avoid using the wrong flags. Could you please try that? Thanks for your feedback! Thank you for the quick response! I use the Conan version 2.10.0 from Pixi (Conda). The mentioned changes are good but unfortunately it isn't enough. In my case, "the right clang flags" pass through the NMakeToolchain "`_format_options`" function and become invalid. It seems it would be good to improve the NMakeToolchain to support not only "msvc" compiler, but "clang-cl' too. I attached the log of errors when NMakeToolchain transformed the format of debug parameters: ![image](https://github.com/user-attachments/assets/8b78041c-2e5e-486b-85b1-09995b55a7d1) We have recently improved the support for clang-cl in Autotools, in Conan 2.17 (https://docs.conan.io/2/examples/tools/autotools/llvm_clang.html), lets try to have a look at this for next 2.18 I am doing a PR in https://github.com/conan-io/conan/pull/18422 that works with ``clang-cl`` for ``NMakeToolchain`` That PR will close this ticket. If there are any further issues after merging that PR (intended for Conan 2.18), it would be necessary to re-open, or better, create a new ticket with full reproduction steps, that is a minimal reproducible example code that would fail. Many thanks for your feedback!
Hi @molotok-sms > Hi! Thank you for your great project! Thanks for your kind words! Thanks for your report too. I think this was addressed in https://github.com/conan-io/conan/pull/17387, which was released in latest Conan 2.10. The way it works is it uses the ``tools.build:compiler_executables`` information to see if it is using ``clang-cl`` or not, and avoid using the wrong flags. Could you please try that? Thanks for your feedback! Thank you for the quick response! I use the Conan version 2.10.0 from Pixi (Conda). The mentioned changes are good but unfortunately it isn't enough. In my case, "the right clang flags" pass through the NMakeToolchain "`_format_options`" function and become invalid. It seems it would be good to improve the NMakeToolchain to support not only "msvc" compiler, but "clang-cl' too. I attached the log of errors when NMakeToolchain transformed the format of debug parameters: ![image](https://github.com/user-attachments/assets/8b78041c-2e5e-486b-85b1-09995b55a7d1) We have recently improved the support for clang-cl in Autotools, in Conan 2.17 (https://docs.conan.io/2/examples/tools/autotools/llvm_clang.html), lets try to have a look at this for next 2.18 I am doing a PR in https://github.com/conan-io/conan/pull/18422 that works with ``clang-cl`` for ``NMakeToolchain`` That PR will close this ticket. If there are any further issues after merging that PR (intended for Conan 2.18), it would be necessary to re-open, or better, create a new ticket with full reproduction steps, that is a minimal reproducible example code that would fail. Many thanks for your feedback! This has been closed by https://github.com/conan-io/conan/pull/18422, it will be in next Conan 2.18 release, feedback welcome.
[ "https://github.com/conan-io/conan/commit/f5e2af1b853ee94c047eefdeeee63a27b596610a", "https://github.com/conan-io/conan/commit/4b6ad32bd37c81292016b2eaad56f204d21de6f9", "https://github.com/conan-io/conan/commit/476c8bd1c44587dcd75c9d08c8f1ff776be537e2" ]
2025-06-06T07:00:38Z
https://github.com/conan-io/conan/tree/048565de10232675955056fd41c9f3c4e5305eba
[ "python -m pip install -e .[test]; python -m pip install -r conans/requirements_server.txt" ]
[ "New-Item -ItemType Directory -Force reports | Out-Null; New-Item -ItemType Directory -Force C:\\t | Out-Null; $env:CONAN_TEST_FOLDER=\"C:\\t\"; $env:TEMP=\"C:\\t\"; $env:TMP=\"C:\\t\"; git config --global core.autocrlf false; python -m pip install pytest-json-report; python -m pytest -rA -vv --json-report --json-r...
[ "type reports\\pytest-results.json" ]
def parser(log: str) -> dict[str, str]: # Robust test output parser for pytest JSON and textual outputs # Returns mapping: { "test_case_name": "pass|fail|skip", ... } import json import re from pathlib import Path from typing import Any, Dict, List, Optional, Tuple def map_status(s: str) -> str: if not s: return "fail" v = s.strip().lower() if v in {"passed", "pass", "ok", "xpass", "xpassed"}: return "pass" if v in {"skipped", "skip", "xfail", "xfailed"}: return "skip" # treat failed/error/others as fail return "fail" def is_test_function_nodeid(nodeid: str) -> bool: # pytest nodeid format: file.py::[Class::]test_name[params] if not nodeid or "::" not in nodeid: return False last = nodeid.split("::")[-1] # allow parametrized (may include brackets), ensure base name starts with lowercase "test" base = last.split("[", 1)[0] return base.startswith("test") def select_better(existing: Tuple[str, int], new_status: str, new_prio: int) -> Tuple[str, int]: # Higher priority wins; on tie, keep "worst" outcome (fail > skip > pass) if new_prio > existing[1]: return (new_status, new_prio) if new_prio < existing[1]: return existing # same priority, apply severity ordering order = {"fail": 2, "skip": 1, "pass": 0} if order.get(map_status(new_status), 0) > order.get(map_status(existing[0]), 0): return (new_status, new_prio) return existing def add_result(results: Dict[str, str], nodeid: str, status: str) -> None: if not nodeid: return # Keep the worst status if re-encountered if nodeid in results: order = {"fail": 2, "skip": 1, "pass": 0} if order[map_status(status)] > order[results[nodeid]]: results[nodeid] = map_status(status) else: results[nodeid] = map_status(status) def normalize_nodeid(nodeid: str) -> str: # Remove hard-wrapped CR/LF that can split nodeids across lines return nodeid.replace("\r", "").replace("\n", "") def collect_from_tests_list(tests: List[Any]) -> Dict[str, str]: out: Dict[str, str] = {} for t in tests: if not isinstance(t, dict): continue nodeid = t.get("nodeid") or t.get("node") outcome = t.get("outcome") if nodeid and outcome: nid = normalize_nodeid(str(nodeid)) if is_test_function_nodeid(nid): add_result(out, nid, str(outcome)) return out def collect_from_reports_list(reports: List[Any]) -> Dict[str, str]: # Handle pytest-reportlog style: multiple phase reports per nodeid # Prefer when == "call", else any available; combine to worst result per_node: Dict[str, Tuple[str, int]] = {} # nodeid -> (status, priority) for r in reports: if not isinstance(r, dict): continue nodeid = r.get("nodeid") or r.get("node") outcome = r.get("outcome") if not nodeid or not outcome: continue nid = normalize_nodeid(str(nodeid)) if not is_test_function_nodeid(nid): continue when = r.get("when") prio = 1 if isinstance(when, str): when_l = when.lower() if when_l == "call": prio = 3 elif when_l == "teardown": prio = 2 else: prio = 1 status = map_status(str(outcome)) if nid in per_node: per_node[nid] = select_better(per_node[nid], status, prio) else: per_node[nid] = (status, prio) out: Dict[str, str] = {} for k, (status, _prio) in per_node.items(): out[k] = map_status(status) return out def collect_from_json_obj(data: Any) -> Dict[str, str]: # Try standard keys first results: Dict[str, str] = {} if isinstance(data, dict): # pytest-json-report plugin if isinstance(data.get("tests"), list): res = collect_from_tests_list(data["tests"]) if res: results.update(res) # pytest-reportlog plugin if isinstance(data.get("reports"), list): res = collect_from_reports_list(data["reports"]) if res: results.update(res) # Some tools may place results under other keys, try to find lists of dicts with nodeid+outcome if not results: for val in data.values(): if isinstance(val, list): # try as tests res1 = collect_from_tests_list(val) res2 = collect_from_reports_list(val) if res1: results.update(res1) if res2: results.update(res2) # As last resort, deep scan dicts to pick any object with nodeid+outcome def deep_scan(obj: Any) -> None: if isinstance(obj, dict): nodeid = obj.get("nodeid") or obj.get("node") outcome = obj.get("outcome") when = obj.get("when") if nodeid and outcome: nid = normalize_nodeid(str(nodeid)) if is_test_function_nodeid(nid): prio = 1 if isinstance(when, str): wl = when.lower() prio = 3 if wl == "call" else (2 if wl == "teardown" else 1) status = map_status(str(outcome)) if nid in temp: temp[nid] = select_better(temp[nid], status, prio) else: temp[nid] = (status, prio) for v in obj.values(): deep_scan(v) elif isinstance(obj, list): for it in obj: deep_scan(it) temp: Dict[str, Tuple[str, int]] = {} deep_scan(data) if temp: for k, (status, _p) in temp.items(): results[k] = map_status(status) elif isinstance(data, list): # Maybe top-level list of test reports res = collect_from_tests_list(data) if not res: res = collect_from_reports_list(data) results.update(res) return results def try_load_json_from_file() -> Optional[Dict[str, str]]: p = Path("reports") / "pytest-results.json" if not p.exists(): return None try: with p.open("r", encoding="utf-8") as f: data = json.load(f) res = collect_from_json_obj(data) return res if res else {} except Exception: return None def try_load_json_from_log_text(log_text: str) -> Optional[Dict[str, str]]: try: data = json.loads(log_text) res = collect_from_json_obj(data) return res if res else {} except Exception: return None def object_aware_scan(log_text: str) -> Dict[str, str]: # Scan for JSON object substrings and match nodeid+outcome within the same object results: Dict[str, Tuple[str, int]] = {} stack: List[int] = [] for i, ch in enumerate(log_text): if ch == "{": stack.append(i) elif ch == "}" and stack: start = stack.pop() obj_text = log_text[start : i + 1] # Quick filter to avoid expensive regex on irrelevant chunks if '"nodeid"' not in obj_text and '"node"' not in obj_text: continue # Extract fields within this object m_node = re.search(r'"(?:nodeid|node)"\s*:\s*"((?:[^"\\]|\\.)*)"', obj_text) m_out = re.search(r'"outcome"\s*:\s*"((?:[^"\\]|\\.)*)"', obj_text) m_when = re.search(r'"when"\s*:\s*"((?:[^"\\]|\\.)*)"', obj_text) if not m_node or not m_out: continue nodeid_raw = m_node.group(1) outcome_raw = m_out.group(1) when_raw = m_when.group(1) if m_when else None # Unescape common sequences in JSON strings try: nodeid = bytes(nodeid_raw, "utf-8").decode("unicode_escape") except Exception: nodeid = nodeid_raw nodeid = normalize_nodeid(nodeid) outcome = outcome_raw when = when_raw if not is_test_function_nodeid(nodeid): continue prio = 1 if isinstance(when, str): wl = when.lower() prio = 3 if wl == "call" else (2 if wl == "teardown" else 1) status = map_status(outcome) if nodeid in results: results[nodeid] = select_better(results[nodeid], status, prio) else: results[nodeid] = (status, prio) flat: Dict[str, str] = {} for k, (status, _p) in results.items(): flat[k] = map_status(status) return flat def parse_textual(log_text: str) -> Dict[str, str]: results: Dict[str, str] = {} def add(id_: str, st: str): id_clean = normalize_nodeid(id_.strip()) # Ensure it's a function-level pytest id if not id_clean: return # Some textual reports might include class-only entries; filter them if is_test_function_nodeid(id_clean): add_result(results, id_clean, st) # Pytest console pattern: file::[Class::]test STATUS # Allow optional progress bracket like [ 50%] pytest_re = re.compile(r'(?m)^(?P<id>.+?::.+?)\s+(?:\[\s*\d+%?\]\s*)?(?P<st>PASSED|FAILED|SKIPPED|ERROR|XFAIL|XPASS)\b') for m in pytest_re.finditer(log_text): add(m.group("id"), m.group("st")) # unittest pattern: TestClass.test_method ... ok/FAIL/ERROR/skipped ut_re = re.compile(r'(?m)^(?P<id>[\w\.]+\.[\w\.\[\]-]+)\s+\.\.\.\s+(?P<st>ok|FAIL|ERROR|skipped)\b') for m in ut_re.finditer(log_text): add(m.group("id"), m.group("st")) # Go test style (if present): --- PASS/FAIL/SKIP: TestName go_re = re.compile(r'(?m)^---\s+(PASS|FAIL|SKIP):\s+([^\r\n]+)') for m in go_re.finditer(log_text): status = m.group(1) name = m.group(2) add(name, status) return results # 1) Try loading from file final_results = try_load_json_from_file() if final_results: return {k: map_status(v) for k, v in final_results.items()} # 2) Try parsing full JSON from provided log text final_results = try_load_json_from_log_text(log) if final_results: return {k: map_status(v) for k, v in final_results.items()} # 3) Object-aware scan of the text to pair nodeid and outcome in the same JSON object final_results = object_aware_scan(log) if final_results: return {k: map_status(v) for k, v in final_results.items()} # 4) Fallback to textual patterns final_results = parse_textual(log) if final_results: return {k: map_status(v) for k, v in final_results.items()} # If everything fails, return empty dict return {}
[ "test/unittests/model/test_conf.py::test_compose_conf_complex" ]
[ "test/unittests/tools/files/test_patches.py::test_base_path", "test/unittests/model/info_test.py::test_false_values_affect_none", "test/unittests/util/test_encrypt.py::test_key_unicode", "test/unittests/tools/files/test_patches.py::test_patch_user", "test/unittests/model/test_conf.py::test_conf_error_upperc...
starryzhang/sweb.eval.win.conan-io_1776_conan-18422
elastic/beats
42172
elastic__beats-42172
Go
[ "38142" ]
27b714dc999ae6e73abee95b3e294a077af20599
diff --git a/.buildkite/scripts/gcp_auth.ps1 b/.buildkite/scripts/gcp_auth.ps1 new file mode 100644 index 000000000000..2ba5982a3e20 --- /dev/null +++ b/.buildkite/scripts/gcp_auth.ps1 @@ -0,0 +1,20 @@ +Write-Host "~~~ Authenticating GCP" +# Secrets must be redacted +# https://buildkite.com/docs/pipelines/managing-log-output#redacted-environment-variables + +$privateCIGCSServiceAccount = "kv/ci-shared/platform-ingest/gcp-platform-ingest-ci-service-account" +$tempFileName = "google-cloud-credentials.json" +$secretFileLocation = Join-Path $env:TEMP $tempFileName + +$serviceAccountJsonSecret = Retry-Command -ScriptBlock { + vault kv get -field=data -format=json $privateCIGCSServiceAccount | ConvertFrom-Json + if ( -not $? ) { throw "Error during vault kv get" } +} + +New-Item -ItemType File -Path $secretFileLocation >$null +$serviceAccountJsonPlaintextSecret = $serviceAccountJsonSecret.plaintext | ConvertTo-Json +Set-Content -Path $secretFileLocation -Value $serviceAccountJsonPlaintextSecret +if ( -not $?) { throw "Error retrieving the required field from the secret" } + +gcloud auth activate-service-account --key-file $secretFileLocation +Remove-Item -Path $secretFileLocation -Force \ No newline at end of file diff --git a/.buildkite/scripts/gcp_auth.sh b/.buildkite/scripts/gcp_auth.sh deleted file mode 100755 index 5b92f943980e..000000000000 --- a/.buildkite/scripts/gcp_auth.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -echo "~~~ Authenticating GCP" -# Secrets must be redacted -# https://buildkite.com/docs/pipelines/managing-log-output#redacted-environment-variables -PRIVATE_CI_GCS_CREDENTIALS_PATH="kv/ci-shared/platform-ingest/gcp-platform-ingest-ci-service-account" -PRIVATE_CI_GCS_CREDENTIALS_SECRET=$(vault kv get -field plaintext -format=json ${PRIVATE_CI_GCS_CREDENTIALS_PATH}) -export PRIVATE_CI_GCS_CREDENTIALS_SECRET -echo "${PRIVATE_CI_GCS_CREDENTIALS_SECRET}" > ./gcp.json -GOOGLE_APPLICATION_CREDENTIALS=$(realpath ./gcp.json) -export GOOGLE_APPLICATION_CREDENTIALS -gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" \ No newline at end of file diff --git a/.buildkite/x-pack/pipeline.xpack.packetbeat.yml b/.buildkite/x-pack/pipeline.xpack.packetbeat.yml index 5b3625e0fc92..1620bc2320ae 100644 --- a/.buildkite/x-pack/pipeline.xpack.packetbeat.yml +++ b/.buildkite/x-pack/pipeline.xpack.packetbeat.yml @@ -207,9 +207,8 @@ steps: - label: ":windows: x-pack/packetbeat: Win 2022 System Tests" key: "mandatory-win-2022-system-tests" - skip: "skipping due to elastic/beats#38142" command: | - source .buildkite/scripts/gcp_auth.sh + .buildkite/scripts/gcp_auth.ps1 Set-Location -Path x-pack/packetbeat mage systemTest retry: @@ -322,9 +321,8 @@ steps: - label: ":windows: x-pack/packetbeat: Win 10 System Tests" key: "extended-win-10-system-tests" - skip: "skipping due to elastic/beats#38142" command: | - source .buildkite/scripts/gcp_auth.sh + .buildkite/scripts/gcp_auth.ps1 Set-Location -Path x-pack/packetbeat mage systemTest retry: diff --git a/x-pack/packetbeat/magefile.go b/x-pack/packetbeat/magefile.go index 5e938cbbda72..220c430e5d84 100644 --- a/x-pack/packetbeat/magefile.go +++ b/x-pack/packetbeat/magefile.go @@ -137,12 +137,7 @@ func TestPackages() error { } func SystemTest(ctx context.Context) error { - // Buildkite (CI) images have preinstalled npcap - if os.Getenv("CI") == "true" { - mg.SerialDeps(devtools.BuildSystemTestBinary) - } else { - mg.SerialDeps(xpacketbeat.GetNpcapInstaller, devtools.BuildSystemTestBinary) - } + mg.SerialDeps(xpacketbeat.GetNpcapInstaller, devtools.BuildSystemTestBinary) args := devtools.DefaultGoTestIntegrationArgs() args.Packages = []string{"./tests/system/..."}
diff --git a/x-pack/packetbeat/tests/system/app_test.go b/x-pack/packetbeat/tests/system/app_test.go index 214cd9c2369a..d298b9157374 100644 --- a/x-pack/packetbeat/tests/system/app_test.go +++ b/x-pack/packetbeat/tests/system/app_test.go @@ -24,7 +24,7 @@ import ( ) // Keep in sync with NpcapVersion in magefile.go. -const NpcapVersion = "1.79" +const NpcapVersion = "1.80" func TestWindowsNpcapInstaller(t *testing.T) { if runtime.GOOS != "windows" { @@ -65,7 +65,26 @@ func TestDevices(t *testing.T) { } t.Log("Expect interfaces:\n", expected) +ifcsLoop: for _, ifc := range ifcs { - assert.Contains(t, stdout, ifc.Name) + if strings.Contains(stdout, ifc.Name) { + continue ifcsLoop + } + addrs, err := ifc.Addrs() + assert.NoError(t, err) + maddrs, err := ifc.MulticastAddrs() + assert.NoError(t, err) + addrs = append(addrs, maddrs...) + for _, addr := range addrs { + s := addr.String() + // remove the network mask suffix + if idx := strings.Index(s, "/"); idx > -1 { + s = s[:idx] + } + if strings.Contains(stdout, s) { + continue ifcsLoop + } + } + t.Errorf("interface %q not found", ifc.Name) } }
x-pack/packetbeat: System tests failure on Windows ## Failed Test * **Test Name:** TestDevices * **Link:** https://github.com/elastic/beats/blob/7b041ba327011d85471cfd37f1014f054ae43436/x-pack/packetbeat/tests/system/app_test.go#L55-L70 * **Branch:** main: https://github.com/elastic/beats/pull/40387 * **Artifact Link:** [beats-xpack-packetbeat_build_5706_windows-x-pack-slash-packetbeat-win-2022-system-tests.log](https://github.com/user-attachments/files/16588014/beats-xpack-packetbeat_build_5706_windows-x-pack-slash-packetbeat-win-2022-system-tests.log) * **Notes:** Additional details about the test. e.g. theory as to failure cause **NB!** tests should be enabled back, since those were muted while migrating from Jenkins to Buildkite. Please uncomment: https://github.com/elastic/beats/blob/7b041ba327011d85471cfd37f1014f054ae43436/.buildkite/x-pack/pipeline.xpack.packetbeat.yml#L163 https://github.com/elastic/beats/blob/7b041ba327011d85471cfd37f1014f054ae43436/.buildkite/x-pack/pipeline.xpack.packetbeat.yml#L253 Buildkite builds: https://buildkite.com/elastic/beats-xpack-packetbeat/builds/5706#_ ### Stack Trace ``` === FAIL: x-pack/packetbeat/tests/system TestDevices (0.21s) --   | app_test.go:58: Output:   | 0: \Device\NPF_{8058D1FD-C1F5-4A20-A848-0188AB321A97} (WAN Miniport (Network Monitor)) (Not assigned ip address)   | 1: \Device\NPF_{4265A7ED-129C-497F-B1AC-A60DB44D0DC7} (WAN Miniport (IPv6)) (Not assigned ip address)   | 2: \Device\NPF_{608080BA-7C74-441B-918A-AEA32DD991B4} (WAN Miniport (IP)) (Not assigned ip address)   | 3: \Device\NPF_{5F3CCF6B-3314-4F07-9284-5AE0B33202A8} (Google VirtIO Ethernet Adapter) (10.224.0.241)   | 4: \Device\NPF_Loopback (Adapter for loopback traffic capture) (::1 127.0.0.1)   | PASS   | coverage: 43.9% of statements in ./...   | app_test.go:66: Expect interfaces:   | [7:Ethernet 2:up\|broadcast\|multicast\|running 1:Loopback Pseudo-Interface 1:up\|loopback\|multicast\|running]   | app_test.go:69:   | Error Trace: C:/buildkite-agent/builds/bk-agent-prod-gcp-1723456393706198410/elastic/beats-xpack-packetbeat/x-pack/packetbeat/tests/system/app_test.go:69   | Error: "0: \\Device\\NPF_{8058D1FD-C1F5-4A20-A848-0188AB321A97} (WAN Miniport (Network Monitor)) (Not assigned ip address)\n1: \\Device\\NPF_{4265A7ED-129C-497F-B1AC-A60DB44D0DC7} (WAN Miniport (IPv6)) (Not assigned ip address)\n2: \\Device\\NPF_{608080BA-7C74-441B-918A-AEA32DD991B4} (WAN Miniport (IP)) (Not assigned ip address)\n3: \\Device\\NPF_{5F3CCF6B-3314-4F07-9284-5AE0B33202A8} (Google VirtIO Ethernet Adapter) (10.224.0.241)\n4: \\Device\\NPF_Loopback (Adapter for loopback traffic capture) (::1 127.0.0.1)\nPASS\ncoverage: 43.9% of statements in ./..." does not contain "Ethernet 2"   | Test: TestDevices   | app_test.go:69:   | Error Trace: C:/buildkite-agent/builds/bk-agent-prod-gcp-1723456393706198410/elastic/beats-xpack-packetbeat/x-pack/packetbeat/tests/system/app_test.go:69   | Error: "0: \\Device\\NPF_{8058D1FD-C1F5-4A20-A848-0188AB321A97} (WAN Miniport (Network Monitor)) (Not assigned ip address)\n1: \\Device\\NPF_{4265A7ED-129C-497F-B1AC-A60DB44D0DC7} (WAN Miniport (IPv6)) (Not assigned ip address)\n2: \\Device\\NPF_{608080BA-7C74-441B-918A-AEA32DD991B4} (WAN Miniport (IP)) (Not assigned ip address)\n3: \\Device\\NPF_{5F3CCF6B-3314-4F07-9284-5AE0B33202A8} (Google VirtIO Ethernet Adapter) (10.224.0.241)\n4: \\Device\\NPF_Loopback (Adapter for loopback traffic capture) (::1 127.0.0.1)\nPASS\ncoverage: 43.9% of statements in ./..." does not contain "Loopback Pseudo-Interface 1"   | Test: TestDevices ```
Pinging @elastic/sec-linux-platform (Team:Security-Linux Platform) During the migration of beats-ci from Jenkins to Buildkite, a number of tests were failing consistently due to issues unrelated to the migration. Those tests were disabled to stabilize the CI, with the intent to revisit them post-migration. @oakrizan has reviewed them all in her draft PRs linked above in the description, and has opened tickets such as this one to highlight to the product teams the tests that are currently still disabled and could use some attention. @pierrehilbert @jlind23 Can we prioritize this issue (re-enabling the test mentioned in this issue as it is currently skipped) in an upcoming sprint, please? See https://github.com/elastic/beats/pull/41909#issuecomment-2541253385 for the rationale. @ycombinator looks like this is owned by @elastic/sec-linux-platform. @nfritts is there anyone on your end able to pick this up? @jlind23 I'm not sure. I'm doing some research to see what all is required. Even if we picked it up though, I think having this fixed by the next feature freeze is unlikely looking at the PTO calendar. > I think having this fixed by the next feature freeze is unlikely looking at the PTO calendar. It has been disabled for quite some time so I don't think we need to rush that hard to get it back on track but the sooner the better.
Pinging @elastic/sec-linux-platform (Team:Security-Linux Platform) During the migration of beats-ci from Jenkins to Buildkite, a number of tests were failing consistently due to issues unrelated to the migration. Those tests were disabled to stabilize the CI, with the intent to revisit them post-migration. @oakrizan has reviewed them all in her draft PRs linked above in the description, and has opened tickets such as this one to highlight to the product teams the tests that are currently still disabled and could use some attention. @pierrehilbert @jlind23 Can we prioritize this issue (re-enabling the test mentioned in this issue as it is currently skipped) in an upcoming sprint, please? See https://github.com/elastic/beats/pull/41909#issuecomment-2541253385 for the rationale. @ycombinator looks like this is owned by @elastic/sec-linux-platform. @nfritts is there anyone on your end able to pick this up? @jlind23 I'm not sure. I'm doing some research to see what all is required. Even if we picked it up though, I think having this fixed by the next feature freeze is unlikely looking at the PTO calendar. > I think having this fixed by the next feature freeze is unlikely looking at the PTO calendar. It has been disabled for quite some time so I don't think we need to rush that hard to get it back on track but the sooner the better.
[ "https://github.com/elastic/beats/commit/887ed7c8d7c41728bb7c61273ba74ae291f0c6d5", "https://github.com/elastic/beats/commit/cc9b79a7a01242d9c9a80dfed90e14ba3ffa8866", "https://github.com/elastic/beats/commit/9f01f00f9a7fba58898cb9bd956365b539729a6c", "https://github.com/elastic/beats/commit/7bdbad7beb1cd79cf...
2024-12-27T11:29:35Z
https://github.com/elastic/beats/tree/27b714dc999ae6e73abee95b3e294a077af20599
[ "go build ./winlogbeat/... ./x-pack/winlogbeat/..." ]
[ "New-Item -ItemType Directory -Force -Path reports | Out-Null; go test -json -v ./... | Tee-Object -FilePath reports\\go-test-results.json" ]
[ "Select-String -Path reports\\go-test-results.json -Pattern '\"Action\":\"(pass|fail|skip)\".*\"Test\":\"' | ForEach-Object { $_.Line }" ]
def parser(log: str) -> dict[str, str]: # Improved parser with newline-boundary deduplication for names import re def unwrap_name(s: str) -> str: # Remove CRs s = s.replace('\r', '') # Collapse duplicate boundary char around newlines: e.g., "co\nom" -> "com", "Ord\nder" -> "Order" s = re.sub(r'([A-Za-z0-9_\-./:])\n\1', r'\1', s) # Remove remaining newlines s = s.replace('\n', '') return s results: dict[str, str] = {} # Iterate over JSON-like objects; tolerate line wraps for obj_match in re.finditer(r'\{.*?\}', log, re.DOTALL): obj = obj_match.group(0) # Extract Action with tolerance for embedded whitespace/newlines m_action = re.search(r'"Action"\s*:\s*"([^"]*?)"', obj, re.DOTALL) if not m_action: continue action_raw = m_action.group(1) action_norm = re.sub(r'\s+', '', action_raw).lower() if action_norm == 'error': action_norm = 'fail' if action_norm not in ('pass', 'fail', 'skip'): continue # Extract Test field m_test = re.search(r'"Test"\s*:\s*"([^"]*?)"', obj, re.DOTALL) if not m_test: continue test_raw = m_test.group(1) test_name = unwrap_name(test_raw).strip() if not test_name: continue # Extract Package (optional) pkg_name = '' m_pkg = re.search(r'"Package"\s*:\s*"([^"]*?)"', obj, re.DOTALL) if m_pkg: pkg_raw = m_pkg.group(1) pkg_name = unwrap_name(pkg_raw).strip() key = f"{pkg_name}::{test_name}" if pkg_name else test_name results[key] = action_norm return results
[ "github.com/elastic/beats/v7/x-pack/filebeat/input/httpjson::TestInput/pagination_not_log_fail", "github.com/elastic/beats/v7/x-pack/filebeat/input/entityanalytics/internal/collections::TestUUIDTree_ExpandFromSet/elements", "github.com/elastic/beats/v7/x-pack/filebeat/input/entityanalytics/internal/collections:...
[ "github.com/elastic/beats/v7/x-pack/filebeat/input/netflow::TestDatFiles/IPFIX_vIPtela_with_VPN_id", "github.com/elastic/beats/v7/filebeat/input/v2/input-cursor::TestStore_Get/same_resource_is_returned", "github.com/elastic/beats/v7/filebeat/input/syslog::TestDateParsing/z+00:00", "github.com/elastic/beats/v7...
starryzhang/sweb.eval.win.elastic_1776_beats-42172
copier-org/copier
2007
copier-org__copier-2007
Python
[ "2006" ]
cc9aacb00742ffc792a3556734b020bb783d1e1c
diff --git a/copier/main.py b/copier/main.py index f164ff073..625009c40 100644 --- a/copier/main.py +++ b/copier/main.py @@ -1232,10 +1232,13 @@ def _apply_update(self) -> None: # noqa: C901 # The 3-way merge might have resolved conflicts automatically, # so we need to check if the file contains conflict markers # before storing the file name for marking it as unmerged after the loop. - with Path(fname).open() as conflicts_candidate: + with Path(fname).open("rb") as conflicts_candidate: if any( line.rstrip() - in {"<<<<<<< before updating", ">>>>>>> after updating"} + in { + b"<<<<<<< before updating", + b">>>>>>> after updating", + } for line in conflicts_candidate ): conflicted.append(fname)
diff --git a/tests/helpers.py b/tests/helpers.py index c50086b47..3e3e921fa 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -102,7 +102,9 @@ def assert_file(tmp_path: Path, *path: str) -> None: def build_file_tree( - spec: Mapping[StrOrPath, str | bytes | Path], dedent: bool = True + spec: Mapping[StrOrPath, str | bytes | Path], + dedent: bool = True, + encoding: str = "utf-8", ) -> None: """Builds a file tree based on the received spec. @@ -124,7 +126,8 @@ def build_file_tree( assert isinstance(contents, str) contents = textwrap.dedent(contents) mode = "wb" if binary else "w" - with path.open(mode) as fd: + enc = None if binary else encoding + with Path(path).open(mode, encoding=enc) as fd: fd.write(contents) diff --git a/tests/test_exclude.py b/tests/test_exclude.py index 516c4c965..db9d113ae 100644 --- a/tests/test_exclude.py +++ b/tests/test_exclude.py @@ -55,7 +55,7 @@ def test_config_include(tmp_path_factory: pytest.TempPathFactory) -> None: @pytest.mark.xfail( - condition=platform.system() in {"Darwin", "Windows"}, + condition=platform.system() == "Darwin", reason="OS without proper UTF-8 filesystem.", strict=True, ) diff --git a/tests/test_updatediff.py b/tests/test_updatediff.py index 35a7a45fb..07f42c3d8 100644 --- a/tests/test_updatediff.py +++ b/tests/test_updatediff.py @@ -1787,3 +1787,85 @@ def test_update_with_answers_with_umlaut( run_update(dst, skip_answered=True, overwrite=True) answers = load_answersfile_data(dst) assert answers["umlaut"] == "äöü" + + +def test_conflict_on_update_with_unicode_in_content( + tmp_path_factory: pytest.TempPathFactory, +) -> None: + src, dst = map(tmp_path_factory.mktemp, ("src", "dst")) + with local.cwd(src): + build_file_tree( + { + "{{ _copier_conf.answers_file }}.jinja": "{{ _copier_answers|to_yaml }}", + "copier.yml": "b: false", + "content.jinja": """\ + aaa🐍 + {%- if b %} + bbb🐍 + {%- endif %} + zzz🐍 + """, + }, + encoding="utf-8", + ) + git("init") + git("add", "-A") + git("commit", "-m1") + git("tag", "1") + build_file_tree( + { + "copier.yml": dedent( + """\ + b: false + c: false + """ + ), + "content.jinja": """\ + aaa🐍 + {%- if b %} + bbb🐍 + {%- endif %} + {%- if c %} + ccc🐍 + {%- endif %} + zzz🐍 + """, + }, + encoding="utf-8", + ) + git("commit", "-am2") + git("tag", "2") + # Init project + run_copy(str(src), dst, data={"b": True}, vcs_ref="1") + assert "ccc" not in (dst / "content").read_text(encoding="utf-8") + with local.cwd(dst): + git("init") + git("add", "-A") + git("commit", "-m1") + # Project evolution + Path("content").write_text( + dedent( + """\ + aaa🐍 + bbb🐍 + jjj🐍 + zzz🐍 + """ + ), + encoding="utf-8", + ) + git("commit", "-am2") + # Update from template, inline, with answer changes + run_update(data={"c": True}, defaults=True, overwrite=True, conflict="inline") + assert Path("content").read_text(encoding="utf-8") == dedent( + """\ + aaa🐍 + bbb🐍 + <<<<<<< before updating + jjj🐍 + ======= + ccc🐍 + >>>>>>> after updating + zzz🐍 + """ + )
UnicodeDecodeError when updating on Windows ### Describe the problem During the update of a project to a new template version I got an UnicodeDecodeError. The update was therefore incomplete. ### Template Template: https://github.com/dalito/linkml-project-copier Repo that was updated (from v0.2.2 to v0.3.0): https://github.com/nfdi4cat/pid4cat-model ### To Reproduce _No response_ ### Logs ```console λ copier update --trust --skip-answered Updating to template version 0.3.0 Traceback (most recent call last): File "<frozen runpy>", line 198, in _run_module_as_main File "<frozen runpy>", line 88, in _run_code File "C:\Users\david\.local\bin\copier.exe\__main__.py", line 7, in <module> File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\plumbum\cli\application.py", line 640, in run inst, retcode = subapp.run(argv, exit=False) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\plumbum\cli\application.py", line 635, in run retcode = inst.main(*tailargs) ^^^^^^^^^^^^^^^^^^^^ File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\copier\cli.py", line 425, in main return _handle_exceptions(inner) ^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\copier\cli.py", line 70, in _handle_exceptions method() File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\copier\cli.py", line 415, in inner with self._worker( ^^^^^^^^^^^^^ File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\copier\main.py", line 254, in __exit__ raise value File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\copier\cli.py", line 423, in inner worker.run_update() File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\copier\main.py", line 1031, in run_update self._apply_update() File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\copier\main.py", line 1239, in _apply_update if any( ^^^^ File "C:\Users\david\.local\pipx\venvs\copier\Lib\site-packages\copier\main.py", line 1242, in <genexpr> for line in conflicts_candidate ^^^^^^^^^^^^^^^^^^^ File "C:\dev\Python312\Lib\encodings\cp1252.py", line 23, in decode return codecs.charmap_decode(input,self.errors,decoding_table)[0] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UnicodeDecodeError: 'charmap' codec can't decode byte 0x90 in position 712: character maps to <undefined> ``` ### Expected behavior The update should run to completion. ### Screenshots/screencasts/logs _No response_ ### Operating system macOS ### Operating system distribution and version Windows 10 Pro, German locale ### Copier version copier 9.5.0 ### Python version CPython 3.12 ### Installation method pipx+pypi ### Additional context _No response_
[ "https://github.com/copier-org/copier/commit/377b02433d754dcdf9912e96cca322ea710e8884", "https://github.com/copier-org/copier/commit/0162b1f69a7d68458ffcc71773f01a2816779160", "https://github.com/copier-org/copier/commit/4d5380cb3abefa9f3f405daf6e09f87516b478e4", "https://github.com/copier-org/copier/commit/4...
2025-03-03T22:30:02Z
https://github.com/copier-org/copier/tree/cc9aacb00742ffc792a3556734b020bb783d1e1c
[ "git config --global --add safe.directory (Get-Location).Path ; poetry install --with dev,docs -n ; poetry build" ]
[ "$ErrorActionPreference = 'Stop'; New-Item -ItemType Directory -Force -Path reports | Out-Null; poetry run python -m pip install pytest-json-report; poetry run pytest -vv -rA --json-report --json-report-file=reports/pytest-results.json" ]
[ "Get-Content -Raw reports\\pytest-results.json" ]
def parser(log: str) -> dict[str, str]: import re import json def map_outcome(out: str) -> str: o = (out or "").strip().lower() if o == "passed": return "pass" if o in {"skipped", "xfailed"}: return "skip" # failed, error, xpassed, and any unknown → fail return "fail" results: dict[str, str] = {} # Try JSON parsing first for maximum reliability try: data = json.loads(log) tests = data.get("tests") or [] for t in tests: nodeid = t.get("nodeid") outcome = t.get("outcome") if not nodeid or not outcome: continue nodeid_clean = nodeid.replace("\r", "").replace("\n", "") if not nodeid_clean.strip(): continue results[nodeid_clean] = map_outcome(outcome) if results: return results except Exception: pass # Fallback: regex scanning # Capture nodeid across newlines/escapes nodeid_re = re.compile(r'"nodeid"\s*:\s*"((?:[^"\\]|\\.)*)"', re.IGNORECASE | re.DOTALL) outcome_re = re.compile(r'"outcome"\s*:\s*"([^"]+)"', re.IGNORECASE | re.DOTALL) for m in nodeid_re.finditer(log): nodeid_raw = m.group(1) start_after_nodeid = m.end() # Search for the next nodeid to limit the range to this test object next_nodeid = log.find('"nodeid"', start_after_nodeid) search_end = next_nodeid if next_nodeid != -1 else len(log) # Find the first "outcome" after this nodeid within the object range segment = log[start_after_nodeid:search_end] om = None for mo in outcome_re.finditer(segment): om = mo break # first outcome is assumed to be the top-level test outcome if not om: # As last resort, search globally after nodeid and take the first match global_match = outcome_re.search(log, start_after_nodeid) if global_match: om = global_match else: continue outcome_val = om.group(1) nodeid_clean = nodeid_raw.replace("\r", "").replace("\n", "") if not nodeid_clean.strip(): continue results[nodeid_clean] = map_outcome(outcome_val) return results
[ "tests/test_config.py::test_flags_bad_data[data3]" ]
[ "tests/test_config.py::test_invalid_yaml", "tests/test_answersfile.py::test_external_data" ]
starryzhang/sweb.eval.win.copier-org_1776_copier-2007
CherryHQ/cherry-studio
8634
CherryHQ__cherry-studio-8634
TypeScript
[ "8611" ]
2b750b6d29e5dc5115e2700a8e8f7d46775aa1eb
diff --git a/src/renderer/src/utils/export.ts b/src/renderer/src/utils/export.ts index 7f441ef1e33..bc7d9b20adf 100644 --- a/src/renderer/src/utils/export.ts +++ b/src/renderer/src/utils/export.ts @@ -169,7 +169,7 @@ const createBaseMarkdown = ( ) => { const { forceDollarMathInMarkdown } = store.getState().settings const roleText = getRoleText(message.role, message.model?.name, message.model?.provider) - const titleSection = `### ${roleText}` + const titleSection = `## ${roleText}` let reasoningSection = '' if (includeReasoning) {
diff --git a/src/renderer/src/utils/__tests__/export.test.ts b/src/renderer/src/utils/__tests__/export.test.ts index e3c5c4d16ea..da7cc668151 100644 --- a/src/renderer/src/utils/__tests__/export.test.ts +++ b/src/renderer/src/utils/__tests__/export.test.ts @@ -262,7 +262,7 @@ describe('export', () => { { type: MessageBlockType.MAIN_TEXT, content: '' } ]) const markdown = messageToMarkdown(msgWithEmptyContent) - expect(markdown).toContain('### 🧑‍💻 User') + expect(markdown).toContain('## 🧑‍💻 User') // Should handle empty content gracefully expect(markdown).toBeDefined() expect(markdown.split('\n\n').filter((s) => s.trim()).length).toBeGreaterThanOrEqual(1) @@ -272,11 +272,11 @@ describe('export', () => { const msg = mockedMessages.find((m) => m.id === 'u1') expect(msg).toBeDefined() const markdown = messageToMarkdown(msg!) - expect(markdown).toContain('### 🧑‍💻 User') + expect(markdown).toContain('## 🧑‍💻 User') expect(markdown).toContain('hello user') // The format is: [titleSection, '', contentSection, citation].join('\n') - // When citation is empty, we get: "### 🧑‍💻 User\n\nhello user\n" + // When citation is empty, we get: "## 🧑‍💻 User\n\nhello user\n" const sections = markdown.split('\n\n') expect(sections.length).toBeGreaterThanOrEqual(2) // title section and content section }) @@ -285,11 +285,11 @@ describe('export', () => { const msg = mockedMessages.find((m) => m.id === 'a1') expect(msg).toBeDefined() const markdown = messageToMarkdown(msg!) - expect(markdown).toContain('### 🤖 Assistant') + expect(markdown).toContain('## 🤖 Assistant') expect(markdown).toContain('hi assistant') // The format is: [titleSection, '', contentSection, citation].join('\n') - // When citation is empty, we get: "### 🤖 Assistant\n\nhi assistant\n" + // When citation is empty, we get: "## 🤖 Assistant\n\nhi assistant\n" const sections = markdown.split('\n\n') expect(sections.length).toBeGreaterThanOrEqual(2) // title section and content section }) @@ -298,7 +298,7 @@ describe('export', () => { const msg = createMessage({ role: 'user', id: 'u2' }, []) mockedMessages.push(msg) const markdown = messageToMarkdown(msg) - expect(markdown).toContain('### 🧑‍💻 User') + expect(markdown).toContain('## 🧑‍💻 User') // Check that it doesn't fail when no content exists expect(markdown).toBeDefined() }) @@ -309,7 +309,7 @@ describe('export', () => { { type: MessageBlockType.CITATION } ]) const markdown = messageToMarkdown(msgWithCitation) - expect(markdown).toContain('### 🤖 Assistant') + expect(markdown).toContain('## 🤖 Assistant') expect(markdown).toContain('Main content') expect(markdown).toContain('[1] [https://example1.com](Example Citation 1)') }) @@ -341,7 +341,7 @@ describe('export', () => { const msg = mockedMessages.find((m) => m.id === 'a2') expect(msg).toBeDefined() const markdown = messageToMarkdownWithReasoning(msg!) - expect(markdown).toContain('### 🤖 Assistant') + expect(markdown).toContain('## 🤖 Assistant') expect(markdown).toContain('Main Answer') expect(markdown).toContain('<details') expect(markdown).toContain('<summary>common.reasoning_content</summary>') @@ -366,7 +366,7 @@ describe('export', () => { const msg = mockedMessages.find((m) => m.id === 'a4') expect(msg).toBeDefined() const markdown = messageToMarkdownWithReasoning(msg!) - expect(markdown).toContain('### 🤖 Assistant') + expect(markdown).toContain('## 🤖 Assistant') expect(markdown).toContain('Simple Answer') expect(markdown).not.toContain('<details') }) @@ -375,7 +375,7 @@ describe('export', () => { const msg = mockedMessages.find((m) => m.id === 'a5') expect(msg).toBeDefined() const markdown = messageToMarkdownWithReasoning(msg!) - expect(markdown).toContain('### 🤖 Assistant') + expect(markdown).toContain('## 🤖 Assistant') expect(markdown).toContain('Answer with citation') expect(markdown).toContain('<details') expect(markdown).toContain('Some thinking') @@ -1015,7 +1015,7 @@ describe('Citation formatting in Markdown export', () => { const markdown = messageToMarkdown(msgWithCitations) // Should contain the title and content - expect(markdown).toContain('### 🤖 Assistant') + expect(markdown).toContain('## 🤖 Assistant') expect(markdown).toContain('Content with citations') // Should include citation content (mocked by getCitationContent)
[讨论]: 将话题复制为markdown时的标题问题 ### Issue 检查清单 - [x] 我理解 Issue 是用于反馈和解决问题的,而非吐槽评论区,将尽可能提供更多信息帮助问题解决。 - [x] 我确认自己需要的是提出问题并且讨论问题,而不是 Bug 反馈或需求建议。 ### 平台 Windows ### 版本 1.5.3 ### 您的问题 将话题复制为markdown时 🧑‍💻 User和🤖 Assistant应该是二级标题吧 现在是三级标题,和消息中的标题同级了 ### 相关背景 _No response_ ### 补充信息 ```shell ``` ### 优先级 低 (有空再看)
[ "https://github.com/CherryHQ/cherry-studio/commit/f46234bb37a53ca84baddb16e70f3d434b400b75", "https://github.com/CherryHQ/cherry-studio/commit/e44d69e398056c7edc235ca17e6e12372a5ce05b" ]
2025-07-29T07:01:44Z
https://github.com/CherryHQ/cherry-studio/tree/2b750b6d29e5dc5115e2700a8e8f7d46775aa1eb
[ "$env:PATH=\"C:\\node;$env:PATH\"; cd C:\\testbed; yarn install; yarn build" ]
[ "cd C:\\testbed; mkdir reports -Force 2&gt;$null; yarn vitest run --reporter=json --outputFile=reports\\vitest-results.json; node -e \"const fs=require('fs'); const j=JSON.parse(fs.readFileSync('reports/vitest-results.json','utf8')); const lines=[]; for(const f of (j.testResults||[])) for(const a of (f.assertionRes...
[ "cd C:\\testbed; Get-Content -Raw reports\\test-statuses.txt" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} def map_status(st: str) -> str: st = st.lower() if st in ("passed", "pass", "ok", "success", "✓"): return "pass" if st in ("skipped", "skip", "pending", "ignored", "○"): return "skip" return "fail" def clean_line(s: str) -> str: s = s.replace("\ufeff", "") s = re.sub(r"^\s*PS [^>]*>\s*", "", s) s = re.sub(r"^\s*PS>\s*", "", s) return s.rstrip("\r") def normalize_name(raw: str) -> str: raw = raw.replace("\ufeff", "").strip() # Remove line-wrap artifacts where a path got split as "/r\nroot" -> "/root" raw = re.sub(r"/([a-zA-Z])\n([A-Za-z])", r"/\2", raw) # Fix word splits across wrapped newlines: "chil\nd" -> "child" raw = re.sub(r"([A-Za-z0-9])\n([a-z0-9])", r"\1\2", raw) # Replace remaining newlines with single spaces raw = re.sub(r"\n+", " ", raw) raw = re.sub(r"\s+", " ", raw).strip() # Strip trailing punctuation raw = re.sub(r"[\-–—:]+$", "", raw).strip() return raw # --------- JUnit XML (if present) ---------- if "<testsuite" in log and "<testcase" in log: try: import xml.etree.ElementTree as ET xml_start = re.search(r"(?is)<testsuite\b", log) if xml_start: root = ET.fromstring(log[xml_start.start() :]) for tc in root.iter(): if tc.tag.lower().endswith("testcase"): name = tc.attrib.get("name", "").strip() classname = tc.attrib.get("classname", "").strip() full = f"{classname}.{name}".strip(".") if classname else name status = "pass" for ch in list(tc): tag = ch.tag.lower() if tag.endswith("skipped"): status = "skip" break if tag.endswith("failure") or tag.endswith("error"): status = "fail" break if full: results[full] = status if results: return results except Exception: pass # --------- Physical lines (strip prompts/commands) ---------- phys_lines: list[str] = [] for ln in log.splitlines(): if not ln or not ln.strip(): continue if re.match(r"^\s*PS(?: [^>]*)?>\s*$", ln): continue if re.match(r"^\s*PS(?: [^>]*)?>.*\b(Get-Content|cd)\b", ln): continue phys_lines.append(clean_line(ln)) # --------- Assemble logical test records ---------- # Primary: status is aligned with >=2 spaces or a tab before it (as in the file) end_status_re = re.compile(r"(?i)\s{2,}(passed|failed|skipped)\s*$") # Wrapped status: previous line ended with 'p' and next line is 'passed' status_only_re = re.compile(r"(?i)^(passed|failed|skipped)\s*$") cur = "" for ln in phys_lines: s = ln.rstrip() if not s.strip(): continue # If this line is only status, close record if we have a buffer mo = status_only_re.match(s.strip()) if mo and cur: name = cur status = mo.group(1) cur = "" nm = normalize_name(name) if nm: results[nm] = map_status(status) continue # Append with newline to preserve wrap positions if cur: cur += "\n" + s else: cur = s m = end_status_re.search(cur) if m: status = m.group(1) name = cur[: m.start()].rstrip() # Fix specific artifact: trailing standalone letter before status due to wrap ("... p\npassed") name = re.sub(r"\b[a-zA-Z]\s*$", lambda mm: "" if len(mm.group(0).strip()) == 1 else mm.group(0), name) cur = "" nm = normalize_name(name) if nm: results[nm] = map_status(status) # Truncated tail without status is ignored # --------- Fallback single-line formats ---------- for ln in phys_lines: s = ln.strip() m = re.match(r"^(.*?)(?:\s+)(PASSED|FAILED|SKIPPED|ERROR)\s*$", s, flags=re.I) if m: results.setdefault(normalize_name(m.group(1)), map_status(m.group(2))) continue m = re.match(r"^---\s+(PASS|FAIL|SKIP):\s+(.+?)(?:\s+\(\d|\s*$)", s, flags=re.I) if m: results.setdefault(normalize_name(m.group(2)), map_status(m.group(1))) continue m = re.match(r"^\s*([✓✕○])\s+(.*?)\s*$", s) if m: results.setdefault(normalize_name(m.group(2)), map_status(m.group(1))) return results
[ "TagExtractor å¤šä¸ªæ ‡ç­¾å¤„ç† åº”è¯¥å¤„ç†ä¸‰ä¸ªè¿žç»­æ ‡ç­¾çš„分次输出 p" ]
[ "CitationTooltip content display and title logic should fallback to hostname whenn title is empty or whitespace", "KnowledgeBaseFormModal basic rendering should handle empty panels array passed KnowledgeBaseFormModal menu interaction should switch panels when menu item is cclicked", "file getFileType should ret...
starryzhang/sweb.eval.win.cherryhq_1776_cherry-studio-8634
gleam-lang/gleam
4850
gleam-lang__gleam-4850
Rust
[ "4817" ]
f13b8649d72a843209428a9c974228209a6b7912
diff --git a/CHANGELOG.md b/CHANGELOG.md index fe22e167143..f725bf83d63 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -138,3 +138,7 @@ expression as safe to remove, instead of just highlighting the double negation. ([Giacomo Cavalieri](https://github.com/giacomocavalieri)) + +- Fixed a bug where the compiler would crash if there was an invalid version + requirement in a project's `gleam.toml`. + ([Giacomo Cavalieri](https://github.com/giacomocavalieri)) diff --git a/compiler-core/src/requirement.rs b/compiler-core/src/requirement.rs index 036ae08b682..02e7c75a40c 100644 --- a/compiler-core/src/requirement.rs +++ b/compiler-core/src/requirement.rs @@ -122,7 +122,10 @@ impl<'de> Visitor<'de> for RequirementVisitor { where E: de::Error, { - Ok(FromStr::from_str(value).expect("expected string")) + match value.parse::<Requirement>() { + Ok(value) => Ok(value), + Err(error) => Err(de::Error::custom(error)), + } } fn visit_map<M>(self, visitor: M) -> Result<Self::Value, M::Error> @@ -165,4 +168,15 @@ mod tests { Requirement::git("https://github.com/gleam-lang/otp.git", "4d34935") ); } + + #[test] + fn read_wrong_version() { + let toml = r#" + short = ">= 2.0 and < 3.0.0" + "#; + + let error = + toml::from_str::<HashMap<String, Requirement>>(toml).expect_err("invalid version"); + insta::assert_snapshot!(error.to_string()); + } }
diff --git a/compiler-core/src/snapshots/gleam_core__requirement__tests__read_wrong_version.snap b/compiler-core/src/snapshots/gleam_core__requirement__tests__read_wrong_version.snap new file mode 100644 index 00000000000..9f71c70f618 --- /dev/null +++ b/compiler-core/src/snapshots/gleam_core__requirement__tests__read_wrong_version.snap @@ -0,0 +1,5 @@ +--- +source: compiler-core/src/requirement.rs +expression: error.to_string() +--- +>= 2.0 and < 3.0.0 is not a valid version. missing patch version: 2.0 for key `short` at line 2 column 21
Javascript InvalidVersionFormat on Windows When running gleam build on windows 11 with 1.12.0-rc3, with default code from gleam new with this added to gleam.toml ```yaml target = "javascript" [javascript] typescript_declarations = true runtime = "node" ``` Logs with trace ``` DEBUG download_deps: locking_directory path="E:\\dev\\temp\\gl_ts\\build\\packages\\gleam.lock" TRACE download_deps: reading_file path="E:\\dev\\temp\\gl_ts\\gleam.toml" error: Fatal compiler bug! This is a bug in the Gleam compiler, sorry! Please report this crash to https://github.com/gleam-lang/gleam/issues/new and include this error message with your report. Panic: D:\a\gleam\gleam\compiler-core\src\requirement.rs:125 expected string: InvalidVersionFormat { input: "javascript", error: "encountered unexpected token: \"javascript\"" } Gleam version: 1.12.0-rc3 Operating system: windows ```
Can you share your entire config please 🙏 IMO, this is not bug. I'm able to reproduce your case: ```sh test localhost@10.54.65.52 @main A $ GLEAM_LOG=trace gleam1.12 build DEBUG download_deps: locking_directory path="/data/data/com.termux/files/home/test/build/packages/gleam.lock" TRACE download_deps: reading_file path="/data/data/com.termux/files/home/test/gleam.toml" error: Fatal compiler bug! This is a bug in the Gleam compiler, sorry! Please report this crash to https://github.com/gleam-lang/gleam/issues/new and include this error message with your report. Panic: /project/compiler-core/src/requirement.rs:125 expected string: InvalidVersionFormat { input: "javascript", error: "encountered unexpected token: \"javascript\"" } Gleam version: 1.12.0-rc2 Operating system: linux If you can also share your code and say what file you were editing or any steps to reproduce the crash that would be a great help. You may also want to try again with the `GLEAM_LOG=trace` environment variable set. test localhost@10.54.65.52 @main A $ ^cat gleam.toml name = "test_app" version = "1.0.0" # Fill out these fields if you intend to generate HTML documentation or publish # your project to the Hex package manager. # # description = "" # licences = ["Apache-2.0"] # repository = { type = "github", user = "", repo = "" } # links = [{ title = "Website", href = "" }] # # For a full reference of all the available options, you can have a look at # https://gleam.run/writing-gleam/gleam-toml/. [dependencies] gleam_stdlib = ">= 0.44.0 and < 2.0.0" [dev-dependencies] gleeunit = ">= 1.0.0 and < 2.0.0" target = "javascript" [javascript] typescript_declarations = false runtime = "node" ``` But if I edit code so `target` is before any block, it works correctly (I get another issue due to Hex's cert): ```sh gleam1.12 build Resolving versions error: Dependency resolution failed An error occured while chosing the version of gleam_stdlib: error sending request for url (https://repo.hex.pm/packages/gleam_stdlib) test localhost@10.54.65.52 @main A $ nvim test localhost@10.54.65.52 @main A $ ^cat gleam.toml name = "test_app" version = "1.0.0" target = "javascript" # Fill out these fields if you intend to generate HTML documentation or publish # your project to the Hex package manager. # # description = "" # licences = ["Apache-2.0"] # repository = { type = "github", user = "", repo = "" } # links = [{ title = "Website", href = "" }] # # For a full reference of all the available options, you can have a look at # https://gleam.run/writing-gleam/gleam-toml/. [dependencies] gleam_stdlib = ">= 0.44.0 and < 2.0.0" [dev-dependencies] gleeunit = ">= 1.0.0 and < 2.0.0" [javascript] typescript_declarations = false runtime = "node" ``` So, when first variant is applied, target field is parsed as one of `dev-dependencies`s entries. If `target` is in the wrong place, then there will be an error, yes. However, this should not cause the compiler to panic. Rather, a proper error message should be reported For instance, panic message points to this fragment: https://github.com/gleam-lang/gleam/blob/7eeac6de4d27edf28448c3072a93158a97b7c904/compiler-core/src/requirement.rs#L121-L126 This type probably constructs here: https://github.com/gleam-lang/gleam/blob/7eeac6de4d27edf28448c3072a93158a97b7c904/compiler-core/src/requirement.rs#L33-L40 Gears is right, the config is invalid, but the build tool shouldn't crash because of user error. We'll fix this.
Can you share your entire config please 🙏 IMO, this is not bug. I'm able to reproduce your case: ```sh test localhost@10.54.65.52 @main A $ GLEAM_LOG=trace gleam1.12 build DEBUG download_deps: locking_directory path="/data/data/com.termux/files/home/test/build/packages/gleam.lock" TRACE download_deps: reading_file path="/data/data/com.termux/files/home/test/gleam.toml" error: Fatal compiler bug! This is a bug in the Gleam compiler, sorry! Please report this crash to https://github.com/gleam-lang/gleam/issues/new and include this error message with your report. Panic: /project/compiler-core/src/requirement.rs:125 expected string: InvalidVersionFormat { input: "javascript", error: "encountered unexpected token: \"javascript\"" } Gleam version: 1.12.0-rc2 Operating system: linux If you can also share your code and say what file you were editing or any steps to reproduce the crash that would be a great help. You may also want to try again with the `GLEAM_LOG=trace` environment variable set. test localhost@10.54.65.52 @main A $ ^cat gleam.toml name = "test_app" version = "1.0.0" # Fill out these fields if you intend to generate HTML documentation or publish # your project to the Hex package manager. # # description = "" # licences = ["Apache-2.0"] # repository = { type = "github", user = "", repo = "" } # links = [{ title = "Website", href = "" }] # # For a full reference of all the available options, you can have a look at # https://gleam.run/writing-gleam/gleam-toml/. [dependencies] gleam_stdlib = ">= 0.44.0 and < 2.0.0" [dev-dependencies] gleeunit = ">= 1.0.0 and < 2.0.0" target = "javascript" [javascript] typescript_declarations = false runtime = "node" ``` But if I edit code so `target` is before any block, it works correctly (I get another issue due to Hex's cert): ```sh gleam1.12 build Resolving versions error: Dependency resolution failed An error occured while chosing the version of gleam_stdlib: error sending request for url (https://repo.hex.pm/packages/gleam_stdlib) test localhost@10.54.65.52 @main A $ nvim test localhost@10.54.65.52 @main A $ ^cat gleam.toml name = "test_app" version = "1.0.0" target = "javascript" # Fill out these fields if you intend to generate HTML documentation or publish # your project to the Hex package manager. # # description = "" # licences = ["Apache-2.0"] # repository = { type = "github", user = "", repo = "" } # links = [{ title = "Website", href = "" }] # # For a full reference of all the available options, you can have a look at # https://gleam.run/writing-gleam/gleam-toml/. [dependencies] gleam_stdlib = ">= 0.44.0 and < 2.0.0" [dev-dependencies] gleeunit = ">= 1.0.0 and < 2.0.0" [javascript] typescript_declarations = false runtime = "node" ``` So, when first variant is applied, target field is parsed as one of `dev-dependencies`s entries. If `target` is in the wrong place, then there will be an error, yes. However, this should not cause the compiler to panic. Rather, a proper error message should be reported For instance, panic message points to this fragment: https://github.com/gleam-lang/gleam/blob/7eeac6de4d27edf28448c3072a93158a97b7c904/compiler-core/src/requirement.rs#L121-L126 This type probably constructs here: https://github.com/gleam-lang/gleam/blob/7eeac6de4d27edf28448c3072a93158a97b7c904/compiler-core/src/requirement.rs#L33-L40 Gears is right, the config is invalid, but the build tool shouldn't crash because of user error. We'll fix this.
[ "https://github.com/gleam-lang/gleam/commit/cc289145efd43f85839a98b4ce3b9612f489d55d" ]
2025-08-13T15:53:27Z
https://github.com/gleam-lang/gleam/tree/f13b8649d72a843209428a9c974228209a6b7912
[ "powershell -NoProfile -Command \"cd /testbed; cargo build -q\"" ]
[ "cd /testbed; mkdir -Force reports | Out-Null; cargo test -- --nocapture 2>&1 | Tee-Object -FilePath reports\\test-output.log" ]
[ "cd /testbed; Get-Content reports\\test-output.log" ]
def parser(log: str) -> dict[str, str]: import re def merge_status(prev: str | None, new: str) -> str: # fail dominates, then pass, then skip if prev is None: return new order = {"fail": 3, "pass": 2, "skip": 1} return new if order[new] > order[prev] else prev results: dict[str, str] = {} # ---- Pre-normalise for common Rust/powershell wrapping artifacts ---- # 1) join "o\nok" -> "ok", "fini\nished" -> "finished", etc. (only common tokens) # Do it conservatively for patterns seen in Rust harness output. log2 = log log2 = re.sub(r"(?<=\.)\n(?=\.\.)", "", log2) # "...\n..." -> "......" (rare) log2 = re.sub(r"\.\.\.\s*o\s*\n\s*ok\b", "... ok", log2, flags=re.IGNORECASE) log2 = re.sub(r"\bfini\s*\n\s*ished\b", "finished", log2, flags=re.IGNORECASE) log2 = re.sub(r"\bdependencie\s*\n\s*es\b", "dependencies", log2, flags=re.IGNORECASE) log2 = re.sub(r"\bdependen\s*\n\s*ncy\b", "dependency", log2, flags=re.IGNORECASE) log2 = re.sub(r"\bpath\s*\n\s*hs\b", "paths", log2, flags=re.IGNORECASE) # 2) ensure word boundary after ok/FAILED/ignored when immediately followed by text (e.g., "okLocked!") log2 = re.sub(r"\b(ok|ignored|FAILED)(?=[A-Za-z_])", r"\1 ", log2) # ---- 1) Rust libtest lines: "test name ... ok/FAILED/ignored" ---- rust_line_re = re.compile( r"(?m)^\s*test\s+(?P<name>.+?)\s+\.\.\.\s+(?P<status>ok|ignored|FAILED|fail)\b", re.IGNORECASE, ) for m in rust_line_re.finditer(log2): name = m.group("name").strip() status_raw = m.group("status").lower() status = "pass" if status_raw == "ok" else "skip" if status_raw == "ignored" else "fail" results[name] = merge_status(results.get(name), status) # ---- 2) Go test: "--- PASS: TestName", "--- FAIL: TestName", "--- SKIP: TestName" ---- go_re = re.compile(r"(?m)^\s*---\s+(PASS|FAIL|SKIP):\s+([^\s]+)") for m in go_re.finditer(log2): status_raw, name = m.group(1), m.group(2) status = {"PASS": "pass", "FAIL": "fail", "SKIP": "skip"}[status_raw] results[name] = merge_status(results.get(name), status) # ---- 3) pytest: "file.py::test_name PASSED/FAILED/SKIPPED/ERROR/XFAIL" ---- pytest_re = re.compile( r"(?m)^\s*([^\s:][^:\n]*::[^\s]+)\s+(PASSED|FAILED|SKIPPED|ERROR|XFAIL|XPASS)\b" ) for m in pytest_re.finditer(log2): name, st = m.group(1).strip(), m.group(2) status = "pass" if st in ("PASSED", "XPASS") else "skip" if st in ("SKIPPED", "XFAIL") else "fail" results[name] = merge_status(results.get(name), status) # ---- 4) unittest-ish: "TestClass.test_method ... ok/FAIL/ERROR/skipped" ---- unittest_re = re.compile(r"(?m)^\s*([A-Za-z_]\w*(?:\.[A-Za-z_]\w*)+)\s+\.\.\.\s+(ok|FAIL|ERROR|skipped)\b") for m in unittest_re.finditer(log2): name, st = m.group(1).strip(), m.group(2).lower() status = "pass" if st == "ok" else "skip" if st == "skipped" else "fail" results[name] = merge_status(results.get(name), status) # ---- 5) Inline JUnit XML (if present in log) ---- # Minimal XML parsing via regex (robust enough for inline logs). if "<testcase" in log2 and "<testsuite" in log2: # Capture testcase blocks (self-closing or with nested tags) tc_re = re.compile(r"<testcase\b([^>]*)>(.*?)</testcase\s*>|<testcase\b([^>]*)/>", re.DOTALL) attr_re = re.compile(r'(\w+)="([^"]*)"') for m in tc_re.finditer(log2): attrs = m.group(1) or m.group(3) or "" body = m.group(2) or "" attr_map = {k: v for k, v in attr_re.findall(attrs)} name = attr_map.get("name") classname = attr_map.get("classname") if not name: continue full = f"{classname}::{name}" if classname else name if re.search(r"<skipped\b", body): status = "skip" elif re.search(r"<failure\b|<error\b", body): status = "fail" else: status = "pass" results[full] = merge_status(results.get(full), status) return results
[ "requirement::tests::read_wrong_version" ]
[ "language_server::tests::hover::hover_function_argument", "erlang::tests::strings::string_of_number_concat", "javascript::tests::assignments::let_assert_nested_string_prefix", "javascript::tests::custom_types::unnamed_fields_typescript", "format::tests::expr_case_nested", "javascript::tests::custom_types:...
starryzhang/sweb.eval.win.gleam-lang_1776_gleam-4850
podman-desktop/podman-desktop
13439
podman-desktop__podman-desktop-13439
TypeScript
[ "13353" ]
9b99c041cd29727920af9b3a23f56bffd9f11c3c
diff --git a/.github/workflows/pr-check.yaml b/.github/workflows/pr-check.yaml index b3817d2b3f54f..d96cab71423fc 100644 --- a/.github/workflows/pr-check.yaml +++ b/.github/workflows/pr-check.yaml @@ -434,7 +434,7 @@ jobs: strategy: fail-fast: false matrix: - os: [windows-2025] + os: [windows-2025, windows-11-arm] if: contains(github.event.pull_request.labels.*.name, 'area/update') || needs.detect_pnpm_changes.outputs.pnpm_lock_changed == 'true' runs-on: ${{ matrix.os }} timeout-minutes: 60
diff --git a/.github/workflows/e2e-main.yaml b/.github/workflows/e2e-main.yaml index b1851fe12c5db..bedc675445437 100644 --- a/.github/workflows/e2e-main.yaml +++ b/.github/workflows/e2e-main.yaml @@ -175,8 +175,8 @@ jobs: !./tests/**/traces/raw win-update-e2e-test: - name: win update e2e tests - ${{ matrix.installation }} - runs-on: windows-2025 + name: ${{ matrix.os }} update e2e tests - ${{ matrix.installation }} + runs-on: ${{ matrix.os }} # disable on forks as secrets are not available if: github.event.repository.fork == false permissions: @@ -188,6 +188,7 @@ jobs: fail-fast: false matrix: installation: ['vanilla', 'custom-extensions'] + os: [windows-2025, windows-11-arm] exclude: - installation: ${{ (github.event.inputs.update_with_extensions && github.event.inputs.update_with_extensions == 'true') && 'N/A' || 'custom-extensions' }} steps: @@ -220,11 +221,30 @@ jobs: env: ELECTRON_ENABLE_INSPECT: true run: | + (Get-Content packages/main/src/plugin/updater.ts).Replace('autoUpdater.autoDownload = false;', 'autoUpdater.autoDownload = false;autoUpdater.allowPrerelease=true;') | Set-Content packages/main/src/plugin/updater.ts pnpm compile:current --win nsis - $path=('./dist/win-unpacked/Podman Desktop.exe' | resolve-path).ProviderPath + $runnerArch=$env:RUNNER_ARCH + $unpackedPath = "dist/win-unpacked" + if ($runnerArch -eq 'ARM64') { + $unpackedPath = "dist/win-arm64-unpacked" + } + echo ("PD_DIST_PATH=" + $unpackedPath) >> $env:GITHUB_ENV + $path=("./$unpackedPath/Podman Desktop.exe" | resolve-path).ProviderPath echo $path echo ("PODMAN_DESKTOP_BINARY=" + $path) >> $env:GITHUB_ENV + - name: Manually set testing-prereleases as update target + run: | + echo "Replace app-update.yml repo to a testing-prerelease, which are more stable update target then the prerelease" + $updateFile = "$env:PD_DIST_PATH/resources/app-update.yml" + if (-Not (Test-Path $updateFile)) { + Write-Error "app-update.yml not found at $updateFile" + } + (Get-Content $updateFile).Replace('repo: podman-desktop', 'repo: testing-prereleases') | + Set-Content -ErrorAction Stop $updateFile + echo "Show app-update.yml after replace..." + cat "$env:PD_DIST_PATH/resources/app-update.yml" + - name: Run E2E Update test env: INSTALLATION_TYPE: ${{ matrix.installation }} @@ -247,7 +267,7 @@ jobs: - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: always() with: - name: update-e2e-test-${{ matrix.installation }} + name: ${{ matrix.os }}-update-e2e-test-${{ matrix.installation }} path: | ./tests/**/output/ !./tests/**/traces/raw
Extend the testing matrix for update-e2e tests on pr-check to windows arm platform ### Is your enhancement related to a problem? Please describe Right now we run e2e tests for update on `windows-2025` which is x86_64 based arch. We can also make use of `windows-arm-11` runners in preview mode to test update on arm based systems. ### Describe the solution you'd like Expand testing matrix on PR check and on `e2e-test-main` to also run on `windows-11-arm`. ### Describe alternatives you've considered _No response_ ### Additional context _No response_
[ "https://github.com/podman-desktop/podman-desktop/commit/613b286e2e22b241c78c490bd8c1da7bca10137a", "https://github.com/podman-desktop/podman-desktop/commit/62fd300a59694fa7af2ca3ebedd36dc9fb7e58f9", "https://github.com/podman-desktop/podman-desktop/commit/19ae1028d005b19e155984f7240005930e0f03f9", "https://g...
2025-07-30T15:17:41Z
https://github.com/podman-desktop/podman-desktop/tree/9b99c041cd29727920af9b3a23f56bffd9f11c3c
[ "powershell.exe -NoProfile -Command \"pnpm install; pnpm build\"" ]
[ "powershell.exe -NoProfile -Command \"$env:PATH='C:\\Program Files\\nodejs;'+$env:PATH; New-Item -ItemType Directory -Force reports | Out-Null; pnpm test:unit -- --reporter=verbose --reporter=json --outputFile=reports/vitest-results.json\"" ]
[ "powershell.exe -NoProfile -Command \"Get-Content -Raw reports/vitest-results.json\"" ]
def parser(log: str) -> dict[str, str]: """ Parse Vitest/Jest JSON report printed via PowerShell Get-Content -Raw. Strategy: 1) Aggressively isolate the JSON report object from surrounding noise. 2) Remove CR/LF inserted by PowerShell wrapping. 3) json.loads and validate schema/counters. 4) Extract every assertionResult with correct status mapping. 5) Fallback (only if JSON cannot be parsed): conservative regex that will not fabricate failures by mis-pairing fields. """ import json import re def norm_status(s: str) -> str: s = (s or "").strip().lower() if s in ("passed", "pass", "ok", "success"): return "pass" if s in ("skipped", "skip", "pending", "todo", "disabled"): return "skip" return "fail" def unique(base: str, used: set[str]) -> str: if base not in used: used.add(base) return base i = 2 while f"{base} [{i}]" in used: i += 1 name = f"{base} [{i}]" used.add(name) return name def isolate_json(text: str) -> str | None: if not text: return None t = text.replace("\ufeff", "").replace("\x00", "") # Start at a strong signature if possible sigs = [ '{"numTotalTestSuites"', '{"numTotalTests"', '{"success"', '{"testResults"', ] start = -1 for sig in sigs: pos = t.find(sig) if pos != -1 and (start == -1 or pos < start): start = pos if start == -1: # fallback to first '{' start = t.find("{") if start == -1: return None end = t.rfind("}") if end == -1 or end <= start: return None cand = t[start : end + 1] # Undo PowerShell wrapping cand = cand.replace("\r", "").replace("\n", "") return cand.strip() def validate_report(obj: object) -> bool: if not isinstance(obj, dict): return False tr = obj.get("testResults") if not isinstance(tr, list) or len(tr) == 0: return False # Basic keys usually exist if "numTotalTests" not in obj and "success" not in obj: return False return True def count_assertions(obj: dict) -> int: total = 0 tr = obj.get("testResults", []) if not isinstance(tr, list): return 0 for suite in tr: if not isinstance(suite, dict): continue ars = suite.get("assertionResults", []) if isinstance(ars, list): total += len(ars) return total def parse_report(obj: dict) -> dict[str, str]: results: dict[str, str] = {} used: set[str] = set() suites = obj.get("testResults", []) for suite_i, suite in enumerate(suites): if not isinstance(suite, dict): continue suite_name = str(suite.get("name") or "").strip() assertions = suite.get("assertionResults", []) if not isinstance(assertions, list): assertions = [] for case_i, case in enumerate(assertions): if not isinstance(case, dict): continue st = norm_status(str(case.get("status") or "")) title = str(case.get("title") or "").strip() full = str(case.get("fullName") or "").strip() anc = case.get("ancestorTitles") anc_list = [] if isinstance(anc, list): anc_list = [str(a).strip() for a in anc if str(a).strip()] if anc_list or title: test_id = " > ".join([*anc_list, title]).strip(" >") else: test_id = full or f"test[{suite_i}:{case_i}]" name = f"{suite_name}::{test_id}" if suite_name else test_id results[unique(name, used)] = st return results # ---- main: JSON path ---- cand = isolate_json(log or "") if cand: try: obj = json.loads(cand) if validate_report(obj): # Validate counters if available; if mismatch is extreme, consider invalid parse. expected = obj.get("numTotalTests") found = count_assertions(obj) if isinstance(expected, int): # Accept if close; Vitest sometimes counts differently with retries, # but should be in the same order of magnitude. if expected > 0 and (found == 0 or abs(found - expected) > max(50, expected // 2)): raise ValueError("Parsed JSON but assertion count mismatch; likely wrong substring.") res = parse_report(obj) # Sanity: if root says no failures, don't allow any fail statuses. if isinstance(obj.get("numFailedTests"), int) and obj.get("numFailedTests") == 0: if any(v == "fail" for v in res.values()): raise ValueError("Root reports 0 failed tests but parsed failures; likely wrong parse.") return res except Exception: pass # ---- conservative fallback (no fabricated fails) ---- # If JSON can't be parsed, do best-effort extraction of *explicit* statuses. # Default to "pass" unless status explicitly indicates skip/fail. results: dict[str, str] = {} used: set[str] = set() s = (log or "").replace("\ufeff", "").replace("\x00", "").replace("\r", "").replace("\n", "") # Try to capture individual assertion-like objects and read status/name within same object. start_re = re.compile(r'\{(?=[^{}]*"status"\s*:\s*")') # optional suite names suite_iter = list( re.finditer(r'"name"\s*:\s*"((?:\\.|[^"\\])*)"\s*,\s*"assertionResults"\s*:\s*\[', s) ) def unesc(val: str) -> str: try: return json.loads(f'"{val}"') except Exception: return val.replace('\\"', '"').replace("\\\\", "\\") regions = [] if suite_iter: for i, m in enumerate(suite_iter): st = m.end() en = suite_iter[i + 1].start() if i + 1 < len(suite_iter) else len(s) regions.append((unesc(m.group(1)), s[st:en])) else: regions.append(("", s)) for suite_name, region in regions: for m in start_re.finditer(region): i = m.start() depth = 0 j = i while j < len(region): ch = region[j] if ch == "{": depth += 1 elif ch == "}": depth -= 1 if depth == 0: j += 1 break j += 1 if depth != 0: continue obj_txt = region[i:j] ms = re.search(r'"status"\s*:\s*"((?:\\.|[^"\\])*)"', obj_txt) if not ms: continue status_raw = unesc(ms.group(1)) status = status_raw.strip().lower() # Extract name mf = re.search(r'"fullName"\s*:\s*"((?:\\.|[^"\\])*)"', obj_txt) mt = re.search(r'"title"\s*:\s*"((?:\\.|[^"\\])*)"', obj_txt) test_id = unesc((mf.group(1) if mf else "") or (mt.group(1) if mt else "")).strip() if not test_id: continue # Conservative status mapping in fallback: if status in ("skipped", "skip", "pending", "todo", "disabled"): st = "skip" elif status in ("failed", "fail", "error"): st = "fail" else: st = "pass" name = f"{suite_name}::{test_id}" if suite_name else test_id results[unique(name, used)] = st return results
[ "Expect node empty screen", "Check create volume", "opening messageBox and hiding banner", "check connection info", "Checkbox checked", "Check with empty", "Check for Docker Compose installed", "Expect clear tasks is being called", "expect invalid field", "provider registered", "Expect long sear...
[ "signedHumanSize", "check error", "mac [2]", "foo =~ /zee/gm", "check open devtools", "dispose", "Expect Escape key works [2]", "Confirm mock values", "Check for path not included", "non default title", "Expect default size [2]", "Expect progress [2]", "makeExecutable windows", "Check info...
starryzhang/sweb.eval.win.podman-desktop_1776_podman-desktop-13439
rustls/rustls
2586
rustls__rustls-2586
Rust
[ "2584" ]
f189222ca160c4e4eacf9fc511255ac43899d037
diff --git a/rustls/benches/benchmarks.rs b/rustls/benches/benchmarks.rs index 5aebc995f0c..5fc473de872 100644 --- a/rustls/benches/benchmarks.rs +++ b/rustls/benches/benchmarks.rs @@ -1,19 +1,17 @@ #![cfg(feature = "ring")] #![allow(clippy::disallowed_types)] -use std::io; use std::sync::Arc; use bencher::{Bencher, benchmark_group, benchmark_main}; use rustls::ServerConnection; use rustls::crypto::ring as provider; -use rustls_test::{FailsReads, KeyType, make_server_config}; +use rustls_test::{KeyType, TestNonBlockIo, make_server_config}; fn bench_ewouldblock(c: &mut Bencher) { let server_config = make_server_config(KeyType::Rsa2048, &provider::default_provider()); let mut server = ServerConnection::new(Arc::new(server_config)).unwrap(); - let mut read_ewouldblock = FailsReads::new(io::ErrorKind::WouldBlock); - c.iter(|| server.read_tls(&mut read_ewouldblock)); + c.iter(|| server.read_tls(&mut TestNonBlockIo::default())); } benchmark_group!(benches, bench_ewouldblock); diff --git a/rustls/src/conn.rs b/rustls/src/conn.rs index 7bacd3f2472..25d573a8850 100644 --- a/rustls/src/conn.rs +++ b/rustls/src/conn.rs @@ -603,6 +603,15 @@ impl<Data> ConnectionCommon<Data> { return Ok((rdlen, wrlen)); } + // If we want to write, but are WouldBlocked by the underlying IO, *and* + // have no desire to read; that is everything. + if let (Some(_), false) = (&blocked_write, self.wants_read()) { + return match wrlen { + 0 => Err(blocked_write.unwrap()), + _ => Ok((rdlen, wrlen)), + }; + } + while !eof && self.wants_read() { let read_size = match self.read_tls(io) { Ok(0) => { @@ -633,6 +642,15 @@ impl<Data> ConnectionCommon<Data> { return Err(io::Error::new(io::ErrorKind::InvalidData, e)); }; + // If we want to read, but are WouldBlocked by the underlying IO, *and* + // have no desire to write; that is everything. + if let (Some(_), false) = (&blocked_read, self.wants_write()) { + return match rdlen { + 0 => Err(blocked_read.unwrap()), + _ => Ok((rdlen, wrlen)), + }; + } + // if we're doing IO until handshaked, and we believe we've finished handshaking, // but process_new_packets() has queued TLS data to send, loop around again to write // the queued messages. @@ -643,9 +661,9 @@ impl<Data> ConnectionCommon<Data> { let blocked = blocked_write.zip(blocked_read); match (eof, until_handshaked, self.is_handshaking(), blocked) { (_, true, false, _) => return Ok((rdlen, wrlen)), + (_, _, _, Some((e, _))) if rdlen == 0 && wrlen == 0 => return Err(e), (_, false, _, _) => return Ok((rdlen, wrlen)), (true, true, true, _) => return Err(io::Error::from(io::ErrorKind::UnexpectedEof)), - (_, _, _, Some((e, _))) => return Err(e), _ => {} } }
diff --git a/rustls-test/src/lib.rs b/rustls-test/src/lib.rs index 51819a4df58..9ea2e80c1e3 100644 --- a/rustls-test/src/lib.rs +++ b/rustls-test/src/lib.rs @@ -969,19 +969,67 @@ pub fn server_name(name: &'static str) -> ServerName<'static> { name.try_into().unwrap() } -pub struct FailsReads { - errkind: io::ErrorKind, +/// An object that impls `io::Read` and `io::Write` for testing. +/// +/// The `reads` and `writes` fields set the behaviour of these trait +/// implementations. They return the `WouldBlock` error if not otherwise +/// configured -- `TestNonBlockIo::default()` does this permanently. +/// +/// This object panics on drop if the configured expected reads/writes +/// didn't take place. +#[derive(Debug, Default)] +pub struct TestNonBlockIo { + /// Each `write()` call is satisfied by inspecting this field. + /// + /// If it is empty, `WouldBlock` is returned. Otherwise the write is + /// satisfied by popping a value and returning it (reduced by the size + /// of the write buffer, if needed). + pub writes: Vec<usize>, + + /// Each `read()` call is satisfied by inspecting this field. + /// + /// If it is empty, `WouldBlock` is returned. Otherwise the read is + /// satisfied by popping a value and copying it into the output + /// buffer. Each value must be no longer than the buffer for that + /// call. + pub reads: Vec<Vec<u8>>, } -impl FailsReads { - pub fn new(errkind: io::ErrorKind) -> Self { - Self { errkind } +impl io::Read for TestNonBlockIo { + fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { + println!("read {:?}", buf.len()); + match self.reads.pop() { + None => Err(io::ErrorKind::WouldBlock.into()), + Some(data) => { + assert!(data.len() <= buf.len()); + let take = core::cmp::min(data.len(), buf.len()); + buf[..take].clone_from_slice(&data[..take]); + Ok(take) + } + } + } +} + +impl io::Write for TestNonBlockIo { + fn write(&mut self, buf: &[u8]) -> io::Result<usize> { + println!("write {:?}", buf.len()); + match self.writes.pop() { + None => Err(io::ErrorKind::WouldBlock.into()), + Some(n) => Ok(core::cmp::min(n, buf.len())), + } + } + + fn flush(&mut self) -> io::Result<()> { + println!("flush"); + Ok(()) } } -impl io::Read for FailsReads { - fn read(&mut self, _b: &mut [u8]) -> io::Result<usize> { - Err(io::Error::from(self.errkind)) +impl Drop for TestNonBlockIo { + fn drop(&mut self) { + // ensure the object was exhausted as expected + assert!(self.reads.is_empty()); + assert!(self.writes.is_empty()); } } diff --git a/rustls/tests/api.rs b/rustls/tests/api.rs index e87fd8cc812..006c8187af7 100644 --- a/rustls/tests/api.rs +++ b/rustls/tests/api.rs @@ -2621,6 +2621,99 @@ fn client_complete_io_for_write() { } } +#[test] +fn client_complete_io_with_nonblocking_io() { + let (mut client, _) = make_pair(KeyType::Rsa2048, &provider::default_provider()); + + // absolutely no progress writing ClientHello + assert_eq!( + client + .complete_io(&mut TestNonBlockIo::default()) + .unwrap_err() + .kind(), + io::ErrorKind::WouldBlock + ); + + // a little progress writing ClientHello + let (mut client, _) = make_pair(KeyType::Rsa2048, &provider::default_provider()); + assert_eq!( + client + .complete_io(&mut TestNonBlockIo { + writes: vec![1], + reads: vec![], + }) + .unwrap(), + (0, 1) + ); + + // complete writing ClientHello + let (mut client, _) = make_pair(KeyType::Rsa2048, &provider::default_provider()); + assert_eq!( + client + .complete_io(&mut TestNonBlockIo { + writes: vec![4096], + reads: vec![], + }) + .unwrap_err() + .kind(), + io::ErrorKind::WouldBlock + ); + + // complete writing ClientHello, partial read of ServerHello + let (mut client, _) = make_pair(KeyType::Rsa2048, &provider::default_provider()); + let (rd, wr) = dbg!(client.complete_io(&mut TestNonBlockIo { + writes: vec![4096], + reads: vec![vec![ContentType::Handshake.into()]], + })) + .unwrap(); + assert_eq!(rd, 1); + assert!(wr > 1); + + // data phase: + let (mut client, mut server) = make_pair(KeyType::Rsa2048, &provider::default_provider()); + do_handshake(&mut client, &mut server); + + // read + assert_eq!( + client + .complete_io(&mut TestNonBlockIo { + reads: vec![vec![ContentType::ApplicationData.into()]], + writes: vec![], + }) + .unwrap(), + (1, 0) + ); + + // write + client + .writer() + .write_all(b"hello") + .unwrap(); + + // no progress + assert_eq!( + client + .complete_io(&mut TestNonBlockIo { + reads: vec![], + writes: vec![], + }) + .unwrap_err() + .kind(), + io::ErrorKind::WouldBlock + ); + + // some write progress + assert_eq!( + client + .complete_io(&mut TestNonBlockIo { + reads: vec![], + writes: vec![1], + }) + .unwrap(), + (0, 1) + ); +} + #[test] fn buffered_client_complete_io_for_write() { let provider = provider::default_provider();
0.23.30 appears to stall my integration tests on WSL2 **Checklist** * [x] I've searched the issue tracker for similar bugs. **Describe the bug** My integration tests which make requests consistently stall out with maxed CPU on rustls version 0.23.30 when running on WSL2. **To Reproduce** Steps to reproduce the behavior: 1. Run `cargo test` on WSL2 **Applicable Version(s)** 0.23.30 **Expected behavior** The tests run without incident. **Additional context** Locking rustls to version 0.23.29 resolves the issue for now. Version 0.23.30 works without incident on my Mac. My app continues to run normally via `cargo run` on WSL2. My WSL2 is running Debian 12.11. It stalls in both the VSCode terminal and in Windows Terminal. A sample test which stalls: ```rust /// Tests the login form page rendering. #[tokio::test] async fn login_form() -> Result<(), Box<dyn Error + Send + Sync>> { let (router, _state) = init_test().await; let request = Request::builder() .uri("/login") .header(X_REAL_IP, TEST_IP) .body(Body::empty())?; let response = router.oneshot(request).await?; assert!(response.status().is_success()); let body_str = response_body_str(response).await; assert!(body_str.contains("Log in")); Ok(()) } ```
I observe same behavior when doing tls handshake on a client connection : no progress and maxed cpu : downgrading to 0.23.29 remove this issue. Thanks for the report! Could you distill this down to a stand-alone reproducer? > Thanks for the report! Could you distill this down to a stand-alone reproducer? I am not sure how to do that at this time, sorry. However, by cloning the rustls repo, I was able to determine the specific commit that introduces the issue: https://github.com/rustls/rustls/commit/247fd66d17b6fe8f4d1da28d8525c4c521025be5 Thank you for your work on this project. The `_` arm is being reached here, which causes the infinite loop: ```rust match (eof, until_handshaked, self.is_handshaking(), blocked) { (_, true, false, _) => return Ok((rdlen, wrlen)), (_, false, _, _) => return Ok((rdlen, wrlen)), (true, true, true, _) => return Err(io::Error::from(io::ErrorKind::UnexpectedEof)), (_, _, _, Some((e, _))) => return Err(e), _ => {} } ``` Replacing that arm with `_ => {return Err(io::Error::from(io::ErrorKind::Other)) }` causes my test to fail with `Connect to PostgreSQL: Io(Kind(Other))` instead of loop. I think it is happening during the `sqlx` connection to Postgres in the test, which happens in the `init_test` helper in the code sample I provided before, ultimately from this line in my code: `PgPool::connect(&url).await.expect("Connect to PostgreSQL")`. I have sqlx's "runtime-tokio-rustls" feature enabled, which is how rustls is added to my dependencies. So it seems it is not necessarily related to the Request element of my sample test, but rather initializing the database connection. Edit: I realize now it is probably *supposed* to loop here... but hopefully this comment is somehow useful anyway. We faced a very similar issue on production with windmill. Took us quite a bit of time to root cause to rustls upgrade. We haven't a full reproduction yet but downgrading fully solved it. I left a comment in #2559 . Not sure if I found the cause though, since it depends on how `complete_io` is used. I understand the problem now, so I've yanked 0.23.30 to avoid more downstream pain. We'll do a 0.23.31 in the next couple of days. Wow, I have spent around 12 hours in the past day or so trying to debug this. Thank you for yanking. Thanks for fixing this. I started to think that I'm a bum :D (I had to debug another issue with telemetry configuration that was causing a deadlock, and because this one was not repeatable the same way every time, I was even thinking that I'm hitting a compiler bug) <img width="600" height="400" alt="Image" src="https://github.com/user-attachments/assets/a7e639e8-c550-489d-8437-2a3e56a6852d" /> If someone could test with: ```toml [patch.crates-io] rustls = { version = "0.23.31", git = "https://github.com/rustls/rustls", branch = "jbp-prepare-0.23.31" } ``` That would be very helpful! Though I'm reasonably confident the fix is good at this point.
I observe same behavior when doing tls handshake on a client connection : no progress and maxed cpu : downgrading to 0.23.29 remove this issue. Thanks for the report! Could you distill this down to a stand-alone reproducer? > Thanks for the report! Could you distill this down to a stand-alone reproducer? I am not sure how to do that at this time, sorry. However, by cloning the rustls repo, I was able to determine the specific commit that introduces the issue: https://github.com/rustls/rustls/commit/247fd66d17b6fe8f4d1da28d8525c4c521025be5 Thank you for your work on this project. The `_` arm is being reached here, which causes the infinite loop: ```rust match (eof, until_handshaked, self.is_handshaking(), blocked) { (_, true, false, _) => return Ok((rdlen, wrlen)), (_, false, _, _) => return Ok((rdlen, wrlen)), (true, true, true, _) => return Err(io::Error::from(io::ErrorKind::UnexpectedEof)), (_, _, _, Some((e, _))) => return Err(e), _ => {} } ``` Replacing that arm with `_ => {return Err(io::Error::from(io::ErrorKind::Other)) }` causes my test to fail with `Connect to PostgreSQL: Io(Kind(Other))` instead of loop. I think it is happening during the `sqlx` connection to Postgres in the test, which happens in the `init_test` helper in the code sample I provided before, ultimately from this line in my code: `PgPool::connect(&url).await.expect("Connect to PostgreSQL")`. I have sqlx's "runtime-tokio-rustls" feature enabled, which is how rustls is added to my dependencies. So it seems it is not necessarily related to the Request element of my sample test, but rather initializing the database connection. Edit: I realize now it is probably *supposed* to loop here... but hopefully this comment is somehow useful anyway. We faced a very similar issue on production with windmill. Took us quite a bit of time to root cause to rustls upgrade. We haven't a full reproduction yet but downgrading fully solved it. I left a comment in #2559 . Not sure if I found the cause though, since it depends on how `complete_io` is used. I understand the problem now, so I've yanked 0.23.30 to avoid more downstream pain. We'll do a 0.23.31 in the next couple of days. Wow, I have spent around 12 hours in the past day or so trying to debug this. Thank you for yanking. Thanks for fixing this. I started to think that I'm a bum :D (I had to debug another issue with telemetry configuration that was causing a deadlock, and because this one was not repeatable the same way every time, I was even thinking that I'm hitting a compiler bug) <img width="600" height="400" alt="Image" src="https://github.com/user-attachments/assets/a7e639e8-c550-489d-8437-2a3e56a6852d" /> If someone could test with: ```toml [patch.crates-io] rustls = { version = "0.23.31", git = "https://github.com/rustls/rustls", branch = "jbp-prepare-0.23.31" } ``` That would be very helpful! Though I'm reasonably confident the fix is good at this point. > If someone could test with: > > [patch.crates-io] > rustls = { version = "0.23.31", git = "https://github.com/rustls/rustls", branch = "jbp-prepare-0.23.31" } > > That would be very helpful! Though I'm reasonably confident the fix is good at this point. This works for me 👍 > If someone could test with: > > ```toml > [patch.crates-io] > rustls = { version = "0.23.31", git = "https://github.com/rustls/rustls", branch = "jbp-prepare-0.23.31" } > ``` > > That would be very helpful! Though I'm reasonably confident the fix is good at this point. Also works for me. Rustls should add some tests that would detect such problems in the future. Yup, we have (additional) tests that reproduce the same symptoms that were reported here. But sometimes that can be a different set of symptoms. Thanks for the confirmations! https://crates.io/crates/rustls/0.23.31
[ "https://github.com/rustls/rustls/commit/ebb081c52b5825e7bcc326bc9fab6836f1a5861b", "https://github.com/rustls/rustls/commit/773cb9cf3a088b4eedc3b121ad0984e42091e00a", "https://github.com/rustls/rustls/commit/c155b3460b5c0297144c50f87fc3f3950472a08a", "https://github.com/rustls/rustls/commit/a1aa8c8da99d07b37...
2025-07-29T12:58:57Z
https://github.com/rustls/rustls/tree/f189222ca160c4e4eacf9fc511255ac43899d037
[ "cd /testbed ; cargo build --locked" ]
[ "cd C:\\testbed; if (!(Test-Path reports)) { New-Item -ItemType Directory reports | Out-Null }; cargo test --locked --all-features --all-targets -- --nocapture 2>&1 | Tee-Object -FilePath reports\\cargo-test.log" ]
[ "cd C:\\testbed; Get-Content reports\\cargo-test.log" ]
def parser(log: str) -> dict[str, str]: import re from xml.etree import ElementTree as ET results: dict[str, str] = {} def set_result(name: str, status: str) -> None: name = (name or "").strip() if not name: return prev = results.get(name) # Prefer fail > skip > pass if duplicated if prev == "fail": return if prev == "skip" and status == "pass": return results[name] = status # ---------------- XML (JUnit-ish) ---------------- xml_candidate = log.strip() if "<testsuite" in xml_candidate and "<testcase" in xml_candidate: def parse_xml(xml_text: str) -> bool: try: root = ET.fromstring(xml_text) except Exception: return False testcases = root.findall(".//testcase") if root.tag != "testcase" else [root] for tc in testcases: name = tc.get("name") or "" classname = tc.get("classname") or "" full = f"{classname}::{name}".strip(":") status = "pass" if tc.find("skipped") is not None: status = "skip" elif tc.find("failure") is not None or tc.find("error") is not None: status = "fail" set_result(full or name, status) return True if parse_xml(xml_candidate): return results for m in re.finditer(r"(<testsuite\b[\s\S]*?</testsuite>)", log): parse_xml(m.group(1)) if results: return results # ---------------- Line-oriented parsing ---------------- status_map = { "ok": "pass", "PASSED": "pass", "PASS": "pass", "FAILED": "fail", "FAIL": "fail", "ERROR": "fail", "ignored": "skip", "SKIPPED": "skip", "SKIP": "skip", "skipped": "skip", "XFAIL": "skip", "XPASS": "fail", } # libtest canonical line libtest_line_re = re.compile( r"^\s*test\s+(?P<name>.+?)\s+\.\.\.\s+(?P<status>ok|FAILED|ignored)\s*$" ) # other frameworks (fallbacks) pytest_re = re.compile( r"^\s*(?P<name>\S+::\S+)\s+(?P<status>PASSED|FAILED|SKIPPED|XFAIL|XPASS|ERROR)\s*$" ) gotest_re = re.compile(r"^\s*---\s+(?P<status>PASS|FAIL|SKIP):\s+(?P<name>\S+)") unittest_re = re.compile( r"^\s*(?P<name>[\w./:-]+)\s+\.\.\.\s+(?P<status>ok|FAIL|ERROR|skipped)\s*$" ) # Wrapped libtest handling (PowerShell/terminal hard-wrap) buffering = False buf = "" # Abort buffering if we see clearly unrelated multi-line output abort_re = re.compile( r"^\s*(Running\s+(unittests|tests|doctests)\b|running\s+\d+\s+tests\b|test result:|failures:|error:|thread\s+'.+'\s+panicked)\b", re.IGNORECASE, ) def extract_all_libtests_from_buffer(s: str) -> None: """ The log capture sometimes concatenates multiple 'test ... ... ok' records into one long wrapped line. Extract *all* occurrences. """ # Normalize whitespace a bit, but keep as a single string. norm = re.sub(r"\s+", " ", s) # Find all occurrences of: 'test <name> ... <status>' for m in re.finditer(r"\btest\s+(.+?)\s+\.\.\.\s+(ok|FAILED|ignored)\b", norm): name = m.group(1).strip() st = status_map.get(m.group(2), "fail") set_result(name, st) for raw_line in log.splitlines(): line = raw_line.rstrip("\r\n") # If buffering a wrapped "test ..." record, append and try to extract. if buffering: if abort_re.match(line) and not line.lstrip().startswith("test "): buffering = False buf = "" else: # Preserve a space between lines; hard-wrap may happen mid-word but this is # preferable to accidentally concatenating "... ok" + "test ..." into a name. buf = (buf + " " + line.strip()).strip() # If we have at least one complete status token, try extracting all tests. if re.search(r"\.\.\.\s*(ok|FAILED|ignored)\b", buf): extract_all_libtests_from_buffer(buf) buffering = False buf = "" elif len(buf) > 4096: buffering = False buf = "" continue # Single-line libtest m = libtest_line_re.match(line) if m: set_result(m.group("name"), status_map.get(m.group("status"), "fail")) continue # Begin buffering if it looks like a test line start if line.lstrip().startswith("test "): buffering = True buf = line.strip() # In case it already contains a status (but didn't match due to odd spacing), # try to extract immediately. if re.search(r"\.\.\.\s*(ok|FAILED|ignored)\b", buf): extract_all_libtests_from_buffer(buf) buffering = False buf = "" continue # Fallbacks m = pytest_re.match(line) if m: set_result(m.group("name"), status_map.get(m.group("status"), "fail")) continue m = gotest_re.match(line) if m: set_result(m.group("name"), status_map.get(m.group("status"), "fail")) continue m = unittest_re.match(line) if m: set_result(m.group("name"), status_map.get(m.group("status"), "fail")) continue # Flush if log ends mid-buffer if buffering and buf: extract_all_libtests_from_buffer(buf) return results
[ "msgs::handshake_test::can_clone_all_client_extensions", "webpki::client_verifier::test_with_ring::tests::test_with_crls_multiple_cal lls", "crypto::tls12::tests::check_sha512", "crypto::ring::quic::test_with_ring::tests::initial_test_vector_v2", "client::handy::test_with_ring::tests::test_noclientsessionst...
[]
starryzhang/sweb.eval.win.rustls_1776_rustls-2586
microsoft/vscode
239695
microsoft__vscode-239695
TypeScript
[ "237598" ]
3834de8f311b7a3227952910088c6b34abd2a2ba
diff --git a/extensions/terminal-suggest/src/terminalSuggestMain.ts b/extensions/terminal-suggest/src/terminalSuggestMain.ts index 685ed84100564..71bcfba77ca8e 100644 --- a/extensions/terminal-suggest/src/terminalSuggestMain.ts +++ b/extensions/terminal-suggest/src/terminalSuggestMain.ts @@ -239,9 +239,15 @@ export async function getCompletionItemsFromSpecs( let filesRequested = false; let foldersRequested = false; - const precedingText = terminalContext.commandLine.slice(0, terminalContext.cursorPosition + 1); - // TODO: Normalize precedingText to ignore file extensions on Windows - // precedingText = precedingText.replace('.cmd', ''); + let precedingText = terminalContext.commandLine.slice(0, terminalContext.cursorPosition + 1); + if (isWindows) { + const spaceIndex = precedingText.indexOf(' '); + const commandEndIndex = spaceIndex === -1 ? precedingText.length : spaceIndex; + const lastDotIndex = precedingText.lastIndexOf('.', commandEndIndex); + if (lastDotIndex > 0) { // Don't treat dotfiles as extensions + precedingText = precedingText.substring(0, lastDotIndex) + precedingText.substring(spaceIndex); + } + } let specificItemsProvided = false; for (const spec of specs) { @@ -252,9 +258,9 @@ export async function getCompletionItemsFromSpecs( } for (const specLabel of specLabels) { - const availableCommand = availableCommands.find(command => specLabel === command.label); - // TODO: Normalize commands to ignore file extensions on Windows https://github.com/microsoft/vscode/issues/237598 - // const availableCommand = availableCommands.find(command => command.label.startsWith(specLabel)); + const availableCommand = (isWindows + ? availableCommands.find(command => command.label.match(new RegExp(`${specLabel}(\\.[^ ]+)?$`))) + : availableCommands.find(command => command.label.startsWith(specLabel))); if (!availableCommand || (token && token.isCancellationRequested)) { continue; } @@ -267,11 +273,14 @@ export async function getCompletionItemsFromSpecs( continue; } - // TODO: Normalize commands to ignore file extensions on Windows https://github.com/microsoft/vscode/issues/237598 - // const commandAndAliases = availableCommands.filter(command => specLabel === (command.definitionCommand ?? command.label).replace('.cmd', '')); - // if (!commandAndAliases.some(e => terminalContext.commandLine.startsWith(`${e.label} `) || terminalContext.commandLine.startsWith(`${e.label}.cmd `))) { - const commandAndAliases = availableCommands.filter(command => specLabel === (command.definitionCommand ?? command.label)); - if (!commandAndAliases.some(e => terminalContext.commandLine.startsWith(`${e.label} `))) { + const commandAndAliases = (isWindows + ? availableCommands.filter(command => specLabel === removeAnyFileExtension(command.definitionCommand ?? command.label)) + : availableCommands.filter(command => specLabel === (command.definitionCommand ?? command.label))); + if ( + !(isWindows + ? commandAndAliases.some(e => precedingText.startsWith(`${removeAnyFileExtension(e.label)} `)) + : commandAndAliases.some(e => precedingText.startsWith(`${e.label} `))) + ) { // the spec label is not the first word in the command line, so do not provide options or args continue; } @@ -469,3 +478,6 @@ function getShell(shellType: TerminalShellType): string | undefined { } } +function removeAnyFileExtension(label: string): string { + return label.replace(/\.[a-zA-Z0-9!#\$%&'\(\)\-@\^_`{}~\+,;=\[\]]+$/, ''); +}
diff --git a/extensions/terminal-suggest/src/test/completions/code.test.ts b/extensions/terminal-suggest/src/test/completions/code.test.ts index 50ad0458a2242..ecbaef0c130bd 100644 --- a/extensions/terminal-suggest/src/test/completions/code.test.ts +++ b/extensions/terminal-suggest/src/test/completions/code.test.ts @@ -7,8 +7,9 @@ import 'mocha'; import codeCompletionSpec from '../../completions/code'; import { testPaths, type ISuiteSpec, type ITestSpec } from '../helpers'; +export const codeSpecOptions = ['-', '--add', '--category', '--diff', '--disable-extension', '--disable-extensions', '--disable-gpu', '--enable-proposed-api', '--extensions-dir', '--goto', '--help', '--inspect-brk-extensions', '--inspect-extensions', '--install-extension', '--list-extensions', '--locale', '--log', '--max-memory', '--merge', '--new-window', '--pre-release', '--prof-startup', '--profile', '--reuse-window', '--show-versions', '--status', '--sync', '--telemetry', '--uninstall-extension', '--user-data-dir', '--verbose', '--version', '--wait', '-a', '-d', '-g', '-h', '-m', '-n', '-r', '-s', '-v', '-w']; + export function createCodeTestSpecs(executable: string): ITestSpec[] { - const codeOptions = ['-', '--add', '--category', '--diff', '--disable-extension', '--disable-extensions', '--disable-gpu', '--enable-proposed-api', '--extensions-dir', '--goto', '--help', '--inspect-brk-extensions', '--inspect-extensions', '--install-extension', '--list-extensions', '--locale', '--log', '--max-memory', '--merge', '--new-window', '--pre-release', '--prof-startup', '--profile', '--reuse-window', '--show-versions', '--status', '--sync', '--telemetry', '--uninstall-extension', '--user-data-dir', '--verbose', '--version', '--wait', '-a', '-d', '-g', '-h', '-m', '-n', '-r', '-s', '-v', '-w']; const localeOptions = ['bg', 'de', 'en', 'es', 'fr', 'hu', 'it', 'ja', 'ko', 'pt-br', 'ru', 'tr', 'zh-CN', 'zh-TW']; const categoryOptions = ['azure', 'data science', 'debuggers', 'extension packs', 'education', 'formatters', 'keymaps', 'language packs', 'linters', 'machine learning', 'notebooks', 'programming languages', 'scm providers', 'snippets', 'testing', 'themes', 'visualization', 'other']; const logOptions = ['critical', 'error', 'warn', 'info', 'debug', 'trace', 'off']; @@ -25,7 +26,7 @@ export function createCodeTestSpecs(executable: string): ITestSpec[] { ...typingTests, // Basic arguments - { input: `${executable} |`, expectedCompletions: codeOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, + { input: `${executable} |`, expectedCompletions: codeSpecOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, { input: `${executable} --locale |`, expectedCompletions: localeOptions }, { input: `${executable} --diff |`, expectedResourceRequests: { type: 'files', cwd: testPaths.cwd } }, { input: `${executable} --diff ./file1 |`, expectedResourceRequests: { type: 'files', cwd: testPaths.cwd } }, @@ -40,13 +41,13 @@ export function createCodeTestSpecs(executable: string): ITestSpec[] { { input: `${executable} --log |`, expectedCompletions: logOptions }, { input: `${executable} --sync |`, expectedCompletions: syncOptions }, { input: `${executable} --extensions-dir |`, expectedResourceRequests: { type: 'folders', cwd: testPaths.cwd } }, - { input: `${executable} --list-extensions |`, expectedCompletions: codeOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, - { input: `${executable} --show-versions |`, expectedCompletions: codeOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, + { input: `${executable} --list-extensions |`, expectedCompletions: codeSpecOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, + { input: `${executable} --show-versions |`, expectedCompletions: codeSpecOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, { input: `${executable} --category |`, expectedCompletions: categoryOptions }, { input: `${executable} --category a|`, expectedCompletions: categoryOptions.filter(c => c.startsWith('a')) }, // Middle of command - { input: `${executable} | --locale`, expectedCompletions: codeOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, + { input: `${executable} | --locale`, expectedCompletions: codeSpecOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, ]; } diff --git a/extensions/terminal-suggest/src/test/terminalSuggestMain.test.ts b/extensions/terminal-suggest/src/test/terminalSuggestMain.test.ts index eca864c9727cd..189a022101b73 100644 --- a/extensions/terminal-suggest/src/test/terminalSuggestMain.test.ts +++ b/extensions/terminal-suggest/src/test/terminalSuggestMain.test.ts @@ -9,7 +9,7 @@ import { basename } from 'path'; import { asArray, getCompletionItemsFromSpecs } from '../terminalSuggestMain'; import { getTokenType } from '../tokens'; import { cdTestSuiteSpec as cdTestSuite } from './completions/cd.test'; -import { codeTestSuite } from './completions/code.test'; +import { codeSpecOptions, codeTestSuite } from './completions/code.test'; import { testPaths, type ISuiteSpec } from './helpers'; import { codeInsidersTestSuite } from './completions/code-insiders.test'; import { lsTestSuiteSpec } from './completions/upstream/ls.test'; @@ -18,6 +18,8 @@ import { mkdirTestSuiteSpec } from './completions/upstream/mkdir.test'; import { rmTestSuiteSpec } from './completions/upstream/rm.test'; import { rmdirTestSuiteSpec } from './completions/upstream/rmdir.test'; import { touchTestSuiteSpec } from './completions/upstream/touch.test'; +import { osIsWindows } from '../helpers/os'; +import codeCompletionSpec from '../completions/code'; const testSpecs2: ISuiteSpec[] = [ { @@ -46,6 +48,26 @@ const testSpecs2: ISuiteSpec[] = [ touchTestSuiteSpec, ]; +if (osIsWindows()) { + testSpecs2.push({ + name: 'Handle options extensions on Windows', + completionSpecs: [codeCompletionSpec], + availableCommands: [ + 'code.bat', + 'code.cmd', + 'code.exe', + 'code.anything', + ], + testSpecs: [ + { input: 'code |', expectedCompletions: codeSpecOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, + { input: 'code.bat |', expectedCompletions: codeSpecOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, + { input: 'code.cmd |', expectedCompletions: codeSpecOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, + { input: 'code.exe |', expectedCompletions: codeSpecOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, + { input: 'code.anything |', expectedCompletions: codeSpecOptions, expectedResourceRequests: { type: 'both', cwd: testPaths.cwd } }, + ] + }); +} + suite('Terminal Suggest', () => { for (const suiteSpec of testSpecs2) { suite(suiteSpec.name, () => {
Fig completions aren't working on Windows 1. Type `code --locale ` + `ctrl+space`. You should see all locales, but do not 🐛 2. Type `cd ` + `ctrl+space`. You should see `~`, `-`, and all folders, but do not 🐛 cc @Tyriar
The `code-insiders --locale` case still doesn't work for me. ![code-insiders --locale only shows folder names after](https://github.com/user-attachments/assets/abd23473-c0ad-48c9-8ce2-6a32c0769da0) I see `locales` but also file/folder completions, which is unexpected. <img width="645" alt="Image" src="https://github.com/user-attachments/assets/af432832-9834-40d5-909e-22f5867f1fe1" /> <img width="577" alt="Image" src="https://github.com/user-attachments/assets/f4fc22ab-ef8b-4262-972b-0bb04edeb16f" /> Reopening as it seems this is broken on windows 2 is fixed, 1 still happens on Windows. I think it's related to the label validation and `code.cmd` being in the path
The `code-insiders --locale` case still doesn't work for me. ![code-insiders --locale only shows folder names after](https://github.com/user-attachments/assets/abd23473-c0ad-48c9-8ce2-6a32c0769da0) I see `locales` but also file/folder completions, which is unexpected. <img width="645" alt="Image" src="https://github.com/user-attachments/assets/af432832-9834-40d5-909e-22f5867f1fe1" /> <img width="577" alt="Image" src="https://github.com/user-attachments/assets/f4fc22ab-ef8b-4262-972b-0bb04edeb16f" /> Reopening as it seems this is broken on windows 2 is fixed, 1 still happens on Windows. I think it's related to the label validation and `code.cmd` being in the path
[ "https://github.com/microsoft/vscode/commit/a563227a21c235f16a6daa33a1f2e9efae253d9c", "https://github.com/microsoft/vscode/commit/bbee48424ca24e6f7630c8cfdad2896a1787efe8", "https://github.com/microsoft/vscode/commit/58c2049bef94714f41e9099f197612ebe74a069c" ]
2025-02-05T15:23:48Z
https://github.com/microsoft/vscode/tree/3834de8f311b7a3227952910088c6b34abd2a2ba
[ "npm install --ignore-scripts ; node --max-old-space-size=8192 .\\node_modules\\gulp\\bin\\gulp.js compile-client" ]
[ "New-Item -ItemType Directory -Force reports\\junit | Out-Null; Set-Content -Path reports\\mocha-multi-reporters.json -Value '{ \"reporterEnabled\": \"spec, mocha-junit-reporter\", \"mochaJunitReporterReporterOptions\": { \"mochaFile\": \"reports/junit/results.xml\", \"testsuitesTitle\": \"Mocha Tests\", \"rootSuit...
[ "Get-Content reports\\test-output.log -Raw" ]
def parser(log: str) -> dict[str, str]: import re passes: dict[str, str] = {} skips: dict[str, str] = {} fails: dict[str, str] = {} lines = log.splitlines() # Regexes re_pass = re.compile(r'^(?P<indent>\s*)(?:✓|✔|√|Γ\S*Ü)\s+(?P<title>.*?)(?:\s+\(\d+ms\))?\s*$') re_skip = re.compile(r'^(?P<indent>\s*)(?:-\s+|○\s+)(?P<title>.*?)(?:\s+\(\d+ms\))?\s*$') re_fail_hdr = re.compile(r'^\s*(\d+)\)\s+(?P<title>.+?)\s*$') re_fail_mark = re.compile(r'^(?P<indent>\s*)(?:✗|×|x)\s+(?P<title>.*?)(?:\s+\(\d+ms\))?\s*$') re_leak = re.compile(r'potential listener LEAK detected', re.IGNORECASE) re_duration = re.compile(r'\s+\(\d+ms\)\s*$') # Strict suite header: require at least two leading spaces and exclude known markers re_suite = re.compile( r'^(?P<indent>\s{2,})(?P<title>(?!✓|✔|√|Γ\S*Ü|-|○|\d+\)|Error|at |node :).+?)\s*$' ) suite_stack: list[tuple[int, str]] = [] def clean_title(title: str) -> str: title = re_duration.sub('', title.strip()) return title.strip() def get_full_name(test_title: str, test_indent: int) -> str: parts = [t for (ind, t) in suite_stack if ind < test_indent] parts.append(test_title.strip()) return " / ".join(parts).strip() i = 0 n = len(lines) while i < n: line = lines[i].rstrip('\n') # Pass line mp = re_pass.match(line) if mp: indent = len(mp.group('indent')) title = clean_title(mp.group('title')) # Handle hyphen-wrapped continuation: next line starts with "-<word>" (no space) j = i + 1 while title.endswith('-') and j < n: nxt = lines[j].rstrip('\n') nxt_stripped = nxt.strip() if nxt_stripped.startswith('-') and not nxt_stripped.startswith('- '): cont = nxt_stripped[1:] title = title[:-1] + '-' + cont i = j j = i + 1 continue break if title: full = get_full_name(title, indent) if full: passes[full] = "pass" i += 1 continue # Skip line ms = re_skip.match(line) if ms: indent = len(ms.group('indent')) title = clean_title(ms.group('title')) if title: full = get_full_name(title, indent) if full: skips[full] = "skip" i += 1 continue # Explicit fail marker mfm = re_fail_mark.match(line) if mfm: indent = len(mfm.group('indent')) title = clean_title(mfm.group('title')) if title: full = get_full_name(title, indent) if full: fails[full] = "fail" i += 1 continue # Numbered failure summary mfh = re_fail_hdr.match(line) if mfh: title = clean_title(mfh.group('title')) if title: fails[title] = "fail" i += 1 continue # Leak detection if re_leak.search(line): suite_titles = [t for _, t in suite_stack] if suite_titles: full = " / ".join(suite_titles + ["potential listener LEAK detected"]) else: full = "potential listener LEAK detected" fails[full] = "fail" i += 1 continue # Suite header msu = re_suite.match(line) if msu: indent = len(msu.group('indent')) title = clean_title(msu.group('title')) # Ignore summary lines that include passing/pending/failing counts if not re.search(r'\b(passing|pending|failing)\b', title, re.IGNORECASE): while suite_stack and suite_stack[-1][0] >= indent: suite_stack.pop() suite_stack.append((indent, title)) i += 1 continue i += 1 results: dict[str, str] = {} for k in passes: results[k] = "pass" for k in skips: results.setdefault(k, "skip") for k in fails: results[k] = "fail" return results
[ "ExtHostSearch / File: / max results = 2", "Editor Model - Model / model getValue", "ExtensionsMerge / merge: remote extension is changed to unpinned" ]
[ "Files / FileChangesEvent - basics", "CharacterPairSupport / shouldAutoClosePair in interesting line 2", "Collections / SetWithKey / size", "TextResourceConfigurationService - Update / updateValue writes into derived user remote target with overrides and va", "Event utils / debounce / leading reset", "Tas...
starryzhang/sweb.eval.win.microsoft_1776_vscode-239695
lobehub/lobe-chat
8459
lobehub__lobe-chat-8459
TypeScript
[ "8437" ]
752e576b802d06b1d3a975a42b8447550b8295d8
diff --git a/.cursor/rules/code-review.mdc b/.cursor/rules/code-review.mdc index a5a2516018e78..55bc1d46697c1 100644 --- a/.cursor/rules/code-review.mdc +++ b/.cursor/rules/code-review.mdc @@ -1,13 +1,14 @@ --- description: How to code review -globs: +globs: alwaysApply: false --- + # Role Description -- You are a senior full-stack engineer skilled in performance optimization, security, and design systems. -- You excel at reviewing code and providing constructive feedback. -- Your task is to review submitted Git diffs **in Chinese** and return a structured review report. +- You are a senior full-stack engineer skilled in performance optimization, security, and design systems. +- You excel at reviewing code and providing constructive feedback. +- Your task is to review submitted Git diffs **in Chinese** and return a structured review report. - Review style: concise, direct, focused on what matters most, with actionable suggestions. ## Before the Review @@ -16,54 +17,57 @@ Gather the modified code and context. Please strictly follow the process below: 1. Use `read_file` to read [package.json](mdc:package.json) 2. Use terminal to run command `git diff HEAD | cat` to obtain the diff and list the changed files. If you recieived empty result, run the same command once more. -3. Use `read_file` to open each changed file. -4. Use `read_file` to read [rules-attach.mdc](mdc:.cursor/rules/rules-attach.mdc). Even if you think it's unnecessary, you must read it. -5. combine changed files, step3 and `agent_requestable_workspace_rules`, list the rules which need to read +3. Use `read_file` to open each changed file. +4. Use `read_file` to read [rules-attach.mdc](mdc:.cursor/rules/rules-attach.mdc). Even if you think it's unnecessary, you must read it. +5. combine changed files, step3 and `agent_requestable_workspace_rules`, list the rules which need to read 6. Use `read_file` to read the rules list in step 5 ## Review ### Code Style -- Ensure JSDoc comments accurately reflect the implementation; update them when needed. -- Look for opportunities to simplify or modernize code with the latest JavaScript/TypeScript features. -- Prefer `async`/`await` over callbacks or chained `.then` promises. -- Use consistent, descriptive naming—avoid obscure abbreviations. -- Replace magic numbers or strings with well-named constants. +- Ensure JSDoc comments accurately reflect the implementation; update them when needed. +- Look for opportunities to simplify or modernize code with the latest JavaScript/TypeScript features. +- Prefer `async`/`await` over callbacks or chained `.then` promises. +- Use consistent, descriptive naming—avoid obscure abbreviations. +- Replace magic numbers or strings with well-named constants. - Use semantically meaningful variable, function, and class names. - Ignore purely formatting issues and other autofixable lint problems. ### Code Optimization -- Prefer `for…of` loops to index-based `for` loops when feasible. -- Decide whether callbacks should be **debounced** or **throttled**. -- Use components from `@lobehub/ui`, Ant Design, or the existing design system instead of raw HTML tags (e.g., `Button` vs. `button`). -- reuse npm packages already installed (e.g., `lodash/omit`) rather than reinventing the wheel. -- Design for dark mode and mobile responsiveness: - - Use the `antd-style` token system instead of hard-coded colors. - - Select the proper component variants. -- Performance considerations: - - Where safe, convert sequential async flows to concurrent ones with `Promise.all`, `Promise.race`, etc. +- Prefer `for…of` loops to index-based `for` loops when feasible. +- Decide whether callbacks should be **debounced** or **throttled**. +- Use components from `@lobehub/ui`, Ant Design, or the existing design system instead of raw HTML tags (e.g., `Button` vs. `button`). +- reuse npm packages already installed (e.g., `lodash/omit`) rather than reinventing the wheel. +- Design for dark mode and mobile responsiveness: + - Use the `antd-style` token system instead of hard-coded colors. + - Select the proper component variants. +- Performance considerations: + - Where safe, convert sequential async flows to concurrent ones with `Promise.all`, `Promise.race`, etc. - Query only the required columns from a database rather than selecting entire rows. ### Obvious Bugs -- Do not silently swallow errors in `catch` blocks; at minimum, log them. -- Revert temporary code used only for testing (e.g., debug logs, temporary configs). -- Remove empty handlers (e.g., an empty `onClick`). +- Do not silently swallow errors in `catch` blocks; at minimum, log them. +- Revert temporary code used only for testing (e.g., debug logs, temporary configs). +- Remove empty handlers (e.g., an empty `onClick`). - Confirm the UI degrades gracefully for unauthenticated users. +- Don't leave any debug logs in the code (except when using the `debug` module properly). + - When using the `debug` module, avoid `import { log } from 'debug'` as it logs directly to console. Use proper debug namespaces instead. +- Check logs for sensitive information like api key, etc ## After the Review: output 1. Summary - - Start with a brief explanation of what the change set does. - - Summarize the changes for each modified file (or logical group). + - Start with a brief explanation of what the change set does. + - Summarize the changes for each modified file (or logical group). 2. Comments Issues - - List the most critical issues first. - - Use an ordered list, which will be convenient for me to reference later. - - For each issue: - - Mark severity tag (`❌ Must fix`, `⚠️ Should fix`, `💅 Nitpick`) - - Provode file path to the relevant file. - - Provide recommended fix - - End with a **git commit** command, instruct the author to run it. - - We use gitmoji to label commit messages, format: [emoji] <type>(<scope>): <subject> \ No newline at end of file + - List the most critical issues first. + - Use an ordered list, which will be convenient for me to reference later. + - For each issue: + - Mark severity tag (`❌ Must fix`, `⚠️ Should fix`, `💅 Nitpick`) + - Provode file path to the relevant file. + - Provide recommended fix + - End with a **git commit** command, instruct the author to run it. + - We use gitmoji to label commit messages, format: [emoji] <type>(<scope>): <subject> diff --git a/.cursor/rules/system-role.mdc b/.cursor/rules/system-role.mdc index 61b270a637482..90c46c632312b 100644 --- a/.cursor/rules/system-role.mdc +++ b/.cursor/rules/system-role.mdc @@ -1,8 +1,9 @@ --- -description: -globs: +description: +globs: alwaysApply: true --- + ## System Role You are an expert in full-stack Web development, proficient in JavaScript, TypeScript, CSS, React, Node.js, Next.js, Postgresql, all kinds of network protocols. @@ -11,7 +12,6 @@ You are an expert in LLM and Ai art. In Ai image generation, you are proficient You are an expert in UI/UX design, proficient in web interaction patterns, responsive design, accessibility, and user behavior optimization. You excel at improving user retention and paid conversion rates through various interaction details. - ## Problem Solving - Before formulating any response, you must first gather context by using tools like codebase_search, grep_search, file_search, web_search, fetch_rules, context7, and read_file to avoid making assumptions. @@ -36,3 +36,8 @@ You are an expert in UI/UX design, proficient in web interaction patterns, respo - If you're unable to access or retrieve content from websites, please inform me immediately and request the specific information needed rather than making assumptions - You can use emojis, npm packages like `chalk`/`chalk-animation`/`terminal-link`/`gradient-string`/`log-symbols`/`boxen`/`consola`/`@clack/prompts` to create beautiful terminal output - Don't run `tsc --noEmit` to check ts syntax error, because our project is very large and the validate very slow + +## Some logging rules + +- Never log user private information like api key, etc +- Don't use `import { log } from 'debug'` to log messages, because it will directly log the message to the console. diff --git a/apps/desktop/package.json b/apps/desktop/package.json index 2e95d1f01e32e..b154e80f11a9e 100644 --- a/apps/desktop/package.json +++ b/apps/desktop/package.json @@ -20,7 +20,7 @@ "electron:run-unpack": "electron .", "format": "prettier --write ", "i18n": "bun run scripts/i18nWorkflow/index.ts && lobe-i18n", - "postinstall": "electron-builder install-app-deps && pnpm rebuild sharp", + "postinstall": "electron-builder install-app-deps", "install-isolated": "pnpm install", "lint": "eslint --cache ", "pg-server": "bun run scripts/pglite-server.ts", diff --git a/src/services/chat.ts b/src/services/chat.ts index 9988fbafd39f0..61d2f6952b9a0 100644 --- a/src/services/chat.ts +++ b/src/services/chat.ts @@ -15,6 +15,7 @@ import { ChatCompletionErrorPayload, ModelProvider, } from '@/libs/model-runtime'; +import { parseDataUri } from '@/libs/model-runtime/utils/uriParser'; import { filesPrompts } from '@/prompts/files'; import { BuiltinSystemRolePrompts } from '@/prompts/systemRole'; import { getAgentStoreState } from '@/store/agent'; @@ -35,7 +36,7 @@ import { import { WebBrowsingManifest } from '@/tools/web-browsing'; import { WorkingModel } from '@/types/agent'; import { ChatErrorType } from '@/types/fetch'; -import { ChatMessage, MessageToolCall } from '@/types/message'; +import { ChatImageItem, ChatMessage, MessageToolCall } from '@/types/message'; import type { ChatStreamPayload, OpenAIChatMessage } from '@/types/openai/chat'; import { UserMessageContentPart } from '@/types/openai/chat'; import { parsePlaceholderVariablesMessages } from '@/utils/client/parserPlaceholder'; @@ -46,8 +47,10 @@ import { getMessageError, standardizeAnimationStyle, } from '@/utils/fetch'; +import { imageUrlToBase64 } from '@/utils/imageToBase64'; import { genToolCallingName } from '@/utils/toolCall'; import { createTraceHeader, getTraceId } from '@/utils/trace'; +import { isLocalUrl } from '@/utils/url'; import { createHeaderWithAuth, createPayloadWithKeyVaults } from './_auth'; import { API_ENDPOINTS } from './_url'; @@ -61,6 +64,14 @@ const isCanUseFC = (model: string, provider: string) => { return aiModelSelectors.isModelSupportToolUse(model, provider)(getAiInfraStoreState()); }; +const isCanUseVision = (model: string, provider: string) => { + // TODO: remove isDeprecatedEdition condition in V2.0 + if (isDeprecatedEdition) { + return modelProviderSelectors.isModelEnabledVision(model)(getUserStoreState()); + } + return aiModelSelectors.isModelSupportVision(model, provider)(getAiInfraStoreState()); +}; + /** * TODO: we need to update this function to auto find deploymentName with provider setting config */ @@ -205,7 +216,7 @@ class ChatService { // ============ 2. preprocess messages ============ // - const oaiMessages = this.processMessages( + const oaiMessages = await this.processMessages( { messages: parsedMessages, model: payload.model, @@ -475,7 +486,7 @@ class ChatService { onLoadingChange?.(true); try { - const oaiMessages = this.processMessages({ + const oaiMessages = await this.processMessages({ messages: params.messages as any, model: params.model!, provider: params.provider!, @@ -507,7 +518,7 @@ class ChatService { } }; - private processMessages = ( + private processMessages = async ( { messages = [], tools, @@ -520,29 +531,28 @@ class ChatService { tools?: string[]; }, options?: FetchOptions, - ): OpenAIChatMessage[] => { + ): Promise<OpenAIChatMessage[]> => { // handle content type for vision model // for the models with visual ability, add image url to content // refs: https://platform.openai.com/docs/guides/vision/quick-start - const getUserContent = (m: ChatMessage) => { + const getUserContent = async (m: ChatMessage) => { // only if message doesn't have images and files, then return the plain content if ((!m.imageList || m.imageList.length === 0) && (!m.fileList || m.fileList.length === 0)) return m.content; const imageList = m.imageList || []; + const imageContentParts = await this.processImageList({ imageList, model, provider }); const filesContext = isServerMode ? filesPrompts({ addUrl: !isDesktop, fileList: m.fileList, imageList }) : ''; return [ { text: (m.content + '\n\n' + filesContext).trim(), type: 'text' }, - ...imageList.map( - (i) => ({ image_url: { detail: 'auto', url: i.url }, type: 'image_url' }) as const, - ), + ...imageContentParts, ] as UserMessageContentPart[]; }; - const getAssistantContent = (m: ChatMessage) => { + const getAssistantContent = async (m: ChatMessage) => { // signature is a signal of anthropic thinking mode const shouldIncludeThinking = m.reasoning && !!m.reasoning?.signature; @@ -559,65 +569,70 @@ class ChatService { // only if message doesn't have images and files, then return the plain content if (m.imageList && m.imageList.length > 0) { + const imageContentParts = await this.processImageList({ + imageList: m.imageList, + model, + provider, + }); return [ !!m.content ? { text: m.content, type: 'text' } : undefined, - ...m.imageList.map( - (i) => ({ image_url: { detail: 'auto', url: i.url }, type: 'image_url' }) as const, - ), + ...imageContentParts, ].filter(Boolean) as UserMessageContentPart[]; } return m.content; }; - let postMessages = messages.map((m): OpenAIChatMessage => { - const supportTools = isCanUseFC(model, provider); - switch (m.role) { - case 'user': { - return { content: getUserContent(m), role: m.role }; - } - - case 'assistant': { - const content = getAssistantContent(m); - - if (!supportTools) { - return { content, role: m.role }; + let postMessages = await Promise.all( + messages.map(async (m): Promise<OpenAIChatMessage> => { + const supportTools = isCanUseFC(model, provider); + switch (m.role) { + case 'user': { + return { content: await getUserContent(m), role: m.role }; } - return { - content, - role: m.role, - tool_calls: m.tools?.map( - (tool): MessageToolCall => ({ - function: { - arguments: tool.arguments, - name: genToolCallingName(tool.identifier, tool.apiName, tool.type), - }, - id: tool.id, - type: 'function', - }), - ), - }; - } - - case 'tool': { - if (!supportTools) { - return { content: m.content, role: 'user' }; + case 'assistant': { + const content = await getAssistantContent(m); + + if (!supportTools) { + return { content, role: m.role }; + } + + return { + content, + role: m.role, + tool_calls: m.tools?.map( + (tool): MessageToolCall => ({ + function: { + arguments: tool.arguments, + name: genToolCallingName(tool.identifier, tool.apiName, tool.type), + }, + id: tool.id, + type: 'function', + }), + ), + }; } - return { - content: m.content, - name: genToolCallingName(m.plugin!.identifier, m.plugin!.apiName, m.plugin?.type), - role: m.role, - tool_call_id: m.tool_call_id, - }; - } + case 'tool': { + if (!supportTools) { + return { content: m.content, role: 'user' }; + } + + return { + content: m.content, + name: genToolCallingName(m.plugin!.identifier, m.plugin!.apiName, m.plugin?.type), + role: m.role, + tool_call_id: m.tool_call_id, + }; + } - default: { - return { content: m.content, role: m.role as any }; + default: { + return { content: m.content, role: m.role as any }; + } } - } - }); + }), + ); postMessages = produce(postMessages, (draft) => { // if it's a welcome question, inject InboxGuide SystemRole @@ -657,6 +672,37 @@ class ChatService { return this.reorderToolMessages(postMessages); }; + /** + * Process imageList: convert local URLs to base64 and format as UserMessageContentPart + */ + private processImageList = async ({ + model, + provider, + imageList, + }: { + imageList: ChatImageItem[]; + model: string; + provider: string; + }) => { + if (!isCanUseVision(model, provider)) { + return []; + } + + return Promise.all( + imageList.map(async (image) => { + const { type } = parseDataUri(image.url); + + let processedUrl = image.url; + if (type === 'url' && isLocalUrl(image.url)) { + const { base64, mimeType } = await imageUrlToBase64(image.url); + processedUrl = `data:${mimeType};base64,${base64}`; + } + + return { image_url: { detail: 'auto', url: processedUrl }, type: 'image_url' } as const; + }), + ); + }; + private mapTrace = (trace?: TracePayload, tag?: TraceTagMap): TracePayload => { const tags = sessionMetaSelectors.currentAgentMeta(getSessionStoreState()).tags || []; @@ -681,9 +727,6 @@ class ChatService { provider: string; signal?: AbortSignal; }) => { - const agentRuntime = await initializeWithClientStore(params.provider, params.payload); - const data = params.payload as ChatStreamPayload; - /** * if enable login and not signed in, return unauthorized error */ @@ -692,6 +735,9 @@ class ChatService { throw AgentRuntimeError.createError(ChatErrorType.InvalidAccessCode); } + const agentRuntime = await initializeWithClientStore(params.provider, params.payload); + const data = params.payload as ChatStreamPayload; + return agentRuntime.chat(data, { signal: params.signal }); }; diff --git a/src/utils/url.ts b/src/utils/url.ts index 8a287aca36995..4f7f7ca465533 100644 --- a/src/utils/url.ts +++ b/src/utils/url.ts @@ -123,3 +123,31 @@ export function inferContentTypeFromImageUrl(url: string) { return mimeType!; // Non-null assertion is safe due to whitelist validation } + +/** + * Check if a URL points to localhost (127.0.0.1) + * + * This function safely determines if the provided URL's hostname is '127.0.0.1'. + * It handles malformed URLs gracefully by returning false instead of throwing errors. + * + * @param url - The URL string to check + * @returns true if the URL's hostname is '127.0.0.1', false otherwise (including for malformed URLs) + * + * @example + * ```typescript + * isLocalUrl('http://127.0.0.1:8080/path') // true + * isLocalUrl('https://example.com') // false + * isLocalUrl('invalid-url') // false (instead of throwing) + * isLocalUrl('') // false (instead of throwing) + * ``` + * + * check: apps/desktop/src/main/core/StaticFileServerManager.ts + */ +export function isLocalUrl(url: string) { + try { + return new URL(url).hostname === '127.0.0.1'; + } catch { + // Return false for malformed URLs instead of throwing + return false; + } +}
diff --git a/.cursor/rules/testing-guide/testing-guide.mdc b/.cursor/rules/testing-guide/testing-guide.mdc index 7140ffa9e3cbb..bd63bef840624 100644 --- a/.cursor/rules/testing-guide/testing-guide.mdc +++ b/.cursor/rules/testing-guide/testing-guide.mdc @@ -28,85 +28,39 @@ LobeChat 项目使用 Vitest 测试库,配置了两种不同的测试环境: ## 🚀 测试运行命令 -### package.json 脚本说明 - -查看 [package.json](mdc:package.json) 中的测试相关脚本: - -```json -{ - "test": "npm run test-app && npm run test-server", - "test-app": "vitest run --config vitest.config.ts", - "test-app:coverage": "vitest run --config vitest.config.ts --coverage", - "test-server": "vitest run --config vitest.config.server.ts", - "test-server:coverage": "vitest run --config vitest.config.server.ts --coverage" -} -``` - -### 推荐的测试运行方式 - -#### ⚠️ 重要提醒 - -**🚨 性能警告**: - -- **永远不要直接运行整个项目的所有测试用例** - 项目包含 3000+ 测试用例,完整运行需要约 10 分钟 -- **务必使用文件过滤或测试名称过滤** - 始终指定具体的测试文件或测试名称模式 -- **避免无意中触发全量测试** - 某些看似针对单个文件的命令实际上会运行所有测试 +**🚨 性能警告**: 项目包含 3000+ 测试用例,完整运行需要约 10 分钟。务必使用文件过滤或测试名称过滤。 -#### ✅ 正确的命令格式 +### ✅ 正确的命令格式 ```bash -# 运行所有客户端测试 -npx vitest run --config vitest.config.ts - -# 运行所有服务端测试 -npx vitest run --config vitest.config.server.ts +# 运行所有客户端/服务端测试 +npx vitest run --config vitest.config.ts # 客户端测试 +npx vitest run --config vitest.config.server.ts # 服务端测试 # 运行特定测试文件 (支持模糊匹配) -npx vitest run --config vitest.config.ts basic npx vitest run --config vitest.config.ts user.test.ts -# 运行特定文件的特定行号 -npx vitest run --config vitest.config.ts src/utils/helper.test.ts:25 -npx vitest run --config vitest.config.ts basic/foo.test.ts:10,basic/foo.test.ts:25 - -# 过滤特定测试用例名称 -npx vitest -t "test case name" --config vitest.config.ts +# 运行特定测试用例名称 (使用 -t 参数) +npx vitest run --config vitest.config.ts -t "test case name" # 组合使用文件和测试名称过滤 npx vitest run --config vitest.config.ts filename.test.ts -t "specific test" + +# 生成覆盖率报告 (使用 --coverage 参数) +npx vitest run --config vitest.config.ts --coverage ``` -#### ❌ 避免的命令格式 +### ❌ 避免的命令格式 ```bash # ❌ 这些命令会运行所有 3000+ 测试用例,耗时约 10 分钟! npm test -npm run test -pnpm test -pnpm run test - -# ❌ 这些命令看似针对单个文件,但实际会运行所有测试用例!, 需要直接运行 vitest 命令不要使用 test npm script -npm test src/libs/model-runtime/utils/openaiCompatibleFactory/index.test.ts -pnpm test src/components/Button/index.test.tsx - -# ❌ 不要使用 pnpm test xxx (这不是有效的 vitest 命令) -pnpm test some-file +npm test some-file.test.ts # ❌ 不要使用裸 vitest (会进入 watch 模式) vitest test-file.test.ts - -# ❌ 不要混淆测试环境 -npx vitest run --config vitest.config.server.ts client-component.test.ts ``` -### 关键运行参数说明 - -- **`vitest run`**: 运行一次测试然后退出 (避免 watch 模式) -- **`vitest`**: 默认进入 watch 模式,持续监听文件变化 -- **`--config`**: 指定配置文件,选择正确的测试环境 -- **`-t`**: 过滤测试用例名称,支持正则表达式 -- **`--coverage`**: 生成测试覆盖率报告 - ## 🔧 测试修复原则 ### 核心原则 ⚠️ @@ -116,46 +70,125 @@ npx vitest run --config vitest.config.server.ts client-component.test.ts 3. **专注单一问题**: 只修复指定的测试,不要添加额外测试或功能 4. **不自作主张**: 不要因为发现其他问题就直接修改,先提出再讨论 -### 测试修复流程 +### 测试协作最佳实践 🤝 + +基于实际开发经验总结的重要协作原则: -```mermaid -flowchart TD - subgraph "阶段一:分析与复现" - A[开始:收到测试失败报告] --> B[定位并运行失败的测试]; - B --> C{是否能在本地复现?}; - C -->|否| D[检查测试环境/配置/依赖]; - C -->|是| E[分析:阅读测试代码、错误日志、Git 历史]; - end - - subgraph "阶段二:诊断与调试" - E --> F[建立假设:问题出在测试、代码还是环境?]; - F --> G["调试:使用 console.log 或 debugger 深入检查"]; - G --> H{假设是否被证实?}; - H -->|否, 重新假设| F; - end - - subgraph "阶段三:修复与验证" - H -->|是| I{确定根本原因}; - I -->|测试逻辑错误| J[修复测试代码]; - I -->|实现代码 Bug| K[修复实现代码]; - I -->|环境/配置问题| L[修复配置或依赖]; - J --> M[验证修复:重新运行失败的测试]; - K --> M; - L --> M; - M --> N{测试是否通过?}; - N -->|否, 修复无效| F; - N -->|是| O[扩大验证:运行当前文件内所有测试]; - O --> P{是否全部通过?}; - P -->|否, 引入新问题| F; - end - - subgraph "阶段四:总结" - P -->|是| Q[完成:撰写修复总结]; - end - - D --> F; +#### 1. 失败处理策略 + +**核心原则**: 避免盲目重试,快速识别问题并寻求帮助。 + +- **失败阈值**: 当连续尝试修复测试 1-2 次都失败后,应立即停止继续尝试 +- **问题总结**: 分析失败原因,整理已尝试的解决方案及其失败原因 +- **寻求帮助**: 带着清晰的问题摘要和尝试记录向团队寻求帮助 +- **避免陷阱**: 不要陷入"不断尝试相同或类似方法"的循环 + +```typescript +// ❌ 错误做法:连续失败后继续盲目尝试 +// 第3次、第4次仍在用相似的方法修复同一个问题 + +// ✅ 正确做法:失败1-2次后总结问题 +/* +问题总结: +1. 尝试过的方法:修改 mock 数据结构 +2. 失败原因:仍然提示类型不匹配 +3. 具体错误:Expected 'UserData' but received 'UserProfile' +4. 需要帮助:不确定最新的 UserData 接口定义 +*/ ``` +#### 2. 测试用例命名规范 + +**核心原则**: 测试应该关注"行为",而不是"实现细节"。 + +- **描述业务场景**: `describe` 和 `it` 的标题应该描述具体的业务场景和预期行为 +- **避免实现绑定**: 不要在测试名称中提及具体的代码行号、覆盖率目标或实现细节 +- **保持稳定性**: 测试名称应该在代码重构后仍然有意义 + +```typescript +// ❌ 错误的测试命名 +describe('User component coverage', () => { + it('covers line 45-50 in getUserData', () => { + // 为了覆盖第45-50行而写的测试 + }); + + it('tests the else branch', () => { + // 仅为了测试某个分支而存在 + }); +}); + +// ✅ 正确的测试命名 +describe('<UserAvatar />', () => { + it('should render fallback icon when image url is not provided', () => { + // 测试具体的业务场景,自然会覆盖相关代码分支 + }); + + it('should display user initials when avatar image fails to load', () => { + // 描述用户行为和预期结果 + }); +}); +``` + +**覆盖率提升的正确思路**: + +- ✅ 通过设计各种业务场景(正常流程、边缘情况、错误处理)来自然提升覆盖率 +- ❌ 不要为了达到覆盖率数字而写测试,更不要在测试中注释"为了覆盖 xxx 行" + +#### 3. 测试组织结构 + +**核心原则**: 维护清晰的测试层次结构,避免冗余的顶级测试块。 + +- **复用现有结构**: 添加新测试时,优先在现有的 `describe` 块中寻找合适的位置 +- **逻辑分组**: 相关的测试用例应该组织在同一个 `describe` 块内 +- **避免碎片化**: 不要为了单个测试用例就创建新的顶级 `describe` 块 + +```typescript +// ❌ 错误的组织方式:创建过多顶级块 +describe('<UserProfile />', () => { + it('should render user name', () => {}); +}); + +describe('UserProfile new prop test', () => { + // 不必要的新块 + it('should handle email display', () => {}); +}); + +describe('UserProfile edge cases', () => { + // 不必要的新块 + it('should handle missing avatar', () => {}); +}); + +// ✅ 正确的组织方式:合并相关测试 +describe('<UserProfile />', () => { + it('should render user name', () => {}); + + it('should handle email display', () => {}); + + it('should handle missing avatar', () => {}); + + describe('when user data is incomplete', () => { + // 只有在有多个相关子场景时才创建子组 + it('should show placeholder for missing name', () => {}); + it('should hide email section when email is undefined', () => {}); + }); +}); +``` + +**组织决策流程**: + +1. 是否存在逻辑相关的现有 `describe` 块? → 如果有,添加到其中 +2. 是否有多个(3个以上)相关的测试用例? → 如果有,可以考虑创建新的子 `describe` +3. 是否是独立的、无关联的功能模块? → 如果是,才考虑创建新的顶级 `describe` + +### 测试修复流程 + +1. **复现问题**: 定位并运行失败的测试,确认能在本地复现 +2. **分析原因**: 阅读测试代码、错误日志和相关文件的 Git 修改历史 +3. **建立假设**: 判断问题出在测试逻辑、实现代码还是环境配置 +4. **修复验证**: 根据假设进行修复,重新运行测试确认通过 +5. **扩大验证**: 运行当前文件内所有测试,确保没有引入新问题 +6. **撰写总结**: 说明错误原因和修复方法 + ### 修复完成后的总结 测试修复完成后,应该提供简要说明,包括: @@ -197,7 +230,7 @@ flowchart TD 例如: -``` +```plaintext src/components/Button/ ├── index.tsx # 源文件 └── index.test.tsx # 测试文件 @@ -205,13 +238,12 @@ src/components/Button/ ## 🛠️ 测试调试技巧 -### 运行失败测试的步骤 +### 测试调试步骤 -1. **确定测试类型**: 查看文件路径确定使用哪个配置 -2. **运行单个测试**: 使用 `-t` 参数隔离问题 -3. **检查错误日志**: 仔细阅读错误信息和堆栈跟踪 -4. **查看最近修改记录**: 检查相关文件的最近变更情况 -5. **添加调试日志**: 在测试中添加 `console.log` 了解执行流程 +1. **确定测试环境**: 根据文件路径选择正确的配置文件 +2. **隔离问题**: 使用 `-t` 参数只运行失败的测试用例 +3. **分析错误**: 仔细阅读错误信息、堆栈跟踪和最近的文件修改记录 +4. **添加调试**: 在测试中添加 `console.log` 了解执行流程 ### TypeScript 类型处理 📝 @@ -245,157 +277,47 @@ mockStream.toReadableStream = () => mockStream; ### 检查最近修改记录 🔍 -为了更好地判断测试失败的根本原因,需要**系统性地检查相关文件的修改历史**。这是问题定位的关键步骤。 - -#### 第一步:确定需要检查的文件范围 +系统性地检查相关文件的修改历史是问题定位的关键步骤。 -1. **测试文件本身**: `path/to/component.test.ts` -2. **对应的实现文件**: `path/to/component.ts` 或 `path/to/component/index.ts` -3. **相关依赖文件**: 测试或实现中导入的其他模块 +#### 三步检查法 -#### 第二步:检查当前工作目录状态 +**Step 1: 查看当前状态** ```bash -# 查看所有未提交的修改状态 -git status - -# 重点关注测试文件和实现文件是否有未提交的修改 -git status | grep -E "(test|spec)" -``` - -#### 第三步:检查未提交的修改内容 - -```bash -# 查看测试文件的未提交修改 (工作区 vs 暂存区) -git diff path/to/component.test.ts | cat - -# 查看对应实现文件的未提交修改 -git diff path/to/component.ts | cat - -# 查看已暂存但未提交的修改 -git diff --cached path/to/component.test.ts | cat -git diff --cached path/to/component.ts | cat +git status # 查看未提交的修改 +git diff path/to/component.test.ts | cat # 查看测试文件修改 +git diff path/to/component.ts | cat # 查看实现文件修改 ``` -#### 第四步:检查提交历史和时间相关性 - -**首先查看提交时间,判断修改的时效性**: +**Step 2: 查看提交历史** ```bash -# 查看测试文件的最近提交历史,包含提交时间 -git log --pretty=format:"%h %ad %s" --date=relative -5 path/to/component.test.ts | cat - -# 查看实现文件的最近提交历史,包含提交时间 -git log --pretty=format:"%h %ad %s" --date=relative -5 path/to/component.ts | cat - -# 查看详细的提交时间(ISO格式,便于精确判断) -git log --pretty=format:"%h %ad %an %s" --date=iso -3 path/to/component.ts | cat -git log --pretty=format:"%h %ad %an %s" --date=iso -3 path/to/component.test.ts | cat +git log --pretty=format:"%h %ad %s" --date=relative -3 path/to/component.ts | cat ``` -**判断提交的参考价值**: - -1. **最近提交(24小时内)**: 🔴 **高度相关** - 很可能是导致测试失败的直接原因 -2. **近期提交(1-7天内)**: 🟡 **中等相关** - 可能相关,需要仔细分析修改内容 -3. **较早提交(超过1周)**: ⚪ **低相关性** - 除非是重大重构,否则不太可能是直接原因 - -#### 第五步:基于时间相关性查看具体修改内容 - -**根据提交时间的远近,优先查看最近的修改**: +**Step 3: 查看具体修改内容** ```bash -# 如果有24小时内的提交,重点查看这些修改 -git show HEAD -- path/to/component.test.ts | cat -git show HEAD -- path/to/component.ts | cat - -# 查看次新的提交(如果最新提交时间较远) -git show HEAD~1 -- path/to/component.ts | cat -git show path/to/component.ts < recent-commit-hash > -- | cat - -# 对比最近两次提交的差异 -git diff HEAD~1 HEAD -- path/to/component.ts | cat +git show HEAD -- path/to/component.ts | cat # 查看最新提交的修改 ``` -#### 第六步:分析修改与测试失败的关系 +#### 时间相关性判断 -基于修改记录和时间相关性判断: - -1. **最近修改了实现代码**: - - ```bash - # 重点检查实现逻辑的变化 - git diff HEAD~1 path/to/component.ts | cat - ``` - - - 很可能是实现代码的变更导致测试失败 - - 检查实现逻辑是否正确 - - 确认测试是否需要相应更新 - -2. **最近修改了测试代码**: - - ```bash - # 重点检查测试逻辑的变化 - git diff HEAD~1 path/to/component.test.ts | cat - ``` - - - 可能是测试本身写错了 - - 检查测试逻辑和断言是否正确 - - 确认测试是否符合实现的预期行为 - -3. **两者都有最近修改**: - - ```bash - # 对比两个文件的修改时间 - git log --pretty=format:"%ad %f" --date=iso -1 path/to/component.ts | cat - git log --pretty=format:"%ad %f" --date=iso -1 path/to/component.test.ts | cat - ``` - - - 需要综合分析两者的修改 - - 确定哪个修改更可能导致问题 - - 优先检查时间更近的修改 - -4. **都没有最近修改**: - - 可能是依赖变更或环境问题 - - 检查 `package.json`、配置文件等的修改 - - 查看是否有全局性的代码重构 - -#### 修改记录检查示例 - -```bash -# 完整的检查流程示例 -echo "=== 检查文件修改状态 ===" -git status | grep component - -echo "=== 检查未提交修改 ===" -git diff src/components/Button/index.test.tsx | cat -git diff src/components/Button/index.tsx | cat - -echo "=== 检查提交历史和时间 ===" -git log --pretty=format:"%h %ad %s" --date=relative -3 src/components/Button/index.test.tsx | cat -git log --pretty=format:"%h %ad %s" --date=relative -3 src/components/Button/index.tsx | cat - -echo "=== 根据时间优先级查看修改内容 ===" -# 如果有24小时内的提交,重点查看 -git show HEAD -- src/components/Button/index.tsx | cat -``` +- **24小时内的提交**: 🔴 **高度相关** - 很可能是直接原因 +- **1-7天内的提交**: 🟡 **中等相关** - 需要仔细分析 +- **超过1周的提交**: ⚪ **低相关性** - 除非重大重构 ## 特殊场景的测试 -针对一些特殊场景的测试,需要阅读相关文件: +针对一些特殊场景的测试,需要阅读相关 rules: - [Electron IPC 接口测试策略](mdc:./electron-ipc-test.mdc) - [数据库 Model 测试指南](mdc:./db-model-test.mdc) -## 🎯 总结 - -修复测试时,记住以下关键点: +## 🎯 核心要点 -- **使用正确的命令**: `npx vitest run --config [config-file]` -- **理解测试意图**: 先读懂测试再修复 -- **查看最近修改**: 检查相关文件的 git 修改记录,判断问题根源 -- **选择正确环境**: 客户端测试用 `vitest.config.ts`,服务端用 `vitest.config.server.ts` -- **专注单一问题**: 只修复当前的测试失败 -- **验证修复结果**: 确保修复后测试通过且无副作用 -- **提供修复总结**: 说明错误原因和修复方法 -- **Model 测试安全第一**: 必须包含用户权限检查和对应的安全测试 -- **Model 双环境验证**: 必须在 PGLite 和 PostgreSQL 两个环境下都验证通过 +- **命令格式**: 使用 `npx vitest run --config [config-file]` 并指定文件过滤 +- **修复原则**: 失败1-2次后寻求帮助,测试命名关注行为而非实现细节 +- **调试流程**: 复现 → 分析 → 假设 → 修复 → 验证 → 总结 +- **文件组织**: 优先在现有 `describe` 块中添加测试,避免创建冗余顶级块 +- **安全要求**: Model 测试必须包含权限检查,并在双环境下验证通过 diff --git a/src/services/__tests__/chat.test.ts b/src/services/__tests__/chat.test.ts index 7bec8efe45991..c17b1d3ab0e9d 100644 --- a/src/services/__tests__/chat.test.ts +++ b/src/services/__tests__/chat.test.ts @@ -2,7 +2,7 @@ import { LobeChatPluginManifest } from '@lobehub/chat-plugin-sdk'; import { act } from '@testing-library/react'; import { merge } from 'lodash-es'; import OpenAI from 'openai'; -import { beforeEach, describe, expect, it, vi } from 'vitest'; +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; import { DEFAULT_AGENT_CONFIG } from '@/const/settings'; import { @@ -36,7 +36,8 @@ import { modelConfigSelectors } from '@/store/user/selectors'; import { UserSettingsState, initialSettingsState } from '@/store/user/slices/settings/initialState'; import { DalleManifest } from '@/tools/dalle'; import { WebBrowsingManifest } from '@/tools/web-browsing'; -import { ChatMessage } from '@/types/message'; +import { ChatErrorType } from '@/types/fetch'; +import { ChatImageItem, ChatMessage } from '@/types/message'; import { ChatStreamPayload, type OpenAIChatMessage } from '@/types/openai/chat'; import { LobeTool } from '@/types/tool'; @@ -58,15 +59,48 @@ vi.mock('@/utils/fetch', async (importOriginal) => { return { ...(module as any), getMessageError: vi.fn() }; }); -beforeEach(() => { +// Mock image processing utilities +vi.mock('@/utils/url', () => ({ + isLocalUrl: vi.fn(), +})); + +vi.mock('@/utils/imageToBase64', () => ({ + imageUrlToBase64: vi.fn(), +})); + +vi.mock('@/libs/model-runtime/utils/uriParser', () => ({ + parseDataUri: vi.fn(), +})); + +afterEach(() => { + vi.restoreAllMocks(); +}); + +beforeEach(async () => { // 清除所有模块的缓存 vi.resetModules(); + // 默认设置 isServerMode 为 false vi.mock('@/const/version', () => ({ isServerMode: false, isDeprecatedEdition: true, isDesktop: false, })); + + // Reset all mocks + vi.clearAllMocks(); + + // Set default mock return values for image processing utilities + const { isLocalUrl } = await import('@/utils/url'); + const { imageUrlToBase64 } = await import('@/utils/imageToBase64'); + const { parseDataUri } = await import('@/libs/model-runtime/utils/uriParser'); + + vi.mocked(parseDataUri).mockReturnValue({ type: 'url', base64: null, mimeType: null }); + vi.mocked(isLocalUrl).mockReturnValue(false); + vi.mocked(imageUrlToBase64).mockResolvedValue({ + base64: 'mock-base64', + mimeType: 'image/jpeg', + }); }); // mock auth @@ -142,6 +176,164 @@ describe('ChatService', () => { ); }); + describe('extendParams functionality', () => { + it('should add reasoning parameters when model supports enableReasoning and user enables it', async () => { + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + const messages = [{ content: 'Test reasoning', role: 'user' }] as ChatMessage[]; + + // Mock aiModelSelectors for extend params support + vi.spyOn(aiModelSelectors, 'isModelHasExtendParams').mockReturnValue(() => true); + vi.spyOn(aiModelSelectors, 'modelExtendParams').mockReturnValue(() => ['enableReasoning']); + + // Mock agent chat config with reasoning enabled + vi.spyOn(agentChatConfigSelectors, 'currentChatConfig').mockReturnValue({ + enableReasoning: true, + reasoningBudgetToken: 2048, + searchMode: 'off', + } as any); + + await chatService.createAssistantMessage({ + messages, + model: 'deepseek-reasoner', + provider: 'deepseek', + plugins: [], + }); + + expect(getChatCompletionSpy).toHaveBeenCalledWith( + expect.objectContaining({ + thinking: { + budget_tokens: 2048, + type: 'enabled', + }, + }), + undefined, + ); + }); + + it('should disable reasoning when model supports enableReasoning but user disables it', async () => { + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + const messages = [{ content: 'Test no reasoning', role: 'user' }] as ChatMessage[]; + + // Mock aiModelSelectors for extend params support + vi.spyOn(aiModelSelectors, 'isModelHasExtendParams').mockReturnValue(() => true); + vi.spyOn(aiModelSelectors, 'modelExtendParams').mockReturnValue(() => ['enableReasoning']); + + // Mock agent chat config with reasoning disabled + vi.spyOn(agentChatConfigSelectors, 'currentChatConfig').mockReturnValue({ + enableReasoning: false, + searchMode: 'off', + } as any); + + await chatService.createAssistantMessage({ + messages, + model: 'deepseek-reasoner', + provider: 'deepseek', + plugins: [], + }); + + expect(getChatCompletionSpy).toHaveBeenCalledWith( + expect.objectContaining({ + thinking: { + budget_tokens: 0, + type: 'disabled', + }, + }), + undefined, + ); + }); + + it('should use default budget when reasoningBudgetToken is not set', async () => { + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + const messages = [{ content: 'Test default budget', role: 'user' }] as ChatMessage[]; + + // Mock aiModelSelectors for extend params support + vi.spyOn(aiModelSelectors, 'isModelHasExtendParams').mockReturnValue(() => true); + vi.spyOn(aiModelSelectors, 'modelExtendParams').mockReturnValue(() => ['enableReasoning']); + + // Mock agent chat config with reasoning enabled but no custom budget + vi.spyOn(agentChatConfigSelectors, 'currentChatConfig').mockReturnValue({ + enableReasoning: true, + // reasoningBudgetToken is undefined + searchMode: 'off', + } as any); + + await chatService.createAssistantMessage({ + messages, + model: 'deepseek-reasoner', + provider: 'deepseek', + plugins: [], + }); + + expect(getChatCompletionSpy).toHaveBeenCalledWith( + expect.objectContaining({ + thinking: { + budget_tokens: 1024, // default value + type: 'enabled', + }, + }), + undefined, + ); + }); + + it('should set reasoning_effort when model supports reasoningEffort and user configures it', async () => { + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + const messages = [{ content: 'Test reasoning effort', role: 'user' }] as ChatMessage[]; + + // Mock aiModelSelectors for extend params support + vi.spyOn(aiModelSelectors, 'isModelHasExtendParams').mockReturnValue(() => true); + vi.spyOn(aiModelSelectors, 'modelExtendParams').mockReturnValue(() => ['reasoningEffort']); + + // Mock agent chat config with reasoning effort set + vi.spyOn(agentChatConfigSelectors, 'currentChatConfig').mockReturnValue({ + reasoningEffort: 'high', + searchMode: 'off', + } as any); + + await chatService.createAssistantMessage({ + messages, + model: 'test-model', + provider: 'test-provider', + plugins: [], + }); + + expect(getChatCompletionSpy).toHaveBeenCalledWith( + expect.objectContaining({ + reasoning_effort: 'high', + }), + undefined, + ); + }); + + it('should set thinkingBudget when model supports thinkingBudget and user configures it', async () => { + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + const messages = [{ content: 'Test thinking budget', role: 'user' }] as ChatMessage[]; + + // Mock aiModelSelectors for extend params support + vi.spyOn(aiModelSelectors, 'isModelHasExtendParams').mockReturnValue(() => true); + vi.spyOn(aiModelSelectors, 'modelExtendParams').mockReturnValue(() => ['thinkingBudget']); + + // Mock agent chat config with thinking budget set + vi.spyOn(agentChatConfigSelectors, 'currentChatConfig').mockReturnValue({ + thinkingBudget: 5000, + searchMode: 'off', + } as any); + + await chatService.createAssistantMessage({ + messages, + model: 'test-model', + provider: 'test-provider', + plugins: [], + }); + + expect(getChatCompletionSpy).toHaveBeenCalledWith( + expect.objectContaining({ + thinkingBudget: 5000, + }), + undefined, + ); + }); + }); + describe('should handle content correctly for vision models', () => { it('should include image content when with vision model', async () => { const messages = [ @@ -209,6 +401,263 @@ describe('ChatService', () => { }); }); + describe('local image URL conversion', () => { + it('should convert local image URLs to base64 and call processImageList', async () => { + const { isLocalUrl } = await import('@/utils/url'); + const { imageUrlToBase64 } = await import('@/utils/imageToBase64'); + const { parseDataUri } = await import('@/libs/model-runtime/utils/uriParser'); + + // Mock for local URL + vi.mocked(parseDataUri).mockReturnValue({ type: 'url', base64: null, mimeType: null }); + vi.mocked(isLocalUrl).mockReturnValue(true); // This is a local URL + vi.mocked(imageUrlToBase64).mockResolvedValue({ + base64: 'converted-base64-content', + mimeType: 'image/png', + }); + + const messages = [ + { + content: 'Hello', + role: 'user', + imageList: [ + { + id: 'file1', + url: 'http://127.0.0.1:3000/uploads/image.png', // Real local URL + alt: 'local-image.png', + }, + ], + createdAt: Date.now(), + id: 'test-id', + meta: {}, + updatedAt: Date.now(), + }, + ] as ChatMessage[]; + + // Spy on processImageList method + const processImageListSpy = vi.spyOn(chatService as any, 'processImageList'); + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + + await chatService.createAssistantMessage({ + messages, + plugins: [], + model: 'gpt-4-vision-preview', + }); + + // Verify processImageList was called with correct arguments + expect(processImageListSpy).toHaveBeenCalledWith({ + imageList: [ + { + id: 'file1', + url: 'http://127.0.0.1:3000/uploads/image.png', + alt: 'local-image.png', + }, + ], + model: 'gpt-4-vision-preview', + provider: undefined, + }); + + // Verify the utility functions were called + expect(parseDataUri).toHaveBeenCalledWith('http://127.0.0.1:3000/uploads/image.png'); + expect(isLocalUrl).toHaveBeenCalledWith('http://127.0.0.1:3000/uploads/image.png'); + expect(imageUrlToBase64).toHaveBeenCalledWith('http://127.0.0.1:3000/uploads/image.png'); + + // Verify the final result contains base64 converted URL + expect(getChatCompletionSpy).toHaveBeenCalledWith( + { + messages: [ + { + content: [ + { + text: 'Hello', + type: 'text', + }, + { + image_url: { + detail: 'auto', + url: 'data:image/png;base64,converted-base64-content', + }, + type: 'image_url', + }, + ], + role: 'user', + }, + ], + model: 'gpt-4-vision-preview', + }, + undefined, + ); + }); + + it('should not convert remote URLs to base64 and call processImageList', async () => { + const { isLocalUrl } = await import('@/utils/url'); + const { imageUrlToBase64 } = await import('@/utils/imageToBase64'); + const { parseDataUri } = await import('@/libs/model-runtime/utils/uriParser'); + + // Mock for remote URL + vi.mocked(parseDataUri).mockReturnValue({ type: 'url', base64: null, mimeType: null }); + vi.mocked(isLocalUrl).mockReturnValue(false); // This is NOT a local URL + vi.mocked(imageUrlToBase64).mockClear(); // Clear to ensure it's not called + + const messages = [ + { + content: 'Hello', + role: 'user', + imageList: [ + { + id: 'file1', + url: 'https://example.com/remote-image.jpg', // Remote URL + alt: 'remote-image.jpg', + }, + ], + createdAt: Date.now(), + id: 'test-id-2', + meta: {}, + updatedAt: Date.now(), + }, + ] as ChatMessage[]; + + // Spy on processImageList method + const processImageListSpy = vi.spyOn(chatService as any, 'processImageList'); + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + + await chatService.createAssistantMessage({ + messages, + plugins: [], + model: 'gpt-4-vision-preview', + }); + + // Verify processImageList was called + expect(processImageListSpy).toHaveBeenCalledWith({ + imageList: [ + { + id: 'file1', + url: 'https://example.com/remote-image.jpg', + alt: 'remote-image.jpg', + }, + ], + model: 'gpt-4-vision-preview', + provider: undefined, + }); + + // Verify the utility functions were called + expect(parseDataUri).toHaveBeenCalledWith('https://example.com/remote-image.jpg'); + expect(isLocalUrl).toHaveBeenCalledWith('https://example.com/remote-image.jpg'); + expect(imageUrlToBase64).not.toHaveBeenCalled(); // Should NOT be called for remote URLs + + // Verify the final result preserves original URL + expect(getChatCompletionSpy).toHaveBeenCalledWith( + { + messages: [ + { + content: [ + { + text: 'Hello', + type: 'text', + }, + { + image_url: { detail: 'auto', url: 'https://example.com/remote-image.jpg' }, + type: 'image_url', + }, + ], + role: 'user', + }, + ], + model: 'gpt-4-vision-preview', + }, + undefined, + ); + }); + + it('should handle mixed local and remote URLs correctly', async () => { + const { isLocalUrl } = await import('@/utils/url'); + const { imageUrlToBase64 } = await import('@/utils/imageToBase64'); + const { parseDataUri } = await import('@/libs/model-runtime/utils/uriParser'); + + // Mock parseDataUri to always return url type + vi.mocked(parseDataUri).mockReturnValue({ type: 'url', base64: null, mimeType: null }); + + // Mock isLocalUrl to return true only for 127.0.0.1 URLs + vi.mocked(isLocalUrl).mockImplementation((url: string) => { + return new URL(url).hostname === '127.0.0.1'; + }); + + // Mock imageUrlToBase64 for conversion + vi.mocked(imageUrlToBase64).mockResolvedValue({ + base64: 'local-file-base64', + mimeType: 'image/jpeg', + }); + + const messages = [ + { + content: 'Multiple images', + role: 'user', + imageList: [ + { + id: 'local1', + url: 'http://127.0.0.1:3000/local1.jpg', // Local URL + alt: 'local1.jpg', + }, + { + id: 'remote1', + url: 'https://example.com/remote1.png', // Remote URL + alt: 'remote1.png', + }, + { + id: 'local2', + url: 'http://127.0.0.1:8080/local2.gif', // Another local URL + alt: 'local2.gif', + }, + ], + createdAt: Date.now(), + id: 'test-id-3', + meta: {}, + updatedAt: Date.now(), + }, + ] as ChatMessage[]; + + const processImageListSpy = vi.spyOn(chatService as any, 'processImageList'); + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + + await chatService.createAssistantMessage({ + messages, + plugins: [], + model: 'gpt-4-vision-preview', + }); + + // Verify processImageList was called + expect(processImageListSpy).toHaveBeenCalledWith({ + imageList: [ + { id: 'local1', url: 'http://127.0.0.1:3000/local1.jpg', alt: 'local1.jpg' }, + { id: 'remote1', url: 'https://example.com/remote1.png', alt: 'remote1.png' }, + { id: 'local2', url: 'http://127.0.0.1:8080/local2.gif', alt: 'local2.gif' }, + ], + model: 'gpt-4-vision-preview', + provider: undefined, + }); + + // Verify isLocalUrl was called for each image + expect(isLocalUrl).toHaveBeenCalledWith('http://127.0.0.1:3000/local1.jpg'); + expect(isLocalUrl).toHaveBeenCalledWith('https://example.com/remote1.png'); + expect(isLocalUrl).toHaveBeenCalledWith('http://127.0.0.1:8080/local2.gif'); + + // Verify imageUrlToBase64 was called only for local URLs + expect(imageUrlToBase64).toHaveBeenCalledWith('http://127.0.0.1:3000/local1.jpg'); + expect(imageUrlToBase64).toHaveBeenCalledWith('http://127.0.0.1:8080/local2.gif'); + expect(imageUrlToBase64).toHaveBeenCalledTimes(2); // Only for local URLs + + // Verify the final result has correct URLs + const callArgs = getChatCompletionSpy.mock.calls[0][0]; + const imageContent = (callArgs.messages?.[0].content as any[])?.filter( + (c) => c.type === 'image_url', + ); + + expect(imageContent).toHaveLength(3); + expect(imageContent[0].image_url.url).toBe('data:image/jpeg;base64,local-file-base64'); // Local converted + expect(imageContent[1].image_url.url).toBe('https://example.com/remote1.png'); // Remote preserved + expect(imageContent[2].image_url.url).toBe('data:image/jpeg;base64,local-file-base64'); // Local converted + }); + }); + describe('with tools messages', () => { it('should inject a tool system role for models with tools', async () => { const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); @@ -607,6 +1056,15 @@ describe('ChatService', () => { }); describe('getChatCompletion', () => { + let mockFetchSSE: any; + + beforeEach(async () => { + // Setup common fetchSSE mock for getChatCompletion tests + const { fetchSSE } = await import('@/utils/fetch'); + mockFetchSSE = vi.fn().mockResolvedValue(new Response('mock response')); + vi.mocked(fetchSSE).mockImplementation(mockFetchSSE); + }); + it('should make a POST request with the correct payload', async () => { const params: Partial<ChatStreamPayload> = { model: 'test-model', @@ -622,12 +1080,16 @@ describe('ChatService', () => { await chatService.getChatCompletion(params, options); - expect(global.fetch).toHaveBeenCalledWith(expect.any(String), { - body: JSON.stringify(expectedPayload), - headers: expect.any(Object), - method: 'POST', - }); + expect(mockFetchSSE).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + body: JSON.stringify(expectedPayload), + headers: expect.any(Object), + method: 'POST', + }), + ); }); + it('should make a POST request without response in non-openai provider payload', async () => { const params: Partial<ChatStreamPayload> = { model: 'deepseek-reasoner', @@ -647,52 +1109,52 @@ describe('ChatService', () => { await chatService.getChatCompletion(params, options); - expect(global.fetch).toHaveBeenCalledWith(expect.any(String), { - body: JSON.stringify(expectedPayload), - headers: expect.any(Object), - method: 'POST', - }); + expect(mockFetchSSE).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + body: JSON.stringify(expectedPayload), + headers: expect.any(Object), + method: 'POST', + }), + ); }); - it('should throw InvalidAccessCode error when enableFetchOnClient is true and auth is enabled but user is not signed in', async () => { - // Mock userStore - const mockUserStore = { - enableAuth: () => true, - isSignedIn: false, - }; - - // Mock modelConfigSelectors - const mockModelConfigSelectors = { - isProviderFetchOnClient: () => () => true, - }; + it('should return InvalidAccessCode error when enableFetchOnClient is true and auth is enabled but user is not signed in', async () => { + // Mock fetchSSE to call onErrorHandle with the error + const { fetchSSE } = await import('@/utils/fetch'); + + const mockFetchSSEWithError = vi.fn().mockImplementation((url, options) => { + // Simulate the error being caught and passed to onErrorHandle + if (options.onErrorHandle) { + const error = { + errorType: ChatErrorType.InvalidAccessCode, + error: new Error('InvalidAccessCode'), + }; + options.onErrorHandle(error, { errorType: ChatErrorType.InvalidAccessCode }); + } + return Promise.resolve(new Response('')); + }); - vi.spyOn(useUserStore, 'getState').mockImplementationOnce(() => mockUserStore as any); - vi.spyOn(modelConfigSelectors, 'isProviderFetchOnClient').mockImplementationOnce( - mockModelConfigSelectors.isProviderFetchOnClient, - ); + vi.mocked(fetchSSE).mockImplementation(mockFetchSSEWithError); const params: Partial<ChatStreamPayload> = { model: 'test-model', messages: [], + provider: 'openai', }; - const options = {}; - const expectedPayload = { - model: DEFAULT_AGENT_CONFIG.model, - stream: true, - ...DEFAULT_AGENT_CONFIG.params, - ...params, - }; - - const result = await chatService.getChatCompletion(params, options); - expect(global.fetch).toHaveBeenCalledWith(expect.any(String), { - body: JSON.stringify(expectedPayload), - headers: expect.objectContaining({ - 'Content-Type': 'application/json', - }), - method: 'POST', + let errorHandled = false; + const onErrorHandle = vi.fn((error: any) => { + errorHandled = true; + expect(error.errorType).toBe(ChatErrorType.InvalidAccessCode); }); - expect(result.status).toBe(401); + + // Call getChatCompletion with onErrorHandle to catch the error + await chatService.getChatCompletion(params, { onErrorHandle }); + + // Verify that the error was handled + expect(errorHandled).toBe(true); + expect(onErrorHandle).toHaveBeenCalled(); }); // Add more test cases to cover different scenarios and edge cases @@ -717,10 +1179,29 @@ describe('ChatService', () => { describe('fetchPresetTaskResult', () => { it('should handle successful chat completion response', async () => { - // 模拟 fetch 抛出错误的情况 - vi.mocked(fetch).mockResolvedValueOnce(new Response('AI response')); + // Mock getChatCompletion to simulate successful completion + const getChatCompletionSpy = vi + .spyOn(chatService, 'getChatCompletion') + .mockImplementation(async (params, options) => { + // Simulate successful response + if (options?.onFinish) { + options.onFinish('AI response', { + type: 'done', + observationId: null, + toolCalls: undefined, + traceId: null, + }); + } + if (options?.onMessageHandle) { + options.onMessageHandle({ type: 'text', text: 'AI response' }); + } + return Promise.resolve(new Response('')); + }); + const params = { - /* 填充参数 */ + messages: [{ content: 'Hello', role: 'user' as const }], + model: 'gpt-4', + provider: 'openai', }; const onMessageHandle = vi.fn(); @@ -748,25 +1229,31 @@ describe('ChatService', () => { }); expect(onError).not.toHaveBeenCalled(); expect(onMessageHandle).toHaveBeenCalled(); - expect(onLoadingChange).toHaveBeenCalledWith(false); // 确认加载状态已经被设置为 false + expect(onLoadingChange).toHaveBeenCalledWith(false); // Confirm loading state is set to false expect(onLoadingChange).toHaveBeenCalledTimes(2); }); it('should handle error in chat completion', async () => { - // 模拟 fetch 抛出错误的情况 - vi.mocked(fetch).mockResolvedValueOnce( - new Response(null, { status: 404, statusText: 'Not Found' }), - ); + // Mock getChatCompletion to simulate error + const getChatCompletionSpy = vi + .spyOn(chatService, 'getChatCompletion') + .mockImplementation(async (params, options) => { + // Simulate error response + if (options?.onErrorHandle) { + options.onErrorHandle({ message: 'translated_response.404', type: 404 }); + } + return Promise.resolve(new Response('')); + }); const params = { - /* 填充参数 */ + messages: [{ content: 'Hello', role: 'user' as const }], + model: 'gpt-4', + provider: 'openai', }; const onError = vi.fn(); const onLoadingChange = vi.fn(); const abortController = new AbortController(); - const trace = { - /* 填充跟踪信息 */ - }; + const trace = {}; await chatService.fetchPresetTaskResult({ params, @@ -780,7 +1267,7 @@ describe('ChatService', () => { message: 'translated_response.404', type: 404, }); - expect(onLoadingChange).toHaveBeenCalledWith(false); // 确认加载状态已经被设置为 false + expect(onLoadingChange).toHaveBeenCalledWith(false); // Confirm loading state is set to false }); }); @@ -910,6 +1397,18 @@ describe('ChatService', () => { // 需要在修改模拟后重新导入相关模块 const { chatService } = await import('../chat'); + // Mock processImageList to return expected image content + const processImageListSpy = vi.spyOn(chatService as any, 'processImageList'); + processImageListSpy.mockImplementation(async () => { + // Mock the expected return value for an image + return [ + { + image_url: { detail: 'auto', url: 'http://example.com/xxx0asd-dsd.png' }, + type: 'image_url', + }, + ]; + }); + const messages = [ { content: 'Hello', @@ -941,7 +1440,7 @@ describe('ChatService', () => { { content: 'Hey', role: 'assistant' }, // Regular user message ] as ChatMessage[]; - const output = chatService['processMessages']({ + const output = await chatService['processMessages']({ messages, model: 'gpt-4o', provider: 'openai', @@ -1062,7 +1561,7 @@ describe('ChatService', () => { }); }); - it('should handle empty tool calls messages correctly', () => { + it('should handle empty tool calls messages correctly', async () => { const messages = [ { content: '## Tools\n\nYou can use these tools', @@ -1075,7 +1574,7 @@ describe('ChatService', () => { }, ] as ChatMessage[]; - const result = chatService['processMessages']({ + const result = await chatService['processMessages']({ messages, model: 'gpt-4', provider: 'openai', @@ -1093,7 +1592,7 @@ describe('ChatService', () => { ]); }); - it('should handle assistant messages with reasoning correctly', () => { + it('should handle assistant messages with reasoning correctly', async () => { const messages = [ { role: 'assistant', @@ -1105,7 +1604,7 @@ describe('ChatService', () => { }, ] as ChatMessage[]; - const result = chatService['processMessages']({ + const result = await chatService['processMessages']({ messages, model: 'gpt-4', provider: 'openai', @@ -1128,6 +1627,70 @@ describe('ChatService', () => { }, ]); }); + + it('should inject INBOX_GUIDE_SYSTEMROLE for welcome questions in inbox session', async () => { + // Don't mock INBOX_GUIDE_SYSTEMROLE, use the real one + const messages: ChatMessage[] = [ + { + role: 'user', + content: 'Hello, this is my first question', + createdAt: Date.now(), + id: 'test-welcome', + meta: {}, + updatedAt: Date.now(), + }, + ]; + + const result = await chatService['processMessages']( + { + messages, + model: 'gpt-4', + provider: 'openai', + }, + { + isWelcomeQuestion: true, + trace: { sessionId: 'inbox' }, + }, + ); + + // Should have system message with inbox guide content + const systemMessage = result.find((msg) => msg.role === 'system'); + expect(systemMessage).toBeDefined(); + // Check for characteristic content of the actual INBOX_GUIDE_SYSTEMROLE + expect(systemMessage!.content).toContain('LobeChat Support Assistant'); + expect(systemMessage!.content).toContain('LobeHub'); + }); + + it('should inject historySummary into system message when provided', async () => { + const historySummary = 'Previous conversation summary: User discussed AI topics.'; + + const messages: ChatMessage[] = [ + { + role: 'user', + content: 'Continue our discussion', + createdAt: Date.now(), + id: 'test-history', + meta: {}, + updatedAt: Date.now(), + }, + ]; + + const result = await chatService['processMessages']( + { + messages, + model: 'gpt-4', + provider: 'openai', + }, + { + historySummary, + }, + ); + + // Should have system message with history summary + const systemMessage = result.find((msg) => msg.role === 'system'); + expect(systemMessage).toBeDefined(); + expect(systemMessage!.content).toContain(historySummary); + }); }); }); @@ -1139,6 +1702,379 @@ vi.mock('../_auth', async (importOriginal) => { return importOriginal(); }); +describe('ChatService private methods', () => { + describe('processImageList', () => { + beforeEach(() => { + vi.resetModules(); + }); + + it('should return empty array if model cannot use vision (non-deprecated)', async () => { + vi.doMock('@/const/version', () => ({ + isServerMode: false, + isDeprecatedEdition: false, + isDesktop: false, + })); + const { aiModelSelectors } = await import('@/store/aiInfra'); + vi.spyOn(aiModelSelectors, 'isModelSupportVision').mockReturnValue(() => false); + + const { chatService } = await import('../chat'); + const result = await chatService['processImageList']({ + imageList: [{ url: 'image_url', alt: '', id: 'test' } as ChatImageItem], + model: 'any-model', + provider: 'any-provider', + }); + expect(result).toEqual([]); + }); + + it('should process images if model can use vision (non-deprecated)', async () => { + vi.doMock('@/const/version', () => ({ + isServerMode: false, + isDeprecatedEdition: false, + isDesktop: false, + })); + const { aiModelSelectors } = await import('@/store/aiInfra'); + vi.spyOn(aiModelSelectors, 'isModelSupportVision').mockReturnValue(() => true); + + const { chatService } = await import('../chat'); + const result = await chatService['processImageList']({ + imageList: [{ url: 'image_url', alt: '', id: 'test' } as ChatImageItem], + model: 'any-model', + provider: 'any-provider', + }); + expect(result.length).toBe(1); + expect(result[0].type).toBe('image_url'); + }); + + it('should return empty array when vision disabled in deprecated edition', async () => { + vi.doMock('@/const/version', () => ({ + isServerMode: false, + isDeprecatedEdition: true, + isDesktop: false, + })); + + const { modelProviderSelectors } = await import('@/store/user/selectors'); + const spy = vi + .spyOn(modelProviderSelectors, 'isModelEnabledVision') + .mockReturnValue(() => false); + + const { chatService } = await import('../chat'); + const result = await chatService['processImageList']({ + imageList: [{ url: 'image_url', alt: '', id: 'test' } as ChatImageItem], + model: 'any-model', + provider: 'any-provider', + }); + + expect(spy).toHaveBeenCalled(); + expect(result).toEqual([]); + }); + + it('should process images when vision enabled in deprecated edition', async () => { + vi.doMock('@/const/version', () => ({ + isServerMode: false, + isDeprecatedEdition: true, + isDesktop: false, + })); + + const { modelProviderSelectors } = await import('@/store/user/selectors'); + const spy = vi + .spyOn(modelProviderSelectors, 'isModelEnabledVision') + .mockReturnValue(() => true); + + const { chatService } = await import('../chat'); + const result = await chatService['processImageList']({ + imageList: [{ url: 'image_url' } as ChatImageItem], + model: 'any-model', + provider: 'any-provider', + }); + + expect(spy).toHaveBeenCalled(); + expect(result.length).toBe(1); + expect(result[0].type).toBe('image_url'); + }); + }); + + describe('processMessages', () => { + describe('getAssistantContent', () => { + it('should handle assistant message with imageList and content', async () => { + const messages: ChatMessage[] = [ + { + role: 'assistant', + content: 'Here is an image.', + imageList: [{ id: 'img1', url: 'http://example.com/image.png', alt: 'test.png' }], + createdAt: Date.now(), + id: 'test-id', + meta: {}, + updatedAt: Date.now(), + }, + ]; + const result = await chatService['processMessages']({ + messages, + model: 'gpt-4-vision-preview', + provider: 'openai', + }); + + expect(result[0].content).toEqual([ + { text: 'Here is an image.', type: 'text' }, + { image_url: { detail: 'auto', url: 'http://example.com/image.png' }, type: 'image_url' }, + ]); + }); + + it('should handle assistant message with imageList but no content', async () => { + const messages: ChatMessage[] = [ + { + role: 'assistant', + content: '', + imageList: [{ id: 'img1', url: 'http://example.com/image.png', alt: 'test.png' }], + createdAt: Date.now(), + id: 'test-id-2', + meta: {}, + updatedAt: Date.now(), + }, + ]; + const result = await chatService['processMessages']({ + messages, + model: 'gpt-4-vision-preview', + provider: 'openai', + }); + + expect(result[0].content).toEqual([ + { image_url: { detail: 'auto', url: 'http://example.com/image.png' }, type: 'image_url' }, + ]); + }); + }); + + it('should not include tool_calls for assistant message if model does not support tools', async () => { + // Mock isCanUseFC to return false + vi.spyOn( + (await import('@/store/aiInfra')).aiModelSelectors, + 'isModelSupportToolUse', + ).mockReturnValue(() => false); + + const messages: ChatMessage[] = [ + { + role: 'assistant', + content: 'I have a tool call.', + tools: [ + { + id: 'tool_123', + type: 'default', + apiName: 'testApi', + arguments: '{}', + identifier: 'test-plugin', + }, + ], + createdAt: Date.now(), + id: 'test-id-3', + meta: {}, + updatedAt: Date.now(), + }, + ]; + + const result = await chatService['processMessages']({ + messages, + model: 'some-model-without-fc', + provider: 'openai', + }); + + expect(result[0].tool_calls).toBeUndefined(); + expect(result[0].content).toBe('I have a tool call.'); + }); + }); + + describe('reorderToolMessages', () => { + it('should correctly reorder when a tool message appears before the assistant message', () => { + const input: OpenAIChatMessage[] = [ + { + role: 'system', + content: 'System message', + }, + { + role: 'tool', + tool_call_id: 'tool_call_1', + name: 'test-plugin____testApi', + content: 'Tool result', + }, + { + role: 'assistant', + content: '', + tool_calls: [ + { id: 'tool_call_1', type: 'function', function: { name: 'testApi', arguments: '{}' } }, + ], + }, + ]; + + const output = chatService['reorderToolMessages'](input); + + // Verify reordering logic works and covers line 688 hasPushed check + // In this test, tool messages are duplicated but the second occurrence is skipped + expect(output.length).toBe(4); // Original has 3, assistant will add corresponding tool message again + expect(output[0].role).toBe('system'); + expect(output[1].role).toBe('tool'); + expect(output[2].role).toBe('assistant'); + expect(output[3].role).toBe('tool'); // Tool message added by assistant's tool_calls + }); + }); + + describe('getChatCompletion', () => { + it('should merge responseAnimation styles correctly', async () => { + const { fetchSSE } = await import('@/utils/fetch'); + vi.mock('@/utils/fetch', async (importOriginal) => { + const module = await importOriginal(); + return { + ...(module as any), + fetchSSE: vi.fn(), + }; + }); + + // Mock provider config + const { aiProviderSelectors } = await import('@/store/aiInfra'); + vi.spyOn(aiProviderSelectors, 'providerConfigById').mockReturnValue({ + id: 'openai', + settings: { + responseAnimation: 'slow', + }, + } as any); + + // Mock user preference + const { userGeneralSettingsSelectors } = await import('@/store/user/selectors'); + vi.spyOn(userGeneralSettingsSelectors, 'transitionMode').mockReturnValue('smooth'); + + await chatService.getChatCompletion( + { provider: 'openai', messages: [] }, + { responseAnimation: { speed: 20 } }, + ); + + expect(fetchSSE).toHaveBeenCalled(); + const fetchSSEOptions = (fetchSSE as any).mock.calls[0][1]; + + expect(fetchSSEOptions.responseAnimation).toEqual({ + speed: 20, + text: 'fadeIn', + toolsCalling: 'fadeIn', + }); + }); + }); + + describe('extendParams', () => { + it('should set enabledContextCaching to false when model supports disableContextCaching and user enables it', async () => { + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + const messages = [{ content: 'Test context caching', role: 'user' }] as ChatMessage[]; + + // Mock aiModelSelectors for extend params support + vi.spyOn(aiModelSelectors, 'isModelHasExtendParams').mockReturnValue(() => true); + vi.spyOn(aiModelSelectors, 'modelExtendParams').mockReturnValue(() => [ + 'disableContextCaching', + ]); + + // Mock agent chat config with context caching disabled + vi.spyOn(agentChatConfigSelectors, 'currentChatConfig').mockReturnValue({ + disableContextCaching: true, + searchMode: 'off', + } as any); + + await chatService.createAssistantMessage({ + messages, + model: 'test-model', + provider: 'test-provider', + plugins: [], + }); + + expect(getChatCompletionSpy).toHaveBeenCalledWith( + expect.objectContaining({ + enabledContextCaching: false, + }), + undefined, + ); + }); + + it('should not set enabledContextCaching when disableContextCaching is false', async () => { + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + const messages = [{ content: 'Test context caching enabled', role: 'user' }] as ChatMessage[]; + + // Mock aiModelSelectors for extend params support + vi.spyOn(aiModelSelectors, 'isModelHasExtendParams').mockReturnValue(() => true); + vi.spyOn(aiModelSelectors, 'modelExtendParams').mockReturnValue(() => [ + 'disableContextCaching', + ]); + + // Mock agent chat config with context caching enabled (default) + vi.spyOn(agentChatConfigSelectors, 'currentChatConfig').mockReturnValue({ + disableContextCaching: false, + searchMode: 'off', + } as any); + + await chatService.createAssistantMessage({ + messages, + model: 'test-model', + provider: 'test-provider', + plugins: [], + }); + + // enabledContextCaching should not be present in the call + const callArgs = getChatCompletionSpy.mock.calls[0][0]; + expect(callArgs).not.toHaveProperty('enabledContextCaching'); + }); + + it('should set reasoning_effort when model supports reasoningEffort and user configures it', async () => { + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + const messages = [{ content: 'Test reasoning effort', role: 'user' }] as ChatMessage[]; + + // Mock aiModelSelectors for extend params support + vi.spyOn(aiModelSelectors, 'isModelHasExtendParams').mockReturnValue(() => true); + vi.spyOn(aiModelSelectors, 'modelExtendParams').mockReturnValue(() => ['reasoningEffort']); + + // Mock agent chat config with reasoning effort set + vi.spyOn(agentChatConfigSelectors, 'currentChatConfig').mockReturnValue({ + reasoningEffort: 'high', + searchMode: 'off', + } as any); + + await chatService.createAssistantMessage({ + messages, + model: 'test-model', + provider: 'test-provider', + plugins: [], + }); + + expect(getChatCompletionSpy).toHaveBeenCalledWith( + expect.objectContaining({ + reasoning_effort: 'high', + }), + undefined, + ); + }); + + it('should set thinkingBudget when model supports thinkingBudget and user configures it', async () => { + const getChatCompletionSpy = vi.spyOn(chatService, 'getChatCompletion'); + const messages = [{ content: 'Test thinking budget', role: 'user' }] as ChatMessage[]; + + // Mock aiModelSelectors for extend params support + vi.spyOn(aiModelSelectors, 'isModelHasExtendParams').mockReturnValue(() => true); + vi.spyOn(aiModelSelectors, 'modelExtendParams').mockReturnValue(() => ['thinkingBudget']); + + // Mock agent chat config with thinking budget set + vi.spyOn(agentChatConfigSelectors, 'currentChatConfig').mockReturnValue({ + thinkingBudget: 5000, + searchMode: 'off', + } as any); + + await chatService.createAssistantMessage({ + messages, + model: 'test-model', + provider: 'test-provider', + plugins: [], + }); + + expect(getChatCompletionSpy).toHaveBeenCalledWith( + expect.objectContaining({ + thinkingBudget: 5000, + }), + undefined, + ); + }); + }); +}); + describe('AgentRuntimeOnClient', () => { describe('initializeWithClientStore', () => { describe('should initialize with options correctly', () => { diff --git a/src/utils/url.test.ts b/src/utils/url.test.ts index 26103fc62af8e..0ac056155aefd 100644 --- a/src/utils/url.test.ts +++ b/src/utils/url.test.ts @@ -1,7 +1,7 @@ import { vi } from 'vitest'; import { pathString } from './url'; -import { inferContentTypeFromImageUrl, inferFileExtensionFromImageUrl } from './url'; +import { inferContentTypeFromImageUrl, inferFileExtensionFromImageUrl, isLocalUrl } from './url'; describe('pathString', () => { it('should handle basic path', () => { @@ -398,3 +398,44 @@ describe('inferFileExtensionFromImageUrl', () => { expect(result).toBe('gif'); }); }); + +describe('isLocalUrl', () => { + it('should return true for URLs with 127.0.0.1 as hostname', () => { + expect(isLocalUrl('http://127.0.0.1')).toBe(true); + expect(isLocalUrl('https://127.0.0.1')).toBe(true); + expect(isLocalUrl('http://127.0.0.1:8080')).toBe(true); + expect(isLocalUrl('http://127.0.0.1/path/to/resource')).toBe(true); + expect(isLocalUrl('https://127.0.0.1/path?query=1#hash')).toBe(true); + }); + + it('should return false for URLs with "localhost" as hostname', () => { + expect(isLocalUrl('http://localhost')).toBe(false); + expect(isLocalUrl('http://localhost:3000')).toBe(false); + }); + + it('should return false for other IP addresses', () => { + expect(isLocalUrl('http://192.168.1.1')).toBe(false); + expect(isLocalUrl('http://0.0.0.0')).toBe(false); + expect(isLocalUrl('http://127.0.0.2')).toBe(false); + }); + + it('should return false for domain names', () => { + expect(isLocalUrl('https://example.com')).toBe(false); + expect(isLocalUrl('http://www.google.com')).toBe(false); + }); + + it('should return false for malformed URLs', () => { + expect(isLocalUrl('invalid-url')).toBe(false); + expect(isLocalUrl('http://')).toBe(false); + expect(isLocalUrl('a string but not a url')).toBe(false); + }); + + it('should return false for empty or nullish strings', () => { + expect(isLocalUrl('')).toBe(false); + }); + + it('should return false for relative URLs', () => { + expect(isLocalUrl('/path/to/file')).toBe(false); + expect(isLocalUrl('./relative/path')).toBe(false); + }); +});
windows客户端无法发送图片(之前没有这个bug,今天早上更新后出现) ### 📦 部署环境 Other ### 📦 部署模式 客户端模式(lobe-chat 镜像) ### 📌 软件版本 1.99.0 ### 💻 系统环境 Other, Windows ### 🌐 浏览器 Other ### 🐛 问题描述 windows客户端无法发送图片(之前没有这个bug,今天早上更新后出现) { "error": { "code": "convert_request_failed", "type": "new_api_error", "param": "", "message": "请求上游地址失败 (request id: 2025071509341923668639pu6aiKrS)" }, "endpoint": "https://llmapi.***.dev/v1", "provider": "openai" } ### 📷 复现步骤 _No response_ ### 🚦 期望结果 _No response_ ### 📝 补充信息 _No response_
👀 @187370 Thank you for raising an issue. We will investigate into the matter and get back to you as soon as possible. Please make sure you have given us as much context as possible.\ 非常感谢您提交 issue。我们会尽快调查此事,并尽快回复您。 请确保您已经提供了尽可能多的背景信息。 { "error": { "code": "image_url_invalid", "type": "system_error", "message": "请求上游地址失败 (request id: 2025071509371675051489403xbLmyX)" }, "endpoint": "http://10.129.***.218:****/v1", "provider": "openai" } > Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿 ---- { "error": { "code": "image_url_invalid", "type": "system_error", "message": "Request upstream address failed (request id: 2025071509371675051489403xbLmyX)" }, "endpoint": "http://10.129.***.218:****/v1", "provider": "openai" } <!-- Answer --> 这个问题是由于 1.99.0 版本更新后,后端加强了 SSRF(服务端请求伪造)防护,导致部分上游 API(比如 OpenAI 或自定义 llmapi)请求被拦截,从而图片发送失败,报错“请求上游地址失败”[相关代码说明](https://github.com/lobehub/lobe-chat/blob/7eb4a7f1731ca46b5f8121fa88f189f9aac5c5c1/src/app/(backend)/webapi/proxy/route.ts)。 解决方法: 1. 检查你的部署环境变量,确保 SSRF 相关配置允许访问你的上游 API 地址。常用变量有: - `SSRF_ALLOW_IP_ADDRESS_LIST`:允许的 IP 地址白名单(逗号分隔)。 - `SSRF_ALLOW_PRIVATE_IP_ADDRESS=1`:如你的上游 API 是内网地址,需设置为 1 允许访问私有 IP。 2. 修改后重启 lobe-chat 容器或后端服务。 3. 如果依然无法发送,建议检查后端日志,确认是否有 SSRF 拦截或 proxy 相关报错。 如果你用的是 v1 client 镜像,且主要需求是图片生成/发送,建议尝试 pglite 版本,client v1 模式下图片相关功能有已知兼容性问题[参考](https://github.com/lobehub/lobe-chat/issues/7015)。 如还有问题,欢迎补充更多日志或报错信息。 <!-- Dosu Comment Footer --> *To reply, just mention [@dosu](https://go.dosu.dev/dosubot).* --- <sup>How did I do? [Good](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=great_response) | [Irrelevant](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=irrelevant_answer) | [Incorrect](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=incorrect_sources) | [Verbose](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=too_verbose) | [Hallucination](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=hallucination) | [Report 🐛](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=bug_report) | [Other](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=other)</sup>&nbsp;&nbsp;[![Join Discord](https://img.shields.io/badge/join-5865F2?logo=discord&logoColor=white&label=)](https://go.dosu.dev/discord-bot)&nbsp;[![Share on X](https://img.shields.io/badge/X-share-black)](https://twitter.com/intent/tweet?text=%40dosu_ai%20helped%20me%20solve%20this%20issue!&url=https%3A//github.com/lobehub/lobe-chat/issues/8437) > 这个问题是由于 1.99.0 版本更新后,后端加强了 SSRF(服务端请求伪造)防护,导致部分上游 API(比如 OpenAI 或自定义 llmapi)请求被拦截,从而图片发送失败,报错“请求上游地址失败”[相关代码说明](https://github.com/lobehub/lobe-chat/blob/7eb4a7f1731ca46b5f8121fa88f189f9aac5c5c1/src/app/(backend)/webapi/proxy/route.ts)。 > > 解决方法: > > 1. 检查你的部署环境变量,确保 SSRF 相关配置允许访问你的上游 API 地址。常用变量有: > > * `SSRF_ALLOW_IP_ADDRESS_LIST`:允许的 IP 地址白名单(逗号分隔)。 > * `SSRF_ALLOW_PRIVATE_IP_ADDRESS=1`:如你的上游 API 是内网地址,需设置为 1 允许访问私有 IP。 > 2. 修改后重启 lobe-chat 容器或后端服务。 > 3. 如果依然无法发送,建议检查后端日志,确认是否有 SSRF 拦截或 proxy 相关报错。 > > 如果你用的是 v1 client 镜像,且主要需求是图片生成/发送,建议尝试 pglite 版本,client v1 模式下图片相关功能有已知兼容性问题[参考](https://github.com/lobehub/lobe-chat/issues/7015)。 > > 如还有问题,欢迎补充更多日志或报错信息。 > > _To reply, just mention [@dosu](https://go.dosu.dev/dosubot)._ > > How did I do? [Good](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=great_response) | [Irrelevant](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=irrelevant_answer) | [Incorrect](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=incorrect_sources) | [Verbose](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=too_verbose) | [Hallucination](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=hallucination) | [Report 🐛](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=bug_report) | [Other](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=other)  [![Join Discord](https://camo.githubusercontent.com/d562c2df5e4567ea41d37f1d91a4da0839d8341037045b103b2da268998de17f/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f6a6f696e2d3538363546323f6c6f676f3d646973636f7264266c6f676f436f6c6f723d7768697465266c6162656c3d)](https://go.dosu.dev/discord-bot) [![Share on X](https://camo.githubusercontent.com/4649f705e2396d6f328dbc8e800a0509c9c17b3b20253b36706c393df124b93d/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f582d73686172652d626c61636b)](https://twitter.com/intent/tweet?text=%40dosu_ai%20helped%20me%20solve%20this%20issue!&url=https%3A//github.com/lobehub/lobe-chat/issues/8437) 显然不行哈,我是客户端部署,我咋改 我们排查下 > Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿 ---- Let's check > 我们排查下 1.99.2还是同样的问题 > Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿 ---- > Let's check 1.99.2 is still the same problem @187370 你是 local db 模式吧 <img width="1440" height="801" alt="Image" src="https://github.com/user-attachments/assets/e90636a3-af83-47a2-beb1-2528182c02b4" /> > @187370 你是 local db 模式吧 > > <img width="1440" height="801" alt="Image" src="https://github.com/user-attachments/assets/e90636a3-af83-47a2-beb1-2528182c02b4" /> 没错 我已经知道问题,今天晚些时间会修复 > Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿 ---- I already know the problem, it will be fixed later today ✅ @187370 This issue is closed, If you have any questions, you can comment and reply.\ 此问题已经关闭。如果您有任何问题,可以留言并回复。
👀 @187370 Thank you for raising an issue. We will investigate into the matter and get back to you as soon as possible. Please make sure you have given us as much context as possible.\ 非常感谢您提交 issue。我们会尽快调查此事,并尽快回复您。 请确保您已经提供了尽可能多的背景信息。 { "error": { "code": "image_url_invalid", "type": "system_error", "message": "请求上游地址失败 (request id: 2025071509371675051489403xbLmyX)" }, "endpoint": "http://10.129.***.218:****/v1", "provider": "openai" } > Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿 ---- { "error": { "code": "image_url_invalid", "type": "system_error", "message": "Request upstream address failed (request id: 2025071509371675051489403xbLmyX)" }, "endpoint": "http://10.129.***.218:****/v1", "provider": "openai" } <!-- Answer --> 这个问题是由于 1.99.0 版本更新后,后端加强了 SSRF(服务端请求伪造)防护,导致部分上游 API(比如 OpenAI 或自定义 llmapi)请求被拦截,从而图片发送失败,报错“请求上游地址失败”[相关代码说明](https://github.com/lobehub/lobe-chat/blob/7eb4a7f1731ca46b5f8121fa88f189f9aac5c5c1/src/app/(backend)/webapi/proxy/route.ts)。 解决方法: 1. 检查你的部署环境变量,确保 SSRF 相关配置允许访问你的上游 API 地址。常用变量有: - `SSRF_ALLOW_IP_ADDRESS_LIST`:允许的 IP 地址白名单(逗号分隔)。 - `SSRF_ALLOW_PRIVATE_IP_ADDRESS=1`:如你的上游 API 是内网地址,需设置为 1 允许访问私有 IP。 2. 修改后重启 lobe-chat 容器或后端服务。 3. 如果依然无法发送,建议检查后端日志,确认是否有 SSRF 拦截或 proxy 相关报错。 如果你用的是 v1 client 镜像,且主要需求是图片生成/发送,建议尝试 pglite 版本,client v1 模式下图片相关功能有已知兼容性问题[参考](https://github.com/lobehub/lobe-chat/issues/7015)。 如还有问题,欢迎补充更多日志或报错信息。 <!-- Dosu Comment Footer --> *To reply, just mention [@dosu](https://go.dosu.dev/dosubot).* --- <sup>How did I do? [Good](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=great_response) | [Irrelevant](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=irrelevant_answer) | [Incorrect](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=incorrect_sources) | [Verbose](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=too_verbose) | [Hallucination](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=hallucination) | [Report 🐛](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=bug_report) | [Other](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=other)</sup>&nbsp;&nbsp;[![Join Discord](https://img.shields.io/badge/join-5865F2?logo=discord&logoColor=white&label=)](https://go.dosu.dev/discord-bot)&nbsp;[![Share on X](https://img.shields.io/badge/X-share-black)](https://twitter.com/intent/tweet?text=%40dosu_ai%20helped%20me%20solve%20this%20issue!&url=https%3A//github.com/lobehub/lobe-chat/issues/8437) > 这个问题是由于 1.99.0 版本更新后,后端加强了 SSRF(服务端请求伪造)防护,导致部分上游 API(比如 OpenAI 或自定义 llmapi)请求被拦截,从而图片发送失败,报错“请求上游地址失败”[相关代码说明](https://github.com/lobehub/lobe-chat/blob/7eb4a7f1731ca46b5f8121fa88f189f9aac5c5c1/src/app/(backend)/webapi/proxy/route.ts)。 > > 解决方法: > > 1. 检查你的部署环境变量,确保 SSRF 相关配置允许访问你的上游 API 地址。常用变量有: > > * `SSRF_ALLOW_IP_ADDRESS_LIST`:允许的 IP 地址白名单(逗号分隔)。 > * `SSRF_ALLOW_PRIVATE_IP_ADDRESS=1`:如你的上游 API 是内网地址,需设置为 1 允许访问私有 IP。 > 2. 修改后重启 lobe-chat 容器或后端服务。 > 3. 如果依然无法发送,建议检查后端日志,确认是否有 SSRF 拦截或 proxy 相关报错。 > > 如果你用的是 v1 client 镜像,且主要需求是图片生成/发送,建议尝试 pglite 版本,client v1 模式下图片相关功能有已知兼容性问题[参考](https://github.com/lobehub/lobe-chat/issues/7015)。 > > 如还有问题,欢迎补充更多日志或报错信息。 > > _To reply, just mention [@dosu](https://go.dosu.dev/dosubot)._ > > How did I do? [Good](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=great_response) | [Irrelevant](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=irrelevant_answer) | [Incorrect](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=incorrect_sources) | [Verbose](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=too_verbose) | [Hallucination](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=hallucination) | [Report 🐛](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=bug_report) | [Other](https://app.dosu.dev/response-feedback/357d25de-ae6d-4523-9894-c7850cc640e0?feedback_type=other)  [![Join Discord](https://camo.githubusercontent.com/d562c2df5e4567ea41d37f1d91a4da0839d8341037045b103b2da268998de17f/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f6a6f696e2d3538363546323f6c6f676f3d646973636f7264266c6f676f436f6c6f723d7768697465266c6162656c3d)](https://go.dosu.dev/discord-bot) [![Share on X](https://camo.githubusercontent.com/4649f705e2396d6f328dbc8e800a0509c9c17b3b20253b36706c393df124b93d/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f582d73686172652d626c61636b)](https://twitter.com/intent/tweet?text=%40dosu_ai%20helped%20me%20solve%20this%20issue!&url=https%3A//github.com/lobehub/lobe-chat/issues/8437) 显然不行哈,我是客户端部署,我咋改 我们排查下 > Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿 ---- Let's check > 我们排查下 1.99.2还是同样的问题 > Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿 ---- > Let's check 1.99.2 is still the same problem @187370 你是 local db 模式吧 <img width="1440" height="801" alt="Image" src="https://github.com/user-attachments/assets/e90636a3-af83-47a2-beb1-2528182c02b4" /> > @187370 你是 local db 模式吧 > > <img width="1440" height="801" alt="Image" src="https://github.com/user-attachments/assets/e90636a3-af83-47a2-beb1-2528182c02b4" /> 没错 我已经知道问题,今天晚些时间会修复 > Bot detected the issue body's language is not English, translate it automatically. 👯👭🏻🧑‍🤝‍🧑👫🧑🏿‍🤝‍🧑🏻👩🏾‍🤝‍👨🏿👬🏿 ---- I already know the problem, it will be fixed later today ✅ @187370 This issue is closed, If you have any questions, you can comment and reply.\ 此问题已经关闭。如果您有任何问题,可以留言并回复。 This issue has been automatically locked since it was closed and has not had any activity for 7 days. If you're experiencing a similar issue, please file a new issue and reference this one if it's relevant.
[ "https://github.com/lobehub/lobe-chat/commit/57ed2449aec535c6d3b29f8b55fa6dfcb9b3cba8", "https://github.com/lobehub/lobe-chat/commit/860fdb870071f0008c1199f249c56bdd29d3b156", "https://github.com/lobehub/lobe-chat/commit/74a4a16f6f1e98c109906d766d0b4320c5564b2e", "https://github.com/lobehub/lobe-chat/commit/3...
2025-07-16T14:05:36Z
https://github.com/lobehub/lobe-chat/tree/752e576b802d06b1d3a975a42b8447550b8295d8
[ "pnpm install ; pnpm run prebuild ; pnpm run build-sitemap" ]
[ "$ErrorActionPreference = \"Continue\"; if (!(Test-Path reports)) { New-Item -ItemType Directory -Path reports | Out-Null }; $env:NODE_ENV=\"test\"; npm run test-app --silent -- --reporter=json | Out-File -FilePath reports/vitest-app.json -Encoding utf8; $env:NODE_ENV=\"test\"; $env:DATABASE_DRIVER=\"node\"; $env:D...
[ "Get-Content reports/vitest-app.json -Raw; Get-Content reports/vitest-server.json -Raw" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} if not isinstance(log, str): try: log = str(log) except Exception: log = "" # Utilities def clean_name(s: str) -> str: s = re.sub(r'\s+', ' ', s or '') s = s.strip(' \t\r\n,:') return s def map_status(st: str) -> str: st = (st or '').strip().lower() if st in ('passed', 'pass', 'success', 'ok'): return 'pass' if st in ('failed', 'fail', 'error', 'broken'): return 'fail' if st in ('skipped', 'skip', 'pending', 'todo', 'disabled'): return 'skip' return 'fail' PRIORITY = {'fail': 2, 'skip': 1, 'pass': 0} def add_result(name: str, status: str): name = clean_name(name) if not name or len(name) <= 1: return status = map_status(status) if name in results: if PRIORITY.get(status, -1) > PRIORITY.get(results[name], -1): results[name] = status else: results[name] = status # Patterns status_pattern = re.compile( r'"status"\s*:\s*"(passed|failed|pending|skipped|skip|todo|error)"', re.IGNORECASE | re.DOTALL, ) full_name_pattern = re.compile( r'"fullName"\s*:\s*"((?:[^"\\]|\\.)*)"', re.IGNORECASE | re.DOTALL, ) title_pattern = re.compile( r'"title"\s*:\s*"((?:[^"\\]|\\.)*)"', re.IGNORECASE | re.DOTALL, ) ancestors_block_pattern = re.compile( r'"ancestorTitles"\s*:\s*\[\s*(.*?)\s*\]', re.IGNORECASE | re.DOTALL, ) quoted_str_pattern = re.compile(r'"((?:[^"\\]|\\.)*)"') log_len = len(log) # Anchor on each status occurrence, then look backward for its name context # Use conservative windows to avoid cross-object contamination BACKWARD_FULLNAME_WINDOW = 1000 BACKWARD_TITLE_WINDOW = 2000 for sm in status_pattern.finditer(log): status_raw = sm.group(1) spos = sm.start() # Try nearest fullName before status within window fn_start = max(0, spos - BACKWARD_FULLNAME_WINDOW) segment = log[fn_start:spos] last_fn = None for fnm in full_name_pattern.finditer(segment): last_fn = fnm if last_fn: full_name_raw = last_fn.group(1) add_result(full_name_raw, status_raw) continue # Fallback: nearest title before status and its nearest ancestorTitles before title t_start = max(0, spos - BACKWARD_TITLE_WINDOW) t_segment = log[t_start:spos] last_title = None for tm in title_pattern.finditer(t_segment): last_title = tm if last_title: title_raw = last_title.group(1) title_abs_start = t_start + last_title.start() # Find the last ancestorTitles block before the title last_anc = None for am in ancestors_block_pattern.finditer(log, max(0, title_abs_start - BACKWARD_TITLE_WINDOW), title_abs_start): last_anc = am ancestors_list = [] if last_anc: ancestors_block = last_anc.group(1) for s in quoted_str_pattern.findall(ancestors_block): cn = clean_name(s) if cn: ancestors_list.append(cn) parts = list(ancestors_list) parts.append(title_raw) fallback_name = ' '.join(parts) add_result(fallback_name, status_raw) continue # If no name context found, skip this status (cannot attribute) continue return results
[ "FileModel findByNames should find files by names", "messageRouter should handle count", "ARTIFACT_TAG_REGEX should match", "ServerService removeSession should call lambdaClient", "Configuration tests should have runtime set to \\\"edge\\\"", "AiModelModel create should create a new ai provider", "Gener...
[]
starryzhang/sweb.eval.win.lobehub_1776_lobe-chat-8459
gogf/gf
4386
gogf__gf-4386
Go
[ "4385" ]
4262aa254d0af4f3dc07c5a00d99171798bceab6
diff --git a/cmd/gf/go.mod b/cmd/gf/go.mod index 1adb3f4af16..cafe70d1cdc 100644 --- a/cmd/gf/go.mod +++ b/cmd/gf/go.mod @@ -3,31 +3,31 @@ module github.com/gogf/gf/cmd/gf/v2 go 1.23.0 require ( - github.com/gogf/gf/contrib/drivers/clickhouse/v2 v2.9.0 - github.com/gogf/gf/contrib/drivers/mssql/v2 v2.9.0 - github.com/gogf/gf/contrib/drivers/mysql/v2 v2.9.0 - github.com/gogf/gf/contrib/drivers/oracle/v2 v2.9.0 - github.com/gogf/gf/contrib/drivers/pgsql/v2 v2.9.0 - github.com/gogf/gf/contrib/drivers/sqlite/v2 v2.9.0 - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/contrib/drivers/clickhouse/v2 v2.9.1 + github.com/gogf/gf/contrib/drivers/mssql/v2 v2.9.1 + github.com/gogf/gf/contrib/drivers/mysql/v2 v2.9.1 + github.com/gogf/gf/contrib/drivers/oracle/v2 v2.9.1 + github.com/gogf/gf/contrib/drivers/pgsql/v2 v2.9.1 + github.com/gogf/gf/contrib/drivers/sqlite/v2 v2.9.1 + github.com/gogf/gf/v2 v2.9.1 github.com/gogf/selfupdate v0.0.0-20231215043001-5c48c528462f github.com/olekukonko/tablewriter v0.0.5 github.com/schollz/progressbar/v3 v3.15.0 - golang.org/x/mod v0.25.0 - golang.org/x/tools v0.34.0 + golang.org/x/mod v0.26.0 + golang.org/x/tools v0.35.0 ) require ( aead.dev/minisign v0.2.0 // indirect - github.com/BurntSushi/toml v1.4.0 // indirect + github.com/BurntSushi/toml v1.5.0 // indirect github.com/ClickHouse/clickhouse-go/v2 v2.0.15 // indirect github.com/clbanning/mxj/v2 v2.7.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect github.com/fatih/color v1.18.0 // indirect - github.com/fsnotify/fsnotify v1.7.0 // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect github.com/glebarez/go-sqlite v1.21.2 // indirect - github.com/go-logr/logr v1.4.2 // indirect + github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-sql-driver/mysql v1.7.1 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect @@ -36,8 +36,8 @@ require ( github.com/gorilla/websocket v1.5.3 // indirect github.com/grokify/html-strip-tags-go v0.1.0 // indirect github.com/lib/pq v1.10.9 // indirect - github.com/magiconair/properties v1.8.9 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect + github.com/magiconair/properties v1.8.10 // indirect + github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect github.com/microsoft/go-mssqldb v1.7.1 // indirect @@ -48,16 +48,17 @@ require ( github.com/rivo/uniseg v0.4.7 // indirect github.com/shopspring/decimal v1.3.1 // indirect github.com/sijms/go-ora/v2 v2.7.10 // indirect - go.opentelemetry.io/otel v1.32.0 // indirect - go.opentelemetry.io/otel/metric v1.32.0 // indirect - go.opentelemetry.io/otel/sdk v1.32.0 // indirect - go.opentelemetry.io/otel/trace v1.32.0 // indirect - golang.org/x/crypto v0.39.0 // indirect - golang.org/x/net v0.41.0 // indirect - golang.org/x/sync v0.15.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/term v0.32.0 // indirect - golang.org/x/text v0.26.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/otel v1.37.0 // indirect + go.opentelemetry.io/otel/metric v1.37.0 // indirect + go.opentelemetry.io/otel/sdk v1.37.0 // indirect + go.opentelemetry.io/otel/trace v1.37.0 // indirect + golang.org/x/crypto v0.41.0 // indirect + golang.org/x/net v0.43.0 // indirect + golang.org/x/sync v0.16.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/term v0.34.0 // indirect + golang.org/x/text v0.28.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect modernc.org/libc v1.22.5 // indirect modernc.org/mathutil v1.5.0 // indirect diff --git a/cmd/gf/go.sum b/cmd/gf/go.sum index d94dce82bb3..a5be2f2287c 100644 --- a/cmd/gf/go.sum +++ b/cmd/gf/go.sum @@ -12,8 +12,8 @@ github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occ github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA= github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= -github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= -github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/BurntSushi/toml v1.5.0 h1:W5quZX/G/csjUnuI8SUYlsHs9M38FC7znL0lIO+DvMg= +github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/ClickHouse/clickhouse-go v1.5.4/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= github.com/ClickHouse/clickhouse-go/v2 v2.0.15 h1:lLAZliqrZEygkxosLaW1qHyeTb4Ho7fVCZ0WKCpLocU= github.com/ClickHouse/clickhouse-go/v2 v2.0.15/go.mod h1:Z21o82zD8FFqefOQDg93c0XITlxGbTsWQuRm588Azkk= @@ -31,14 +31,14 @@ github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= -github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= -github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/glebarez/go-sqlite v1.21.2 h1:3a6LFC4sKahUunAmynQKLZceZCOzUthkRkEAl9gAXWo= github.com/glebarez/go-sqlite v1.21.2/go.mod h1:sfxdZyhQjTM2Wry3gVYWaW072Ri1WMdWJi0k6+3382k= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= -github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-ole/go-ole v1.2.4/go.mod h1:XCwSNxSkXRo4vlyPy93sltvi/qJq0jqQhjqQNIwKuxM= @@ -46,20 +46,6 @@ github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiU github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI= github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= -github.com/gogf/gf/contrib/drivers/clickhouse/v2 v2.9.0 h1:ml8lrFbSumZQjzQJGTQ2uvk1LY7NJ/FrKox/ITpYc3w= -github.com/gogf/gf/contrib/drivers/clickhouse/v2 v2.9.0/go.mod h1:Eb5iTy2QypvexojIeb3LdP5VAN8sqNcV0nmHGGt19lk= -github.com/gogf/gf/contrib/drivers/mssql/v2 v2.9.0 h1:JTR3ApDH4mduk3XmcXqRSadUdch8dW5HE+ToFv2A89o= -github.com/gogf/gf/contrib/drivers/mssql/v2 v2.9.0/go.mod h1:0iLTveNmvtP16yJqIeiVPUkIl7S6U8iV3Fn4CUJsZuw= -github.com/gogf/gf/contrib/drivers/mysql/v2 v2.9.0 h1:1f7EeD0lfPHoXfaJDSL7cxRcSRelbsAKgF3MGXY+Uyo= -github.com/gogf/gf/contrib/drivers/mysql/v2 v2.9.0/go.mod h1:tToO1PjGkLIR+9DbJ0wrKicYma0H/EUHXOpwel6Dw+0= -github.com/gogf/gf/contrib/drivers/oracle/v2 v2.9.0 h1:w15kMOWlHxFY+6GNcW3Ow9AfC45lHYTH4XCv3IO4e24= -github.com/gogf/gf/contrib/drivers/oracle/v2 v2.9.0/go.mod h1:g9eCVfgwRih3MHUHMzOgyfs/lN//4X+Nw+Q+sKkv7PQ= -github.com/gogf/gf/contrib/drivers/pgsql/v2 v2.9.0 h1:F/XfLI3TsgFU22AqJ2Df+ZUlF7lzkPo7oB5Cmx6VqOQ= -github.com/gogf/gf/contrib/drivers/pgsql/v2 v2.9.0/go.mod h1:p0c5ZhIITNrqgOz7+dhlk4eDCIC3Tt0ocUVhRjpUw+I= -github.com/gogf/gf/contrib/drivers/sqlite/v2 v2.9.0 h1:8dg4KHNBJ8OmIfRCGnN5zrP13iENThh4i71IwIa2VP8= -github.com/gogf/gf/contrib/drivers/sqlite/v2 v2.9.0/go.mod h1:hr3GNf9+LJs9TbjEGb7vEGOg2YWfrJBLrXgOcerKRlU= -github.com/gogf/gf/v2 v2.9.0 h1:semN5Q5qGjDQEv4620VzxcJzJlSD07gmyJ9Sy9zfbHk= -github.com/gogf/gf/v2 v2.9.0/go.mod h1:sWGQw+pLILtuHmbOxoe0D+0DdaXxbleT57axOLH2vKI= github.com/gogf/selfupdate v0.0.0-20231215043001-5c48c528462f h1:7xfXR/BhG3JDqO1s45n65Oyx9t4E/UqDOXep6jXdLCM= github.com/gogf/selfupdate v0.0.0-20231215043001-5c48c528462f/go.mod h1:HnYoio6S7VaFJdryKcD/r9HgX+4QzYfr00XiXUo/xz0= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= @@ -72,8 +58,8 @@ github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EO github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 h1:Xim43kblpZXfIBQsbuBVKCudVG457BR2GZFIz3uw3hQ= github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -98,11 +84,10 @@ github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+ github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/magiconair/properties v1.8.9 h1:nWcCbLq1N2v/cpNsy5WvQ37Fb+YElfq20WJ/a8RkpQM= -github.com/magiconair/properties v1.8.9/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= +github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE= +github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= +github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE= +github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= @@ -132,8 +117,8 @@ github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qq github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= -github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= -github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= +github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= +github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/schollz/progressbar/v3 v3.15.0 h1:cNZmcNiVyea6oofBTg80ZhVXxf3wG/JoAhqCCwopkQo= github.com/schollz/progressbar/v3 v3.15.0/go.mod h1:ncBdc++eweU0dQoeZJ3loXoAc+bjaallHRIm8pVVeQM= github.com/shirou/gopsutil v2.19.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= @@ -155,37 +140,41 @@ github.com/tklauser/numcpus v0.4.0/go.mod h1:1+UI3pD8NW14VMwdgJNJ1ESk2UnwhAnz5hM github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk= -go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U= -go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg= -go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M= -go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8= -go.opentelemetry.io/otel/sdk v1.32.0 h1:RNxepc9vK59A8XsgZQouW8ue8Gkb4jpWtJm9ge5lEG4= -go.opentelemetry.io/otel/sdk v1.32.0/go.mod h1:LqgegDBjKMmb2GC6/PrTnteJG39I8/vJCAP9LlJXEjU= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI= +go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg= go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU= -go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM= -go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= -golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= -golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= +golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -195,25 +184,24 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210228012217-479acdf4ea46/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220429233432-b5fbb4746d32/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= -golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= -golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4= +golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo= -golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= +golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0= +golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/contrib/config/apollo/go.mod b/contrib/config/apollo/go.mod index 18e8119552a..931f42ae33c 100644 --- a/contrib/config/apollo/go.mod +++ b/contrib/config/apollo/go.mod @@ -4,7 +4,7 @@ go 1.23.0 require ( github.com/apolloconfig/agollo/v4 v4.3.1 - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 ) require ( diff --git a/contrib/config/consul/go.mod b/contrib/config/consul/go.mod index af52cb4bcf6..33e6f3ab756 100644 --- a/contrib/config/consul/go.mod +++ b/contrib/config/consul/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/config/consul/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/hashicorp/consul/api v1.24.0 github.com/hashicorp/go-cleanhttp v0.5.2 ) diff --git a/contrib/config/kubecm/go.mod b/contrib/config/kubecm/go.mod index d4815398b0d..caa6aa8b4a0 100644 --- a/contrib/config/kubecm/go.mod +++ b/contrib/config/kubecm/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/config/kubecm/v2 go 1.24.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 k8s.io/api v0.33.4 k8s.io/apimachinery v0.33.4 k8s.io/client-go v0.33.4 diff --git a/contrib/config/nacos/go.mod b/contrib/config/nacos/go.mod index 109bae69bde..939d6bbbd9b 100644 --- a/contrib/config/nacos/go.mod +++ b/contrib/config/nacos/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/config/nacos/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/nacos-group/nacos-sdk-go/v2 v2.2.5 ) diff --git a/contrib/config/polaris/go.mod b/contrib/config/polaris/go.mod index e35f7c37641..babaa05b596 100644 --- a/contrib/config/polaris/go.mod +++ b/contrib/config/polaris/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/config/polaris/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/polarismesh/polaris-go v1.6.1 ) diff --git a/contrib/drivers/clickhouse/go.mod b/contrib/drivers/clickhouse/go.mod index 06fe9706c18..fe84082a663 100644 --- a/contrib/drivers/clickhouse/go.mod +++ b/contrib/drivers/clickhouse/go.mod @@ -4,7 +4,7 @@ go 1.23.0 require ( github.com/ClickHouse/clickhouse-go/v2 v2.0.15 - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/google/uuid v1.6.0 github.com/shopspring/decimal v1.3.1 ) diff --git a/contrib/drivers/dm/go.mod b/contrib/drivers/dm/go.mod index a1c2f0fc012..705c493ca5a 100644 --- a/contrib/drivers/dm/go.mod +++ b/contrib/drivers/dm/go.mod @@ -6,7 +6,7 @@ replace github.com/gogf/gf/v2 => ../../../ require ( gitee.com/chunanyong/dm v1.8.12 - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 ) require ( diff --git a/contrib/drivers/mssql/go.mod b/contrib/drivers/mssql/go.mod index 37c64dda9ed..27dd0164ada 100644 --- a/contrib/drivers/mssql/go.mod +++ b/contrib/drivers/mssql/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/drivers/mssql/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/microsoft/go-mssqldb v1.7.1 ) diff --git a/contrib/drivers/mysql/go.mod b/contrib/drivers/mysql/go.mod index 1ff72d6fe02..93f59464284 100644 --- a/contrib/drivers/mysql/go.mod +++ b/contrib/drivers/mysql/go.mod @@ -4,7 +4,7 @@ go 1.23.0 require ( github.com/go-sql-driver/mysql v1.7.1 - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 ) require ( diff --git a/contrib/drivers/oracle/go.mod b/contrib/drivers/oracle/go.mod index 1f889dab92d..e35907ed33e 100644 --- a/contrib/drivers/oracle/go.mod +++ b/contrib/drivers/oracle/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/drivers/oracle/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/sijms/go-ora/v2 v2.7.10 ) diff --git a/contrib/drivers/pgsql/go.mod b/contrib/drivers/pgsql/go.mod index 7774e9c2b68..9ff89ce6551 100644 --- a/contrib/drivers/pgsql/go.mod +++ b/contrib/drivers/pgsql/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/drivers/pgsql/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/lib/pq v1.10.9 ) diff --git a/contrib/drivers/sqlite/go.mod b/contrib/drivers/sqlite/go.mod index 86659c6a541..4a69931c1c9 100644 --- a/contrib/drivers/sqlite/go.mod +++ b/contrib/drivers/sqlite/go.mod @@ -4,7 +4,7 @@ go 1.23.0 require ( github.com/glebarez/go-sqlite v1.21.2 - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 ) require ( diff --git a/contrib/drivers/sqlitecgo/go.mod b/contrib/drivers/sqlitecgo/go.mod index 076e48707d0..5a837de5b35 100644 --- a/contrib/drivers/sqlitecgo/go.mod +++ b/contrib/drivers/sqlitecgo/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/drivers/sqlitecgo/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/mattn/go-sqlite3 v1.14.17 ) diff --git a/contrib/metric/otelmetric/go.mod b/contrib/metric/otelmetric/go.mod index 18bf31c69be..de249e5c0df 100644 --- a/contrib/metric/otelmetric/go.mod +++ b/contrib/metric/otelmetric/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/metric/otelmetric/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/prometheus/client_golang v1.23.0 go.opentelemetry.io/contrib/instrumentation/runtime v0.62.0 go.opentelemetry.io/otel v1.37.0 diff --git a/contrib/nosql/redis/go.mod b/contrib/nosql/redis/go.mod index b530cfce6fe..a3b35893543 100644 --- a/contrib/nosql/redis/go.mod +++ b/contrib/nosql/redis/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/nosql/redis/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/redis/go-redis/v9 v9.12.1 go.opentelemetry.io/otel v1.37.0 go.opentelemetry.io/otel/trace v1.37.0 diff --git a/contrib/registry/consul/go.mod b/contrib/registry/consul/go.mod index 0b47e889457..dda4aa42bfb 100644 --- a/contrib/registry/consul/go.mod +++ b/contrib/registry/consul/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/registry/consul/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/hashicorp/consul/api v1.26.1 ) diff --git a/contrib/registry/etcd/go.mod b/contrib/registry/etcd/go.mod index 1e7b5acfc04..a45c41166b8 100644 --- a/contrib/registry/etcd/go.mod +++ b/contrib/registry/etcd/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/registry/etcd/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 go.etcd.io/etcd/client/v3 v3.5.17 google.golang.org/grpc v1.59.0 ) diff --git a/contrib/registry/file/go.mod b/contrib/registry/file/go.mod index 57a5a3e26b5..36e17590eae 100644 --- a/contrib/registry/file/go.mod +++ b/contrib/registry/file/go.mod @@ -2,7 +2,7 @@ module github.com/gogf/gf/contrib/registry/file/v2 go 1.23.0 -require github.com/gogf/gf/v2 v2.9.0 +require github.com/gogf/gf/v2 v2.9.1 require ( github.com/BurntSushi/toml v1.5.0 // indirect diff --git a/contrib/registry/polaris/go.mod b/contrib/registry/polaris/go.mod index d3e330e8185..c65bbb0af86 100644 --- a/contrib/registry/polaris/go.mod +++ b/contrib/registry/polaris/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/registry/polaris/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 github.com/polarismesh/polaris-go v1.6.1 ) diff --git a/contrib/registry/zookeeper/go.mod b/contrib/registry/zookeeper/go.mod index ecd7f27147f..b06bfaf73a7 100644 --- a/contrib/registry/zookeeper/go.mod +++ b/contrib/registry/zookeeper/go.mod @@ -4,7 +4,7 @@ go 1.23.0 require ( github.com/go-zookeeper/zk v1.0.3 - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 golang.org/x/sync v0.16.0 ) diff --git a/contrib/rpc/grpcx/go.mod b/contrib/rpc/grpcx/go.mod index 5abd887dbe6..0256310b757 100644 --- a/contrib/rpc/grpcx/go.mod +++ b/contrib/rpc/grpcx/go.mod @@ -3,8 +3,8 @@ module github.com/gogf/gf/contrib/rpc/grpcx/v2 go 1.23.0 require ( - github.com/gogf/gf/contrib/registry/file/v2 v2.9.0 - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/contrib/registry/file/v2 v2.9.1 + github.com/gogf/gf/v2 v2.9.1 go.opentelemetry.io/otel v1.37.0 go.opentelemetry.io/otel/trace v1.37.0 google.golang.org/grpc v1.64.1 diff --git a/contrib/sdk/httpclient/go.mod b/contrib/sdk/httpclient/go.mod index b9946fd6225..25a749d024b 100644 --- a/contrib/sdk/httpclient/go.mod +++ b/contrib/sdk/httpclient/go.mod @@ -2,7 +2,7 @@ module github.com/gogf/gf/contrib/sdk/httpclient/v2 go 1.23.0 -require github.com/gogf/gf/v2 v2.9.0 +require github.com/gogf/gf/v2 v2.9.1 require ( github.com/BurntSushi/toml v1.5.0 // indirect diff --git a/contrib/trace/otlpgrpc/go.mod b/contrib/trace/otlpgrpc/go.mod index 17bc17db32d..0c326eb8e85 100644 --- a/contrib/trace/otlpgrpc/go.mod +++ b/contrib/trace/otlpgrpc/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/trace/otlpgrpc/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 go.opentelemetry.io/otel v1.37.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.37.0 diff --git a/contrib/trace/otlphttp/go.mod b/contrib/trace/otlphttp/go.mod index 5597974f155..bd166ef872d 100644 --- a/contrib/trace/otlphttp/go.mod +++ b/contrib/trace/otlphttp/go.mod @@ -3,7 +3,7 @@ module github.com/gogf/gf/contrib/trace/otlphttp/v2 go 1.23.0 require ( - github.com/gogf/gf/v2 v2.9.0 + github.com/gogf/gf/v2 v2.9.1 go.opentelemetry.io/otel v1.37.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0 diff --git a/version.go b/version.go index cd0daf596c5..4397b0fdba4 100644 --- a/version.go +++ b/version.go @@ -2,5 +2,5 @@ package gf const ( // VERSION is the current GoFrame version. - VERSION = "v2.9.0" + VERSION = "v2.9.1" )
diff --git a/os/gproc/gproc_z_signal_test.go b/os/gproc/gproc_z_signal_test.go index ff5dc09a0d3..45c029264f3 100644 --- a/os/gproc/gproc_z_signal_test.go +++ b/os/gproc/gproc_z_signal_test.go @@ -4,6 +4,8 @@ // If a copy of the MIT was not distributed with this file, // You can obtain one at https://github.com/gogf/gf. +//go:build linux + package gproc import (
cmd/gf: Windows下go1.25版本go install cmd/gf 报错 ### Go version go version go1.25.0 windows/amd64 ### GoFrame version 2.9.0 ### Can this bug be reproduced with the latest release? Option Yes ### What did you do? ``` go install github.com/gogf/gf/cmd/gf/v2@latest go: downloading github.com/gogf/gf/cmd/gf/v2 v2.9.0 # golang.org/x/tools/internal/tokeninternal ..gopath\pkg\mod\golang.org\x\tools@v0.21.1-0.20240508182429-e35e4ccd0d2d\internal\tokeninternal\tokeninternal.go:64:9: invalid array length -delta * delta (constant -256 of type int64) ``` ### What did you see happen? ``` go install github.com/gogf/gf/cmd/gf/v2@latest go: downloading github.com/gogf/gf/cmd/gf/v2 v2.9.0 # golang.org/x/tools/internal/tokeninternal ..gopath\pkg\mod\golang.org\x\tools@v0.21.1-0.20240508182429-e35e4ccd0d2d\internal\tokeninternal\tokeninternal.go:64:9: invalid array length -delta * delta (constant -256 of type int64) ``` ### What did you expect to see? ``` go install github.com/gogf/gf/cmd/gf/v2@latest go: downloading github.com/gogf/gf/cmd/gf/v2 v2.9.0 # golang.org/x/tools/internal/tokeninternal ..gopath\pkg\mod\golang.org\x\tools@v0.21.1-0.20240508182429-e35e4ccd0d2d\internal\tokeninternal\tokeninternal.go:64:9: invalid array length -delta * delta (constant -256 of type int64) ```
使用go install github.com/gogf/gf/cmd/gf/v2@v2.9.1解决
使用go install github.com/gogf/gf/cmd/gf/v2@v2.9.1解决
[ "https://github.com/gogf/gf/commit/6a62a5c2972cf64c2d9c8e5f7dff61ac48f0d261", "https://github.com/gogf/gf/commit/af762183ae3fdefde71c37e1609d1d96ea2c133a" ]
2025-08-27T02:54:41Z
https://github.com/gogf/gf/tree/4262aa254d0af4f3dc07c5a00d99171798bceab6
[ "cd /testbed; go mod tidy; go build ./..." ]
[ "cd /testbed; go test -json ./... | tee go-test-results.json" ]
[ "cd /testbed; powershell -NoProfile -Command \"Get-Content -Raw go-test-results.json\"" ]
def parser(log: str) -> dict[str, str]: import json import re def extract_json_objects(text: str): objs = [] n = len(text) i = 0 while True: start = text.find("{", i) if start < 0: break depth = 0 in_str = False esc = False j = start while j < n: ch = text[j] if in_str: if esc: esc = False elif ch == "\\": esc = True elif ch == '"': in_str = False else: if ch == '"': in_str = True elif ch == "{": depth += 1 elif ch == "}": depth -= 1 if depth == 0: objs.append(text[start : j + 1]) i = j + 1 break j += 1 else: break return objs def compact_ws(s: str) -> str: # In this dataset, console wrapping injects whitespace/newlines into JSON text. # Remove all whitespace characters occurring within field strings. return re.sub(r"\s+", "", s or "") results: dict[str, str] = {} # Parse all json events; take any event containing Action and Test and Package. for obj in extract_json_objects(log): try: ev = json.loads(obj) except Exception: continue action = (ev.get("Action") or "").lower() if action not in ("pass", "fail", "skip"): continue pkg = compact_ws(ev.get("Package", "")) test = compact_ws(ev.get("Test", "")) if pkg and test: results[f"{pkg}/{test}"] = action # If we got nothing, fall back to a tolerant regex over raw text (like the draft), # but still normalize whitespace within captured values. if not results: action_re = re.compile(r'"Action"\s*:\s*"(?P<action>pass|fail|skip)"', re.IGNORECASE) pkg_re = re.compile(r'"Package"\s*:\s*"(?P<pkg>[^"]+)"', re.DOTALL) test_re = re.compile(r'"Test"\s*:\s*"(?P<test>[^"]+)"', re.DOTALL) for m in re.finditer(r"\{.*?\}", log, re.DOTALL): obj = m.group(0) ma = action_re.search(obj) mp = pkg_re.search(obj) mt = test_re.search(obj) if not (ma and mp and mt): continue action = ma.group("action").lower() pkg = compact_ws(mp.group("pkg")) test = compact_ws(mt.group("test")) if pkg and test: results[f"{pkg}/{test}"] = action # Final defensive normalization for k, v in list(results.items()): if v not in ("pass", "fail", "skip"): results[k] = "fail" return results
[ "github.coom/gogf/gf/v2/util/gconv/Test_Issue1227", "github.coom/gogf/gf/v2/util/gconv/TestMapWithMapOption", "github.coom/gogf/gf/v2/os/glog/TestLogger_SetHandlers2", "github.coom/gogf/gf/v2/container/gmap/Test_StrIntMap_Pop", "github.coom/gogf/gf/v2/util/gtag/Test_SetOver_Get", "github.coom/gogf/gf/v2/c...
[ "github.coom/gogf/gf/v2/container/gtree/ExampleAVLTree_MapStrAny", "github.coom/gogf/gf/v2/os/gfsnotify/TestWatcher_Callback2", "github.coom/gogf/gf/v2/util/gutil/Test_ComparatorInt32", "github.coom/gogf/gf/v2/container/gtree/ExampleAVLTree_Removes", "github.coom/gogf/gf/v2/container/garray/ExampleIntArray_...
starryzhang/sweb.eval.win.gogf_1776_gf-4386
AvaloniaUI/Avalonia
19538
AvaloniaUI__Avalonia-19538
C#
[ "19535" ]
ad33ddbbacbf67de512892624aa8b338e30c964e
diff --git a/src/Avalonia.Base/Media/FontFamily.cs b/src/Avalonia.Base/Media/FontFamily.cs index 722f806cac0..aa2ce05bd45 100644 --- a/src/Avalonia.Base/Media/FontFamily.cs +++ b/src/Avalonia.Base/Media/FontFamily.cs @@ -36,6 +36,11 @@ public FontFamily(Uri? baseUri, string name) throw new ArgumentNullException(nameof(name)); } + if (baseUri != null && !baseUri.IsAbsoluteUri) + { + throw new ArgumentException("Base uri must be an absolute uri.", nameof(baseUri)); + } + var fontSources = GetFontSourceIdentifier(name); FamilyNames = new FamilyNameCollection(fontSources); @@ -46,20 +51,15 @@ public FontFamily(Uri? baseUri, string name) if (singleSource.Source is Uri source) { - if (baseUri != null && !baseUri.IsAbsoluteUri) + if (source.IsAbsoluteUri) { - throw new ArgumentException("Base uri must be an absolute uri.", nameof(baseUri)); + Key = new FontFamilyKey(source); } - - Key = new FontFamilyKey(source, baseUri); - } - else - { - if(baseUri != null && baseUri.IsAbsoluteUri) + else { - Key = new FontFamilyKey(baseUri); + Key = new FontFamilyKey(source, baseUri); } - } + } } else { @@ -138,7 +138,7 @@ private static FrugalStructList<FontSourceIdentifier> GetFontSourceIdentifier(st var segment = segments[i]; var innerSegments = segment.Split('#'); - FontSourceIdentifier identifier; + FontSourceIdentifier identifier = new FontSourceIdentifier(name, null); switch (innerSegments.Length) { @@ -159,19 +159,19 @@ private static FrugalStructList<FontSourceIdentifier> GetFontSourceIdentifier(st } else { - var source = path.StartsWith("/", StringComparison.Ordinal) - ? new Uri(path, UriKind.Relative) - : new Uri(path, UriKind.RelativeOrAbsolute); - - identifier = new FontSourceIdentifier(innerName, source); - } - - break; - } + if (path.Contains('/') && Uri.TryCreate(path, UriKind.Relative, out var source)) + { + identifier = new FontSourceIdentifier(innerName, source); + } + else + { + if (Uri.TryCreate(path, UriKind.Absolute, out source)) + { + identifier = new FontSourceIdentifier(innerName, source); + } + } + } - default: - { - identifier = new FontSourceIdentifier(name, null); break; } } diff --git a/src/Avalonia.Base/Media/FontManager.cs b/src/Avalonia.Base/Media/FontManager.cs index 7310ab19a11..dfce362297b 100644 --- a/src/Avalonia.Base/Media/FontManager.cs +++ b/src/Avalonia.Base/Media/FontManager.cs @@ -273,8 +273,6 @@ public bool TryMatchCharacter(int codepoint, FontStyle fontStyle, FontWeight fon //Try to match against fallbacks first if (fontFamily?.Key != null) { - var fontUri = fontFamily.Key.Source.EnsureAbsolute(fontFamily.Key.BaseUri); - if (fontFamily.Key is CompositeFontFamilyKey compositeKey) { for (int i = 0; i < compositeKey.Keys.Count; i++) @@ -296,6 +294,8 @@ public bool TryMatchCharacter(int codepoint, FontStyle fontStyle, FontWeight fon } } + var fontUri = fontFamily.Key.Source.EnsureAbsolute(fontFamily.Key.BaseUri); + if (fontUri.IsFontCollection()) { if (TryGetFontCollection(fontUri, out var fontCollection) && diff --git a/src/Avalonia.Base/Media/Fonts/FontCollectionBase.cs b/src/Avalonia.Base/Media/Fonts/FontCollectionBase.cs index 038332c2da8..222a514ed13 100644 --- a/src/Avalonia.Base/Media/Fonts/FontCollectionBase.cs +++ b/src/Avalonia.Base/Media/Fonts/FontCollectionBase.cs @@ -35,7 +35,7 @@ public virtual bool TryMatchCharacter(int codepoint, FontStyle style, FontWeight { if (glyphTypeface.TryGetGlyph((uint)codepoint, out _)) { - match = new Typeface(new FontFamily(Key, "#" + glyphTypeface.FamilyName), style, weight, stretch); + match = new Typeface(new FontFamily(null, Key.AbsoluteUri + "#" + glyphTypeface.FamilyName), style, weight, stretch); return true; } @@ -57,7 +57,7 @@ public virtual bool TryMatchCharacter(int codepoint, FontStyle style, FontWeight { if (glyphTypeface.TryGetGlyph((uint)codepoint, out _)) { - match = new Typeface(new FontFamily(Key, "#" + glyphTypeface.FamilyName) , style, weight, stretch); + match = new Typeface(new FontFamily(null, Key.AbsoluteUri + "#" + glyphTypeface.FamilyName), style, weight, stretch); return true; }
diff --git a/tests/Avalonia.Base.UnitTests/Media/FontFamilyTests.cs b/tests/Avalonia.Base.UnitTests/Media/FontFamilyTests.cs index 73c46a92959..7990fb14265 100644 --- a/tests/Avalonia.Base.UnitTests/Media/FontFamilyTests.cs +++ b/tests/Avalonia.Base.UnitTests/Media/FontFamilyTests.cs @@ -121,12 +121,12 @@ public void Should_Create_FontFamily_From_Uri(string name) } [Theory] - [InlineData("resm:Avalonia.Visuals.UnitTests.Assets.Fonts", "#MyFont")] - [InlineData("avares://Avalonia.Visuals.UnitTests/Assets/Fonts", "#MyFont")] + [InlineData(null, "resm:Avalonia.Visuals.UnitTests.Assets.Fonts#MyFont")] + [InlineData("avares://Avalonia.Visuals.UnitTests/Assets/Fonts", "/#MyFont")] [InlineData("avares://Avalonia.Visuals.UnitTests", "/Assets/Fonts#MyFont")] public void Should_Create_FontFamily_From_Uri_With_Base_Uri(string @base, string name) { - var baseUri = new Uri(@base); + var baseUri = @base != null ? new Uri(@base) : null; var fontFamily = new FontFamily(baseUri, name); @@ -134,5 +134,77 @@ public void Should_Create_FontFamily_From_Uri_With_Base_Uri(string @base, string Assert.NotNull(fontFamily.Key); } + + [InlineData(null, "Arial", "Arial", null)] + [InlineData(null, "resm:Avalonia.Skia.UnitTests.Fonts?assembly=Avalonia.Skia.UnitTests#Manrope", "Manrope", "resm:Avalonia.Skia.UnitTests.Fonts?assembly=Avalonia.Skia.UnitTests")] + [InlineData(null, "avares://Avalonia.Fonts.Inter/Assets#Inter", "Inter", null)] + [InlineData("avares://Avalonia.Fonts.Inter", "/Assets#Inter", "Inter", "avares://Avalonia.Fonts.Inter/Assets")] + [InlineData("avares://ControlCatalog/MainWindow.xaml", "avares://Avalonia.Fonts.Inter/Assets#Inter", "Inter", "avares://Avalonia.Fonts.Inter/Assets")] + [Theory] + public void Should_Parse_FontFamily_With_BaseUri(string baseUri, string s, string expectedName, string expectedUri) + { + var b = baseUri is not null ? new Uri(baseUri) : null; + + expectedUri = expectedUri is not null ? new Uri(expectedUri).AbsoluteUri : null; + + var fontFamily = FontFamily.Parse(s, b); + + Assert.Equal(expectedName, fontFamily.Name); + + var key = fontFamily.Key; + + if (expectedUri is not null) + { + Assert.NotNull(key); + + if (key.BaseUri is not null) + { + Assert.True(key.BaseUri.IsAbsoluteUri); + } + + if (key.BaseUri is null) + { + Assert.NotNull(key.Source); + Assert.True(key.Source.IsAbsoluteUri); + } + + var fontUri = key.BaseUri; + + if (key.Source is Uri sourceUri) + { + if (sourceUri.IsAbsoluteUri) + { + fontUri = sourceUri; + } + else + { + fontUri = new Uri(fontUri, sourceUri); + } + } + + Assert.Equal(expectedUri, fontUri.AbsoluteUri); + } + } + + [InlineData("avares://MyAssembly/", "Some/Path/#FontName", "avares://MyAssembly/Some/Path/"), ] + [InlineData("avares://MyAssembly/", "./Some/Path/#FontName", "avares://MyAssembly/Some/Path/")] + [InlineData("avares://MyAssembly/sub/", "../Some/Path/#FontName", "avares://MyAssembly/Some/Path/")] + [Theory] + public void Should_Parse_Relative_Path(string baseUriString, string path, string expected) + { + var baseUri = new Uri(baseUriString, UriKind.Absolute); + + var fontFamily = FontFamily.Parse(path, baseUri); + + Assert.NotNull(fontFamily.Key); + + Assert.NotNull(fontFamily.Key.BaseUri); + + Assert.NotNull(fontFamily.Key.Source); + + var actual = new Uri(fontFamily.Key.BaseUri, fontFamily.Key.Source); + + Assert.Equal(expected, actual.AbsoluteUri); + } } } diff --git a/tests/Avalonia.Skia.UnitTests/Media/FontManagerTests.cs b/tests/Avalonia.Skia.UnitTests/Media/FontManagerTests.cs index 8057b81ace4..da609478155 100644 --- a/tests/Avalonia.Skia.UnitTests/Media/FontManagerTests.cs +++ b/tests/Avalonia.Skia.UnitTests/Media/FontManagerTests.cs @@ -409,5 +409,23 @@ public void Should_Use_FontCollection_MatchCharacter() } } } + + [InlineData("Arial")] + [InlineData("#Arial")] + [Win32Theory("Windows specific font")] + public void Should_Get_SystemFont_With_BaseUri(string name) + { + using (UnitTestApplication.Start(TestServices.MockPlatformRenderInterface.With(fontManagerImpl: new FontManagerImpl()))) + { + using (AvaloniaLocator.EnterScope()) + { + var fontFamily = new FontFamily(new Uri("avares://Avalonia.Skia.UnitTests/NotFound"), name); + + var glyphTypeface = new Typeface(fontFamily).GlyphTypeface; + + Assert.Equal("Arial", glyphTypeface.FamilyName); + } + } + } } } diff --git a/tests/Avalonia.UnitTests/HarfBuzzFontManagerImpl.cs b/tests/Avalonia.UnitTests/HarfBuzzFontManagerImpl.cs index 78895912dbc..24599c5a9b0 100644 --- a/tests/Avalonia.UnitTests/HarfBuzzFontManagerImpl.cs +++ b/tests/Avalonia.UnitTests/HarfBuzzFontManagerImpl.cs @@ -14,11 +14,11 @@ public class HarfBuzzFontManagerImpl : IFontManagerImpl private readonly string _defaultFamilyName; private static readonly Typeface _defaultTypeface = - new Typeface(new FontFamily(new Uri("resm:Avalonia.UnitTests.Assets?assembly=Avalonia.UnitTests", UriKind.Absolute), "Noto Mono")); + new Typeface(new FontFamily("resm:Avalonia.UnitTests.Assets?assembly=Avalonia.UnitTests#Noto Mono")); private static readonly Typeface _italicTypeface = - new Typeface(new FontFamily(new Uri("resm:Avalonia.UnitTests.Assets?assembly=Avalonia.UnitTests", UriKind.Absolute), "Noto Sans")); + new Typeface(new FontFamily("resm:Avalonia.UnitTests.Assets?assembly=Avalonia.UnitTests#Noto Sans")); private static readonly Typeface _emojiTypeface = - new Typeface(new FontFamily(new Uri("resm:Avalonia.UnitTests.Assets?assembly=Avalonia.UnitTests"), "Twitter Color Emoji")); + new Typeface(new FontFamily("resm:Avalonia.UnitTests.Assets?assembly=Avalonia.UnitTests#Twitter Color Emoji")); public HarfBuzzFontManagerImpl(string defaultFamilyName = "Noto Mono") {
ControlCatalog.Desktop crashes when switching to TextBox/TextBlock Tab ### Describe the bug I used the master branch to compile and run Control Catalog.Desktop with Visual Studio, and the crash described in the title occurred. This happens regardless of debug or release mode. There is no such issue when compiling and running with 11.3.4 branch. **Debug mode:** Crash in \Avalonia\src\Avalonia.Base\Media\TextFormatting\FormattingObjectPool.cs <img width="945" height="377" alt="Image" src="https://github.com/user-attachments/assets/83daa9bb-7b6c-4eab-8fd7-6ec290cd21bc" /> **Release mode:** Crash in \Avalonia\src\Avalonia.Base\Media\Typeface.cs <img width="1355" height="365" alt="Image" src="https://github.com/user-attachments/assets/385b52c5-3843-4a77-a834-28fa085be5b2" /> ### To Reproduce 1. Compile and run ControlCatalog.Desktop 2. Click TextBox or TextBlock Tab ### Expected behavior _No response_ ### Avalonia version master ### OS Windows ### Additional context Windows 11 10.0.26100.4946
This does not validate the baseUri: https://github.com/AvaloniaUI/Avalonia/blob/master/src/Avalonia.Base/Media/FontFamily.cs#L58 Caused by: #19494
This does not validate the baseUri: https://github.com/AvaloniaUI/Avalonia/blob/master/src/Avalonia.Base/Media/FontFamily.cs#L58 Caused by: #19494
[ "https://github.com/AvaloniaUI/Avalonia/commit/d839879369cde3b07a94bc86d294dcc1cf3e8734", "https://github.com/AvaloniaUI/Avalonia/commit/f28d3a4316f01b1d157f8229100d738226adca47", "https://github.com/AvaloniaUI/Avalonia/commit/26a1d0b97677ac23a412421b987e6f4210ce271f", "https://github.com/AvaloniaUI/Avalonia/...
2025-08-25T12:13:19Z
https://github.com/AvaloniaUI/Avalonia/tree/ad33ddbbacbf67de512892624aa8b338e30c964e
[ "cd /testbed; powershell -NoProfile -Command \"& (Resolve-Path '.\\.nuke\\temp\\dotnet-win\\dotnet.exe').Path build .\\Avalonia.Desktop.slnf -c Release\"" ]
[ "cd /testbed; powershell -NoProfile -Command \"New-Item -ItemType Directory -Force artifacts | Out-Null; & (Resolve-Path '.\\.nuke\\temp\\dotnet-win\\dotnet.exe').Path test .\\Avalonia.Desktop.slnf -c Release --no-build --logger 'console;verbosity=detailed' 2>&1 | Tee-Object -FilePath artifacts\\test-output.log\"" ...
[ "cd /testbed; powershell -NoProfile -Command \"Get-Content -Raw artifacts\\test-output.log\"" ]
def parser(log: str) -> dict[str, str]: import re results: dict[str, str] = {} def norm_status(s: str) -> str: s = s.strip().lower() if s in ("pass", "passed", "ok", "success"): return "pass" if s in ("skip", "skipped", "ignored"): return "skip" return "fail" # A "plausible" .NET fully qualified test name: # Namespace.Class.Method(...) possibly with parameters, no spaces. fq_test_re = re.compile(r"^[A-Za-z_][\w`]*(?:\.[A-Za-z_][\w`]*)+\.[A-Za-z_][\w`]+(?:\(.+\))?$") def plausible_name(name: str) -> bool: name = name.strip() if not name or " " in name: return False if "\\testbed\\" in name.lower() or ":\\" in name: return False if name.startswith("[xUnit.net"): return False if name.startswith("----") or name.startswith("at "): return False return bool(fq_test_re.match(name)) def set_result(name: str, status: str) -> None: name = name.strip() if plausible_name(name): results[name] = status # xUnit result line, allow optional prefix "dotnet.exe : " xu_line_re = re.compile( r"^(?:.*?:\s*)?\[xUnit\.net[^\]]*\]\s+(?P<name>\S+)\s+\[(?P<st>PASS|FAIL|SKIP)\]\s*$", re.IGNORECASE, ) # VSTest detailed lines vstest_line_re = re.compile( r"^\s*(?P<st>Passed|Failed|Skipped)\s+(?P<name>\S+)\s+\[\s*(?:<\s*)?\d+.*\]\s*$", re.IGNORECASE, ) pytest_re = re.compile(r"^(?P<name>\S+::\S+)\s+(?P<st>PASSED|FAILED|SKIPPED|ERROR)\s*$", re.IGNORECASE) gotest_re = re.compile(r"^---\s+(?P<st>PASS|FAIL|SKIP):\s+(?P<name>\S+)\s*$", re.IGNORECASE) # Reconstruct wrapped lines: # Only join when current buffer looks like a result start AND next line is indented continuation. raw_lines = log.splitlines() logical_lines: list[str] = [] buf = "" def buf_needs_more(b: str) -> bool: if not b: return False if "[xUnit.net" in b and not re.search(r"\[(?:PASS|FAIL|SKIP)\]\s*$", b, re.IGNORECASE): return True if re.match(r"^\s*(Passed|Failed|Skipped)\b", b, re.IGNORECASE) and not re.search( r"\[\s*(?:<\s*)?\d+.*\]\s*$", b ): return True return False for line in raw_lines: l = line.rstrip("\r\n") if not buf: buf = l continue if buf_needs_more(buf) and (len(l) - len(l.lstrip(" ")) >= 2): # continuation line: keep exact content but strip leading spaces cont = l.lstrip() joiner = "" if (buf and buf[-1].isalnum() and cont and cont[0].isalnum()) else " " buf = buf + joiner + cont continue logical_lines.append(buf) buf = l if buf: logical_lines.append(buf) # Parse for ll in logical_lines: s = ll.strip() if not s: continue m = xu_line_re.match(s) if m: set_result(m.group("name"), norm_status(m.group("st"))) continue m = vstest_line_re.match(s) if m: set_result(m.group("name"), norm_status(m.group("st"))) continue m = pytest_re.match(s) if m: # pytest nodeid isn't FQ .NET name; accept as-is results[m.group("name")] = norm_status(m.group("st")) continue m = gotest_re.match(s) if m: results[m.group("name")] = norm_status(m.group("st")) continue return results
[ "Avalonia.Skia.RenderTests.PathTests.Path_With_PenLineCap", "Avalonia.Markup.UnitTests.Parsers.SelectorGrammarTests.Is_Name", "Avalonia.Skia.RenderTests.PathTests.Line_Relative", "Avalonia.Skia.RenderTests.SVGPathTests.SVGPath", "Avalonia.Skia.RenderTests.PathTests.CubicBezier_Relative", "Avalonia.Skia.Re...
[ "Avalonia.LeakTests.ControlTests.TabItem_Is_Freed", "Avalonia.LeakTests.ControlTests.RendererIsDisposed", "Avalonia.Skia.RenderTests.ImageTests.Image_Stretch_Fill", "Avalonia.Base.UnitTests.Media.TypefaceTests.Should_Be_Equal", "Avalonia.Skia.RenderTests.EffectTests.DropShadowEffect", "Avalonia.LeakTests....
starryzhang/sweb.eval.win.avaloniaui_1776_avalonia-19538
canonical/multipass
4205
canonical__multipass-4205
C++
[ "4204" ]
b7a35a3a4abeccc6235354c8ec73cb0c34e3d728
diff --git a/src/platform/backends/hyperv/hyperv_virtual_machine.cpp b/src/platform/backends/hyperv/hyperv_virtual_machine.cpp index f012d1dc86..d8370ba0ff 100644 --- a/src/platform/backends/hyperv/hyperv_virtual_machine.cpp +++ b/src/platform/backends/hyperv/hyperv_virtual_machine.cpp @@ -106,15 +106,6 @@ auto instance_state_for(mp::PowerShell* power_shell, const QString& name) return mp::VirtualMachine::State::unknown; } -void delete_automatic_snapshots(mp::PowerShell* power_shell, const QString& name) -{ - power_shell->easy_run( - {"Get-VMCheckpoint -VMName", - name, - "| Where-Object { $_.IsAutomaticCheckpoint } | Remove-VMCheckpoint -Confirm:$false"}, - "Could not delete existing automatic checkpoints"); -} - void add_extra_net(mp::PowerShell& ps, const QString& vm_name, const mp::NetworkInterface& extra_interface) @@ -193,6 +184,8 @@ mp::HyperVVirtualMachine::HyperVVirtualMachine(const VirtualMachineDescription& "Could not setup cloud-init drive"); power_shell->easy_run({"Set-VMMemory", "-VMName", name, "-DynamicMemoryEnabled", "$false"}, "Could not disable dynamic memory"); + power_shell->easy_run({"Set-VM", "-Name", name, "-AutomaticCheckpointsEnabled", "$false"}, + "Could not disable automatic snapshots"); setup_network_interfaces(); @@ -202,12 +195,6 @@ mp::HyperVVirtualMachine::HyperVVirtualMachine(const VirtualMachineDescription& { state = instance_state_for(power_shell.get(), name); } - - power_shell->easy_run({"Set-VM", "-Name", name, "-AutomaticCheckpointsEnabled", "$false"}, - "Could not disable automatic snapshots"); // TODO move to new VMs only in - // a couple of releases - delete_automatic_snapshots(power_shell.get(), - name); // TODO drop in a couple of releases (going in on v1.13) } mp::HyperVVirtualMachine::HyperVVirtualMachine(const std::string& source_vm_name,
diff --git a/tests/hyperv/test_hyperv_backend.cpp b/tests/hyperv/test_hyperv_backend.cpp index 6c4346435f..11a95bc878 100644 --- a/tests/hyperv/test_hyperv_backend.cpp +++ b/tests/hyperv/test_hyperv_backend.cpp @@ -114,8 +114,9 @@ struct HyperVBackend : public Test {"-EnableSecureBoot Off"}, {"Set-VMProcessor"}, {"Add-VMDvdDrive"}, - {"Set-VMMemory"}}; - inline static const std::vector<RunSpec> postfix_ctor_runs = {{"Set-VM"}, {"Get-VMCheckpoint"}}; + {"Set-VMMemory"}, + {"Set-VM"}}; + inline static const std::vector<RunSpec> postfix_ctor_runs = {}; inline static const RunSpec default_network_run = {"Set-VMNetworkAdapter"}; inline static const RunSpec min_dtor_run = {"-ExpandProperty State", "Off"};
Daemon fails to start intermittently on Windows The daemon fails to start due to an exception being thrown in the Hyper-V VM constructor: ``` [powershell.exe] At line:1 char:1 + Set-VM -Name testing-20-04 -AutomaticCheckpointsEnabled $false + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + CategoryInfo : InvalidOperation: (:) [Set-VM], VirtualizationException + FullyQualifiedErrorId : InvalidState,Microsoft.HyperV.PowerShell.Commands.SetVM [testing-20-04] [9524] stderr: Set-VM : Modifying the properties of 'testing-20-04' failed. The operation cannot be performed while the object is in its current state. At line:1 char:1 + Set-VM -Name testing-20-04 -AutomaticCheckpointsEnabled $false + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + CategoryInfo : InvalidOperation: (:) [Set-VM], VirtualizationException + FullyQualifiedErrorId : InvalidState,Microsoft.HyperV.PowerShell.Commands.SetVM ``` The culprit is this line: ```c++ power_shell->easy_run({"Set-VM", "-Name", name, "-AutomaticCheckpointsEnabled", "$false"}, "Could not disable automatic snapshots"); // TODO move to new VMs only in ``` The reason is that sometimes the VM is not in an appropriate state for setting the automatic checkpoint status, which causes an exception to be thrown in the Hyper-V VM constructor. This exception is then propagated to the daemon constructor and eventually leads to a crash.
[ "https://github.com/canonical/multipass/commit/eab38fd300d4fceabc43aa43c4d4d1636ccdbb04" ]
2025-07-02T07:00:12Z
https://github.com/canonical/multipass/tree/b7a35a3a4abeccc6235354c8ec73cb0c34e3d728
[ "cd C:\\testbed; git config --global --add safe.directory C:/testbed; git submodule update --init --recursive; $VSPath = & \"${env:ProgramFiles(x86)}\\Microsoft Visual Studio\\Installer\\vswhere.exe\" -products Microsoft.VisualStudio.Product.BuildTools -latest -property installationPath; Import-Module \"$VSPath\\Co...
[ "cd C:\\testbed; mkdir -Force reports | Out-Null; .\\build\\bin\\multipass_tests.exe --gtest_output=xml:reports\\gtest.xml 2>&1 | Tee-Object reports\\gtest-verbose.log" ]
[ "cd C:\\testbed; Get-Content reports\\gtest.xml -Raw" ]
def parser(log: str) -> dict[str, str]: import re from xml.etree import ElementTree as ET def _strip_ns(tag: str) -> str: return tag.split("}", 1)[-1] if "}" in tag else tag def _norm_name(s: str) -> str: # Keep names stable: trim, collapse whitespace; also remove stray newlines inside tokens. return re.sub(r"\s+", " ", (s or "")).strip() def _mk_testname(classname: str, name: str, suite: str | None = None) -> str: classname = _norm_name(classname) name = _norm_name(name) suite = _norm_name(suite or "") if classname and name: return f"{classname}.{name}" if suite and name: return f"{suite}.{name}" return classname or name or suite def _status_from_xml_children(tc_elem) -> str: # Priority: fail > skip > pass status_attr = (tc_elem.attrib.get("status") or "").lower() result_attr = (tc_elem.attrib.get("result") or "").lower() name_attr = tc_elem.attrib.get("name") or "" classname_attr = tc_elem.attrib.get("classname") or "" full = f"{classname_attr}.{name_attr}" if ( status_attr in {"notrun", "disabled"} or result_attr in {"suppressed", "skipped", "disabled"} or name_attr.startswith("DISABLED_") or ".DISABLED_" in full ): # still allow fail marker to override below if present base = "skip" else: base = "pass" has_fail = False has_skip = False for child in list(tc_elem): t = _strip_ns(child.tag).lower() if t in {"failure", "error"}: has_fail = True elif t in {"skipped", "skip"}: has_skip = True if has_fail: return "fail" if has_skip: return "skip" return base # ---------- 1) Try to locate and parse XML (best effort) ---------- text = log.replace("\r\n", "\n").replace("\r", "\n") # Try to crop to an XML-ish region to avoid leading noise. start = text.find("<testsuites") if start == -1: start = text.find("<?xml") xml_region = text[start:] if start != -1 else text # If we can find a proper closing tag, crop; otherwise keep partial (ET may fail). end = xml_region.rfind("</testsuites>") if end != -1: xml_region_cropped = xml_region[: end + len("</testsuites>")] else: xml_region_cropped = xml_region results: dict[str, str] = {} def _emit(name: str, status: str): if not name: return # Keep latest seen (usually identical); fail should win over pass/skip if duplicates happen. prev = results.get(name) if prev == "fail": return if status == "fail": results[name] = "fail" elif prev is None: results[name] = status elif prev == "pass" and status == "skip": # don't downgrade pass to skip return else: results[name] = prev # Basic sanitization to make ET more likely to work on wrapped logs: # - remove control chars (except \n, \t) # - do NOT collapse whitespace globally (would corrupt content); ET can handle newlines in attrs if well-formed. xml_sanitized = re.sub(r"[\x00-\x08\x0b\x0c\x0e-\x1f]", "", xml_region_cropped) parsed_xml = False try: root = ET.fromstring(xml_sanitized) parsed_xml = True except Exception: parsed_xml = False if parsed_xml: # Root may be <testsuites> or <testsuite> for suite in root.iter(): if _strip_ns(suite.tag).lower() != "testsuite": continue suite_name = suite.attrib.get("name") or "" for tc in suite: if _strip_ns(tc.tag).lower() != "testcase": continue tname = _mk_testname(tc.attrib.get("classname", ""), tc.attrib.get("name", ""), suite_name) _emit(tname, _status_from_xml_children(tc)) if results: return results # best path # ---------- 2) Fallback: regex-based XML-ish extraction (handles truncated/broken XML) ---------- # We avoid trying to "fix" broken quotes globally; instead extract attrs with tolerant regex. region = xml_region # even if no closing tag exists # Iterate testsuites to keep suite context as fallback classname. # We'll scan sequentially to associate each testcase with the most recent testsuite name. suite_name = "" i = 0 suite_pat = re.compile(r"<\s*testsuite\b([^>]*)>", re.IGNORECASE | re.DOTALL) tc_pat = re.compile(r"<\s*testcase\b([^>]*?)(/?>)", re.IGNORECASE | re.DOTALL) def get_attr_loose(attr_blob: str, key: str) -> str | None: # Supports name="..." or name='...'; tolerant to newlines; stops at matching quote. m = re.search(rf"""\b{re.escape(key)}\s*=\s*(["'])(.*?)\1""", attr_blob, re.IGNORECASE | re.DOTALL) if m: return m.group(2) return None # Precompute all tag hits (suite open and testcase open) with positions. hits = [] for m in suite_pat.finditer(region): hits.append(("suite", m.start(), m.end(), m.group(1))) for m in tc_pat.finditer(region): hits.append(("tc", m.start(), m.end(), m.group(1))) hits.sort(key=lambda x: x[1]) # To detect inner markers for a testcase, look from its end to the next testcase start or testsuite end. # This works even if testcase isn't properly closed. next_tag_pat = re.compile(r"<\s*(testcase|testsuite|/testsuite|/testsuites)\b", re.IGNORECASE) for idx, (kind, start_pos, end_pos, blob) in enumerate(hits): if kind == "suite": suite_name = _norm_name(get_attr_loose(blob, "name") or suite_name) continue attrs = blob name = _norm_name(get_attr_loose(attrs, "name") or "") classname = _norm_name(get_attr_loose(attrs, "classname") or "") # Determine slice for inner content markers inner_start = end_pos inner_end = len(region) nm = next_tag_pat.search(region, inner_start) if nm: inner_end = nm.start() inner = region[inner_start:inner_end] status_attr = _norm_name((get_attr_loose(attrs, "status") or "")).lower() result_attr = _norm_name((get_attr_loose(attrs, "result") or "")).lower() full = f"{classname}.{name}" status = "pass" if ( status_attr in {"notrun", "disabled"} or result_attr in {"suppressed", "skipped", "disabled"} or name.startswith("DISABLED_") or ".DISABLED_" in full ): status = "skip" if re.search(r"<\s*(failure|error)\b", inner, re.IGNORECASE): status = "fail" elif re.search(r"<\s*skipped\b", inner, re.IGNORECASE): status = "skip" tname = _mk_testname(classname, name, suite_name) _emit(tname, status) # ---------- 3) Non-XML fallbacks (pytest / unittest / go test / jest-like) ---------- # Pytest: file.py::test_name PASSED/FAILED/SKIPPED/ERROR for m in re.finditer(r"(?m)^\s*(\S+::\S+)\s+(PASSED|FAILED|SKIPPED|ERROR)\s*$", text): tname = m.group(1).strip() st = m.group(2) _emit(tname, "pass" if st == "PASSED" else "skip" if st == "SKIPPED" else "fail") # Go test: --- PASS: TestName / FAIL / SKIP for m in re.finditer(r"(?m)^\s*---\s+(PASS|FAIL|SKIP):\s+(\S+)", text): st, tname = m.group(1), m.group(2) _emit(tname, "pass" if st == "PASS" else "skip" if st == "SKIP" else "fail") # unittest: test_method (TestClass) ... ok/FAIL/ERROR/skipped for m in re.finditer(r"(?m)^\s*(\w+)\s+\(([^)]+)\)\s+\.\.\.\s+(ok|FAIL|ERROR|skipped)\s*$", text): method, cls, st = m.group(1), m.group(2), m.group(3) tname = f"{cls}.{method}" _emit(tname, "pass" if st == "ok" else "skip" if st == "skipped" else "fail") return results
[ "AuthenticateCommandClien nt.authenticateCmdNoPassphraseEnteredReturnsError", "TestBaseSnapshot/ /TestSnapshotRejectedNonPositiveIndices.refusesNonPositiveIndexFromJson/2", "TestSettings/Te estSettingsGetMultipleHandlers.getReturnsSettingFromFirstHandlerHit/6", "PowerShellTest/TestPS SStatusAndOutput.runHandl...
[ "TestSettings.setDelegatesOnAllHandlers", "PlatformWin/TestWinTermSyncJson.wintermSyncEnablesHiddenProfileIfSettingPrimary/43", "MemorySize/TestGoodMemorySizeFormats.interpretsValidFormats/299", "MemorySize/TestGoodMemorySizeFormats.interpretsValidFormats/100", "PlatformWin/TestWinTermSyncJson.wintermSyncAd...
starryzhang/sweb.eval.win.canonical_1776_multipass-4205
gemrb/gemrb
2365
gemrb__gemrb-2365
C++
[ "2364" ]
159c5fd1b5f5382a7f68e2f03b271b1487c24a1f
diff --git a/gemrb/core/SaveGameIterator.cpp b/gemrb/core/SaveGameIterator.cpp index d5aa894ece..1a9c383f5a 100644 --- a/gemrb/core/SaveGameIterator.cpp +++ b/gemrb/core/SaveGameIterator.cpp @@ -132,17 +132,14 @@ SaveGame::SaveGame(path_t path, const path_t& name, const ResRef& prefix, std::s static const auto DATE_FMT = FMT_STRING("{:%a %Od %b %T %EY}"); PortraitCount = pCount; SaveID = saveID; - struct stat my_stat; path_t nPath = PathJoinExt(Path, Prefix, "bmp"); - memset(&my_stat, 0, sizeof(my_stat)); - std::tm* time; - if (stat(nPath.c_str(), &my_stat)) { - Log(ERROR, "SaveGameIterator", "Stat call failed, using dummy time!"); - time = std::localtime(nullptr); + + auto time = FileModificationTime(nPath); + if (time != nullptr) { + Date = fmt::format(DATE_FMT, *time); } else { - time = std::localtime(&my_stat.st_mtime); + Date = "n/a"; } - Date = fmt::format(DATE_FMT, *time); manager.AddSource(Path, name, PLUGIN_RESOURCE_DIRECTORY); Name = StringFromUtf8(name); } diff --git a/gemrb/core/Strings/StringConversion.cpp b/gemrb/core/Strings/StringConversion.cpp index 26651d53ed..1d43661848 100644 --- a/gemrb/core/Strings/StringConversion.cpp +++ b/gemrb/core/Strings/StringConversion.cpp @@ -133,6 +133,8 @@ std::string RecodedStringFromWideStringBytes(const char16_t* bytes, size_t bytes return ""; } + buffer.resize(buffer.length() - outLenLeft); + return buffer; } diff --git a/gemrb/core/System/VFS.cpp b/gemrb/core/System/VFS.cpp index b4e524af5d..48b4431b29 100644 --- a/gemrb/core/System/VFS.cpp +++ b/gemrb/core/System/VFS.cpp @@ -74,7 +74,7 @@ struct dirent { dirent& operator=(std::string&& entryName) { - auto cutOff = entryName.length(); + auto cutOff = std::min(entryName.length(), static_cast<size_t>(_MAX_PATH - 1)); buffer = std::move(entryName); buffer.resize(_MAX_PATH); buffer[cutOff] = 0; @@ -244,6 +244,60 @@ bool FileExists(const path_t& path) return true; } +std::tm* FileModificationTime(const path_t& path) +{ +#ifdef WIN32 + static std::tm timestamp; + + auto buffer = StringFromUtf8(path.c_str()); + auto wideChars = reinterpret_cast<const wchar_t*>(buffer.c_str()); + + auto file = + CreateFile( + wideChars, + GENERIC_READ, + FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, + nullptr, + OPEN_EXISTING, + FILE_ATTRIBUTE_NORMAL, + nullptr); + + if (file == INVALID_HANDLE_VALUE) { + return nullptr; + } + + FILETIME moditicationTime; + auto result = GetFileTime(file, nullptr, nullptr, &moditicationTime); + CloseHandle(file); + if (result == 0) { + return nullptr; + } + + SYSTEMTIME systemTime, localTime; + FileTimeToSystemTime(&moditicationTime, &systemTime); + SystemTimeToTzSpecificLocalTime(nullptr, &systemTime, &localTime); + + timestamp.tm_sec = localTime.wSecond; + timestamp.tm_min = localTime.wMinute; + timestamp.tm_hour = localTime.wHour; + timestamp.tm_wday = localTime.wDayOfWeek; + timestamp.tm_mday = localTime.wDay; + timestamp.tm_mon = localTime.wMonth - 1; + timestamp.tm_year = localTime.wYear - 1900; + + return &timestamp; +#else + struct stat statStruct; + memset(&statStruct, 0, sizeof(statStruct)); + + if (stat(path.c_str(), &statStruct)) { + return nullptr; + } else { + return std::localtime(&statStruct.st_mtime); + } +#endif +} + void PathAppend(path_t& target, const path_t& name) { if (name.empty()) { diff --git a/gemrb/core/System/VFS.h b/gemrb/core/System/VFS.h index 0fac319ac2..8df6fbd9dc 100644 --- a/gemrb/core/System/VFS.h +++ b/gemrb/core/System/VFS.h @@ -78,6 +78,7 @@ GEM_EXPORT path_t& ResolveFilePath(path_t& FilePath); GEM_EXPORT bool DirExists(const path_t& path); GEM_EXPORT bool FileExists(const path_t& path); +GEM_EXPORT std::tm* FileModificationTime(const path_t& path); // when case sensitivity is enabled dir will be transformed to fit the case of the actual items composing the path GEM_EXPORT path_t& ResolveCase(path_t& dir);
diff --git a/gemrb/tests/core/Strings/Test_String.cpp b/gemrb/tests/core/Strings/Test_String.cpp index 0e4b628e5b..7e7e2775ca 100644 --- a/gemrb/tests/core/Strings/Test_String.cpp +++ b/gemrb/tests/core/Strings/Test_String.cpp @@ -18,6 +18,7 @@ */ #include "Strings/String.h" +#include "Strings/StringConversion.h" #include <gtest/gtest.h> @@ -325,4 +326,12 @@ TEST(StringTest, StringToUpper) #endif } +TEST(StringTest, RecodedStringFromWideStringBytes) +{ + std::u16string original { u"abc" }; + auto utf8 = RecodedStringFromWideStringBytes(reinterpret_cast<const char16_t*>(original.c_str()), 6, "UTF-8"); + EXPECT_EQ(3, utf8.length()); + EXPECT_EQ("abc", utf8); +} + } diff --git a/gemrb/tests/core/System/Test_VFS.cpp b/gemrb/tests/core/System/Test_VFS.cpp index e01ad9caea..7bdf267dca 100644 --- a/gemrb/tests/core/System/Test_VFS.cpp +++ b/gemrb/tests/core/System/Test_VFS.cpp @@ -236,6 +236,14 @@ TEST(VFSTest, UnlinkFile) EXPECT_FALSE(FileExists(umlautFilePath)); } +TEST(VFSTest, FileModificationTime) +{ + auto baseDir = PathJoin("tests", "resources", "VFS", "encoding"); + EXPECT_NE(nullptr, FileModificationTime(PathJoin(baseDir, "file.txt"))); + EXPECT_NE(nullptr, FileModificationTime(PathJoin(baseDir, "file_äöü.txt"))); + EXPECT_EQ(nullptr, FileModificationTime("na")); +} + path_t getTempPath() { #ifdef WIN32
Crash on save game directory iteration #### Bug description In my IWD2 folder, I have around 60 save games that were created with gemrb, and thus I'm confident they are from the last gemrb-only playthrough, so I'm sure it's a regression. Trying to start the game now fails half-way. #### Screenshots ``` C:/MSYS2/mingw64/include/c++/15.2.0/bits/basic_string.h:1369: std::__cxx11::basic_string<_CharT, _Traits, _Alloc>::reference std::__cxx11::basic_string<_CharT, _Traits, _Alloc>::operator[](size_type) [with _CharT = char; _Traits = std::char _traits<char>; _Alloc = std::allocator<char>; reference = char&; size_type = long long unsigned int]: Assertion '__pos <= size()' failed. Thread 1 hit Breakpoint 1.2, 0x00007ffe6abff1e7 in msvcrt!abort () from C:\Windows\System32\msvcrt.dll (gdb) bt #0 0x00007ffe6abff1e7 in msvcrt!abort () from C:\Windows\System32\msvcrt.dll #1 0x00007ffe3423ab7b in ?? () from C:\MSYS2\mingw64\bin\libstdc++-6.dll #2 0x00007ffde93834d7 in std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >::operator[] (this=0x226094d0, __pos=284) at C:/MSYS2/mingw64/include/c++/15.2.0/bits/basic_string.h:1369 #3 0x00007ffde92845f9 in dirent::operator= (this=0x226094d0, entryName=...) at D:/Sources/gemrb/gemrb/core/System/VFS.cpp:80 #4 0x00007ffde91d77c0 in readdir (dir=0x22609270) at D:/Sources/gemrb/gemrb/core/System/VFS.cpp:136 #5 0x00007ffde91d9058 in GemRB::DirectoryIterator::operator++ (this=0x5fc6b0) at D:/Sources/gemrb/gemrb/core/System/VFS.cpp:644 #6 0x00007ffde9184c3b in GemRB::SaveGameIterator::RescanSaveGames (this=0xe08450) at D:/Sources/gemrb/gemrb/core/SaveGameIterator.cpp:306 #7 0x00007ffde9184e10 in GemRB::SaveGameIterator::GetSaveGames (this=0xe08450) at D:/Sources/gemrb/gemrb/core/SaveGameIterator.cpp:320 #8 0x00007ffe2978004e in GemRB_GetSaveGames () at D:/Sources/gemrb/gemrb/plugins/GUIScript/GUIScript.cpp:4353 #9 0x00007ffe26113fcc in cfunction_call (func=0x905ce90, args=0x7ffe263e0d60 <_PyRuntime+82784>, kwargs=0x0) at ../Python-3.12.8/Objects/methodobject.c:548 #10 0x00007ffe260a335e in _PyObject_MakeTpCall (tstate=0x7ffe2643e6a0 <_PyRuntime+466080>, callable=0x905ce90, args=0x1f00f8, nargs=0, keywords=0x0) at ../Python-3.12.8/Objects/call.c:240 #11 0x00007ffe260a28c0 in _PyObject_VectorcallTstate (tstate=0x7ffe2643e6a0 <_PyRuntime+466080>, callable=0x905ce90, args=0x1f00f8, nargsf=9223372036854775808, kwnames=0x0) at ../Python-3.12.8/Include/internal/pycore_call.h:90 #12 0x00007ffe260a3707 in PyObject_Vectorcall (callable=0x905ce90, args=0x1f00f8, nargsf=9223372036854775808, kwnames=0x0) at ../Python-3.12.8/Objects/call.c:325 #13 0x00007ffe26222caa in _PyEval_EvalFrameDefault (tstate=0x7ffe2643e6a0 <_PyRuntime+466080>, frame=0x1f0020, throwflag=0) at Python/bytecodes.c:2715 #14 0x00007ffe2620a400 in _PyEval_EvalFrame (tstate=0x7ffe2643e6a0 <_PyRuntime+466080>, frame=0x1f0020, throwflag=0) at ../Python-3.12.8/Include/internal/pycore_ceval.h:89 #15 0x00007ffe2622d700 in _PyEval_Vector (tstate=0x7ffe2643e6a0 <_PyRuntime+466080>, func=0x904e210, locals=0x0, args=0x0, argcount=0, kwnames=0x0) at ../Python-3.12.8/Python/ceval.c:1683 #16 0x00007ffe260a3b20 in _PyFunction_Vectorcall (func=0x904e210, stack=0x0, nargsf=0, kwnames=0x0) at ../Python-3.12.8/Objects/call.c:419 #17 0x00007ffe260a28e2 in _PyObject_VectorcallTstate (tstate=0x7ffe2643e6a0 <_PyRuntime+466080>, callable=0x904e210, args=0x0, nargsf=0, kwnames=0x0) at ../Python-3.12.8/Include/internal/pycore_call.h:92 #18 0x00007ffe260a293e in _PyObject_CallNoArgsTstate (tstate=0x7ffe2643e6a0 <_PyRuntime+466080>, func=0x904e210) at ../Python-3.12.8/Include/internal/pycore_call.h:99 #19 0x00007ffe260a3cf3 in PyObject_CallObject (callable=0x904e210, args=0x0) at ../Python-3.12.8/Objects/call.c:469 #20 0x00007ffe297de4d9 in GemRB::CallObjectWrapper (function=0x904e210, args=0x0) at D:/Sources/gemrb/gemrb/plugins/GUIScript/PythonCallbacks.h:47 #21 0x00007ffe297a3e55 in GemRB::GUIScript::RunPyFunction (this=0xe0812f0, moduleName=0x5ff4e8 "Start", functionName=0x7ffde94550a1 <GemRB::IDT_SKILLPENALTY+3049> "OnLoad", pArgs=0x0, report_error=true) at D:/Sources/gemrb/gemrb/plugins/GUIScript/GUIScript.cpp:13291 #22 0x00007ffe297a3ccb in GemRB::GUIScript::RunPyFunction (this=0xe0812f0, Modulename=0x5ff4e8 "Start", FunctionName=0x7ffde94550a1 <GemRB::IDT_SKILLPENALTY+3049> "OnLoad", params=std::vector of length 0, capacity 0, report_error=true) at D:/Sources/gemrb/gemrb/plugins/GUIScript/GUIScript.cpp:13257 #23 0x00007ffe297a3919 in GemRB::GUIScript::RunFunction (this=0xe0812f0, Modulename=0x5ff4e8 "Start", FunctionName=0x7ffde94550a1 <GemRB::IDT_SKILLPENALTY+3049> "OnLoad", params=std::vector of length 0, capacity 0, report_error=true) at D:/Sources/gemrb/gemrb/plugins/GUIScript/GUIScript.cpp:13217 #24 0x00007ffde9187712 in GemRB::ScriptEngine::RunFunction (this=0xe0812f0, Modulename=0x5ff4e8 "Start", FunctionName=0x7ffde94550a1 <GemRB::IDT_SKILLPENALTY+3049> "OnLoad", report_error=true) at D:/Sources/gemrb/gemrb/core/ScriptEngine.cpp:55 #25 0x00007ffde9138f28 in GemRB::Interface::HandleFlags (this=0x5ff1d0) at D:/Sources/gemrb/gemrb/core/Interface.cpp:843 #26 0x00007ffde9139b1c in GemRB::Interface::Main (this=0x5ff1d0) at D:/Sources/gemrb/gemrb/core/Interface.cpp:992 #27 0x00007ff653a01651 in main (argc=1, argv=0xe07be0) at D:/Sources/gemrb/platforms/windows/GemRB.cpp:65 ``` GemRB version (check as many as you know apply) - [x] master as of this issue - [ ] 0.9.4 - [ ] 0.9.3 - [ ] 0.9.2
Hmm, I tried with a bunch of my saves of various ages and can't reproduce. But it is in the windows-specific readdir, so at least it seems to be limited to that ... I guess the character conversion fails (RecodedStringFromWideStringBytes). It's also not a regression, some old versions of this era now crash, too. Maybe a bug on the Windows path that now gets caught by some new GCC v15.2 assertion.
Hmm, I tried with a bunch of my saves of various ages and can't reproduce. But it is in the windows-specific readdir, so at least it seems to be limited to that ... I guess the character conversion fails (RecodedStringFromWideStringBytes). It's also not a regression, some old versions of this era now crash, too. Maybe a bug on the Windows path that now gets caught by some new GCC v15.2 assertion.
[ "https://github.com/gemrb/gemrb/commit/7930f1805b5785ec83a4bb425434e4240bd5eed7", "https://github.com/gemrb/gemrb/commit/cbf1602637e53ec64243b35d6d8c93eded0d921f", "https://github.com/gemrb/gemrb/commit/324aa100b20a8899fb029c4e4bbe09774ce20340" ]
2025-08-14T22:52:08Z
https://github.com/gemrb/gemrb/tree/159c5fd1b5f5382a7f68e2f03b271b1487c24a1f
[ "cd C:\\testbed ; cmd /c \"`\"C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools\\Common7\\Tools\\VsDevCmd.bat`\" -arch=x64 -host_arch=x64 && cmake --build C:\\testbed\\build --parallel\"" ]
[ "cd C:\\testbed ; if (!(Test-Path reports)) { New-Item -ItemType Directory reports | Out-Null } ; cmd /c '\"C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\BuildTools\\Common7\\Tools\\VsDevCmd.bat\" -arch=x64 -host_arch=x64 && ctest --test-dir build --output-on-failure -VV --output-junit C:\\testbed\\report...
[ "cd C:\\testbed ; Get-Content C:\\testbed\\reports\\ctest-junit.xml" ]
def parser(log: str) -> dict[str, str]: import re import xml.etree.ElementTree as ET def status_rank(s: str) -> int: # higher is "worse"/more important return {"pass": 0, "skip": 1, "fail": 2}.get(s, 0) def merge_status(results: dict[str, str], name: str, new_status: str) -> None: if not name: return old = results.get(name) if old is None or status_rank(new_status) > status_rank(old): results[name] = new_status def extract_first_xml(text: str) -> str | None: start = text.find("<?xml") if start == -1: # sometimes XML starts directly with <testsuite...> / <testsuites...> m = re.search(r"<testsuites\b|<testsuite\b", text) if not m: return None start = m.start() tail = text[start:] # choose the last plausible closing tag to trim noisy suffix end_tsuites = tail.rfind("</testsuites>") end_tsuite = tail.rfind("</testsuite>") if end_tsuites != -1: return tail[: end_tsuites + len("</testsuites>")] if end_tsuite != -1: return tail[: end_tsuite + len("</testsuite>")] # fallback: return as-is; XML parse may still succeed return tail results: dict[str, str] = {} # --- 1) Primary: JUnit XML parsing --- xml_text = extract_first_xml(log) root = None if xml_text: try: root = ET.fromstring(xml_text) except Exception: root = None testcases = [] if root is not None: # JUnit root can be <testsuite> or <testsuites> testcases = list(root.findall(".//testcase")) for tc in testcases: name = (tc.get("name") or "").strip() classname = (tc.get("classname") or "").strip() # Prefer JUnit testcase "name" as key; if missing, use classname key = name or classname if not key: continue # Determine status via standard JUnit children if tc.find("failure") is not None or tc.find("error") is not None: merge_status(results, key, "fail") elif tc.find("skipped") is not None: merge_status(results, key, "skip") else: merge_status(results, key, "pass") # --- 2) Secondary: best-effort parse gtest inner test statuses from <system-out> --- # This can be truncated; do not rely on it for completeness. ok_re = re.compile(r"^\[\s*OK\s*\]\s+(.+?)(?:\s+\(|$)") skipped_re = re.compile(r"^\[\s*SKIPPED\s*\]\s+(.+?)(?:\s+\(|$)") failed_re = re.compile(r"^\[\s*FAILED\s*\]\s+(.+?)(?:\s+\(|$)") run_re = re.compile(r"^\[\s*RUN\s*\]\s+(.+?)\s*$") if root is not None and testcases: for tc in testcases: sysout = tc.findtext("system-out") or "" current_running = None for raw in sysout.splitlines(): line = raw.strip() m = run_re.match(line) if m: current_running = m.group(1).strip() continue m = ok_re.match(line) if m: merge_status(results, m.group(1).strip(), "pass") current_running = None continue m = skipped_re.match(line) if m: merge_status(results, m.group(1).strip(), "skip") current_running = None continue m = failed_re.match(line) if m: merge_status(results, m.group(1).strip(), "fail") current_running = None continue # Heuristic: gtest sometimes prints "Skipped" without [ SKIPPED ] summary line # e.g. "file.cpp(123): Skipped" if current_running and re.search(r":\s*Skipped\b", line): merge_status(results, current_running, "skip") # --- 3) If no XML parsed, try common plaintext formats as fallback --- if root is None: # pytest: path::test STATUS py_re = re.compile(r"^(?P<name>\S+::\S+)\s+(?P<st>PASSED|FAILED|SKIPPED|ERROR)\b", re.I) # go test: --- PASS: TestName go_re = re.compile(r"^---\s+(PASS|FAIL|SKIP):\s+(\S+)", re.I) # unittest: test_name (Class) ... ok/FAIL/ERROR/skipped uni_re = re.compile(r"^(.*?)\s+\([^)]+\)\s+\.\.\.\s+(ok|FAIL|ERROR|skipped)", re.I) for raw in log.splitlines(): line = raw.strip() m = py_re.match(line) if m: st = m.group("st").upper() merge_status(results, m.group("name"), "pass" if st == "PASSED" else "skip" if st == "SKIPPED" else "fail") continue m = go_re.match(line) if m: st = m.group(1).upper() merge_status(results, m.group(2), "pass" if st == "PASS" else "skip" if st == "SKIP" else "fail") continue m = uni_re.match(line) if m: st = m.group(2).lower() merge_status(results, m.group(1).strip(), "pass" if st == "ok" else "skip" if st == "skipped" else "fail") continue # gtest in plain stdout m = ok_re.match(line) if m: merge_status(results, m.group(1).strip(), "pass") continue m = skipped_re.match(line) if m: merge_status(results, m.group(1).strip(), "skip") continue m = failed_re.match(line) if m: merge_status(results, m.group(1).strip(), "fail") continue return results
[ "VFSTest.PathJoinExtWithoutActualCaseFix", "DataStreamReadingInstances/DataStreamReadingTest.ReadPoint/1", "DataStreamReadingInstances/DataStreamReadingTest.ReadRTrimString/1", "VFSTest.ResolveCase", "StringTest.RecodedStringFromWideStringBytes", "StringTest.StringToLower", "PaletteTest.CopyColors", "...
[ "IDSImporterInstances/IDSImporterTest.GetStringIndex/0", "2DAImporterInstances/p2DAImporterTest.GetRowCount/1", "OrientationTest.GetOrient", "PaletteTest.SetAndGetColor", "INIImporterTest.KeyValueIteration", "Test_2DAImporter", "PaletteTest.Constructor", "2DAImporterInstances/p2DAImporterTest.GetColNa...
starryzhang/sweb.eval.win.gemrb_1776_gemrb-2365
Azure/azure-mcp
636
Azure__azure-mcp-636
C#
[ "611" ]
ed18eb27a0b28b2efef07a8bbabadd523ba14064
diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a4a1273f..fa5c4354b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,27 +1,17 @@ # Release History -## 0.4.1 (Unreleased) +## 0.4.1 (2025-07-17) ### Features Added - Support for Azure Load testing operations - Modify load testing resource, test and test runs. [#315](https://github.com/Azure/azure-mcp/pull/315) - - Support for Azure Data Lake Storage Gen2 operations - List paths in Data Lake file systems via the command: `azmcp storage datalake file-system list-paths`. [#608](https://github.com/Azure/azure-mcp/pull/608) - - Added new commands for Azure Function code generation and deployment best practices (https://github.com/Azure/azure-mcp/pull/630) - - - Add `azmcp sql firewall-rule list` command to list SQL server firewall rules. [[#610](https://github.com/Azure/azure-mcp/pull/610)] - -### Breaking Changes +- Add `azmcp sql firewall-rule list` command to list SQL server firewall rules. [[#610](https://github.com/Azure/azure-mcp/pull/610)] +- Added support for listing SQL elastic pools via the command: `azmcp sql elastic-pool list`. [[#581](https://github.com/Azure/azure-mcp/pull/581)] ### Bugs Fixed -### Other Changes - -## 0.4.1 (2025-07-17) - -### Features Added - -- Added support for listing SQL elastic pools via the command: `azmcp sql elastic-pool list`. [[#581](https://github.com/Azure/azure-mcp/pull/581)] +- Fixed Azure CLI executable path resolution on Windows to prioritize .cmd over bash script. [[#611](https://github.com/Azure/azure-mcp/issues/611)] ## 0.4.0 (2025-07-15) diff --git a/src/Areas/Extension/Commands/AzCommand.cs b/src/Areas/Extension/Commands/AzCommand.cs index e511ac1a5..6b5d2cbfc 100644 --- a/src/Areas/Extension/Commands/AzCommand.cs +++ b/src/Areas/Extension/Commands/AzCommand.cs @@ -20,6 +20,14 @@ public sealed class AzCommand(ILogger<AzCommand> logger, int processTimeoutSecon private volatile bool _isAuthenticated = false; private static readonly SemaphoreSlim s_authSemaphore = new(1, 1); + /// <summary> + /// Clears the cached Azure CLI path. Used for testing purposes. + /// </summary> + internal static void ClearCachedAzPath() + { + _cachedAzPath = null; + } + public override string Name => "az"; public override string Description => @@ -51,7 +59,7 @@ protected override AzOptions BindOptions(ParseResult parseResult) return options; } - private static string? FindAzCliPath() + internal static string? FindAzCliPath() { string executableName = "az"; @@ -66,33 +74,36 @@ protected override AzOptions BindOptions(ParseResult parseResult) return null; string[] paths = pathEnv.Split(Path.PathSeparator); + var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); + foreach (string path in paths) { string fullPath = Path.Combine(path.Trim(), executableName); + + // On Windows, prioritize .cmd and .bat extensions over the base executable + // This ensures we use az.cmd instead of the az bash script which isn't executable by .NET + if (isWindows) + { + string cmdPath = Path.ChangeExtension(fullPath, ".cmd"); + if (File.Exists(cmdPath)) + { + _cachedAzPath = cmdPath; + return _cachedAzPath; + } + string batPath = Path.ChangeExtension(fullPath, ".bat"); + if (File.Exists(batPath)) + { + _cachedAzPath = batPath; + return _cachedAzPath; + } + } + + // Fall back to the base executable name if (File.Exists(fullPath)) { _cachedAzPath = fullPath; return _cachedAzPath; } - else - { - var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows); - if (isWindows) - { - string exePath = Path.ChangeExtension(fullPath, ".cmd"); - if (File.Exists(exePath)) - { - _cachedAzPath = exePath; - return _cachedAzPath; - } - string batPath = Path.ChangeExtension(fullPath, ".bat"); - if (File.Exists(batPath)) - { - _cachedAzPath = batPath; - return _cachedAzPath; - } - } - } } return null; }
diff --git a/tests/Areas/Extension/UnitTests/AzCommandTests.cs b/tests/Areas/Extension/UnitTests/AzCommandTests.cs index 57dea00b3..11889239e 100644 --- a/tests/Areas/Extension/UnitTests/AzCommandTests.cs +++ b/tests/Areas/Extension/UnitTests/AzCommandTests.cs @@ -2,6 +2,7 @@ // Licensed under the MIT License. using System.CommandLine.Parsing; +using System.Runtime.InteropServices; using System.Text.Json; using System.Text.Json.Serialization; using AzureMcp.Areas.Extension.Commands; @@ -186,6 +187,92 @@ public async Task ExecuteAsync_HandlesNonJsonOutput_AndWrapsInParseOutput() Assert.NotNull(response.Results); } + [Fact] + public void FindAzCliPath_PrioritizesCmdExtensionOnWindows() + { + // This test verifies that on Windows, .cmd files are prioritized over bash scripts + // which addresses the issue where az bash script cannot be executed by .NET Process.Start() + + // We can only test this meaningfully if we can create a temporary test environment + // Since we can't easily mock the file system and PATH in a unit test, + // this test documents the expected behavior and validates the logic flow + + // The key insight is that the new logic checks for .cmd/.bat FIRST on Windows + // before falling back to the base executable name + + // Clear any cached path to ensure fresh execution + AzCommand.ClearCachedAzPath(); + + // The method should find az.cmd before az on Windows + var result = AzCommand.FindAzCliPath(); + + // We can't make strong assertions about the result since it depends on the actual + // system PATH, but we can verify the method doesn't throw and returns a string or null + Assert.True(result == null || result.Length > 0); + } + + [Fact] + public void FindAzCliPath_WithTemporaryTestEnvironment_PrioritizesCmdOnWindows() + { + // Clear any cached path to ensure fresh execution + AzCommand.ClearCachedAzPath(); + + // Create a temporary directory to simulate the Azure CLI installation + const int TempDirSuffixLength = 8; + var tempDir = Path.Combine(Path.GetTempPath(), "AzCliTest_" + Guid.NewGuid().ToString("N")[..TempDirSuffixLength]); + Directory.CreateDirectory(tempDir); + + try + { + // Create test files that simulate the Azure CLI installation on Windows + var azPath = Path.Combine(tempDir, "az"); + var azCmdPath = Path.Combine(tempDir, "az.cmd"); + + // Create both files + File.WriteAllText(azPath, "#!/bin/bash\necho 'This is a bash script'"); + File.WriteAllText(azCmdPath, "@echo off\necho This is a Windows batch file"); + + // Save the original PATH + var originalPath = Environment.GetEnvironmentVariable("PATH"); + + try + { + // Temporarily modify PATH to include our test directory + Environment.SetEnvironmentVariable("PATH", tempDir + Path.PathSeparator + originalPath); + + // Clear cached path again after PATH change + AzCommand.ClearCachedAzPath(); + + // Call the method + var result = AzCommand.FindAzCliPath(); + + // On Windows, it should find the .cmd file; on other platforms, the base file + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + Assert.Equal(azCmdPath, result); + } + else + { + Assert.Equal(azPath, result); + } + } + finally + { + // Restore the original PATH + Environment.SetEnvironmentVariable("PATH", originalPath); + AzCommand.ClearCachedAzPath(); + } + } + finally + { + // Clean up the temporary directory + if (Directory.Exists(tempDir)) + { + Directory.Delete(tempDir, true); + } + } + } + private sealed class AzResult { [JsonPropertyName("value")]
AZ executable doesn't launch properly on Windows After the release of version 0.3.2, we've found that the Azure CLI extension can no longer launch `az` properly. It always fails with an error like the one below: `An error occurred trying to start process 'C:\Program Files\Microsoft SDKs\Azure\CLI2\wbin\az' with working directory 'x'. The specified executable is not a valid application for this OS platform. ` ```System.ComponentModel.Win32Exception: at System.Diagnostics.Process.StartWithCreateProcess (System.Diagnostics.Process, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a) at AzureMcp.Services.ProcessExecution.ExternalProcessService+<ExecuteAsync>d__1.MoveNext (azmcp, Version=0.3.2.0, Culture=neutral, PublicKeyToken=null: D:\a\_work\1\s\src\Services\ProcessExecution\ExternalProcessService.cs:68) at System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw (System.Private.CoreLib, Version=9.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e) at System.Runtime.CompilerServices.TaskAwaiter.ThrowForNonSuccess (System.Private.CoreLib, Version=9.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e) at System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification (System.Private.CoreLib, Version=9.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e) at System.Runtime.CompilerServices.TaskAwaiter`1.GetResult (System.Private.CoreLib, Version=9.0.0.0, Culture=neutral, PublicKeyToken=7cec85d7bea7798e) at AzureMcp.Areas.Extension.Commands.AzCommand+<ExecuteAsync>d__18.MoveNext (azmcp, Version=0.3.2.0, Culture=neutral, PublicKeyToken=null: D:\a\_work\1\s\src\Areas\Extension\Commands\AzCommand.cs:169) ``` <img width="1098" height="611" alt="Image" src="https://github.com/user-attachments/assets/3827e562-f458-460f-96ad-e568f5445cc2" /> We've confirmed that version 0.3.1 works properly: <img width="1107" height="383" alt="Image" src="https://github.com/user-attachments/assets/ab8eacd6-63cd-413c-9123-25740b3b7d3e" /> We've replicated this behavior on Windows Server 2025 and Windows 11, where the AZ CLI was installed with MSI packages. On our systems, it seems we have an `az` bash script in the PATH: ``` PS C:\> where.exe az C:\Program Files\Microsoft SDKs\Azure\CLI2\wbin\az C:\Program Files\Microsoft SDKs\Azure\CLI2\wbin\az.cmd ``` <img width="1325" height="769" alt="Image" src="https://github.com/user-attachments/assets/d408e37a-0033-4532-8591-147189d1a86e" /> We haven't been able to confirm, but it seems likely the new issue is related to https://github.com/Azure/azure-mcp/issues/351 and https://github.com/Azure/azure-mcp/pull/552. Thanks in advance!
Same issue encountered and rolling azure mcp back to 0.3.0 fixed this. Running the azure cli paths directly in PWSH work fine. The full error log when Claude Desktop executing azmcp-extension-ac: {"status":500,"message":"An error occurred trying to start process \u0027C:\\Program Files\\Microsoft SDKs\\Azure\\CLI2\\wbin\\az\u0027 with working directory \u0027C:\\Users\\username\\AppData\\Local\\AnthropicClaude\\app-0.12.16\u0027. The specified executable is not a valid application for this OS platform.. To mitigate this issue, please refer to the troubleshooting guidelines here at https://aka.ms/azmcp/troubleshooting.","results":{"message":"An error occurred trying to start process \u0027C:\\Program Files\\Microsoft SDKs\\Azure\\CLI2\\wbin\\az\u0027 with working directory \u0027C:\\Users\\username\\AppData\\Local\\AnthropicClaude\\app-0.12.16\u0027. The specified executable is not a valid application for this OS platform.","stackTrace":" at System.Diagnostics.Process.StartWithCreateProcess(ProcessStartInfo startInfo)\r\n at AzureMcp.Services.ProcessExecution.ExternalProcessService.ExecuteAsync(String executablePath, String arguments, Int32 timeoutSeconds, IEnumerable\u00601 customPaths) in D:\\a\\_work\\1\\s\\src\\Services\\ProcessExecution\\ExternalProcessService.cs:line 68\r\n at AzureMcp.Areas.Extension.Commands.AzCommand.ExecuteAsync(CommandContext context, ParseResult parseResult) in D:\\a\\_work\\1\\s\\src\\Areas\\Extension\\Commands\\AzCommand.cs:line 169","type":"Win32Exception"},"duration":0} Also encountering this issue! {"status":500,"message":"An error occurred trying to start process \u0027C:\\Program Files\\Microsoft SDKs\\Azure\\CLI2\\wbin\\az\u0027 with working directory \u0027C:\\Users\\usernme\u0027. The specified executable is not a valid application for this OS platform.. To mitigate this issue, please refer to the troubleshooting guidelines here at https://aka.ms/azmcp/troubleshooting.","results":{"message":"An error occurred trying to start process \u0027C:\\Program Files\\Microsoft SDKs\\Azure\\CLI2\\wbin\\az\u0027 with working directory \u0027C:\\Users\\username\u0027. The specified executable is not a valid application for this OS platform.","stackTrace":" at System.Diagnostics.Process.StartWithCreateProcess(ProcessStartInfo startInfo)\r\n at AzureMcp.Services.ProcessExecution.ExternalProcessService.ExecuteAsync(String executablePath, String arguments, Int32 timeoutSeconds, IEnumerable\u00601 customPaths) in D:\\a\\_work\\1\\s\\src\\Services\\ProcessExecution\\ExternalProcessService.cs:line 68\r\n at AzureMcp.Areas.Extension.Commands.AzCommand.ExecuteAsync(CommandContext context, ParseResult parseResult) in D:\\a\\_work\\1\\s\\src\\Areas\\Extension\\Commands\\AzCommand.cs:line 169","type":"Win32Exception"},"duration":0} This fix for this will be released today.
Same issue encountered and rolling azure mcp back to 0.3.0 fixed this. Running the azure cli paths directly in PWSH work fine. The full error log when Claude Desktop executing azmcp-extension-ac: {"status":500,"message":"An error occurred trying to start process \u0027C:\\Program Files\\Microsoft SDKs\\Azure\\CLI2\\wbin\\az\u0027 with working directory \u0027C:\\Users\\username\\AppData\\Local\\AnthropicClaude\\app-0.12.16\u0027. The specified executable is not a valid application for this OS platform.. To mitigate this issue, please refer to the troubleshooting guidelines here at https://aka.ms/azmcp/troubleshooting.","results":{"message":"An error occurred trying to start process \u0027C:\\Program Files\\Microsoft SDKs\\Azure\\CLI2\\wbin\\az\u0027 with working directory \u0027C:\\Users\\username\\AppData\\Local\\AnthropicClaude\\app-0.12.16\u0027. The specified executable is not a valid application for this OS platform.","stackTrace":" at System.Diagnostics.Process.StartWithCreateProcess(ProcessStartInfo startInfo)\r\n at AzureMcp.Services.ProcessExecution.ExternalProcessService.ExecuteAsync(String executablePath, String arguments, Int32 timeoutSeconds, IEnumerable\u00601 customPaths) in D:\\a\\_work\\1\\s\\src\\Services\\ProcessExecution\\ExternalProcessService.cs:line 68\r\n at AzureMcp.Areas.Extension.Commands.AzCommand.ExecuteAsync(CommandContext context, ParseResult parseResult) in D:\\a\\_work\\1\\s\\src\\Areas\\Extension\\Commands\\AzCommand.cs:line 169","type":"Win32Exception"},"duration":0} Also encountering this issue! {"status":500,"message":"An error occurred trying to start process \u0027C:\\Program Files\\Microsoft SDKs\\Azure\\CLI2\\wbin\\az\u0027 with working directory \u0027C:\\Users\\usernme\u0027. The specified executable is not a valid application for this OS platform.. To mitigate this issue, please refer to the troubleshooting guidelines here at https://aka.ms/azmcp/troubleshooting.","results":{"message":"An error occurred trying to start process \u0027C:\\Program Files\\Microsoft SDKs\\Azure\\CLI2\\wbin\\az\u0027 with working directory \u0027C:\\Users\\username\u0027. The specified executable is not a valid application for this OS platform.","stackTrace":" at System.Diagnostics.Process.StartWithCreateProcess(ProcessStartInfo startInfo)\r\n at AzureMcp.Services.ProcessExecution.ExternalProcessService.ExecuteAsync(String executablePath, String arguments, Int32 timeoutSeconds, IEnumerable\u00601 customPaths) in D:\\a\\_work\\1\\s\\src\\Services\\ProcessExecution\\ExternalProcessService.cs:line 68\r\n at AzureMcp.Areas.Extension.Commands.AzCommand.ExecuteAsync(CommandContext context, ParseResult parseResult) in D:\\a\\_work\\1\\s\\src\\Areas\\Extension\\Commands\\AzCommand.cs:line 169","type":"Win32Exception"},"duration":0} This fix for this will be released today. Can you please verify this fix? > Can you please verify this fix? Yup confirmed fixed in v0.4.1
[ "https://github.com/Azure/azure-mcp/commit/ba38c64875c555d8f63529dccc4c9ca3f200921b", "https://github.com/Azure/azure-mcp/commit/c4f8d5ab61aa08ce98a0841363b61c26fe5c4949", "https://github.com/Azure/azure-mcp/commit/6f0eae91bd0ab6ffa36cabe418a5a62061ba7332", "https://github.com/Azure/azure-mcp/commit/3a63e3cd9...
2025-07-17T15:28:25Z
https://github.com/Azure/azure-mcp/tree/ed18eb27a0b28b2efef07a8bbabadd523ba14064
[ "dotnet restore .\\AzureMcp.sln ; dotnet build .\\AzureMcp.sln -c Release --no-restore" ]
[ "pwsh -NoProfile -Command \"New-Item -ItemType Directory -Force .\\TestResults | Out-Null; dotnet test .\\AzureMcp.sln -c Release --no-build --logger 'trx;LogFileName=results.trx' --results-directory .\\TestResults --verbosity normal\"" ]
[ "pwsh -NoProfile -Command \"Get-Content -Raw .\\TestResults\\results.trx\"" ]
def parser(log: str) -> dict[str, str]: """ Robust TRX (Visual Studio TestRun) parser. Key issues handled: - Test output may contain hard line-wraps *inside* XML attribute values (e.g., testName="Azu\nre..."), which breaks names if parsed as XML (XML normalizes newlines to spaces inside attributes). We first "de-wrap" newlines that occur inside quoted attribute values. - Names and messages may contain XML/HTML entities (&quot; etc.) -> unescaped. - Namespace/no-namespace TRX. - Fallback regex extraction if XML parse fails for any reason. """ import re import html from xml.etree import ElementTree as ET def dewrap_newlines_inside_quotes(s: str) -> str: # Remove CR/LF that occur while we're inside a quoted attribute value. out = [] in_quote = False quote_char = "" i = 0 while i < len(s): ch = s[i] if not in_quote: if ch in ("'", '"'): in_quote = True quote_char = ch out.append(ch) i += 1 continue # in_quote if ch == quote_char: in_quote = False quote_char = "" out.append(ch) i += 1 continue # drop hard wraps inside quotes if ch == "\r": if i + 1 < len(s) and s[i + 1] == "\n": i += 2 else: i += 1 continue if ch == "\n": i += 1 continue out.append(ch) i += 1 return "".join(out) def normalize_name(name: str) -> str: # Decode entities; keep spaces as-is (but collapse accidental multi-spaces). name = html.unescape(name or "") name = re.sub(r"\s{2,}", " ", name).strip() return name def map_outcome(outcome: str) -> str: o = (outcome or "").strip().lower() if o == "passed": return "pass" if o in ("skipped", "notexecuted", "not executed"): return "skip" # failed/error/timeout/etc. return "fail" def worse_status(a: str, b: str) -> str: # fail > skip > pass rank = {"pass": 0, "skip": 1, "fail": 2} return a if rank.get(a, 0) >= rank.get(b, 0) else b results: dict[str, str] = {} cleaned = log.lstrip("\ufeff") cleaned = dewrap_newlines_inside_quotes(cleaned) # 1) Prefer XML parsing (most accurate) try: root = ET.fromstring(cleaned) # Iterate all elements, ignore namespace. for elem in root.iter(): tag = elem.tag.split("}", 1)[-1] # drop namespace if any if tag != "UnitTestResult": continue name = normalize_name(elem.attrib.get("testName", "")) if not name: continue status = map_outcome(elem.attrib.get("outcome", "")) if name in results: results[name] = worse_status(results[name], status) else: results[name] = status if results: return results except Exception: pass # 2) Fallback: regex over start tags (handles some malformed XML) # Match <UnitTestResult ...> start tag, then extract testName/outcome within it. start_tag_re = re.compile(r"<UnitTestResult\b[^>]*?>", re.IGNORECASE | re.DOTALL) attr_re = re.compile(r"""\b(testName|outcome)\s*=\s*("([^"]*)"|'([^']*)')""", re.IGNORECASE) for m in start_tag_re.finditer(cleaned): tag_txt = m.group(0) attrs = {} for am in attr_re.finditer(tag_txt): key = am.group(1) val = am.group(3) if am.group(3) is not None else am.group(4) attrs[key.lower()] = val name = normalize_name(attrs.get("testname", "")) if not name: continue status = map_outcome(attrs.get("outcome", "")) if name in results: results[name] = worse_status(results[name], status) else: results[name] = status return results
[ "AzureMcp.Tests.Areas.Monitor.Unit\ntTests.Metrics.MonitorMetricsServiceTests.ListMetricDefinitionsAsync_WithNullOrEm\nmptyResourceName_ThrowsArgumentException(resourceName: null)", "AzureMcp.Tests.Areas.Kusto.UnitTe\nests.DatabaseListCommandTests.ExecuteAsync_ReturnsNull_WhenNoDatabasesExist(cliAr\nrgs: \"--clus...
[ "AzureMcp.Tests.Areas.Server.UnitTTests.Commands.ToolLoading.SingleProxyToolLoaderTests.CallToolHandler_WithNullParrams_ReturnsGuidanceMessage", "AzureMcp.Tests.Commands.Extension\nns.CommandExtensionsTests.ParseFromDictionary_WithNullValue_SkipsOption", "AzureMcp.Tests.Areas.Server.UnitT\nTests.Models.Registry...
starryzhang/sweb.eval.win.azure_1776_azure-mcp-636
dotnet/runtime
117960
dotnet__runtime-117960
C#
[ "116125" ]
c9ccccf02d1f72f318dcaba0a837dbb9800132d9
diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index 3edac296301db0..3b1cd0aca7bd9c 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -19914,7 +19914,13 @@ bool GenTree::IsArrayAddr(GenTreeArrAddr** pArrAddr) bool GenTree::SupportsSettingZeroFlag() { #if defined(TARGET_XARCH) - if (OperIs(GT_AND, GT_OR, GT_XOR, GT_ADD, GT_SUB, GT_NEG, GT_LSH, GT_RSH, GT_RSZ, GT_ROL, GT_ROR)) + if (OperIs(GT_LSH, GT_RSH, GT_RSZ, GT_ROL, GT_ROR)) + { + // Shift/Rotate instructions do not update the flags in case of count being zero. + return gtGetOp2()->IsNeverZero(); + } + + if (OperIs(GT_AND, GT_OR, GT_XOR, GT_ADD, GT_SUB, GT_NEG)) { return true; }
diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_116125/Runtime_116125.cs b/src/tests/JIT/Regression/JitBlue/Runtime_116125/Runtime_116125.cs new file mode 100644 index 00000000000000..86094be387b5f2 --- /dev/null +++ b/src/tests/JIT/Regression/JitBlue/Runtime_116125/Runtime_116125.cs @@ -0,0 +1,36 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +// Generated by Fuzzlyn v3.1 on 2025-05-29 20:01:16 +// Run on X64 Windows +// Seed: 14412340127944514498-vectort,vector128,vector256,x86aes,x86avx,x86avx2,x86avx512bw,x86avx512bwvl,x86avx512cd,x86avx512cdvl,x86avx512dq,x86avx512dqvl,x86avx512f,x86avx512fvl,x86avx512fx64,x86bmi1,x86bmi1x64,x86bmi2,x86bmi2x64,x86fma,x86lzcnt,x86lzcntx64,x86pclmulqdq,x86popcnt,x86popcntx64,x86sse,x86ssex64,x86sse2,x86sse2x64,x86sse3,x86sse41,x86sse41x64,x86sse42,x86sse42x64,x86ssse3,x86x86base +// Reduced from 53.8 KiB to 0.7 KiB in 00:02:59 +// Debug: Prints 1 line(s) +// Release: Prints 0 line(s) + +using System; +using System.Runtime.CompilerServices; +using Xunit; + +public class Runtime_116125 +{ + [Fact] + public static void TestEntryPoint() + { + ushort y = 0; + Problem(0, ref y); + } + + [MethodImpl(MethodImplOptions.NoInlining)] + static void Problem(ulong x, ref ushort y) + { + if (x / ((0UL & y) | 1) <= 0) + { + + } + else + { + throw new InvalidOperationException(); + } + } +} diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_116125/Runtime_116125.csproj b/src/tests/JIT/Regression/JitBlue/Runtime_116125/Runtime_116125.csproj new file mode 100644 index 00000000000000..de6d5e08882e86 --- /dev/null +++ b/src/tests/JIT/Regression/JitBlue/Runtime_116125/Runtime_116125.csproj @@ -0,0 +1,8 @@ +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <Optimize>True</Optimize> + </PropertyGroup> + <ItemGroup> + <Compile Include="$(MSBuildProjectName).cs" /> + </ItemGroup> +</Project>
[Fuzzlyn] Incorrect T1 behavior around division ```csharp // Generated by Fuzzlyn v3.1 on 2025-05-29 20:01:16 // Run on X64 Windows // Seed: 14412340127944514498-vectort,vector128,vector256,x86aes,x86avx,x86avx2,x86avx512bw,x86avx512bwvl,x86avx512cd,x86avx512cdvl,x86avx512dq,x86avx512dqvl,x86avx512f,x86avx512fvl,x86avx512fx64,x86bmi1,x86bmi1x64,x86bmi2,x86bmi2x64,x86fma,x86lzcnt,x86lzcntx64,x86pclmulqdq,x86popcnt,x86popcntx64,x86sse,x86ssex64,x86sse2,x86sse2x64,x86sse3,x86sse41,x86sse41x64,x86sse42,x86sse42x64,x86ssse3,x86x86base // Reduced from 53.8 KiB to 0.7 KiB in 00:02:59 // Debug: Prints 1 line(s) // Release: Prints 0 line(s) using System; using System.Numerics; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; public class C0 { public ushort F0; } public struct S0 { public Vector256<int> F0; } public class C1 { public C0 F0; public C1(C0 f0) { F0 = f0; } } public class Program { public static void Main() { var vr1 = new C1(new C0()); M3(vr1); } public static void M3(C1 argThis) { if (0 >= (Bmi2.X64.ZeroHighBits(0, 0) / ((0UL & argThis.F0.F0) | 1))) { S0[] var1 = new S0[] { new S0() }; System.Console.WriteLine(var1[0].F0); } } } ```
Tagging subscribers to this area: @JulieLeeMSFT, @jakobbotsch See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed. <!-- Policy app identification https://img.shields.io/static/v1?label=PullRequestIssueManagement. --> Doesn't look to be Bmi2 specific, reproduces on: ```cs public static void Main() { ushort y = 0; Test(0, ref y); } [MethodImpl(MethodImplOptions.NoInlining)] public static void Test(ulong x, ref ushort y) { if (0 >= x / ((0UL & y) | 1)) Console.WriteLine("always taken"); } ```
Tagging subscribers to this area: @JulieLeeMSFT, @jakobbotsch See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed. <!-- Policy app identification https://img.shields.io/static/v1?label=PullRequestIssueManagement. --> Doesn't look to be Bmi2 specific, reproduces on: ```cs public static void Main() { ushort y = 0; Test(0, ref y); } [MethodImpl(MethodImplOptions.NoInlining)] public static void Test(ulong x, ref ushort y) { if (0 >= x / ((0UL & y) | 1)) Console.WriteLine("always taken"); } ```
[ "https://github.com/dotnet/runtime/commit/63d5d04b81da10599e3e56aa4f974e5dd6c232f4", "https://github.com/dotnet/runtime/commit/34b99cbf6df4699bfedb50997b02c507b691f0dc" ]
2025-07-23T00:47:23Z
https://github.com/dotnet/runtime/tree/c9ccccf02d1f72f318dcaba0a837dbb9800132d9
[ "pwsh -NoProfile -Command \"dotnet restore .\\Build.proj\" ; pwsh -NoProfile -Command \".\\build.cmd -subset Libs.Sfx -configuration Release /p:BuildNative=false\"" ]
[ "pwsh -NoProfile -Command \"New-Item -ItemType Directory -Force .\\TestResults | Out-Null; dotnet test .\\src\\libraries\\Common\\tests\\Common.Tests.csproj -c Release /p:BuildNative=false --logger 'trx;LogFileName=TestResults\\Common.Tests.trx' --verbosity normal\"" ]
[ "pwsh -NoProfile -Command \"Get-Content -Raw 'C:\\testbed\\artifacts\\bin\\Common.Tests\\Release\\net10.0-windows\\TestResults\\TestResults\\Common.Tests.trx'\"" ]
def parser(log: str) -> dict[str, str]: import re from html import unescape import xml.etree.ElementTree as ET results: dict[str, str] = {} prec = {"pass": 0, "skip": 1, "fail": 2} def merge(name: str, status: str): if not name: return if status not in prec: status = "fail" cur = results.get(name) if cur is None or prec[status] > prec[cur]: results[name] = status def norm_status(outcome: str) -> str: if not outcome: return "fail" o = outcome.strip().lower() if o in ("passed", "pass", "success", "succeeded", "ok"): return "pass" if o in ("skipped", "skip", "notexecuted", "not executed", "inconclusive", "ignored", "disabled"): return "skip" return "fail" def norm_text(s: str) -> str: if s is None: return "" s = unescape(s) s = s.replace("\r", "").replace("\n", "") return s.strip() # Remove illegal control chars but keep \r\n\t log = re.sub(r"[\x00-\x08\x0B\x0C\x0E-\x1F]", "", log) log = re.sub(r"(?m)^\s*PS>\s*", "", log) def remove_newlines_inside_quotes(text: str) -> str: out = [] in_quote = None for ch in text: if in_quote is None: if ch in ("'", '"'): in_quote = ch out.append(ch) else: if ch == in_quote: in_quote = None out.append(ch) elif ch in ("\r", "\n"): # drop wrapping newlines pass else: out.append(ch) return "".join(out) def parse_trx(root) -> bool: # Map testId -> canonical test name from TestDefinitions (less likely to include wrapped params) id_to_name: dict[str, str] = {} for ut in root.findall(".//{*}TestDefinitions/{*}UnitTest"): tid = norm_text(ut.attrib.get("id", "")) # TRX UnitTest has name="..." and optional TestMethod with className/name nm = norm_text(ut.attrib.get("name", "")) tm = ut.find(".//{*}TestMethod") if tm is not None: cls = norm_text(tm.attrib.get("className", "")) mname = norm_text(tm.attrib.get("name", "")) if cls and mname: # Prefer method-based canonical name; it's stable and unparameterized nm2 = f"{cls}.{mname}" nm = nm2 or nm if tid and nm: id_to_name[tid] = nm parsed_any = False # Results for r in root.findall(".//{*}Results/{*}UnitTestResult"): tid = norm_text(r.attrib.get("testId", "")) tname = norm_text(r.attrib.get("testName", "")) name = id_to_name.get(tid) or tname status = norm_status(r.attrib.get("outcome", "")) if name: merge(name, status) parsed_any = True # Some TRX variants for r in root.findall(".//{*}Results/{*}TestResult"): tid = norm_text(r.attrib.get("testId", "")) tname = norm_text(r.attrib.get("testName", "")) name = id_to_name.get(tid) or tname status = norm_status(r.attrib.get("outcome", "")) if name: merge(name, status) parsed_any = True return parsed_any def parse_junit(root) -> bool: parsed_any = False for tc in root.findall(".//testcase"): name = norm_text(tc.attrib.get("name", "")) classname = norm_text(tc.attrib.get("classname", "")) full = (classname + "." + name).strip(".") if classname else name if not full: continue status = "pass" if tc.find("skipped") is not None: status = "skip" elif tc.find("failure") is not None or tc.find("error") is not None: status = "fail" merge(full, status) parsed_any = True return parsed_any def parse_xml_block(block: str) -> bool: block = re.sub(r"^.*?(<\?xml)", r"\1", block, flags=re.S | re.I) block = remove_newlines_inside_quotes(block) # Try parse directly try: root = ET.fromstring(block) except Exception: # salvage TRX m = re.search(r"(<TestRun\b.*?</TestRun>)", block, flags=re.S | re.I) if m: try: root = ET.fromstring(m.group(1)) except Exception: return False else: m = re.search(r"(<testsuites?\b.*?</testsuites?>)", block, flags=re.S | re.I) if not m: return False try: root = ET.fromstring(m.group(1)) except Exception: return False # Decide type by root/localname tag = root.tag.lower() if tag.endswith("testrun"): return parse_trx(root) return parse_junit(root) # Extract XML blocks xml_blocks: list[str] = [] for m in re.finditer(r"(<\?xml[^>]*\?>.*?</TestRun>)", log, flags=re.S | re.I): xml_blocks.append(m.group(1)) for m in re.finditer(r"(<\?xml[^>]*\?>.*?</testsuites?>)", log, flags=re.S | re.I): xml_blocks.append(m.group(1)) if not xml_blocks: m = re.search(r"(<\?xml[^>]*\?>.*)$", log, flags=re.S | re.I) if m: xml_blocks.append(m.group(1)) parsed_any_xml = False for xb in xml_blocks: if parse_xml_block(xb): parsed_any_xml = True # Text fallbacks if not results: for m in re.finditer(r"(?m)^\s*(.+?::\S+)\s+(PASSED|FAILED|SKIPPED|XFAILED|XPASSED|ERROR)\s*$", log): merge(norm_text(m.group(1)), norm_status(m.group(2))) for m in re.finditer(r"(?m)^\s*(\S+)\s+\(([^)]+)\)\s+\.\.\.\s+(ok|FAIL|ERROR|skipped)\s*$", log): merge(norm_text(f"{m.group(2)}.{m.group(1)}"), norm_status(m.group(3))) for m in re.finditer(r"(?m)^\s*---\s+(PASS|FAIL|SKIP):\s+(\S+)", log): merge(norm_text(m.group(2)), norm_status(m.group(1))) # Last resort regex if XML could not be parsed at all if not parsed_any_xml and not results: attrs_text = remove_newlines_inside_quotes(log) for m in re.finditer(r"<UnitTestResult\b(.*?)(?:/?>)", attrs_text, flags=re.S | re.I): attrs = m.group(1) nm = re.search(r'testName\s*=\s*(["\'])(.*?)\1', attrs, flags=re.S | re.I) om = re.search(r'outcome\s*=\s*(["\'])(.*?)\1', attrs, flags=re.S | re.I) if not nm or not om: continue merge(norm_text(nm.group(2)), norm_status(om.group(2))) return results
[ "System.Collections.Generic.Tests..ArrayBuilderTestsString.ToArray(seed: [\"-1640530515\", \"-8459566633\", \"-82222174\", \"1481627779\", \"-1878112230\", ···])", "System.IO.Tests.BidirectionalConnnectedStreamsTests.ReadWrite_Success(mode: AsyncMemory, writeSize: 42, startWithFFlush: True)", "Tests.System.IO....
[ "System.Net.Http.Unit.Tests.HPack..HuffmanDecodingTests.HuffmanEncode(code: 84, expectedEncoded: 3724541952, expecttedBitLength: 7)", "System.Net.Http.Unit.Tests.HPack..HuffmanDecodingTests.ThrowsOnPaddingLongerThanSevenBits(encoded: [248, 255])", "System.Net.Http.Unit.Tests.HPack..HuffmanDecodingTests.HuffmanE...
starryzhang/sweb.eval.win.dotnet_1776_runtime-117960
dotnet/runtime
118745
dotnet__runtime-118745
C#
[ "94749" ]
edb570cd9e75036b6c61b201a0a9a9f2301db276
diff --git a/src/libraries/System.Private.CoreLib/src/System/Runtime/InteropServices/StandardOleMarshalObject.Windows.cs b/src/libraries/System.Private.CoreLib/src/System/Runtime/InteropServices/StandardOleMarshalObject.Windows.cs index 9dbd739fd6fd3f..40bae7bc9259bc 100644 --- a/src/libraries/System.Private.CoreLib/src/System/Runtime/InteropServices/StandardOleMarshalObject.Windows.cs +++ b/src/libraries/System.Private.CoreLib/src/System/Runtime/InteropServices/StandardOleMarshalObject.Windows.cs @@ -5,6 +5,7 @@ namespace System.Runtime.InteropServices { + [ComVisible(true)] public class StandardOleMarshalObject : MarshalByRefObject, IMarshal { private static readonly Guid CLSID_StdMarshal = new Guid("00000017-0000-0000-c000-000000000046");
diff --git a/src/libraries/System.Runtime.InteropServices/tests/System.Runtime.InteropServices.UnitTests/System.Runtime.InteropServices.Tests.csproj b/src/libraries/System.Runtime.InteropServices/tests/System.Runtime.InteropServices.UnitTests/System.Runtime.InteropServices.Tests.csproj index 31369aaca2a4a3..acb1d91dae2d82 100644 --- a/src/libraries/System.Runtime.InteropServices/tests/System.Runtime.InteropServices.UnitTests/System.Runtime.InteropServices.Tests.csproj +++ b/src/libraries/System.Runtime.InteropServices/tests/System.Runtime.InteropServices.UnitTests/System.Runtime.InteropServices.Tests.csproj @@ -160,6 +160,7 @@ <Compile Include="System\Runtime\InteropServices\ProgIdAttributeTests.cs" /> <Compile Include="System\Runtime\InteropServices\RuntimeEnvironmentTests.cs" /> <Compile Include="System\Runtime\InteropServices\SafeBufferTests.cs" /> + <Compile Include="System\Runtime\InteropServices\StandardOleMarshalObjectTests.cs" /> <Compile Include="System\Runtime\InteropServices\StructLayoutAttributeTests.cs" /> <Compile Include="System\Runtime\InteropServices\TypeIdentifierAttributeTests.cs" /> <Compile Include="System\Runtime\InteropServices\UnknownWrapperTests.cs" /> diff --git a/src/libraries/System.Runtime.InteropServices/tests/System.Runtime.InteropServices.UnitTests/System/Runtime/InteropServices/StandardOleMarshalObjectTests.cs b/src/libraries/System.Runtime.InteropServices/tests/System.Runtime.InteropServices.UnitTests/System/Runtime/InteropServices/StandardOleMarshalObjectTests.cs new file mode 100644 index 00000000000000..a09ca6a14c9595 --- /dev/null +++ b/src/libraries/System.Runtime.InteropServices/tests/System.Runtime.InteropServices.UnitTests/System/Runtime/InteropServices/StandardOleMarshalObjectTests.cs @@ -0,0 +1,39 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using Xunit; + +namespace System.Runtime.InteropServices.Tests +{ + [ConditionalClass(typeof(PlatformDetection), nameof(PlatformDetection.IsBuiltInComEnabled))] + public class StandardOleMarshalObjectTests + { + private static readonly Guid IID_IDispatch = new Guid("00020400-0000-0000-C000-000000000046"); + [Fact] + public void CanGetIDispatchOfDerivedObject() + { + IntPtr disp = Marshal.GetIDispatchForObject(new DerivedObject()); + Assert.NotEqual(IntPtr.Zero, disp); + Marshal.Release(disp); + } + + [Fact] + public void CanQueryInterfaceForIDispatchOfDerivedObject() + { + IntPtr unk = Marshal.GetIUnknownForObject(new DerivedObject()); + Assert.NotEqual(IntPtr.Zero, unk); + + int hr = Marshal.QueryInterface(unk, IID_IDispatch, out IntPtr disp); + Assert.Equal(0, hr); + Assert.NotEqual(IntPtr.Zero, disp); + + Marshal.Release(disp); + Marshal.Release(unk); + } + + [ComVisible(true)] + public sealed class DerivedObject : StandardOleMarshalObject + { + } + } +} \ No newline at end of file
Apartment threaded IDispatch server throws `InvalidOperationException` or fails to marshal ### Description When implementing an IDispatch apartment threaded object on net7.0, an exception is thrown when calling `Marshal.GetIDispatchForObject`. Non-.Net callers get `E_NOINTERFACE`. The same code works on .Net Framework. This seems to be related to removing `[ComVisible(true)]` from many types - which is understandable in general, but `StandardOleMarshalObject` is exclusively used with COM. I would propose that `[ComVisible(true)]` be re-added to `StandardOleMarshalObject`, the requirement for `[ComVisible(true)]` to be reduced, or the need to re-implement `StandardOleMarshalObject` be documented. In testing, adding the attribute is sufficient to avoid the exception. ### Reproduction Steps Create a new `net7.0-windows` console program. Add the following to Program.cs: ```c# Marshal.GetIDispatchForObject(new Example()); [ComVisible(true)] public class Example : StandardOleMarshalObject { } ``` ### Expected behavior The example program should not throw an exception (the type will be available to IDispatch callers). ### Actual behavior The following exception is thrown: > System.InvalidOperationException: Type 'DispTarget' has a ComVisible(false) parent 'System.Runtime.InteropServices.StandardOleMarshalObject' in its hierarchy, therefore QueryInterface calls for IDispatch or class interfaces are disallowed.<br/>&nbsp;&nbsp;&nbsp;&nbsp;at System.Runtime.InteropServices.Marshal.GetIDispatchForObjectNative(Object o)<br/>&nbsp;&nbsp;&nbsp;&nbsp;at Program.&lt;Main&gt;$(String[] args) in C:\dev\source\repos\ComDispatchDemo\DemoDispatchServer\Program.cs:line 8 If the type is passed to a COM server or registered with `CoRegisterClassObject` instead of `GetIDispatchForObject`, no exception is thrown in the server and `E_NOINTERFACE` is returned the caller. ### Regression? Yes. The same code on net48 runs without issue. ### Known Workarounds Copy/paste the implementation of [StandardOleMarshalComObject.cs](https://github.com/dotnet/runtime/blob/d561f390b8085ffed309c49cf30ec205a47868c0/src/libraries/System.Private.CoreLib/src/System/Runtime/InteropServices/StandardOleMarshalObject.Windows.cs#L8) into the project, and add the `[ComVisible(true)]` attribute. Alternately, if the object does not require apartment threading, remove `StandardOleMarshalComObject` as a base class. ### Configuration dotnet 7.0.401 Windows 11 22H2 (22621.2428) Reproducible with `Platform` x86 or x64 Presumed to be applicable to .Net 5..8 and all platforms ### Other information Loosely related: https://github.com/dotnet/runtime/issues/80422
Tagging subscribers to this area: @dotnet/interop-contrib See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed. <details> <summary>Issue Details</summary> <hr /> ### Description When implementing an IDispatch apartment threaded object on net7.0, an exception is thrown when calling `Marshal.GetIDispatchForObject`. Non-.Net callers get `E_NOINTERFACE`. The same code works on .Net Framework. This seems to be related to removing `[ComVisible(true)]` from many types - which is understandable in general, but `StandardOleMarshalObject` is exclusively used with COM. I would propose that `[ComVisible(true)]` be re-added to `StandardOleMarshalObject`, the requirement for `[ComVisible(true)]` to be reduced, or the need to re-implement `StandardOleMarshalObject` be documented. In testing, adding the attribute is sufficient to avoid the exception. ### Reproduction Steps Create a new `net7.0-windows` console program. Add the following to Program.cs: ```c# Marshal.GetIDispatchForObject(new Example()); [ComVisible(true)] public class Example : StandardOleMarshalObject { } ``` ### Expected behavior The example program should not throw an exception (the type will be available to IDispatch callers). ### Actual behavior The following exception is thrown: > System.InvalidOperationException: Type 'DispTarget' has a ComVisible(false) parent 'System.Runtime.InteropServices.StandardOleMarshalObject' in its hierarchy, therefore QueryInterface calls for IDispatch or class interfaces are disallowed.<br/>&nbsp;&nbsp;&nbsp;&nbsp;at System.Runtime.InteropServices.Marshal.GetIDispatchForObjectNative(Object o)<br/>&nbsp;&nbsp;&nbsp;&nbsp;at Program.&lt;Main&gt;$(String[] args) in C:\dev\source\repos\ComDispatchDemo\DemoDispatchServer\Program.cs:line 8 If the type is passed to a COM server or registered with `CoRegisterClassObject` instead of `GetIDispatchForObject`, no exception is thrown in the server and `E_NOINTERFACE` is returned the caller. ### Regression? Yes. The same code on net48 runs without issue. ### Known Workarounds Copy/paste the implementation of [StandardOleMarshalComObject.cs](https://github.com/dotnet/runtime/blob/d561f390b8085ffed309c49cf30ec205a47868c0/src/libraries/System.Private.CoreLib/src/System/Runtime/InteropServices/StandardOleMarshalObject.Windows.cs#L8) into the project, and add the `[ComVisible(true)]` attribute. Alternately, if the object does not require apartment threading, remove `StandardOleMarshalComObject` as a base class. ### Configuration dotnet 7.0.401 Windows 11 22H2 (22621.2428) Reproducible with `Platform` x86 or x64 Presumed to be applicable to .Net 5..8 and all platforms ### Other information Loosely related: https://github.com/dotnet/runtime/issues/80422 <table> <tr> <th align="left">Author:</th> <td>mgaffigan</td> </tr> <tr> <th align="left">Assignees:</th> <td>-</td> </tr> <tr> <th align="left">Labels:</th> <td> `area-System.Runtime.InteropServices`, `untriaged` </td> </tr> <tr> <th align="left">Milestone:</th> <td>-</td> </tr> </table> </details> This makes sense. Sounds like a simple .NET 9 fix-up. Also a good opportunity to add some tests.
Tagging subscribers to this area: @dotnet/interop-contrib See info in [area-owners.md](https://github.com/dotnet/runtime/blob/main/docs/area-owners.md) if you want to be subscribed. <details> <summary>Issue Details</summary> <hr /> ### Description When implementing an IDispatch apartment threaded object on net7.0, an exception is thrown when calling `Marshal.GetIDispatchForObject`. Non-.Net callers get `E_NOINTERFACE`. The same code works on .Net Framework. This seems to be related to removing `[ComVisible(true)]` from many types - which is understandable in general, but `StandardOleMarshalObject` is exclusively used with COM. I would propose that `[ComVisible(true)]` be re-added to `StandardOleMarshalObject`, the requirement for `[ComVisible(true)]` to be reduced, or the need to re-implement `StandardOleMarshalObject` be documented. In testing, adding the attribute is sufficient to avoid the exception. ### Reproduction Steps Create a new `net7.0-windows` console program. Add the following to Program.cs: ```c# Marshal.GetIDispatchForObject(new Example()); [ComVisible(true)] public class Example : StandardOleMarshalObject { } ``` ### Expected behavior The example program should not throw an exception (the type will be available to IDispatch callers). ### Actual behavior The following exception is thrown: > System.InvalidOperationException: Type 'DispTarget' has a ComVisible(false) parent 'System.Runtime.InteropServices.StandardOleMarshalObject' in its hierarchy, therefore QueryInterface calls for IDispatch or class interfaces are disallowed.<br/>&nbsp;&nbsp;&nbsp;&nbsp;at System.Runtime.InteropServices.Marshal.GetIDispatchForObjectNative(Object o)<br/>&nbsp;&nbsp;&nbsp;&nbsp;at Program.&lt;Main&gt;$(String[] args) in C:\dev\source\repos\ComDispatchDemo\DemoDispatchServer\Program.cs:line 8 If the type is passed to a COM server or registered with `CoRegisterClassObject` instead of `GetIDispatchForObject`, no exception is thrown in the server and `E_NOINTERFACE` is returned the caller. ### Regression? Yes. The same code on net48 runs without issue. ### Known Workarounds Copy/paste the implementation of [StandardOleMarshalComObject.cs](https://github.com/dotnet/runtime/blob/d561f390b8085ffed309c49cf30ec205a47868c0/src/libraries/System.Private.CoreLib/src/System/Runtime/InteropServices/StandardOleMarshalObject.Windows.cs#L8) into the project, and add the `[ComVisible(true)]` attribute. Alternately, if the object does not require apartment threading, remove `StandardOleMarshalComObject` as a base class. ### Configuration dotnet 7.0.401 Windows 11 22H2 (22621.2428) Reproducible with `Platform` x86 or x64 Presumed to be applicable to .Net 5..8 and all platforms ### Other information Loosely related: https://github.com/dotnet/runtime/issues/80422 <table> <tr> <th align="left">Author:</th> <td>mgaffigan</td> </tr> <tr> <th align="left">Assignees:</th> <td>-</td> </tr> <tr> <th align="left">Labels:</th> <td> `area-System.Runtime.InteropServices`, `untriaged` </td> </tr> <tr> <th align="left">Milestone:</th> <td>-</td> </tr> </table> </details> This makes sense. Sounds like a simple .NET 9 fix-up. Also a good opportunity to add some tests.
[ "https://github.com/dotnet/runtime/commit/762cc9b6a042ad6697cb61632070c3557e3ee55b", "https://github.com/dotnet/runtime/commit/1a77fec06e3d1d561ab1956e6e6bad9c7d06c936" ]
2025-08-14T17:23:36Z
https://github.com/dotnet/runtime/tree/edb570cd9e75036b6c61b201a0a9a9f2301db276
[ "pwsh -NoProfile -Command \".\\dotnet.cmd restore .\\Build.proj\" ; pwsh -NoProfile -Command \".\\dotnet.cmd build .\\src\\coreclr\\tools\\aot\\DependencyGraphViewer\\Tests\\DependecyGraphViewer.Tests.csproj -c Release\"" ]
[ "pwsh -NoProfile -Command \"New-Item -ItemType Directory -Force TestResults | Out-Null; .\\dotnet.cmd test .\\src\\coreclr\\tools\\aot\\DependencyGraphViewer\\Tests\\DependecyGraphViewer.Tests.csproj -c Release --no-build --results-directory .\\TestResults --logger 'trx' --logger 'console;verbosity=detailed'\"" ]
[ "pwsh -NoProfile -Command \"Get-Content (Get-ChildItem .\\TestResults -Filter *.trx | Select-Object -First 1 -ExpandProperty FullName) | Set-Content -NoNewline .\\test-output.log; Get-Content .\\test-output.log\"" ]
def parser(log: str) -> dict[str, str]: import html import re from typing import Dict, Tuple, Optional def extract_xmlish(s: str) -> str: i = s.find("<?xml") if i != -1: return s[i:] i = s.find("<TestRun") return s[i:] if i != -1 else s def normalize_tag_region_wrapping(s: str) -> str: """ Remove console hard-wrap artifacts *inside tags only*: - remove CR/LF and following indentation while inside <...> This preserves text nodes while making tags contiguous. """ out = [] in_tag = False quote = None i = 0 n = len(s) while i < n: ch = s[i] if not in_tag: if ch == "<": in_tag = True out.append(ch) i += 1 continue # inside tag if quote is None: if ch in ("'", '"'): quote = ch out.append(ch) i += 1 continue if ch == ">": in_tag = False out.append(ch) i += 1 continue if ch in "\r\n": j = i while j < n and s[j] in "\r\n": j += 1 while j < n and s[j] == " ": j += 1 i = j continue out.append(ch) i += 1 else: if ch == quote: quote = None out.append(ch) i += 1 continue if ch in "\r\n": j = i while j < n and s[j] in "\r\n": j += 1 while j < n and s[j] == " ": j += 1 i = j continue out.append(ch) i += 1 return "".join(out) def status_from_outcome(outcome: str) -> Optional[str]: o = (outcome or "").strip().lower() if o == "passed": return "pass" if o in {"failed", "error", "timeout", "notexecuted", "aborted"}: return "fail" if o in {"skipped", "inconclusive"}: return "skip" return None def parse_attributes(tag_text: str) -> Dict[str, str]: """ Parse attributes from a single tag string like: UnitTestResult a="1" b='2' c=3 Returns dict of raw attribute names->values (unescaped). Robust to extra spaces; stops at end of tag_text. """ attrs: Dict[str, str] = {} i = 0 n = len(tag_text) def skip_ws(k: int) -> int: while k < n and tag_text[k].isspace(): k += 1 return k i = skip_ws(i) # skip tag name while i < n and not tag_text[i].isspace(): i += 1 while i < n: i = skip_ws(i) if i >= n: break # attribute name j = i while j < n and (tag_text[j].isalnum() or tag_text[j] in ":-_"): j += 1 if j == i: break name = tag_text[i:j] i = skip_ws(j) if i >= n or tag_text[i] != "=": # attribute without value; skip token continue i = skip_ws(i + 1) if i >= n: break if tag_text[i] in ("'", '"'): q = tag_text[i] i += 1 start = i while i < n and tag_text[i] != q: i += 1 val = tag_text[start:i] if i < n and tag_text[i] == q: i += 1 else: start = i while i < n and not tag_text[i].isspace(): i += 1 val = tag_text[start:i] attrs[name] = html.unescape(val) return attrs def canonicalize_attr_name(name: str) -> str: # tolerate wrapping typos like executionnId/outcomee/ouutcome/testtName etc. n = name.lower() n = re.sub(r"[^a-z]", "", n) # collapse repeated letters introduced by wrap (very conservative) n = re.sub(r"(.)\1+", r"\1", n) return n xmlish = normalize_tag_region_wrapping(extract_xmlish(log)) results: Dict[str, str] = {} # Scan for each UnitTestResult tag without bleeding into next element. start_pat = re.compile(r"<\s*UnitTestResult\b", re.IGNORECASE) for m in start_pat.finditer(xmlish): start = m.start() # find end of this tag '>' respecting quotes i = m.end() quote = None while i < len(xmlish): ch = xmlish[i] if quote is None: if ch in ("'", '"'): quote = ch elif ch == ">": break else: if ch == quote: quote = None i += 1 if i >= len(xmlish) or xmlish[i] != ">": continue tag = xmlish[start + 1 : i] # without surrounding <> attrs = parse_attributes(tag) # Map attrs by canonicalized name canon = {canonicalize_attr_name(k): v for k, v in attrs.items()} name = canon.get("testname") outcome = canon.get("outcome") if not name or not outcome: continue st = status_from_outcome(outcome) if st is None: continue results[name] = st return results
[ "DependecyGraphViewer.Tests.TestFilePaarsing.NumberOfLinks(fileContents: \"<?xml version=\\\"1.0\\\" encodding=\\\"utf-8\\\"?>\\r\\n<Dir\"···, nodeCount: 0, isValid: True, llinkCount: 0)", "DeppendecyGraphViewer.Tests.TestFileParsing.NumberOfNodes(fileContents: \"<?xxml version=\\\"1.0\\\" encoding=\\\"utf-8\\\"?...
[]
starryzhang/sweb.eval.win.dotnet_1776_runtime-118745
dotnet/runtime
117105
dotnet__runtime-117105
C#
[ "44944" ]
3f184737d81c8404c0d16c59b8482a8c4336c416
diff --git a/src/libraries/Common/src/Interop/Windows/Advapi32/Interop.ProcessOptions.cs b/src/libraries/Common/src/Interop/Windows/Advapi32/Interop.ProcessOptions.cs index 8ef5167d70edc6..893a0aaed9073c 100644 --- a/src/libraries/Common/src/Interop/Windows/Advapi32/Interop.ProcessOptions.cs +++ b/src/libraries/Common/src/Interop/Windows/Advapi32/Interop.ProcessOptions.cs @@ -43,6 +43,7 @@ internal static partial class StartupInfoOptions internal const int STARTF_USESTDHANDLES = 0x00000100; internal const int CREATE_UNICODE_ENVIRONMENT = 0x00000400; internal const int CREATE_NO_WINDOW = 0x08000000; + internal const int CREATE_NEW_PROCESS_GROUP = 0x00000200; } } } diff --git a/src/libraries/System.Diagnostics.Process/ref/System.Diagnostics.Process.cs b/src/libraries/System.Diagnostics.Process/ref/System.Diagnostics.Process.cs index d4cf2170b88b83..17366ae9811736 100644 --- a/src/libraries/System.Diagnostics.Process/ref/System.Diagnostics.Process.cs +++ b/src/libraries/System.Diagnostics.Process/ref/System.Diagnostics.Process.cs @@ -219,6 +219,8 @@ public ProcessStartInfo(string fileName, System.Collections.Generic.IEnumerable< public System.Collections.ObjectModel.Collection<string> ArgumentList { get { throw null; } } [System.Diagnostics.CodeAnalysis.AllowNullAttribute] public string Arguments { get { throw null; } set { } } + [System.Runtime.Versioning.SupportedOSPlatformAttribute("windows")] + public bool CreateNewProcessGroup { get { throw null; } set { } } public bool CreateNoWindow { get { throw null; } set { } } [System.Runtime.Versioning.SupportedOSPlatformAttribute("windows")] [System.Diagnostics.CodeAnalysis.AllowNullAttribute] diff --git a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/Process.Windows.cs b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/Process.Windows.cs index 06a2bd51d6d402..dc05e138a5aec6 100644 --- a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/Process.Windows.cs +++ b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/Process.Windows.cs @@ -505,6 +505,7 @@ private unsafe bool StartWithCreateProcess(ProcessStartInfo startInfo) // set up the creation flags parameter int creationFlags = 0; if (startInfo.CreateNoWindow) creationFlags |= Interop.Advapi32.StartupInfoOptions.CREATE_NO_WINDOW; + if (startInfo.CreateNewProcessGroup) creationFlags |= Interop.Advapi32.StartupInfoOptions.CREATE_NEW_PROCESS_GROUP; // set up the environment block parameter string? environmentBlock = null; diff --git a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessStartInfo.Unix.cs b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessStartInfo.Unix.cs index 9664815dcde794..196d57bf0b36b9 100644 --- a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessStartInfo.Unix.cs +++ b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessStartInfo.Unix.cs @@ -52,5 +52,12 @@ public SecureString? Password get { throw new PlatformNotSupportedException(SR.Format(SR.ProcessStartSingleFeatureNotSupported, nameof(Password))); } set { throw new PlatformNotSupportedException(SR.Format(SR.ProcessStartSingleFeatureNotSupported, nameof(Password))); } } + + [SupportedOSPlatform("windows")] + public bool CreateNewProcessGroup + { + get { throw new PlatformNotSupportedException(SR.Format(SR.ProcessStartSingleFeatureNotSupported, nameof(CreateNewProcessGroup))); } + set { throw new PlatformNotSupportedException(SR.Format(SR.ProcessStartSingleFeatureNotSupported, nameof(CreateNewProcessGroup))); } + } } } diff --git a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessStartInfo.Windows.cs b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessStartInfo.Windows.cs index 8869c1809341f3..6217aeb9c5a5b3 100644 --- a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessStartInfo.Windows.cs +++ b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessStartInfo.Windows.cs @@ -46,5 +46,17 @@ public string Domain [CLSCompliant(false)] [SupportedOSPlatform("windows")] public SecureString? Password { get; set; } + + /// <summary> + /// Gets or sets a value indicating whether to start the process in a new process group. + /// </summary> + /// <value><c>true</c> if the process should be started in a new process group; otherwise, <c>false</c>. The default is <c>false</c>.</value> + /// <remarks> + /// <para>When a process is created in a new process group, it becomes the root of a new process group.</para> + /// <para>An implicit call to <c>SetConsoleCtrlHandler(NULL,TRUE)</c> is made on behalf of the new process, this means that the new process has CTRL+C disabled.</para> + /// <para>This property is useful for preventing console control events sent to the child process from affecting the parent process.</para> + /// </remarks> + [SupportedOSPlatform("windows")] + public bool CreateNewProcessGroup { get; set; } } }
diff --git a/src/libraries/System.Diagnostics.Process/tests/Interop.cs b/src/libraries/System.Diagnostics.Process/tests/Interop.cs index 969995e36d8a33..6bd2ddebbc8f6a 100644 --- a/src/libraries/System.Diagnostics.Process/tests/Interop.cs +++ b/src/libraries/System.Diagnostics.Process/tests/Interop.cs @@ -72,6 +72,11 @@ public struct SID_AND_ATTRIBUTES public int Attributes; } + + [DllImport("kernel32.dll", SetLastError = true)] + [return: MarshalAs(UnmanagedType.Bool)] + public static extern bool GenerateConsoleCtrlEvent(uint dwCtrlEvent, uint dwProcessGroupId); + [DllImport("kernel32.dll")] public static extern bool GetProcessWorkingSetSizeEx(SafeProcessHandle hProcess, out IntPtr lpMinimumWorkingSetSize, out IntPtr lpMaximumWorkingSetSize, out uint flags); diff --git a/src/libraries/System.Diagnostics.Process/tests/ProcessStartInfoTests.cs b/src/libraries/System.Diagnostics.Process/tests/ProcessStartInfoTests.cs index 728e52f520208f..2e896862d638b5 100644 --- a/src/libraries/System.Diagnostics.Process/tests/ProcessStartInfoTests.cs +++ b/src/libraries/System.Diagnostics.Process/tests/ProcessStartInfoTests.cs @@ -905,6 +905,29 @@ public void TestEnvironmentVariablesPropertyUnix() }); } + [Fact] + [PlatformSpecific(TestPlatforms.Windows)] + public void CreateNewProcessGroup_SetWindows_GetReturnsExpected() + { + ProcessStartInfo psi = new ProcessStartInfo(); + Assert.False(psi.CreateNewProcessGroup); + + psi.CreateNewProcessGroup = true; + Assert.True(psi.CreateNewProcessGroup); + + psi.CreateNewProcessGroup = false; + Assert.False(psi.CreateNewProcessGroup); + } + + [Fact] + [PlatformSpecific(TestPlatforms.AnyUnix)] + public void CreateNewProcessGroup_GetSetUnix_ThrowsPlatformNotSupportedException() + { + var info = new ProcessStartInfo(); + Assert.Throws<PlatformNotSupportedException>(() => info.CreateNewProcessGroup); + Assert.Throws<PlatformNotSupportedException>(() => info.CreateNewProcessGroup = true); + } + [Theory] [InlineData(null)] [InlineData("")] diff --git a/src/libraries/System.Diagnostics.Process/tests/ProcessTests.Unix.cs b/src/libraries/System.Diagnostics.Process/tests/ProcessTests.Unix.cs index 7f66696c6b2755..47c5ce05870287 100644 --- a/src/libraries/System.Diagnostics.Process/tests/ProcessTests.Unix.cs +++ b/src/libraries/System.Diagnostics.Process/tests/ProcessTests.Unix.cs @@ -1047,5 +1047,16 @@ private static string StartAndReadToEnd(string filename, string[] arguments) return process.StandardOutput.ReadToEnd(); } } + + private static void SendSignal(PosixSignal signal, int processId) + { + int result = kill(processId, Interop.Sys.GetPlatformSignalNumber(signal)); + if (result != 0) + { + throw new Win32Exception(Marshal.GetLastWin32Error(), $"Failed to send signal {signal} to process {processId}"); + } + } + + private static unsafe void ReEnableCtrlCHandlerIfNeeded(PosixSignal signal) { } } } diff --git a/src/libraries/System.Diagnostics.Process/tests/ProcessTests.Windows.cs b/src/libraries/System.Diagnostics.Process/tests/ProcessTests.Windows.cs index 6fb91c0f4e113e..7850ca8afcbd7d 100644 --- a/src/libraries/System.Diagnostics.Process/tests/ProcessTests.Windows.cs +++ b/src/libraries/System.Diagnostics.Process/tests/ProcessTests.Windows.cs @@ -2,7 +2,11 @@ // The .NET Foundation licenses this file to you under the MIT license. using System; +using System.ComponentModel; using System.IO; +using System.Runtime.InteropServices; +using Microsoft.DotNet.XUnitExtensions; +using Xunit; namespace System.Diagnostics.Tests { @@ -15,5 +19,41 @@ private string WriteScriptFile(string directory, string name, int returnValue) File.WriteAllText(filename, $"exit {returnValue}"); return filename; } + + private static void SendSignal(PosixSignal signal, int processId) + { + uint dwCtrlEvent = signal switch + { + PosixSignal.SIGINT => Interop.Kernel32.CTRL_C_EVENT, + PosixSignal.SIGQUIT => Interop.Kernel32.CTRL_BREAK_EVENT, + _ => throw new ArgumentOutOfRangeException(nameof(signal)) + }; + + if (!Interop.GenerateConsoleCtrlEvent(dwCtrlEvent, (uint)processId)) + { + int error = Marshal.GetLastWin32Error(); + if (error == Interop.Errors.ERROR_INVALID_FUNCTION && PlatformDetection.IsInContainer) + { + // Docker in CI runs without a console attached. + throw new SkipTestException($"GenerateConsoleCtrlEvent failed with ERROR_INVALID_FUNCTION. The process is not a console process or does not have a console."); + } + + throw new Win32Exception(error); + } + } + + // See https://learn.microsoft.com/windows/win32/api/processthreadsapi/nf-processthreadsapi-createprocessw#remarks: + // When a process is created with CREATE_NEW_PROCESS_GROUP specified, an implicit call to SetConsoleCtrlHandler(NULL,TRUE) + // is made on behalf of the new process; this means that the new process has CTRL+C disabled. + private static unsafe void ReEnableCtrlCHandlerIfNeeded(PosixSignal signal) + { + if (signal is PosixSignal.SIGINT) + { + if (!Interop.Kernel32.SetConsoleCtrlHandler(null, false)) + { + throw new Win32Exception(); + } + } + } } } diff --git a/src/libraries/System.Diagnostics.Process/tests/ProcessTests.cs b/src/libraries/System.Diagnostics.Process/tests/ProcessTests.cs index a3b2a7a97f0508..59f010f1941792 100644 --- a/src/libraries/System.Diagnostics.Process/tests/ProcessTests.cs +++ b/src/libraries/System.Diagnostics.Process/tests/ProcessTests.cs @@ -10,6 +10,7 @@ using System.Linq; using System.Net; using System.Reflection; +using System.Runtime.InteropServices; using System.Security; using System.Text; using System.Threading; @@ -80,6 +81,82 @@ private void AssertNonZeroAllZeroDarwin(long value) } } + public static IEnumerable<object[]> SignalTestData() + { + if (OperatingSystem.IsWindows()) + { + // GenerateConsoleCtrlEvent only supports sending CTRL_C_EVENT and CTRL_BREAK_EVENT + yield return new object[] { PosixSignal.SIGINT }; + yield return new object[] { PosixSignal.SIGQUIT }; + } + else + { + foreach (PosixSignal signal in Enum.GetValues<PosixSignal>()) + { + yield return new object[] { signal }; + } + // Test a few raw signals. + yield return new object[] { (PosixSignal)3 }; // SIGQUIT + yield return new object[] { (PosixSignal)15 }; // SIGTERM + } + } + + [ConditionalTheory(typeof(RemoteExecutor), nameof(RemoteExecutor.IsSupported))] + [MemberData(nameof(SignalTestData))] + public void TestCreateNewProcessGroup_HandlerReceivesExpectedSignal(PosixSignal signal) + { + const string PosixSignalRegistrationCreatedMessage = "PosixSignalRegistration created..."; + + var remoteInvokeOptions = new RemoteInvokeOptions { CheckExitCode = false }; + remoteInvokeOptions.StartInfo.RedirectStandardOutput = true; + if (OperatingSystem.IsWindows()) + { + remoteInvokeOptions.StartInfo.CreateNewProcessGroup = true; + } + + using RemoteInvokeHandle remoteHandle = RemoteExecutor.Invoke( + (signalStr) => + { + PosixSignal expectedSignal = Enum.Parse<PosixSignal>(signalStr); + using ManualResetEvent receivedSignalEvent = new ManualResetEvent(false); + ReEnableCtrlCHandlerIfNeeded(expectedSignal); + + using PosixSignalRegistration p = PosixSignalRegistration.Create(expectedSignal, (ctx) => + { + Assert.Equal(expectedSignal, ctx.Signal); + receivedSignalEvent.Set(); + ctx.Cancel = true; + }); + + Console.WriteLine(PosixSignalRegistrationCreatedMessage); + + Assert.True(receivedSignalEvent.WaitOne(WaitInMS)); + + return 0; + }, + arg: $"{signal}", + remoteInvokeOptions); + + while (!remoteHandle.Process.StandardOutput.ReadLine().EndsWith(PosixSignalRegistrationCreatedMessage)) + { + Thread.Sleep(20); + } + + try + { + SendSignal(signal, remoteHandle.Process.Id); + + Assert.True(remoteHandle.Process.WaitForExit(WaitInMS)); + Assert.Equal(0, remoteHandle.Process.ExitCode); + } + finally + { + // If sending the signal fails, we want to kill the process ASAP + // to prevent RemoteExecutor's timeout from hiding it. + remoteHandle.Process.Kill(); + } + } + [ConditionalTheory(typeof(RemoteExecutor), nameof(RemoteExecutor.IsSupported))] [InlineData(-2)] [InlineData((long)int.MaxValue + 1)] diff --git a/src/libraries/System.Diagnostics.Process/tests/System.Diagnostics.Process.Tests.csproj b/src/libraries/System.Diagnostics.Process/tests/System.Diagnostics.Process.Tests.csproj index a69d3eb2372059..718bddd017ea89 100644 --- a/src/libraries/System.Diagnostics.Process/tests/System.Diagnostics.Process.Tests.csproj +++ b/src/libraries/System.Diagnostics.Process/tests/System.Diagnostics.Process.Tests.csproj @@ -44,12 +44,18 @@ <Compile Include="ProcessStartInfoTests.Windows.cs" /> <Compile Include="ProcessTests.Windows.cs" /> <Compile Include="ProcessThreadTests.Windows.cs" /> + <Compile Include="$(CommonPath)Interop\Windows\Interop.BOOL.cs" + Link="Common\Interop\Windows\Interop.BOOL.cs" /> <Compile Include="$(CommonPath)Interop\Windows\Interop.Libraries.cs" - Link="Common\Interop\Windows\Interop.Libraries.cs" /> + Link="Common\Interop\Windows\Interop.Libraries.cs" /> <Compile Include="$(CommonPath)Interop\Windows\Kernel32\Interop.LoadLibrary.cs" - Link="Common\Interop\Windows\Kernel32\Interop.LoadLibrary.cs" /> + Link="Common\Interop\Windows\Kernel32\Interop.LoadLibrary.cs" /> <Compile Include="$(CommonPath)Interop\Windows\Kernel32\Interop.FreeLibrary.cs" Link="Common\Interop\Windows\Kernel32\Interop.FreeLibrary.cs" /> + <Compile Include="$(CommonPath)Interop\Windows\Kernel32\Interop.SetConsoleCtrlHandler.cs" + Link="Common\Interop\Windows\Kernel32\Interop.SetConsoleCtrlHandler.cs" /> + <Compile Include="$(CommonPath)Interop\Windows\Interop.Errors.cs" + Link="Common\Interop\Windows\Interop.Errors.cs" /> <!-- Helpers --> <Compile Include="$(CommonTestPath)TestUtilities\System\WindowsTestFileShare.cs" Link="Common\TestUtilities\System\WindowsTestFileShare.cs" /> </ItemGroup> @@ -63,6 +69,10 @@ Link="Common\Interop\OSX\Interop.libproc.cs" /> <Compile Include="$(CommonPath)Interop\OSX\Interop.libSystem.cs" Link="Common\Interop\OSX\Interop.libSystem.cs" /> + <Compile Include="$(CommonPath)Interop\Unix\Interop.Libraries.cs" + Link="Common\Interop\Unix\Interop.Libraries.cs" /> + <Compile Include="$(CommonPath)Interop\Unix\System.Native\Interop.PosixSignal.cs" + Link="Common\Interop\Unix\System.Native\Interop.PosixSignal.cs" /> </ItemGroup> <ItemGroup> <ProjectReference Include="$(LibrariesProjectRoot)Microsoft.Win32.Registry\src\Microsoft.Win32.Registry.csproj" />
Add CreateNewProcessGroup to ProcessStartInfo ## Background and Motivation <!-- We welcome API proposals! We have a process to evaluate the value and shape of new API. There is an overview of our process [here](https://github.com/dotnet/runtime/blob/master/docs/project/api-review-process.md). This template will help us gather the information we need to start the review process. First, please describe the purpose and value of the new API here. --> _Edited by @jozkee_ Creating process groups is necessary to be able to send signals on Windows as they are sent to the whole group, you need to use `CREATE_NEW_PROCESS_GROUP` creation flag to isolate the signal from the parent process. POSIX systems have the same notion, however in unix-like systems one can use signals directly with processes. This has been previously discussed in another issue in 2017 but it turned out that this change itself would not help the requesting team so it was dropped at that time. For the record, I am using proposals from that time. https://github.com/dotnet/runtime/issues/20734 ## Proposed API Expose the creation flag via a Windows-only property. ```diff namespace System.Diagnostics { public sealed partial class ProcessStartInfo { + [SupportedOSPlatform("windows")] + public bool CreateNewProcessGroup { get; set; } } } ``` ## Usage Examples <!-- Please provide code examples that highlight how the proposed API additions are meant to be consumed. This will help suggest whether the API has the right shape to be functional, performant and useable. You can use code blocks like this: --> ``` C# var startInfo = new ProcessStartInfo { FileName = "my-program.exe", CreateNewProcessGroup = true, }; var p = new Process.Start(startInfo); // Do some work... GenerateConsoleCtrlEvent(0 /* CTRL_C_EVENT */, (uint)process.Id) [DllImport("kernel32.dll")] [return: MarshalAs(UnmanagedType.Bool)] public static extern bool GenerateConsoleCtrlEvent(uint dwCtrlEvent, uint dwProcessGroupId); ``` ## Alternative Designs 1. https://github.com/dotnet/runtime/issues/71515 was considered but I've opted for this proposal instead as we do have concrete scenarios for it. 2. https://github.com/dotnet/runtime/issues/109432, diffrent property naming: `EnableConsoleControlEvents`. This name makes sense if CREATE_NEW_PROCESS_GROUP has no other reason to be used for, outside of signal isolation, I'm unaware of more scenarios where this is useful but that can change in the future. ## Risks None
Tagging subscribers to this area: @eiriktsarpalis, @jeffhandley See info in area-owners.md if you want to be subscribed. <details> <summary>Issue Details</summary> <hr /> ## Background and Motivation <!-- We welcome API proposals! We have a process to evaluate the value and shape of new API. There is an overview of our process [here](https://github.com/dotnet/runtime/blob/master/docs/project/api-review-process.md). This template will help us gather the information we need to start the review process. First, please describe the purpose and value of the new API here. --> Creating process groups is necessary to be able to send signals on Windows as they are sent to the whole group and the calling process may not know the group ID itself. POSIX systems have the same notion, however in unix-like systems one can use signals directly with processes, which is impossible in Windows as I understand it. This has been previously discussed in another issue in 2017 but it turned out that this change itself would not help the requesting team so it was dropped at that time. For the record, I am using proposals from that time. https://github.com/dotnet/runtime/issues/20734 ## Proposed API ```diff namespace System.Diagnostics { public sealed class ProcessStartInfo { + public bool CreateNewProcessGroup { get; set; } } } ``` ## Usage Examples <!-- Please provide code examples that highlight how the proposed API additions are meant to be consumed. This will help suggest whether the API has the right shape to be functional, performant and useable. You can use code blocks like this: ``` C# var startInfo = new System.Diagnostics.ProcessStartInfo { FileName = "powershell", Arguments = "./script-waiting-for-interrupt.ps1", CreateNewProcessGroup = true, }; var p = new System.Diagnostics.Process.Start(startInfo); ``` --> ## Alternative Designs Alternative discussed in the linked issue was using creation flags such as: ``` C# [Flags] public enum CreationFlags { CREATE_NEW_CONSOLE = 0x00000010, **CREATE_NEW_PROCESS_GROUP = 0x00000200**, CREATE_BREAKAWAY_FROM_JOB = 0x010000000, CREATE_DEFAULT_ERROR_MODE = 0x04000000, ... }; ``` But this turns somewhat Windows-centric which is not desired. Using a bool property instead is similar to how `CreateNoWindow` is exposed in ProcessStartInfo. ## Risks I do not have deep knowledge on this subject but I do not see any risks. I would assume the default behavior would be kept the same so no breaking changes to existing code. I do not know of any circumstances where creating a process in new group would prevent the process from creation (such as clashing with CreateNoWindow flag), but that is potential risk. Stopping applications properly may be more challenging for developers using this as the spawned processes may not get signals generated by user interaction with terminal. <table> <tr> <th align="left">Author:</th> <td>mladedav</td> </tr> <tr> <th align="left">Assignees:</th> <td>-</td> </tr> <tr> <th align="left">Labels:</th> <td> `api-suggestion`, `area-System.Diagnostics.Process`, `untriaged` </td> </tr> <tr> <th align="left">Milestone:</th> <td>-</td> </tr> </table> </details> If I get this correctly, in order to obtain the same behaviour as `CREATE_NEW_PROCESS_GROUP` on Unix we would need to call `setpgid(0, 0);` between [fork and execve](https://github.com/dotnet/runtime/blob/2544c744d204c6ae0e20ba78c9cb8832a92091f3/src/libraries/Native/Unix/System.Native/pal_process.c#L438)? I believe so. That would put the new process into a new process group with ID identical to that new process' ID. My concern about this proposal is that it seems to only allow a very constrained use case for process groups. What if I wanted to create a process group that includes the current process? It is probably telling that in the [original issue](https://github.com/dotnet/runtime/issues/20734#issuecomment-304395962) it was determined that ProcessStartInfo is probably not the appropriate API for making this type of configuration. So if we can't provide a general-purpose solution here, perhaps we shouldn't be adding it at all. As I understand the comment, ProcessStartInfo wasn't appropriate for the advanced usage PaulHigin needed, not for the flag itself. I can't think of a better place for this since this needs to be set before the process is started, but I do not mind it being anywhere else. The constraints are based on what both Windows and Linux can do - Windows only has a flag `CREATE_NEW_PROCESS_GROUP` for `CreaetProcess*` functions while Linux is able to change the group ID dynamically after fork. If you want the new process to be part of the creating process' group, you simply don't specify the option, it is the default in both OSs. Triage: We should do research and find out if Windows allows for modifying process group after process creation as Unix does. If it does, we should consider adding APIs for all possibilities: - getting and setting process group of a given process instance - specify it during creating If it's not possible, the current proposal looks as good as it is. Note: as @mladedav initially pointed out, the primary goal of this feature is to allow sending signals to the process on Windows. If `Process` somehow exposed this functionality directly (e.g. with a `process.SendSignal(...)` method), then this switch would not be needed and may become an implementation detail (i.e. `Process.Start(...)` may still create a new process group, just not expose it to the caller). I am affected by this as well in a very similar way to the above mentioned issue (https://github.com/pulumi/pulumi-dotnet/issues/124) Wanted to comment on this just to show there is a want for this, as the previous issue was closed due to lack of use by devs. Any updates? > We should do research and find out if Windows allows for modifying process group after process creation as Unix does. If it does, we should consider adding APIs for all possibilities: > * getting and setting process group of a given process instance > * specify it during creating Please note the restriction on modifying the process group of an existing other process using `setpgid`: this can only be done if the target process has not called `execve` or a similar function yet. In fact, that form of the call is only intended to allow the parent process to ensure the process group has been set in the absence of other synchronization (such as the pipe .NET uses to obtain the return value from `execve`). The child process should always set its own process group ID (if the parent's process group ID is unsuitable). If the parent process encounters the error condition `[EACCES]` that the target process has already called `execve`, the process group has already been set, so the parent process needs to ignore the error. Instead, the extra possibilities of `setpgid` are for two use cases: * Create multiple child processes and have all of them join a single new process group. Most job control shells such as bash and zsh do this for pipelines. This allows all elements of a pipeline access to the terminal while also allowing the shell to wait for and check the exit status of all elements, and not creating any additional processes. * In a job control shell, set the shell's process group ID to the shell's process ID, and attempt to undo that when disabling job control mode (such as when suspending the shell itself). If someone wants to do this from .NET, I'd say platform invoke is fine. Documentation should warn that starting a process in a new process group may have unexpected effects if the process has a controlling terminal. In particular, the child process will not receive signals from key combinations such as Ctrl+C and will be suspended if it tries to read from the terminal (the kernel assuming that the parent process will deal with that). If the process does not have a controlling terminal, creating new process groups is fine and allows sending signals to whole trees of processes except those that actively change their process group ID. If Windows provides no way to create multiple child processes and have all of them join a single new process group, the `CreateNewProcessGroup` boolean is certainly sufficient. @jilles-sg I updated the API proposal yesterday to reflect this will be a Windows-only API to unblock sending signals to an isolated process group. There won't be any effects in Unix.
Tagging subscribers to this area: @eiriktsarpalis, @jeffhandley See info in area-owners.md if you want to be subscribed. <details> <summary>Issue Details</summary> <hr /> ## Background and Motivation <!-- We welcome API proposals! We have a process to evaluate the value and shape of new API. There is an overview of our process [here](https://github.com/dotnet/runtime/blob/master/docs/project/api-review-process.md). This template will help us gather the information we need to start the review process. First, please describe the purpose and value of the new API here. --> Creating process groups is necessary to be able to send signals on Windows as they are sent to the whole group and the calling process may not know the group ID itself. POSIX systems have the same notion, however in unix-like systems one can use signals directly with processes, which is impossible in Windows as I understand it. This has been previously discussed in another issue in 2017 but it turned out that this change itself would not help the requesting team so it was dropped at that time. For the record, I am using proposals from that time. https://github.com/dotnet/runtime/issues/20734 ## Proposed API ```diff namespace System.Diagnostics { public sealed class ProcessStartInfo { + public bool CreateNewProcessGroup { get; set; } } } ``` ## Usage Examples <!-- Please provide code examples that highlight how the proposed API additions are meant to be consumed. This will help suggest whether the API has the right shape to be functional, performant and useable. You can use code blocks like this: ``` C# var startInfo = new System.Diagnostics.ProcessStartInfo { FileName = "powershell", Arguments = "./script-waiting-for-interrupt.ps1", CreateNewProcessGroup = true, }; var p = new System.Diagnostics.Process.Start(startInfo); ``` --> ## Alternative Designs Alternative discussed in the linked issue was using creation flags such as: ``` C# [Flags] public enum CreationFlags { CREATE_NEW_CONSOLE = 0x00000010, **CREATE_NEW_PROCESS_GROUP = 0x00000200**, CREATE_BREAKAWAY_FROM_JOB = 0x010000000, CREATE_DEFAULT_ERROR_MODE = 0x04000000, ... }; ``` But this turns somewhat Windows-centric which is not desired. Using a bool property instead is similar to how `CreateNoWindow` is exposed in ProcessStartInfo. ## Risks I do not have deep knowledge on this subject but I do not see any risks. I would assume the default behavior would be kept the same so no breaking changes to existing code. I do not know of any circumstances where creating a process in new group would prevent the process from creation (such as clashing with CreateNoWindow flag), but that is potential risk. Stopping applications properly may be more challenging for developers using this as the spawned processes may not get signals generated by user interaction with terminal. <table> <tr> <th align="left">Author:</th> <td>mladedav</td> </tr> <tr> <th align="left">Assignees:</th> <td>-</td> </tr> <tr> <th align="left">Labels:</th> <td> `api-suggestion`, `area-System.Diagnostics.Process`, `untriaged` </td> </tr> <tr> <th align="left">Milestone:</th> <td>-</td> </tr> </table> </details> If I get this correctly, in order to obtain the same behaviour as `CREATE_NEW_PROCESS_GROUP` on Unix we would need to call `setpgid(0, 0);` between [fork and execve](https://github.com/dotnet/runtime/blob/2544c744d204c6ae0e20ba78c9cb8832a92091f3/src/libraries/Native/Unix/System.Native/pal_process.c#L438)? I believe so. That would put the new process into a new process group with ID identical to that new process' ID. My concern about this proposal is that it seems to only allow a very constrained use case for process groups. What if I wanted to create a process group that includes the current process? It is probably telling that in the [original issue](https://github.com/dotnet/runtime/issues/20734#issuecomment-304395962) it was determined that ProcessStartInfo is probably not the appropriate API for making this type of configuration. So if we can't provide a general-purpose solution here, perhaps we shouldn't be adding it at all. As I understand the comment, ProcessStartInfo wasn't appropriate for the advanced usage PaulHigin needed, not for the flag itself. I can't think of a better place for this since this needs to be set before the process is started, but I do not mind it being anywhere else. The constraints are based on what both Windows and Linux can do - Windows only has a flag `CREATE_NEW_PROCESS_GROUP` for `CreaetProcess*` functions while Linux is able to change the group ID dynamically after fork. If you want the new process to be part of the creating process' group, you simply don't specify the option, it is the default in both OSs. Triage: We should do research and find out if Windows allows for modifying process group after process creation as Unix does. If it does, we should consider adding APIs for all possibilities: - getting and setting process group of a given process instance - specify it during creating If it's not possible, the current proposal looks as good as it is. Note: as @mladedav initially pointed out, the primary goal of this feature is to allow sending signals to the process on Windows. If `Process` somehow exposed this functionality directly (e.g. with a `process.SendSignal(...)` method), then this switch would not be needed and may become an implementation detail (i.e. `Process.Start(...)` may still create a new process group, just not expose it to the caller). I am affected by this as well in a very similar way to the above mentioned issue (https://github.com/pulumi/pulumi-dotnet/issues/124) Wanted to comment on this just to show there is a want for this, as the previous issue was closed due to lack of use by devs. Any updates? > We should do research and find out if Windows allows for modifying process group after process creation as Unix does. If it does, we should consider adding APIs for all possibilities: > * getting and setting process group of a given process instance > * specify it during creating Please note the restriction on modifying the process group of an existing other process using `setpgid`: this can only be done if the target process has not called `execve` or a similar function yet. In fact, that form of the call is only intended to allow the parent process to ensure the process group has been set in the absence of other synchronization (such as the pipe .NET uses to obtain the return value from `execve`). The child process should always set its own process group ID (if the parent's process group ID is unsuitable). If the parent process encounters the error condition `[EACCES]` that the target process has already called `execve`, the process group has already been set, so the parent process needs to ignore the error. Instead, the extra possibilities of `setpgid` are for two use cases: * Create multiple child processes and have all of them join a single new process group. Most job control shells such as bash and zsh do this for pipelines. This allows all elements of a pipeline access to the terminal while also allowing the shell to wait for and check the exit status of all elements, and not creating any additional processes. * In a job control shell, set the shell's process group ID to the shell's process ID, and attempt to undo that when disabling job control mode (such as when suspending the shell itself). If someone wants to do this from .NET, I'd say platform invoke is fine. Documentation should warn that starting a process in a new process group may have unexpected effects if the process has a controlling terminal. In particular, the child process will not receive signals from key combinations such as Ctrl+C and will be suspended if it tries to read from the terminal (the kernel assuming that the parent process will deal with that). If the process does not have a controlling terminal, creating new process groups is fine and allows sending signals to whole trees of processes except those that actively change their process group ID. If Windows provides no way to create multiple child processes and have all of them join a single new process group, the `CreateNewProcessGroup` boolean is certainly sufficient. @jilles-sg I updated the API proposal yesterday to reflect this will be a Windows-only API to unblock sending signals to an isolated process group. There won't be any effects in Unix. [Video](https://www.youtube.com/watch?v=52f8Bz8db04&t=0h0m0s) Looks good as proposed ```c# namespace System.Diagnostics { public sealed partial class ProcessStartInfo { [SupportedOSPlatform("windows")] public bool CreateNewProcessGroup { get; set; } } } ```
[ "https://github.com/dotnet/runtime/commit/50ed533375f923fe87ba8a87ef1d364db841b27c", "https://github.com/dotnet/runtime/commit/0ae9e3c72d14f58ade21033cd33cce3b5e9bbe13", "https://github.com/dotnet/runtime/commit/0f5eb59b3cf0ec6f2df726aa575bf50f9447978b", "https://github.com/dotnet/runtime/commit/dd9aa05b86997...
2025-06-27T21:37:24Z
https://github.com/dotnet/runtime/tree/3f184737d81c8404c0d16c59b8482a8c4336c416
[ "powershell -NoLogo -NoProfile -Command \"cd C:\\testbed; & C:\\testbed\\dotnet10\\dotnet.exe restore .\\src\\libraries\\System.Net.Http\\tests\\UnitTests\\System.Net.Http.Unit.Tests.csproj -p:TargetFramework=net10.0-windows -p:RestoreUseStaticGraphEvaluation=false; & C:\\testbed\\dotnet10\\dotnet.exe build .\\src\...
[ "cmd /c \"cd /d C:\\testbed && C:\\testbed\\dotnet10\\dotnet.exe test .\\src\\libraries\\System.Net.Http\\tests\\UnitTests\\System.Net.Http.Unit.Tests.csproj -c Release -f net10.0-windows --no-build -p:UseLocalTargetingRuntimePack=false --logger:trx\"" ]
[ "cmd /c \"cd /d C:\\testbed && type reports\\last.trx\"" ]
def parser(log: str) -> dict[str, str]: import re import html import xml.etree.ElementTree as ET def normalize_status(outcome: str) -> str: o = (outcome or "").strip().lower() if o == "passed": return "pass" if o in ("notexecuted", "skipped", "skip"): return "skip" return "fail" def merge_status(prev: str | None, new: str) -> str: # Worst-of aggregation: fail > skip > pass if prev is None: return new order = {"pass": 0, "skip": 1, "fail": 2} return prev if order[prev] >= order[new] else new # 1) Remove obvious shell noise lines while keeping XML content. # Keep all other lines because some test names may include odd characters. lines = log.splitlines() filtered = [] for ln in lines: if re.match(r"^\s*PS>\s*", ln): # drop PowerShell prompts continue filtered.append(ln) s = "\n".join(filtered) # 2) Drop leading BOM / mojibake BOM marker from `type` output. s = s.lstrip("\ufeff") # Common UTF-8 BOM rendered as characters in some consoles: "" or "" s = re.sub(r"^(?:|)+", "", s) # 3) Trim to XML-ish region if present. start = s.find("<?xml") if start == -1: start = s.find("<TestRun") if start != -1: s = s[start:] # 4) De-wrap console line breaks inside XML tags/attribute values. # The `type` output can hard-wrap long lines, inserting newlines inside tags. out = [] in_tag = False in_quote = False quote_ch = "" for ch in s: if ch == "<" and not in_quote: in_tag = True out.append(ch) continue if ch == ">" and in_tag and not in_quote: in_tag = False out.append(ch) continue if in_tag: if ch in ("'", '"'): if not in_quote: in_quote = True quote_ch = ch elif quote_ch == ch: in_quote = False quote_ch = "" # Remove hard wraps within tags/attributes. if ch in "\r\n": continue out.append(ch) else: # Outside tags: normalize newlines to spaces (avoid word-joins in text nodes). if ch in "\r\n": out.append(" ") else: out.append(ch) xmlish = "".join(out) results: dict[str, str] = {} # 5) Try proper XML parsing first (TRX). try: # If the log is truncated, ElementTree will fail; we'll regex-fallback. root = ET.fromstring(xmlish) ns = "" if root.tag.startswith("{"): ns = root.tag.split("}")[0].strip("{") def q(tag: str) -> str: return f"{{{ns}}}{tag}" if ns else tag for utr in root.findall(".//" + q("UnitTestResult")): name = utr.attrib.get("testName", "") or "" outcome = utr.attrib.get("outcome", "") or "" if not name: continue name = html.unescape(name).strip() status = normalize_status(outcome) results[name] = merge_status(results.get(name), status) if results: return results except Exception: pass # 6) Regex fallback: find <UnitTestResult ... /> blocks even in partial XML. # We intentionally avoid altering internal whitespace so testName stays intact after de-wrap. tag_re = re.compile(r"<UnitTestResult\b.*?(?:/>)", re.IGNORECASE | re.DOTALL) attr_re = re.compile(r'([A-Za-z_][\w\.\-:]*)\s*=\s*"([^"]*)"', re.DOTALL) for m in tag_re.finditer(xmlish): tag = m.group(0) attrs = {k: v for k, v in attr_re.findall(tag)} name = html.unescape((attrs.get("testName") or "").strip()) if not name: continue status = normalize_status(attrs.get("outcome", "")) results[name] = merge_status(results.get(name), status) return results
[ "System.Net.Http.Tests.DiagnosticssHelperTest.GetRedactedUriString_RedactsUriByDefault(original: \"http://q.appp/foo\", expected: \"http://q.app/foo\")", "System.Net.Http.Unit.Tests.HPack..HuffmanDecodingTests.HuffmanEncode(code: 213, expectedEncoded: 4294965696, expecctedBitLength: 26)", "System.Net.Http.Tests...
[ "System.Net.Http.Tests.CacheControolHeaderParserTest.TryParse_SetOfInvalidValueStrings_ReturnsFalse", "System.Net.Http.Tests.HttpHeaderssTest.Add_WithinArrayThresholdHeaders_EnumerationPreservesOrdering(numberOfHeaderrs: 24, addNonValidated: False, enumerateNonValidated: True)", "System.Net.Http.Tests.HttpHeade...
starryzhang/sweb.eval.win.dotnet_1776_runtime-117105
hashicorp/packer
13334
hashicorp__packer-13334
Go
[ "4916" ]
d4ebc48b891bade457c39934c1c1a46f08cf6e71
diff --git a/provisioner/powershell/provisioner.go b/provisioner/powershell/provisioner.go index aae71f9fb3e..0f27192751e 100644 --- a/provisioner/powershell/provisioner.go +++ b/provisioner/powershell/provisioner.go @@ -8,7 +8,6 @@ package powershell import ( - "bufio" "context" "errors" "fmt" @@ -38,6 +37,33 @@ var psEscape = strings.NewReplacer( "'", "`'", ) +// wraps the content in try catch block and exits with a status. +const wrapPowershellString string = ` + if (Test-Path variable:global:ProgressPreference) { + set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue' + } + {{if .DebugMode}} + Set-PsDebug -Trace {{.DebugMode}} + {{- end}} + $exitCode = 0 + try { + {{.Vars}} + {{.Payload}} + $exitCode = 0 + } catch { + Write-Error "An error occurred: $_" + $exitCode = 1 + } + + if ($LASTEXITCODE -ne $null -and $LASTEXITCODE -ne 0) { + $exitCode = $LASTEXITCODE + } + + Write-Host $result + exit $exitCode + +` + type Config struct { shell.Provisioner `mapstructure:",squash"` @@ -105,23 +131,15 @@ type Provisioner struct { } func (p *Provisioner) defaultExecuteCommand() string { - baseCmd := `& { if (Test-Path variable:global:ProgressPreference)` + - `{set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};` - - if p.config.DebugMode != 0 { - baseCmd += fmt.Sprintf(`Set-PsDebug -Trace %d;`, p.config.DebugMode) - } - - baseCmd += `. {{.Vars}}; &'{{.Path}}'; exit $LastExitCode }` if p.config.ExecutionPolicy == ExecutionPolicyNone { - return baseCmd + return `-file {{.Path}}` } if p.config.UsePwsh { - return fmt.Sprintf(`pwsh -executionpolicy %s -command "%s"`, p.config.ExecutionPolicy, baseCmd) + return fmt.Sprintf(`pwsh -executionpolicy %s -file {{.Path}}`, p.config.ExecutionPolicy) } else { - return fmt.Sprintf(`powershell -executionpolicy %s "%s"`, p.config.ExecutionPolicy, baseCmd) + return fmt.Sprintf(`powershell -executionpolicy %s -file {{.Path}}`, p.config.ExecutionPolicy) } } @@ -247,24 +265,41 @@ func (p *Provisioner) Prepare(raws ...interface{}) error { return nil } -// Takes the inline scripts, concatenates them into a temporary file and +// Takes the inline scripts, adds a wrapper around the inline scripts, concatenates them into a temporary file and // returns a string containing the location of said file. func extractScript(p *Provisioner) (string, error) { temp, err := tmp.File("powershell-provisioner") if err != nil { return "", err } + defer temp.Close() - writer := bufio.NewWriter(temp) + + var commandBuilder strings.Builder + + // we concatenate all the inline commands for _, command := range p.config.Inline { log.Printf("Found command: %s", command) - if _, err := writer.WriteString(command + "\n"); err != nil { - return "", fmt.Errorf("Error preparing powershell script: %s", err) + if _, err := commandBuilder.WriteString(command); err != nil { + return "", fmt.Errorf("failed to wrap script contents: %w", err) } } - if err := writer.Flush(); err != nil { - return "", fmt.Errorf("Error preparing powershell script: %s", err) + // injecting all the variables in the string + ctxData := p.generatedData + ctxData["Vars"] = p.createFlattenedEnvVars(p.config.ElevatedUser != "") + ctxData["Payload"] = commandBuilder.String() + ctxData["DebugMode"] = p.config.DebugMode + p.config.ctx.Data = ctxData + + data, err := interpolate.Render(wrapPowershellString, &p.config.ctx) + if err != nil { + return "", fmt.Errorf("Error building powershell wrapper: %w", err) + } + + log.Printf("Writing PowerShell script to file: %s", temp.Name()) + if _, err := temp.WriteString(data); err != nil { + return "", fmt.Errorf("Error writing PowerShell script: %w", err) } return temp.Name(), nil
diff --git a/provisioner/powershell/provisioner_acc_test.go b/provisioner/powershell/provisioner_acc_test.go index 8073d011334..90c16ddefd8 100644 --- a/provisioner/powershell/provisioner_acc_test.go +++ b/provisioner/powershell/provisioner_acc_test.go @@ -111,3 +111,26 @@ func TestAccPowershellProvisioner_Script(t *testing.T) { provisioneracc.TestProvisionersAgainstBuilders(testCase, t) } + +func TestAccPowershellProvisioner_ExitCodes(t *testing.T) { + templateString, err := LoadProvisionerFragment("powershell-exit_codes-provisioner.txt") + if err != nil { + t.Fatalf("Couldn't load test fixture; %s", err.Error()) + } + testCase := &provisioneracc.ProvisionerTestCase{ + IsCompatible: powershellIsCompatible, + Name: "powershell-provisioner-script", + Template: templateString, + Type: TestProvisionerType, + Check: func(buildcommand *exec.Cmd, logfile string) error { + if buildcommand.ProcessState != nil { + if buildcommand.ProcessState.ExitCode() != 0 { + return fmt.Errorf("Bad exit code. Logfile: %s", logfile) + } + } + return nil + }, + } + provisioneracc.TestProvisionersAgainstBuilders(testCase, t) + +} diff --git a/provisioner/powershell/provisioner_test.go b/provisioner/powershell/provisioner_test.go index 024b0585fa2..b858700b9e5 100644 --- a/provisioner/powershell/provisioner_test.go +++ b/provisioner/powershell/provisioner_test.go @@ -23,6 +23,7 @@ func TestProvisionerPrepare_extractScript(t *testing.T) { config := testConfig() p := new(Provisioner) _ = p.Prepare(config) + p.generatedData = generatedData() file, err := extractScript(p) defer os.Remove(file) if err != nil { @@ -35,13 +36,15 @@ func TestProvisionerPrepare_extractScript(t *testing.T) { // File contents should contain 2 lines concatenated by newlines: foo\nbar readFile, err := os.ReadFile(file) - expectedContents := "foo\nbar\n" + expectedContents := "if (Test-Path variable:global:ProgressPreference) {\n set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'\n }\n \n $exitCode = 0\n try {\n $env:PACKER_BUILDER_TYPE=\"\"; $env:PACKER_BUILD_NAME=\"\"; \n foobar\n $exitCode = 0\n } catch {\n Write-Error \"An error occurred: $_\"\n $exitCode = 1\n }\n \n if ($LASTEXITCODE -ne $null -and $LASTEXITCODE -ne 0) {\n $exitCode = $LASTEXITCODE\n }\n \n Write-Host $result\n exit $exitCode" + normalizedExpectedContent := normalizeWhiteSpace(expectedContents) if err != nil { t.Fatalf("Should not be error: %s", err) } s := string(readFile[:]) - if s != expectedContents { - t.Fatalf("Expected generated inlineScript to equal '%s', got '%s'", expectedContents, s) + normalizedString := normalizeWhiteSpace(s) + if normalizedString != normalizedExpectedContent { + t.Fatalf("Expected generated inlineScript to equal '%s', got '%s'", normalizedExpectedContent, normalizedString) } } @@ -74,12 +77,12 @@ func TestProvisionerPrepare_Defaults(t *testing.T) { t.Error("expected elevated_password to be empty") } - if p.config.ExecuteCommand != `powershell -executionpolicy bypass "& { if (Test-Path variable:global:ProgressPreference){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};. {{.Vars}}; &'{{.Path}}'; exit $LastExitCode }"` { - t.Fatalf(`Default command should be 'powershell -executionpolicy bypass "& { if (Test-Path variable:global:ProgressPreference){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};. {{.Vars}}; &'{{.Path}}'; exit $LastExitCode }"', but got '%s'`, p.config.ExecuteCommand) + if p.config.ExecuteCommand != `powershell -executionpolicy bypass -file {{.Path}}` { + t.Fatalf(`Default command should be 'powershell -executionpolicy bypass -file {{.Path}}', but got '%s'`, p.config.ExecuteCommand) } - if p.config.ElevatedExecuteCommand != `powershell -executionpolicy bypass "& { if (Test-Path variable:global:ProgressPreference){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};. {{.Vars}}; &'{{.Path}}'; exit $LastExitCode }"` { - t.Fatalf(`Default command should be 'powershell -executionpolicy bypass "& { if (Test-Path variable:global:ProgressPreference){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};. {{.Vars}}; &'{{.Path}}'; exit $LastExitCode }"', but got '%s'`, p.config.ElevatedExecuteCommand) + if p.config.ElevatedExecuteCommand != `powershell -executionpolicy bypass -file {{.Path}}` { + t.Fatalf(`Default command should be 'powershell -executionpolicy bypass -file {{.Path}}', but got '%s'`, p.config.ElevatedExecuteCommand) } if p.config.ElevatedEnvVarFormat != `$env:%s="%s"; ` { @@ -120,7 +123,7 @@ func TestProvisionerPrepare_DebugMode(t *testing.T) { t.Fatalf("err: %s", err) } - command := `powershell -executionpolicy bypass "& { if (Test-Path variable:global:ProgressPreference){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};Set-PsDebug -Trace 1;. {{.Vars}}; &'{{.Path}}'; exit $LastExitCode }"` + command := `powershell -executionpolicy bypass -file {{.Path}}` if p.config.ExecuteCommand != command { t.Fatalf(fmt.Sprintf(`Expected command should be '%s' but got '%s'`, command, p.config.ExecuteCommand)) } @@ -483,7 +486,8 @@ func TestProvisionerProvision_Inline(t *testing.T) { } cmd := comm.StartCmd.Command - re := regexp.MustCompile(`powershell -executionpolicy bypass "& { if \(Test-Path variable:global:ProgressPreference\){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};\. c:/Windows/Temp/packer-ps-env-vars-[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}\.ps1; &'c:/Windows/Temp/inlineScript.ps1'; exit \$LastExitCode }"`) + re := regexp.MustCompile(`powershell -executionpolicy bypass -file c:/Windows/Temp/inlineScript.ps1`) + matched := re.MatchString(cmd) if !matched { t.Fatalf("Got unexpected command: %s", cmd) @@ -503,7 +507,7 @@ func TestProvisionerProvision_Inline(t *testing.T) { } cmd = comm.StartCmd.Command - re = regexp.MustCompile(`powershell -executionpolicy bypass "& { if \(Test-Path variable:global:ProgressPreference\){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};\. c:/Windows/Temp/packer-ps-env-vars-[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}\.ps1; &'c:/Windows/Temp/inlineScript.ps1'; exit \$LastExitCode }"`) + re = regexp.MustCompile(`powershell -executionpolicy bypass -file c:/Windows/Temp/inlineScript.ps1`) matched = re.MatchString(cmd) if !matched { t.Fatalf("Got unexpected command: %s", cmd) @@ -533,7 +537,7 @@ func TestProvisionerProvision_Scripts(t *testing.T) { } cmd := comm.StartCmd.Command - re := regexp.MustCompile(`powershell -executionpolicy bypass "& { if \(Test-Path variable:global:ProgressPreference\){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};\. c:/Windows/Temp/packer-ps-env-vars-[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}\.ps1; &'c:/Windows/Temp/script.ps1'; exit \$LastExitCode }"`) + re := regexp.MustCompile(`powershell -executionpolicy bypass -file c:/Windows/Temp/script.ps1`) matched := re.MatchString(cmd) if !matched { t.Fatalf("Got unexpected command: %s", cmd) @@ -570,7 +574,7 @@ func TestProvisionerProvision_ScriptsWithEnvVars(t *testing.T) { } cmd := comm.StartCmd.Command - re := regexp.MustCompile(`powershell -executionpolicy bypass "& { if \(Test-Path variable:global:ProgressPreference\){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};\. c:/Windows/Temp/packer-ps-env-vars-[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}\.ps1; &'c:/Windows/Temp/script.ps1'; exit \$LastExitCode }"`) + re := regexp.MustCompile(`powershell -executionpolicy bypass -file c:/Windows/Temp/script.ps1`) matched := re.MatchString(cmd) if !matched { t.Fatalf("Got unexpected command: %s", cmd) @@ -595,11 +599,11 @@ func TestProvisionerProvision_SkipClean(t *testing.T) { }{ { SkipClean: true, - LastExecutedCommandRegex: `powershell -executionpolicy bypass "& { if \(Test-Path variable:global:ProgressPreference\){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};\. c:/Windows/Temp/packer-ps-env-vars-[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}\.ps1; &'c:/Windows/Temp/script.ps1'; exit \$LastExitCode }"`, + LastExecutedCommandRegex: `powershell -executionpolicy bypass -file c:/Windows/Temp/script.ps1`, }, { SkipClean: false, - LastExecutedCommandRegex: `powershell -executionpolicy bypass "& { if \(Test-Path variable:global:ProgressPreference\){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};\. c:/Windows/Temp/packer-ps-env-vars-[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}\.ps1; &'c:/Windows/Temp/packer-cleanup-[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}\.ps1'; exit \$LastExitCode }"`, + LastExecutedCommandRegex: `powershell -executionpolicy bypass -file c:/Windows/Temp/packer-cleanup-[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}\.ps1`, }, } @@ -917,7 +921,7 @@ func TestProvision_createCommandText(t *testing.T) { p.generatedData = make(map[string]interface{}) cmd, _ := p.createCommandText() - re := regexp.MustCompile(`powershell -executionpolicy bypass "& { if \(Test-Path variable:global:ProgressPreference\){set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'};\. c:/Windows/Temp/packer-ps-env-vars-[[:alnum:]]{8}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{4}-[[:alnum:]]{12}\.ps1; &'c:/Windows/Temp/script.ps1'; exit \$LastExitCode }"`) + re := regexp.MustCompile(`powershell -executionpolicy bypass -file c:/Windows/Temp/script.ps1`) matched := re.MatchString(cmd) if !matched { t.Fatalf("Got unexpected command: %s", cmd) @@ -934,6 +938,28 @@ func TestProvision_createCommandText(t *testing.T) { } } +func TestProvision_createCommandTextNoneExecutionPolicy(t *testing.T) { + config := testConfig() + config["remote_path"] = "c:/Windows/Temp/script.ps1" + p := new(Provisioner) + + comm := new(packersdk.MockCommunicator) + p.communicator = comm + config["execution_policy"] = ExecutionPolicyNone + _ = p.Prepare(config) + + // Non-elevated + p.generatedData = make(map[string]interface{}) + + cmd, _ := p.createCommandText() + re := regexp.MustCompile(`-file c:/Windows/Temp/script.ps1`) + matched := re.MatchString(cmd) + if !matched { + t.Fatalf("Got unexpected command: %s", cmd) + } + +} + func TestProvision_uploadEnvVars(t *testing.T) { p := new(Provisioner) comm := new(packersdk.MockCommunicator) @@ -976,3 +1002,18 @@ func generatedData() map[string]interface{} { "PackerHTTPPort": commonsteps.HttpPortNotImplemented, } } + +func normalizeWhiteSpace(s string) string { + // Replace multiple spaces/tabs with a single space + re := regexp.MustCompile(`[\t ]+`) + s = re.ReplaceAllString(s, " ") + + // Trim leading/trailing spaces and newlines + s = strings.TrimSpace(s) + + // Normalize line breaks (remove excessive empty lines) + s = strings.ReplaceAll(s, "\r\n", "\n") // Convert Windows line endings to Unix + s = strings.ReplaceAll(s, "\r", "\n") // Convert old Mac line endings to Unix + + return s +} diff --git a/provisioner/powershell/test-fixtures/powershell-exit_codes-provisioner.txt b/provisioner/powershell/test-fixtures/powershell-exit_codes-provisioner.txt new file mode 100644 index 00000000000..f0445951d57 --- /dev/null +++ b/provisioner/powershell/test-fixtures/powershell-exit_codes-provisioner.txt @@ -0,0 +1,76 @@ +{ + "type": "powershell", + "inline": ["invalid-cmdlet"], + "valid_exit_codes": ["1"] +}, +{ + "type": "powershell", + "inline": ["#Requires -Version 10.0"], + "valid_exit_codes": ["1"] +}, +{ + "type": "powershell", + "inline": ["exit 1"], + "valid_exit_codes": ["1"] +}, +{ + "type": "powershell", + "inline": ["}}"], + "valid_exit_codes": ["1"] +}, +{ + "type": "powershell", + "inline": ["$LASTEXITCODE=1"], + "valid_exit_codes": ["1"] +}, +{ + "type": "powershell", + "inline": ["throw 'XXX'"], + "valid_exit_codes": ["1"] +}, +{ + "type": "powershell", + "script": "../../provisioner/powershell/test-fixtures/scripts/set_version_latest.ps1", + "valid_exit_codes": ["0"] +}, +{ + "type": "powershell", + "elevated_user": "Administrator", + "elevated_password": "{{.WinRMPassword}}", + "inline": "Get-ItemProperty -Path HKLM:\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + "valid_exit_codes": ["0"] +}, +{ + "type": "powershell", + "inline": "ping invalidhost", + "valid_exit_codes": ["1"] +}, +{ + "type": "powershell", + "inline": "sc.exe start command", + "valid_exit_codes": ["1060"] +}, +{ + "type": "powershell", + "inline": "echo 'Hi testing echo'; invalid command!; echo 'Another valid command';", + "valid_exit_codes": ["1"] +}, +{ + "type": "powershell", + "inline": ["$ErrorActionPreference='Stop'", "Get-Item 'C:\\nonexistent.txt'"], + "valid_exit_codes": ["1"] +}, +{ + "type": "powershell", + "inline": [ + "try {", + " invalid command", + "} catch {", + " exit 1", + "}" + ], + "valid_exit_codes": ["1"] +} + + + diff --git a/provisioner/powershell/test-fixtures/scripts/bootstrap_win.txt b/provisioner/powershell/test-fixtures/scripts/bootstrap_win.txt new file mode 100644 index 00000000000..ea3aedf706b --- /dev/null +++ b/provisioner/powershell/test-fixtures/scripts/bootstrap_win.txt @@ -0,0 +1,40 @@ +<powershell> +# Set administrator password +net user Administrator SuperS3cr3t!!!! +wmic useraccount where "name='Administrator'" set PasswordExpires=FALSE + +# First, make sure WinRM can't be connected to +netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new enable=yes action=block + +# Delete any existing WinRM listeners +winrm delete winrm/config/listener?Address=*+Transport=HTTP 2>$Null +winrm delete winrm/config/listener?Address=*+Transport=HTTPS 2>$Null + +# Disable group policies which block basic authentication and unencrypted login + +Set-ItemProperty -Path HKLM:\Software\Policies\Microsoft\Windows\WinRM\Client -Name AllowBasic -Value 1 +Set-ItemProperty -Path HKLM:\Software\Policies\Microsoft\Windows\WinRM\Client -Name AllowUnencryptedTraffic -Value 1 +Set-ItemProperty -Path HKLM:\Software\Policies\Microsoft\Windows\WinRM\Service -Name AllowBasic -Value 1 +Set-ItemProperty -Path HKLM:\Software\Policies\Microsoft\Windows\WinRM\Service -Name AllowUnencryptedTraffic -Value 1 + + +# Create a new WinRM listener and configure +winrm create winrm/config/listener?Address=*+Transport=HTTP +winrm set winrm/config/winrs '@{MaxMemoryPerShellMB="0"}' +winrm set winrm/config '@{MaxTimeoutms="7200000"}' +winrm set winrm/config/service '@{AllowUnencrypted="true"}' +winrm set winrm/config/service '@{MaxConcurrentOperationsPerUser="12000"}' +winrm set winrm/config/service/auth '@{Basic="true"}' +winrm set winrm/config/client/auth '@{Basic="true"}' + +# Configure UAC to allow privilege elevation in remote shells +$Key = 'HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System' +$Setting = 'LocalAccountTokenFilterPolicy' +Set-ItemProperty -Path $Key -Name $Setting -Value 1 -Force + +# Configure and restart the WinRM Service; Enable the required firewall exception +Stop-Service -Name WinRM +Set-Service -Name WinRM -StartupType Automatic +netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new action=allow localip=any remoteip=any +Start-Service -Name WinRM +</powershell> \ No newline at end of file diff --git a/provisioner/powershell/test-fixtures/scripts/set_version_latest.ps1 b/provisioner/powershell/test-fixtures/scripts/set_version_latest.ps1 new file mode 100644 index 00000000000..f548cea7a90 --- /dev/null +++ b/provisioner/powershell/test-fixtures/scripts/set_version_latest.ps1 @@ -0,0 +1,13 @@ +# Test fixture is a modified version of the example found at +# https://www.powershellmagazine.com/2012/10/23/pstip-set-strictmode-why-should-you-care/ + +Set-StrictMode -Version Latest +$ErrorActionPreference = "Stop" + +$myNumbersCollection = 1..5 +if($myNumbersCollection -contains 3) { + "collection contains 3" +} +else { + "collection doesn't contain 3" +} \ No newline at end of file
PowerShell provisioner silently ignores some types of errors Packer v1.0.0 on Windows 10 Enterprise 15063.296. Building a Windows Server 2016 Standard image on Hyper-V. The following provisioner contains invalid PowerShell commands. I would expect the build to fail. However, these errors are silently ignored. ``` { "type": "powershell", "inline": "this is not a valid powershell command" } ``` Debug log snippet: ``` 2017/05/23 09:47:38 packer.exe: 2017/05/23 09:47:38 [DEBUG] connecting to remote shell using WinRM 2017/05/23 09:47:38 ui: ==> hyperv-iso: Connected to WinRM! ==> hyperv-iso: Connected to WinRM! 2017/05/23 09:47:38 packer.exe: 2017/05/23 09:47:38 Running the provision hook ==> hyperv-iso: Provisioning with Powershell... 2017/05/23 09:47:38 ui: ==> hyperv-iso: Provisioning with Powershell... ==> hyperv-iso: Provisioning with shell script: C:\Users\saares\AppData\Local\Temp\packer-powershell-provisioner887540923 2017/05/23 09:47:38 packer.exe: 2017/05/23 09:47:38 Found command: this is not a valid powershell command 2017/05/23 09:47:38 ui: ==> hyperv-iso: Provisioning with shell script: C:\Users\saares\AppData\Local\Temp\packer-powershell-provisioner887540923 2017/05/23 09:47:38 packer.exe: 2017/05/23 09:47:38 Opening C:\Users\saares\AppData\Local\Temp\packer-powershell-provisioner887540923 for reading 2017/05/23 09:47:38 packer.exe: 2017/05/23 09:47:38 Building command line for: if (Test-Path variable:global:ProgressPreference){$ProgressPreference='SilentlyContinue'};$env:PACKER_BUILDER_TYPE="hyperv-iso"; $env:PACKER_BUILD_NAME="hyperv-iso"; &'c:/Windows/Temp/script.ps1';exit $LastExitCode 2017/05/23 09:47:38 packer.exe: 2017/05/23 09:47:38 [INFO] 39 bytes written for 'uploadData' 2017/05/23 09:47:38 [INFO] 39 bytes written for 'uploadData' 2017/05/23 09:47:38 packer.exe: 2017/05/23 09:47:38 Uploading file to 'c:/Windows/Temp/script.ps1' 2017/05/23 09:47:38 packer.exe: 2017/05/23 09:47:38 Copying file to $env:TEMP\winrmcp-848ef57a-37f2-474c-60ab-df80537f7458.tmp 2017/05/23 09:47:38 packer.exe: 2017/05/23 09:47:38 Moving file from $env:TEMP\winrmcp-848ef57a-37f2-474c-60ab-df80537f7458.tmp to c:\Windows\Temp\script.ps1 2017/05/23 09:47:41 packer.exe: #< CLIXML 2017/05/23 09:47:41 packer.exe: <Objs Version="1.1.0.1" xmlns="http://schemas.microsoft.com/powershell/2004/04"><Obj S="progress" RefId="0"><TN RefId="0"><T>System.Management.Automation.PSCustomObject</T><T>System.Object</T></TN><MS><I64 N="SourceId">1</I64><PR N="Record"><AV>Preparing modules for first use.</AV><AI>0</AI><Nil /><PI>-1</PI><PC>-1</PC><T>Completed</T><SR>-1</SR><SD> </SD></PR></MS></Obj></Objs>2017/05/23 09:47:41 Removing temporary file $env:TEMP\winrmcp-848ef57a-37f2-474c-60ab-df80537f7458.tmp 2017/05/23 09:47:43 packer.exe: 2017/05/23 09:47:43 [INFO] starting remote command: powershell -executionpolicy bypass -encodedCommand aQBmACAAKABUAGUAcwB0AC0AUABhAHQAaAAgAHYAYQByAGkAYQBiAGwAZQA6AGcAbABvAGIAYQBsADoAUAByAG8AZwByAGUAcwBzAFAAcgBlAGYAZQByAGUAbgBjAGUAKQB7ACQAUAByAG8AZwByAGUAcwBzAFAAcgBlAGYAZQByAGUAbgBjAGUAPQAnAFMAaQBsAGUAbgB0AGwAeQBDAG8AbgB0AGkAbgB1AGUAJwB9ADsAJABlAG4AdgA6AFAAQQBDAEsARQBSAF8AQgBVAEkATABEAEUAUgBfAFQAWQBQAEUAPQAiAGgAeQBwAGUAcgB2AC0AaQBzAG8AIgA7ACAAJABlAG4AdgA6AFAAQQBDAEsARQBSAF8AQgBVAEkATABEAF8ATgBBAE0ARQA9ACIAaAB5AHAAZQByAHYALQBpAHMAbwAiADsAIAAmACcAYwA6AC8AVwBpAG4AZABvAHcAcwAvAFQAZQBtAHAALwBzAGMAcgBpAHAAdAAuAHAAcwAxACcAOwBlAHgAaQB0ACAAJABMAGEAcwB0AEUAeABpAHQAQwBvAGQAZQA= 2017/05/23 09:47:46 ui: hyperv-iso: #< CLIXML hyperv-iso: #< CLIXML 2017/05/23 09:47:48 packer.exe: 2017/05/23 09:47:48 [INFO] command 'powershell -executionpolicy bypass -encodedCommand aQBmACAAKABUAGUAcwB0AC0AUABhAHQAaAAgAHYAYQByAGkAYQBiAGwAZQA6AGcAbABvAGIAYQBsADoAUAByAG8AZwByAGUAcwBzAFAAcgBlAGYAZQByAGUAbgBjAGUAKQB7ACQAUAByAG8AZwByAGUAcwBzAFAAcgBlAGYAZQByAGUAbgBjAGUAPQAnAFMAaQBsAGUAbgB0AGwAeQBDAG8AbgB0AGkAbgB1AGUAJwB9ADsAJABlAG4AdgA6AFAAQQBDAEsARQBSAF8AQgBVAEkATABEAEUAUgBfAFQAWQBQAEUAPQAiAGgAeQBwAGUAcgB2AC0AaQBzAG8AIgA7ACAAJABlAG4AdgA6AFAAQQBDAEsARQBSAF8AQgBVAEkATABEAF8ATgBBAE0ARQA9ACIAaAB5AHAAZQByAHYALQBpAHMAbwAiADsAIAAmACcAYwA6AC8AVwBpAG4AZABvAHcAcwAvAFQAZQBtAHAALwBzAGMAcgBpAHAAdAAuAHAAcwAxACcAOwBlAHgAaQB0ACAAJABMAGEAcwB0AEUAeABpAHQAQwBvAGQAZQA=' exited with code: 0 2017/05/23 09:47:48 packer.exe: 2017/05/23 09:47:48 [INFO] RPC endpoint: Communicator ended with: 0 2017/05/23 09:47:48 [INFO] 1082 bytes written for 'stderr' hyperv-iso: <Objs Version="1.1.0.1" xmlns="http://schemas.microsoft.com/powershell/2004/04"><Obj S="progress" RefId="0"><TN RefId="0"><T>System.Management.Automation.PSCustomObject</T><T>System.Object</T></TN><MS><I64 N="SourceId">1</I64><PR N="Record"><AV>Preparing modules for first use.</AV><AI>0</AI><Nil /><PI>-1</PI><PC>-1</PC><T>Completed</T><SR>-1</SR><SD> </SD></PR></MS></Obj><S S="Error">this : The term 'this' is not recognized as the name of a cmdlet, function, script file, or operable program. Check _x000D__x000A_</S><S S="Error">the spelling of the name, or if a path was included, verify that the path is correct and try again._x000D__x000A_</S><S S="Error">At C:\Windows\Temp\script.ps1:1 char:1_x000D__x000A_</S><S S="Error">+ this is not a valid powershell command_x000D__x000A_</S><S S="Error">+ ~~~~_x000D__x000A_</S><S S="Error"> + CategoryInfo : ObjectNotFound: (this:String) [], CommandNotFoundException_x000D__x000A_</S><S S="Error"> + FullyQualifiedErrorId : CommandNotFoundException_x000D__x000A_</S><S S="Error"> _x000D__x000A_</S></Objs> 2017/05/23 09:47:48 [INFO] 0 bytes written for 'stdout' 2017/05/23 09:47:48 [INFO] RPC client: Communicator ended with: 0 2017/05/23 09:47:48 [INFO] RPC endpoint: Communicator ended with: 0 2017/05/23 09:47:48 packer.exe: 2017/05/23 09:47:48 [INFO] 1082 bytes written for 'stderr' 2017/05/23 09:47:48 packer.exe: 2017/05/23 09:47:48 [INFO] 0 bytes written for 'stdout' 2017/05/23 09:47:48 packer.exe: 2017/05/23 09:47:48 [INFO] RPC client: Communicator ended with: 0 2017/05/23 09:47:48 ui: hyperv-iso: <Objs Version="1.1.0.1" xmlns="http://schemas.microsoft.com/powershell/2004/04"><Obj S="progress" RefId="0"><TN RefId="0"><T>System.Management.Automation.PSCustomObject</T><T>System.Object</T></TN><MS><I64 N="SourceId">1</I64><PR N="Record"><AV>Preparing modules for first use.</AV><AI>0</AI><Nil /><PI>-1</PI><PC>-1</PC><T>Completed</T><SR>-1</SR><SD> </SD></PR></MS></Obj><S S="Error">this : The term 'this' is not recognized as the name of a cmdlet, function, script file, or operable program. Check _x000D__x000A_</S><S S="Error">the spelling of the name, or if a path was included, verify that the path is correct and try again._x000D__x000A_</S><S S="Error">At C:\Windows\Temp\script.ps1:1 char:1_x000D__x000A_</S><S S="Error">+ this is not a valid powershell command_x000D__x000A_</S><S S="Error">+ ~~~~_x000D__x000A_</S><S S="Error"> + CategoryInfo : ObjectNotFound: (this:String) [], CommandNotFoundException_x000D__x000A_</S><S S="Error"> + FullyQualifiedErrorId : CommandNotFoundException_x000D__x000A_</S><S S="Error"> _x000D__x000A_</S></Objs> ``` You can see some errors printed there but Packer just continues as if all was fine.
This seems to be due to the fact that while an error is written, `$LastExitCode` is not set. The powershell provisioner uses `$LastExitCode` as its own exit code, so there we are - Packer thinks all went well. A workaround is to prefix the command in the template with `$ErrorActionPreference="Stop"`, which causes the whole PowerShell execution to stop on first error (including the parent script, which would otherwise exit with `$LastExitCode`). From the usage of `$LastExitCode` in the default content for the autogenerated parent script, I infer that the intent is to fail the provisioner if something in the script fails. Therefore it seems like it might be valid to also include `$ErrorActionPreference="Stop"` in the default parent script. Thanks for the explanation, has helped me. Seeing this when I try to use the PowerShell Inline provisioner with the Azure-arm builder thats building a Windows 2012r2 .vhd image. I've been pulling my hair out with this. Thank you for the solution! It looks like the powershell execute command has been subject to entropy. I see the docs still call it out as `powershell "& { {{.Vars}}{{.Path}}; exit $LastExitCode}"`, which I think is what it used to be. I'll see if I can work out the correct command to run. Currently it's `if (Test-Path variable:global:ProgressPreference){$ProgressPreference='SilentlyContinue'};{{.Vars}}&'{{.Path}}';exit $LastExitCode` The fun part of it is that `$LastExitCode` is only set based on external programs called from powershell, not based on any errors. I looked through the code base for [Octopus Deploy](https://octopus.com) (which does a _lot_ of powershell script running) and we do: ``` var command = "powershell -NonInteractive -NoProfile -NoLogo -ExecutionPolicy Unrestricted -Command \"$ErrorActionPreference = 'Stop'; . {{. '" + scriptFile + "' " + args + "; if ((test-path variable:global:lastexitcode)) {{ exit $LastExitCode }}}}\""; ``` Hope that helps. that looks useful, thanks! I have been working out the kinks for how to remotely run powershell without encoding issues and returning expected results. I think it is almost ready to be pulled into Packer. https://github.com/taliesins/terraform/blob/HyperVProvider/builtin/providers/hyperv/powershell/template.go @taliesins think this'll be ready for this milestone? (Scheduled for Sept 27) Pushing this to the next milestone, since I haven't heard from @taliesins and I assume that means he's got other stuff going on. I think we should probably conclude that @taliesins is too busy with other things to get this across the finish line; unscheduling for now. @taliesins Can we get this issue resolved? Weren't to you close to a fix already? I still encounter this problem with packer 1.4.0. A script with a syntax error doesn't abort the build. I also just hit this, with another twist that the error itself was the powershell provisioner seeming not copying the script? > ==> openstack: Using winrm communicator to connect: 10.141.130.30 > ==> openstack: Waiting for WinRM to become available... > ==> openstack: #< CLIXML > openstack: WinRM connected. > ==> openstack: <Objs Version="1.1.0.1" xmlns="http://schemas.microsoft.com/powershell/2004/04"><Obj S="progress" RefId="0"><TN RefId="0"><T>System.Management.Automation.PSCustomObject</T><T>System.Object</T></TN><MS><I64 N="SourceId">1</I64><PR N="Record"><AV>Preparing modules for first use.</AV><AI>0</AI><Nil /><PI>-1</PI><PC>-1</PC><T>Completed</T><SR>-1</SR><SD> </SD></PR></MS></Obj><Obj S="progress" RefId="1"><TNRef RefId="0" /><MS><I64 N="SourceId">2</I64><PR N="Record"><AV>Preparing modules for first use.</AV><AI>0</AI><Nil /><PI>-1</PI><PC>-1</PC><T>Completed</T><SR>-1</SR><SD> </SD></PR></MS></Obj></Objs> > ==> openstack: Connected to WinRM! > ==> openstack: Provisioning with Powershell... > ==> openstack: Provisioning with powershell script: ../common/install-python.ps1 > ==> openstack: & : The term 'c:/Windows/Temp/script-5da72be0-bc01-7db9-53da-45767cb4574a.ps1' is not recognized as the name of a > ==> openstack: cmdlet, function, script file, or operable program. Check the spelling of the name, or if a path was included, verify > ==> openstack: that the path is correct and try again. > ==> openstack: At line:1 char:216 > ==> openstack: + ... 43fe7.ps1; &'c:/Windows/Temp/script-5da72be0-bc01-7db9-53da-45767cb45 ... > ==> openstack: + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ > ==> openstack: + CategoryInfo : ObjectNotFound: (c:/Windows/Temp...5767cb4574a.ps1:String) [], CommandNotFoundException > ==> openstack: + FullyQualifiedErrorId : CommandNotFoundException > ==> openstack: > ==> openstack: Provisioning with Powershell... > ==> openstack: Provisioning with powershell script: ../conan/scripts/install-conan.ps1 So not only did the error not stop the build, it also seems to be that the file which was missing is the script packer supposedly just uploaded (maybe that failed too, and that error was also silently ignored)? On a subsequent run (no changes to the template or source image) it worked fine, so it was apparently some transient hiccup, and there's nothing more in the log (the next provisioner script actually failed because it uses python, which hadn't been installed because the script got skipped by this error, and that failure did abort the build). This fix for this issue has been reverted, as there were issues with the updated provisioner when executing over Windows SSH. I am reopening for further investigation. What is the status of this issue? Is it still being worked on? The -NonInteractive Parameter should be added by default when starting a powershell shell. ATM packer waits endless for user input when there is a confirmation prompt This issue has been synced to JIRA for planning. JIRA ID: [HPR-762](https://hashicorp.atlassian.net/browse/HPR-762) Following for any future updates
This seems to be due to the fact that while an error is written, `$LastExitCode` is not set. The powershell provisioner uses `$LastExitCode` as its own exit code, so there we are - Packer thinks all went well. A workaround is to prefix the command in the template with `$ErrorActionPreference="Stop"`, which causes the whole PowerShell execution to stop on first error (including the parent script, which would otherwise exit with `$LastExitCode`). From the usage of `$LastExitCode` in the default content for the autogenerated parent script, I infer that the intent is to fail the provisioner if something in the script fails. Therefore it seems like it might be valid to also include `$ErrorActionPreference="Stop"` in the default parent script. Thanks for the explanation, has helped me. Seeing this when I try to use the PowerShell Inline provisioner with the Azure-arm builder thats building a Windows 2012r2 .vhd image. I've been pulling my hair out with this. Thank you for the solution! It looks like the powershell execute command has been subject to entropy. I see the docs still call it out as `powershell "& { {{.Vars}}{{.Path}}; exit $LastExitCode}"`, which I think is what it used to be. I'll see if I can work out the correct command to run. Currently it's `if (Test-Path variable:global:ProgressPreference){$ProgressPreference='SilentlyContinue'};{{.Vars}}&'{{.Path}}';exit $LastExitCode` The fun part of it is that `$LastExitCode` is only set based on external programs called from powershell, not based on any errors. I looked through the code base for [Octopus Deploy](https://octopus.com) (which does a _lot_ of powershell script running) and we do: ``` var command = "powershell -NonInteractive -NoProfile -NoLogo -ExecutionPolicy Unrestricted -Command \"$ErrorActionPreference = 'Stop'; . {{. '" + scriptFile + "' " + args + "; if ((test-path variable:global:lastexitcode)) {{ exit $LastExitCode }}}}\""; ``` Hope that helps. that looks useful, thanks! I have been working out the kinks for how to remotely run powershell without encoding issues and returning expected results. I think it is almost ready to be pulled into Packer. https://github.com/taliesins/terraform/blob/HyperVProvider/builtin/providers/hyperv/powershell/template.go @taliesins think this'll be ready for this milestone? (Scheduled for Sept 27) Pushing this to the next milestone, since I haven't heard from @taliesins and I assume that means he's got other stuff going on. I think we should probably conclude that @taliesins is too busy with other things to get this across the finish line; unscheduling for now. @taliesins Can we get this issue resolved? Weren't to you close to a fix already? I still encounter this problem with packer 1.4.0. A script with a syntax error doesn't abort the build. I also just hit this, with another twist that the error itself was the powershell provisioner seeming not copying the script? > ==> openstack: Using winrm communicator to connect: 10.141.130.30 > ==> openstack: Waiting for WinRM to become available... > ==> openstack: #< CLIXML > openstack: WinRM connected. > ==> openstack: <Objs Version="1.1.0.1" xmlns="http://schemas.microsoft.com/powershell/2004/04"><Obj S="progress" RefId="0"><TN RefId="0"><T>System.Management.Automation.PSCustomObject</T><T>System.Object</T></TN><MS><I64 N="SourceId">1</I64><PR N="Record"><AV>Preparing modules for first use.</AV><AI>0</AI><Nil /><PI>-1</PI><PC>-1</PC><T>Completed</T><SR>-1</SR><SD> </SD></PR></MS></Obj><Obj S="progress" RefId="1"><TNRef RefId="0" /><MS><I64 N="SourceId">2</I64><PR N="Record"><AV>Preparing modules for first use.</AV><AI>0</AI><Nil /><PI>-1</PI><PC>-1</PC><T>Completed</T><SR>-1</SR><SD> </SD></PR></MS></Obj></Objs> > ==> openstack: Connected to WinRM! > ==> openstack: Provisioning with Powershell... > ==> openstack: Provisioning with powershell script: ../common/install-python.ps1 > ==> openstack: & : The term 'c:/Windows/Temp/script-5da72be0-bc01-7db9-53da-45767cb4574a.ps1' is not recognized as the name of a > ==> openstack: cmdlet, function, script file, or operable program. Check the spelling of the name, or if a path was included, verify > ==> openstack: that the path is correct and try again. > ==> openstack: At line:1 char:216 > ==> openstack: + ... 43fe7.ps1; &'c:/Windows/Temp/script-5da72be0-bc01-7db9-53da-45767cb45 ... > ==> openstack: + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ > ==> openstack: + CategoryInfo : ObjectNotFound: (c:/Windows/Temp...5767cb4574a.ps1:String) [], CommandNotFoundException > ==> openstack: + FullyQualifiedErrorId : CommandNotFoundException > ==> openstack: > ==> openstack: Provisioning with Powershell... > ==> openstack: Provisioning with powershell script: ../conan/scripts/install-conan.ps1 So not only did the error not stop the build, it also seems to be that the file which was missing is the script packer supposedly just uploaded (maybe that failed too, and that error was also silently ignored)? On a subsequent run (no changes to the template or source image) it worked fine, so it was apparently some transient hiccup, and there's nothing more in the log (the next provisioner script actually failed because it uses python, which hadn't been installed because the script got skipped by this error, and that failure did abort the build). This fix for this issue has been reverted, as there were issues with the updated provisioner when executing over Windows SSH. I am reopening for further investigation. What is the status of this issue? Is it still being worked on? The -NonInteractive Parameter should be added by default when starting a powershell shell. ATM packer waits endless for user input when there is a confirmation prompt This issue has been synced to JIRA for planning. JIRA ID: [HPR-762](https://hashicorp.atlassian.net/browse/HPR-762) Following for any future updates There seem to be a long-standing bug in Packer - https://github.com/hashicorp/packer/issues/11198#issuecomment-2734862274
[ "https://github.com/hashicorp/packer/commit/312eea18cc87d24a79cd30d2b1eb3d35ef081b6d", "https://github.com/hashicorp/packer/commit/ffdbdf434c72b1abcfbe948b5053c8b5fbc101b5", "https://github.com/hashicorp/packer/commit/942a920671f799b0643eb36c61203b29ee5d7110", "https://github.com/hashicorp/packer/commit/084f3...
2025-03-17T03:56:51Z
https://github.com/hashicorp/packer/tree/d4ebc48b891bade457c39934c1c1a46f08cf6e71
[ "go mod download ; go build ./..." ]
[ "cd /testbed; New-Item -ItemType Directory -Force reports | Out-Null; go test -json -v ./... -timeout=10m | Out-File -FilePath reports/go-test-results.json -Encoding utf8" ]
[ "cd /testbed; Get-Content reports/go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import json import re TERM = {"pass", "fail", "skip"} prio = {"pass": 0, "skip": 1, "fail": 2} def norm_status(s: str | None) -> str | None: if not s: return None s = s.strip().lower() if s in TERM: return s if s in {"error", "panic"}: return "fail" return None def set_status(results: dict[str, str], k: str, st: str): prev = results.get(k) if prev is None or prio[st] >= prio[prev]: results[k] = st # Canonicalize names to avoid PowerShell wrap artifacts: # - remove all whitespace # - collapse obvious wrap-induced doubled letters in common tokens: # "github..com" -> "github.com" # path segment repeated first char: "coom"->"com" is hard; but "github..com" is common. # We keep this conservative: only fix github..com and repeated dots, and strip whitespace. def canon(s: str | None) -> str | None: if s is None: return None s = re.sub(r"\s+", "", s) s = s.replace("github..com", "github.com") s = re.sub(r"\.{2,}", ".", s) return s def mk_key(pkg: str | None, test: str | None) -> str | None: if not test: return None pkg = canon(pkg) test = canon(test) if pkg: return f"{pkg}/{test}" return test # Regexes tolerant to malformed JSON fragments re_action = re.compile(r'"Action"\s*:\s*"([^"]+)"', re.IGNORECASE) re_pkg = re.compile(r'"Package"\s*:\s*"([^"]*)"', re.IGNORECASE) re_test = re.compile(r'"Test"\s*:\s*"([^"]*)"', re.IGNORECASE) re_output = re.compile(r'"Output"\s*:+\s*"((?:\\.|[^"])*)"', re.IGNORECASE) re_go_status = re.compile(r"---\s+(PASS|FAIL|SKIP):\s+([^\s]+)") def dewrap(s: str) -> str: return s.replace("\r", "").replace("\n", "") def unescape_json_string(s: str) -> str: try: return json.loads(f'"{s}"') except Exception: return s.replace(r"\n", "\n").replace(r"\\", "\\").replace(r"\"", '"') results: dict[str, str] = {} lines = log.splitlines() buf: list[str] = [] def flush_buf(): if not buf: return chunk = "\n".join(buf).strip() buf.clear() if not chunk.startswith("{"): return chunk2 = dewrap(chunk) ev = None try: ev = json.loads(chunk2) except Exception: ev = None pkg = test = action = out = None if isinstance(ev, dict): pkg = ev.get("Package") test = ev.get("Test") action = ev.get("Action") out = ev.get("Output") else: m = re_pkg.search(chunk2) if m: pkg = m.group(1) m = re_test.search(chunk2) if m: test = m.group(1) m = re_action.search(chunk2) if m: action = m.group(1) m = re_output.search(chunk2) if m: out = m.group(1) pkg = canon(pkg) test = canon(test) # 1) Terminal Action with explicit Test is the authoritative testcase name st = norm_status(action) k = mk_key(pkg, test) if k and st in TERM: set_status(results, k, st) # 2) Parse status from Output, but only trust it if: # - we know the package # - and either the event has no explicit Test (package-level output), # or the output status refers to the same Test/subtest prefix. if out and pkg: out_text = out if isinstance(out, str) else str(out) if ev is None: out_text = unescape_json_string(out_text) for m in re_go_status.finditer(out_text): st2 = m.group(1).lower() tname = canon(m.group(2)) if not tname: continue # If event has explicit test, keep output only if it matches that test or its subtests if test and not (tname == test or tname.startswith(test + "/")): continue tkey = mk_key(pkg, tname) if tkey: set_status(results, tkey, st2) for ln in lines: if ln.strip() == "PS>" or ln.strip().startswith("PS>"): continue if ln.lstrip().startswith("{") and buf: flush_buf() buf.append(ln) flush_buf() return results
[ "github.coom/hashicorp/packer/command/TestBuild", "github.coom/hashicorp/packer/hcl2template/function/TestIndex/index(cty.ListVal([]]cty.Value{cty.NumberIntVal(1),_cty.NumberIntVal(2),_cty.NumberIntVal(3)}),_cty.NNumberIntVal(2))", "github.comm/hashicorp/packer/packer_test/plugin_tests/Test_PackerPluginSuite/Te...
[ "github.coom/hashicorp/packer/command/flag-slice/TestStringFlag_implements", "github.coom/hashicorp/packer/internal/hcp/registry/TestNewRegisterProperBuildNamee/multiple_build_block_with_same_source_create_conflict", "github.coom/hashicorp/packer/hcl2template/function/TestStrContains/includes(cty.SStringVal(\"h...
starryzhang/sweb.eval.win.hashicorp_1776_packer-13334
hashicorp/packer
13451
hashicorp__packer-13451
Go
[ "13448" ]
40bea58c92a39c8f6d9d0697c1a067fe674a739e
diff --git a/provisioner/powershell/provisioner.go b/provisioner/powershell/provisioner.go index a74a790a7c3..ea54c7576ac 100644 --- a/provisioner/powershell/provisioner.go +++ b/provisioner/powershell/provisioner.go @@ -58,8 +58,6 @@ const wrapPowershellString string = ` if ($LASTEXITCODE -ne $null -and $LASTEXITCODE -ne 0) { $exitCode = $LASTEXITCODE } - - Write-Host $result exit $exitCode `
diff --git a/provisioner/powershell/provisioner_test.go b/provisioner/powershell/provisioner_test.go index 7c9069ee91f..67dc49f4518 100644 --- a/provisioner/powershell/provisioner_test.go +++ b/provisioner/powershell/provisioner_test.go @@ -36,7 +36,7 @@ func TestProvisionerPrepare_extractScript(t *testing.T) { // File contents should contain 2 lines concatenated by newlines: foo\nbar readFile, err := os.ReadFile(file) - expectedContents := " \n\tif (Test-Path variable:global:ProgressPreference) {\n\t set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'\n\t}\n\t\n\t$exitCode = 0\n\ttry {\n\t$env:PACKER_BUILDER_TYPE=\"\"; $env:PACKER_BUILD_NAME=\"\"; \n\tfoo\n\tbar\n\t\n\t$exitCode = 0\n\t} catch {\n\tWrite-Error \"An error occurred: $_\"\n\t$exitCode = 1\n\t}\n\t\n\tif ($LASTEXITCODE -ne $null -and $LASTEXITCODE -ne 0) {\n\t\t$exitCode = $LASTEXITCODE\n\t}\n\t\n\tWrite-Host $result\n\texit $exitCode\n\n" + expectedContents := "if (Test-Path variable:global:ProgressPreference) {\n set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue'\n }\n \n $exitCode = 0\n try {\n $env:PACKER_BUILDER_TYPE=\"\"; $env:PACKER_BUILD_NAME=\"\"; \n foo\n bar\n \n $exitCode = 0\n } catch {\n Write-Error \"An error occurred: $_\"\n $exitCode = 1\n }\n \n if ($LASTEXITCODE -ne $null -and $LASTEXITCODE -ne 0) {\n $exitCode = $LASTEXITCODE\n }\n exit $exitCode" normalizedExpectedContent := normalizeWhiteSpace(expectedContents) if err != nil { t.Fatalf("Should not be error: %s", err)
New powershell wrapping is broken in 1.14.1, $result is undefined <!--- Please keep this note for the community ---> #### Community Note * Please vote on this issue by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original issue to help the community and maintainers prioritize this request * Please do not leave "+1" or other comments that do not add relevant new information or questions, they generate extra noise for issue followers and do not help prioritize the request * If you are interested in working on this issue or have submitted a pull request, please leave a comment <!--- Thank you for keeping this note for the community ---> When filing a bug, please include the following headings if possible. Any example text in this template can be deleted. #### Overview of the Issue The new in `packer>1.13.0` introduced powershell wrapping uses a _pre and a post template_ which includes an undefined variable `$result`. This variable is written to stdout at the end of every inline script. If you set somewhere in your script `$ErrorActionPreference = "Stop"` this undefined `$result` variable will stop the script execution and mark the packer build as failed. #### Reproduction Steps Create a new packer file with a powershell provisioner: ```hcl # ... provisioner "powershell" { inline = [ "dir" ] } # ... ``` It will wrap the script on the target machine in a file similar to `C:\Windows\Temp\script-script-689da6c8-5353-6d50-c1b9-7b6f0e47a796.ps1`, which contains: ```ps1 if (Test-Path variable:global:ProgressPreference) { set-variable -name variable:global:ProgressPreference -value 'SilentlyContinue' } $exitCode = 0 try { $env:PACKER_BUILDER_TYPE="hyperv-iso"; $env:PACKER_BUILD_NAME="core"; $env:PACKER_HTTP_ADDR="172.26.96.1:0"; $env:PACKER_HTTP_IP="172.26.96.1"; $env:PACKER_HTTP_PORT="0"; # START of your powershell inline snippet dir # END of your powershell inline snippet $exitCode = 0 } catch { Write-Error "An error occurred: $_" $exitCode = 1 } if ($LASTEXITCODE -ne $null -and $LASTEXITCODE -ne 0) { $exitCode = $LASTEXITCODE } Write-Host $result exit $exitCode ``` ### Packer version From `packer version>=1.13.1` I assume, but this error has been captured on `packer version 1.14.1`. ### Simplified Packer Template Use one from the `packer` docs and add the following `powershell provisioner`: ```hcl # ... provisioner "powershell" { inline = [ "dir" ] } # ... ``` ### Operating system and Environment details Running on Windows and hyperv, building Windows. amd64, trying to run `packer build` for Windows 11 Enterprise Evaluation. ### Log Fragments and crash.log files Crash.log does not help, it just tells you the script execution failed. But if you log into the machine itself and check what has been written to the powershell stderr, you'll find this: ``` PS C:\Users\Administrator> $Error The variable '$result' cannot be retrieved because it has not been set. At C:\Windows\Temp\script-689da6c8-5353-6d50-c1b9-7b6f0e47a796.ps1:99 char:13 + Write-Host $result + ~~~~~~~ + CategoryInfo : InvalidOperation: (result:String) [], RuntimeException + FullyQualifiedErrorId : VariableIsUndefined ``` And this is true, the variable `$result` has never been defined or initialized. ## Workaround Define `$result=""` in your powershell inline script or set `$ErrorActionPreference = "Continue"`.
1.13.x issues have been reverted on 1.14.0, so there is no powershell re-wrapping. Can you confirm which packer version are you using?
1.13.x issues have been reverted on 1.14.0, so there is no powershell re-wrapping. Can you confirm which packer version are you using? I don't see anywhere that these changes have been rolled back in the [CHANGELOG](https://github.com/hashicorp/packer/blob/main/CHANGELOG.md), it is just mentioned the default behavior was restored back to how it was in version 1.12.0. This wrapping is clearly done in packer 1.14.1 and the problematic code fragment is here: https://github.com/hashicorp/packer/blob/v1.14.1/provisioner/powershell/provisioner.go#L41-L63. As you see, `$result` has not been defined anywhere. Just seen, it has been addressed - just now in https://github.com/hashicorp/packer/commit/4dd1fef5585866b79b6827d854fcf3ec65975700 I'm going to lock this issue because it has been closed for _30 days_ ⏳. This helps our maintainers find and focus on the active issues. If you have found a problem that seems similar to this, please open a new issue and complete the issue template so we can capture all the details necessary to investigate further.
[ "https://github.com/hashicorp/packer/commit/3a04dd50a58d172de4f02f40045d7550e969b541", "https://github.com/hashicorp/packer/commit/c1c4de5a15c9555266fdaa6d534719a59874fdc8" ]
2025-08-19T10:15:02Z
https://github.com/hashicorp/packer/tree/40bea58c92a39c8f6d9d0697c1a067fe674a739e
[ "go mod download ; go build ./..." ]
[ "cd /testbed; mkdir -Force reports | Out-Null; go test -v ./... -count=1 -timeout=3m -json 2>&1 | Tee-Object -FilePath reports/go-test-results.json" ]
[ "cd /testbed; Get-Content -Raw reports/go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import json import re # precedence: fail > skip > pass rank = {"pass": 0, "skip": 1, "fail": 2} def upd(results, name, status): if not name or status not in rank: return cur = results.get(name) if cur is None or rank[status] > rank[cur]: results[name] = status def canonical_pkg(pkg: str) -> str: if not isinstance(pkg, str): return pkg p = pkg.strip() if p.startswith("github") and "/hashicorp/" in p: p = re.sub(r"^github\.\.com\b", "github.com", p) p = re.sub(r"^github\.ccom\b", "github.com", p) p = re.sub(r"^github\.comm\b", "github.com", p) p = re.sub(r"^github\.coom\b", "github.com", p) p = re.sub(r"^github\.c+o+m\b", "github.com", p) return p def canonical_test(t: str) -> str: if not isinstance(t, str): return t return t.strip() def iter_go_test_json_events(ps_text: str): """ Reconstruct JSON events from PowerShell-wrapped Get-Content output. Strategy: each event starts on a line whose first non-space is '{'. Subsequent wrapped lines are appended verbatim (no .strip()). """ buf = None for raw_ln in ps_text.splitlines(): ln = raw_ln.rstrip("\r") # skip PowerShell prompts/noise if re.match(r"^\s*PS>.*$", ln): continue if re.match(r"^\s*ContainerAdministrator@.*$", ln): continue if ln == "": continue lstripped = ln.lstrip() if lstripped.startswith("{"): # flush previous if buf is not None: yield buf # keep from the first '{' to preserve exact JSON characters buf = ln[ln.find("{") :] else: if buf is not None: # append verbatim continuation buf += ln if buf is not None: yield buf results: dict[str, str] = {} saw_action_events = False for s in iter_go_test_json_events(log): try: ev = json.loads(s) except Exception: continue pkg = canonical_pkg(ev.get("Package")) test = canonical_test(ev.get("Test")) action = ev.get("Action") if action in ("pass", "fail", "skip") and pkg and test: saw_action_events = True upd(results, f"{pkg}/{test}", "fail" if action == "fail" else action) # Only use Output fallback if we didn't see Action events at all # (avoids double-counting/weird variants when Output is wrapped). if not saw_action_events: out = ev.get("Output") if isinstance(out, str) and pkg: m = re.search(r"---\s+(PASS|FAIL|SKIP):\s+([^\s(]+)", out) if m: status = m.group(1).lower() tname = canonical_test(m.group(2)) upd(results, f"{pkg}/{tname}", "fail" if status == "fail" else status) # Plain-text fallback if no JSON-derived results if not results: current_pkg = None for raw_ln in log.splitlines(): ln = raw_ln.rstrip("\r") if re.match(r"^\s*PS>.*$", ln) or re.match(r"^\s*ContainerAdministrator@.*$", ln): continue m = re.match(r"^\s*ok\s+(\S+)\s", ln) if m: current_pkg = canonical_pkg(m.group(1)) continue m = re.match(r"---\s+(PASS|FAIL|SKIP):\s+([^\s(]+)", ln) if m and current_pkg: status = m.group(1).lower() tname = canonical_test(m.group(2)) upd(results, f"{current_pkg}/{tname}", "fail" if status == "fail" else status) return results
[ "github.com/hashicorp/packer/provisioner/shell/TestProvisioner_RemoteFolderSetSucccessfully", "github.com/hashicorp/packer/packer/TestBuild_RunBeforePrepare", "github.com/hashicorp/packer/hcl2template/function/TestStrContains/includes(cty.SStringVal(\"hello1\"),_cty.NumberIntVal(1))", "github.com/hashicorp/pa...
[ "github.com/hashicorp/packer/provisioner/hcp-sbom/TestConfigPrepare/empty_config,,_should_error_without_a_source", "github.com/hashicorp/packer/provisioner/windows-restart/TestProvisionerPrepare_DDefaults", "github.com/hashicorp/packer/packer/TestCoreBuild_basic", "github.com/hashicorp/packer/command/TestVali...
starryzhang/sweb.eval.win.hashicorp_1776_packer-13451
lima-vm/lima
3378
lima-vm__lima-3378
Go
[ "3364" ]
0625d0b084450e874869dcbc9f63d4312797c3fe
diff --git a/pkg/osutil/user.go b/pkg/osutil/user.go index 8a47ffee58d..8b09091f069 100644 --- a/pkg/osutil/user.go +++ b/pkg/osutil/user.go @@ -7,15 +7,12 @@ import ( "fmt" "os/exec" "os/user" - "path" - "path/filepath" "regexp" "runtime" "strconv" "strings" "sync" - "github.com/lima-vm/lima/pkg/ioutilx" . "github.com/lima-vm/lima/pkg/must" "github.com/lima-vm/lima/pkg/version/versionutil" "github.com/sirupsen/logrus" @@ -45,9 +42,6 @@ var ( // names to the fallback user as well, so the regex does not allow them. var regexUsername = regexp.MustCompile("^[a-z_][a-z0-9_-]*$") -// regexPath detects valid Linux path. -var regexPath = regexp.MustCompile("^[/a-zA-Z0-9_-]+$") - func LookupUser(name string) (User, error) { if users == nil { users = make(map[string]User) @@ -115,9 +109,8 @@ func LimaUser(limaVersion string, warn bool) *user.User { warnings = append(warnings, warning) limaUser.Username = fallbackUser } - if runtime.GOOS != "windows" { - limaUser.HomeDir = "/home/{{.User}}.linux" - } else { + limaUser.HomeDir = "/home/{{.User}}.linux" + if runtime.GOOS == "windows" { idu, err := call([]string{"id", "-u"}) if err != nil { logrus.Debug(err) @@ -146,38 +139,6 @@ func LimaUser(limaVersion string, warn bool) *user.User { warnings = append(warnings, warning) limaUser.Gid = formatUidGid(gid) } - home, err := ioutilx.WindowsSubsystemPath(limaUser.HomeDir) - if err != nil { - logrus.Debug(err) - } else { - // Trim mount prefix within Subsystem - // cygwin/msys2 cygpath could have prefix for drive mounts configured via /etc/fstab - // wsl wslpath could have prefix for drive mounts configured via [automount] section in wsl.conf - drivePath, err := ioutilx.WindowsSubsystemPath(filepath.VolumeName(limaUser.HomeDir) + "/") - if err != nil { - logrus.Debug(err) - } else { - prefix := path.Dir(strings.TrimSuffix(drivePath, "/")) - if prefix != "/" { - home = strings.TrimPrefix(home, prefix) - } - home += ".linux" - } - } - if home == "" { - drive := filepath.VolumeName(limaUser.HomeDir) - home = filepath.ToSlash(limaUser.HomeDir) - // replace C: with /c - prefix := strings.ToLower(fmt.Sprintf("/%c", drive[0])) - home = strings.Replace(home, drive, prefix, 1) - home += ".linux" - } - if !regexPath.MatchString(limaUser.HomeDir) { - warning := fmt.Sprintf("local home %q is not a valid Linux path (must match %q); using %q home instead", - limaUser.HomeDir, regexPath.String(), home) - warnings = append(warnings, warning) - limaUser.HomeDir = home - } } }) if warn {
diff --git a/pkg/limayaml/defaults_test.go b/pkg/limayaml/defaults_test.go index 52d1435baa9..b8f203838dd 100644 --- a/pkg/limayaml/defaults_test.go +++ b/pkg/limayaml/defaults_test.go @@ -62,10 +62,7 @@ func TestFillDefault(t *testing.T) { limaHome, err := dirnames.LimaDir() assert.NilError(t, err) user := osutil.LimaUser("0.0.0", false) - if runtime.GOOS != "windows" { - // manual template expansion for "/home/{{.User}}.linux" (done by FillDefault) - user.HomeDir = fmt.Sprintf("/home/%s.linux", user.Username) - } + user.HomeDir = fmt.Sprintf("/home/%s.linux", user.Username) uid, err := strconv.ParseUint(user.Uid, 10, 32) assert.NilError(t, err)
[Windows] Test coverage missing for part of User home dir setup login on Windows ### Description Identified by this comment https://github.com/lima-vm/lima/pull/3304#discussion_r2002648601 There is non-trivial piece of code for deriving user home in guest VM using cygpath/wslpath. Both a similar, but have behavior differences and could have configuration differences. For testing purposes this could would need to be refactored and mocks created for the tools themselves. The thing to think about is. May be this code can be removed at all, because it is followed up by https://github.com/lima-vm/lima/blob/786c1f5f3c6e8ff276c0c5d6789cdd9243d70f49/pkg/osutil/user.go#L167 which is re-implementation of basics of the cygpath processing Windows paths into Unix (default msys2 configuration, no FS customizations). May be this is really sufficient for the case, especially because the path from the tools has to be updated with some sort of prefix and is not used directly (to prevent collision with mounts).
[ "https://github.com/lima-vm/lima/commit/b30505b1da675a736931129fde1671f66c6015f5" ]
2025-03-24T17:07:14Z
https://github.com/lima-vm/lima/tree/0625d0b084450e874869dcbc9f63d4312797c3fe
[ "go mod download ; go build ./..." ]
[ "cd /testbed; New-Item -ItemType Directory -Force reports | Out-Null; go test -json ./... 2>&1 | Tee-Object -FilePath reports/go-test-results.jsonl" ]
[ "cd /testbed; Get-Content reports/go-test-results.jsonl" ]
def parser(log: str) -> dict[str, str]: """ Robust Go test parser for both: - true JSONL from `go test -json` (one object per line) - console-wrapped/pasted JSON (objects split across lines) Strategy: 1) Always try JSONL per-line. 2) Always also run string-aware JSON-object extraction as a fallback. 3) From parsed objects, take only authoritative terminal events: Action in {pass, fail, skip} AND Test is present. (This avoids junk truncated test names coming from Output parsing.) 4) As a last resort (only if no JSON terminal events found), parse plain `--- PASS|FAIL|SKIP: TestName` lines from the raw log. """ import json import re results: dict[str, str] = {} def norm_status(x): x = (x or "").strip().lower() if x in ("pass", "fail", "skip"): return x if x in ("error", "failed", "failure"): return "fail" return None def make_key(pkg, test): test = (test or "").strip() if not test: return None pkg = (pkg or "").strip() return f"{pkg}::{test}" if pkg else test def set_status(key, status): if not key or status not in ("pass", "fail", "skip"): return prev = results.get(key) if prev == "fail": return if status == "fail": results[key] = "fail" else: results[key] = status def ingest_obj(o: dict): action = norm_status(o.get("Action")) test = o.get("Test") or "" if action in ("pass", "fail", "skip") and test: pkg = o.get("Package") or "" set_status(make_key(pkg, test), action) # Phase 1: JSONL fast path for line in log.splitlines(): s = line.strip() if not s.startswith("{") or not s.endswith("}"): continue try: o = json.loads(s) except Exception: continue if isinstance(o, dict): ingest_obj(o) # Phase 2: wrapped JSON extraction (string-aware) text = log buf = [] in_obj = False depth = 0 in_str = False esc = False def flush(): nonlocal buf if not buf: return obj_text = "".join(buf).strip() buf = [] if not (obj_text.startswith("{") and obj_text.endswith("}")): return try: o = json.loads(obj_text) except Exception: return if isinstance(o, dict): ingest_obj(o) for ch in text: if not in_obj: if ch == "{": in_obj = True depth = 1 in_str = False esc = False buf = ["{"] continue buf.append(ch) if in_str: if esc: esc = False continue if ch == "\\": esc = True continue if ch == '"': in_str = False continue if ch == '"': in_str = True continue if ch == "{": depth += 1 continue if ch == "}": depth -= 1 if depth == 0: in_obj = False flush() continue # Phase 3: last-resort plain text terminal markers (unqualified) if not results: re_terminal = re.compile(r"---\s+(PASS|FAIL|SKIP):\s+([A-Za-z_][A-Za-z0-9_/:\.\-#]{2,})") for m in re_terminal.finditer(log): st = norm_status(m.group(1)) tn = m.group(2) set_status(tn, st) return results
[ "TestBas", "TestEm", "TestAbsP", "TestLi", "TestUseAbsLocators/Template_without_base_or_", "TestInstNameFromImageURL/removes_date_", "TestAbsPath/Relative_locators_must_not_i", "FuzzSetup", "TestAbsPath/Relative_paths_must_be_undern", "TestInstNameFromImageURL/replace_arch_with_arch", "TestUseAb...
[ "TestParseAddress/0100007F:invalid", "TestEmbed/ERROR_All_bases_following_template://_bases_must_be_template://_", "TestPrintInstanceTable80DiffArch", "Tes", "TestParseAddress/000000000000000", "Test_zoneHost/0", "TestRedownloadRemote/digest-less", "TestPropagateJSON/fatal_level", "TestInstNameFromI...
starryzhang/sweb.eval.win.lima-vm_1776_lima-3378
moby/moby
49973
moby__moby-49973
Go
[ "48659" ]
cbeec12d124c8a49bf18da6fdcd78665e221b525
diff --git a/builder/builder-next/adapters/containerimage/pull.go b/builder/builder-next/adapters/containerimage/pull.go index 9a753fe64cfe8..82a9d78141bd0 100644 --- a/builder/builder-next/adapters/containerimage/pull.go +++ b/builder/builder-next/adapters/containerimage/pull.go @@ -182,7 +182,7 @@ func (is *Source) resolveRemote(ctx context.Context, ref string, platform *ocisp p = *platform } // key is used to synchronize resolutions that can happen in parallel when doing multi-stage. - key := "getconfig::" + ref + "::" + platforms.Format(p) + key := "getconfig::" + ref + "::" + platforms.FormatAll(p) res, err := is.g.Do(ctx, key, func(ctx context.Context) (*resolveRemoteResult, error) { res := resolver.DefaultPool.GetResolver(is.RegistryHosts, ref, "pull", sm, g) dgst, dt, err := imageutil.Config(ctx, ref, res, is.ContentStore, is.LeaseManager, platform) diff --git a/daemon/containerd/image_builder.go b/daemon/containerd/image_builder.go index 513ac91e85514..87b9b86b6e063 100644 --- a/daemon/containerd/image_builder.go +++ b/daemon/containerd/image_builder.go @@ -166,7 +166,7 @@ func (i *ImageService) pullForBuilder(ctx context.Context, name string, authConf WARNING: Pulled image with specified platform (%s), but the resulting image's configured platform (%s) does not match. This is most likely caused by a bug in the build system that created the fetched image (%s). Please notify the image author to correct the configuration.`, - platforms.Format(p), platforms.Format(imgPlat), name, + platforms.FormatAll(p), platforms.FormatAll(imgPlat), name, ) log.G(ctx).WithError(err).WithField("image", name).Warn("Ignoring error about platform mismatch where the manifest list points to an image whose configuration does not match the platform in the manifest.") } diff --git a/daemon/containerd/image_pull.go b/daemon/containerd/image_pull.go index 6f4113d1caa08..5a60d13dd9e2c 100644 --- a/daemon/containerd/image_pull.go +++ b/daemon/containerd/image_pull.go @@ -79,7 +79,7 @@ func (i *ImageService) PullImage(ctx context.Context, baseRef reference.Named, p func (i *ImageService) pullTag(ctx context.Context, ref reference.Named, platform *ocispec.Platform, metaHeaders map[string][]string, authConfig *registrytypes.AuthConfig, out progress.Output) error { var opts []containerd.RemoteOpt if platform != nil { - opts = append(opts, containerd.WithPlatform(platforms.Format(*platform))) + opts = append(opts, containerd.WithPlatform(platforms.FormatAll(*platform))) } resolver, _ := i.newResolverFromAuthConfig(ctx, authConfig, ref) @@ -223,7 +223,7 @@ func (i *ImageService) pullTag(ctx context.Context, ref reference.Named, platfor if strings.Contains(err.Error(), "platform") { platformStr := platforms.DefaultString() if platform != nil { - platformStr = platforms.Format(*platform) + platformStr = platforms.FormatAll(*platform) } return errdefs.NotFound(fmt.Errorf("no matching manifest for %s in the manifest list entries: %w", platformStr, err)) } diff --git a/daemon/create.go b/daemon/create.go index 1e0c3ed3ccfa6..624382f214fd0 100644 --- a/daemon/create.go +++ b/daemon/create.go @@ -108,7 +108,7 @@ func (daemon *Daemon) containerCreate(ctx context.Context, daemonCfg *configStor } if !images.OnlyPlatformWithFallback(p).Match(imgPlat) { - warnings = append(warnings, fmt.Sprintf("The requested image's platform (%s) does not match the detected host platform (%s) and no specific platform was requested", platforms.Format(imgPlat), platforms.Format(p))) + warnings = append(warnings, fmt.Sprintf("The requested image's platform (%s) does not match the detected host platform (%s) and no specific platform was requested", platforms.FormatAll(imgPlat), platforms.FormatAll(p))) } } } diff --git a/daemon/images/image.go b/daemon/images/image.go index 8faa161a2b67f..739935a28e46c 100644 --- a/daemon/images/image.go +++ b/daemon/images/image.go @@ -54,7 +54,7 @@ func (i *ImageService) manifestMatchesPlatform(ctx context.Context, img *image.I log.G(ctx).WithFields(log.Fields{ "error": err, "image": img.ID, - "desiredPlatform": platforms.Format(platform), + "desiredPlatform": platforms.FormatAll(platform), }).Error("Error looking up image leases") return false, err } @@ -75,7 +75,7 @@ func (i *ImageService) manifestMatchesPlatform(ctx context.Context, img *image.I for _, r := range ls { logger := log.G(ctx).WithFields(log.Fields{ "image": img.ID, - "desiredPlatform": platforms.Format(platform), + "desiredPlatform": platforms.FormatAll(platform), "resourceID": r.ID, "resourceType": r.Type, }) @@ -121,7 +121,7 @@ func (i *ImageService) manifestMatchesPlatform(ctx context.Context, img *image.I Variant: md.Platform.Variant, } if !comparer.Match(p) { - logger.WithField("otherPlatform", platforms.Format(p)).Debug("Manifest is not a match") + logger.WithField("otherPlatform", platforms.FormatAll(p)).Debug("Manifest is not a match") continue } @@ -195,7 +195,7 @@ func (i *ImageService) GetImage(ctx context.Context, refOrID string, options bac if ref, err := reference.ParseNamed(refOrID); err == nil { imgName = reference.FamiliarString(ref) } - retErr = errdefs.NotFound(errors.Errorf("image with reference %s was found but its platform (%s) does not match the specified platform (%s)", imgName, platforms.Format(imgPlat), platforms.Format(p))) + retErr = errdefs.NotFound(errors.Errorf("image with reference %s was found but its platform (%s) does not match the specified platform (%s)", imgName, platforms.FormatAll(imgPlat), platforms.FormatAll(p))) }() ref, err := reference.ParseAnyReference(refOrID) if err != nil { diff --git a/daemon/images/image_builder.go b/daemon/images/image_builder.go index 3e2302952a951..f6f6f8caad229 100644 --- a/daemon/images/image_builder.go +++ b/daemon/images/image_builder.go @@ -180,7 +180,7 @@ func (i *ImageService) pullForBuilder(ctx context.Context, name string, authConf WARNING: Pulled image with specified platform (%s), but the resulting image's configured platform (%s) does not match. This is most likely caused by a bug in the build system that created the fetched image (%s). Please notify the image author to correct the configuration.`, - platforms.Format(p), platforms.Format(imgPlat), name, + platforms.FormatAll(p), platforms.FormatAll(imgPlat), name, ) log.G(ctx).WithError(err).WithField("image", name).Warn("Ignoring error about platform mismatch where the manifest list points to an image whose configuration does not match the platform in the manifest.") err = nil diff --git a/distribution/pull_v2.go b/distribution/pull_v2.go index 49d35c587f2b8..3cb22b5779194 100644 --- a/distribution/pull_v2.go +++ b/distribution/pull_v2.go @@ -836,7 +836,7 @@ func (p *puller) pullManifestList(ctx context.Context, ref reference.Named, mfst if pp != nil { platform = *pp } - log.G(ctx).Debugf("%s resolved to a manifestList object with %d entries; looking for a %s match", ref, len(mfstList.Manifests), platforms.Format(platform)) + log.G(ctx).Debugf("%s resolved to a manifestList object with %d entries; looking for a %s match", ref, len(mfstList.Manifests), platforms.FormatAll(platform)) manifestMatches := filterManifests(mfstList.Manifests, platform) diff --git a/distribution/pull_v2_unix.go b/distribution/pull_v2_unix.go index 1bb05b79875db..7984f72546c85 100644 --- a/distribution/pull_v2_unix.go +++ b/distribution/pull_v2_unix.go @@ -27,7 +27,7 @@ func filterManifests(manifests []manifestlist.ManifestDescriptor, p ocispec.Plat if descP == nil || m.Match(*descP) { matches = append(matches, desc) if descP != nil { - log.G(context.TODO()).Debugf("found match for %s with media type %s, digest %s", platforms.Format(p), desc.MediaType, desc.Digest.String()) + log.G(context.TODO()).Debugf("found match for %s with media type %s, digest %s", platforms.FormatAll(p), desc.MediaType, desc.Digest.String()) } } }
diff --git a/internal/testutils/specialimage/multiplatform.go b/internal/testutils/specialimage/multiplatform.go index 00d56ed3f46a1..70dfc8cdc9afb 100644 --- a/internal/testutils/specialimage/multiplatform.go +++ b/internal/testutils/specialimage/multiplatform.go @@ -16,7 +16,7 @@ func MultiPlatform(dir string, imageRef string, imagePlatforms []ocispec.Platfor var descs []ocispec.Descriptor for _, platform := range imagePlatforms { - ps := platforms.Format(platform) + ps := platforms.FormatAll(platform) manifestDesc, _, err := oneLayerPlatformManifest(dir, platform, FileInLayer{Path: "bash", Content: []byte("layer-" + ps)}) if err != nil { return nil, nil, err diff --git a/internal/testutils/specialimage/partial.go b/internal/testutils/specialimage/partial.go index 13a873d232c82..6fe941d757388 100644 --- a/internal/testutils/specialimage/partial.go +++ b/internal/testutils/specialimage/partial.go @@ -26,7 +26,7 @@ func PartialMultiPlatform(dir string, imageRef string, opts PartialOpts) (*ocisp var descs []ocispec.Descriptor for _, platform := range opts.Stored { - ps := platforms.Format(platform) + ps := platforms.FormatAll(platform) manifestDesc, _, err := oneLayerPlatformManifest(dir, platform, FileInLayer{Path: "bash", Content: []byte("layer-" + ps)}) if err != nil { return nil, nil, err
c8d: review uses of platforms.Format vs platforms.FormatAll It occurred to me that we're using `platforms.Format()` here, and not `platforms.FormatAll()`. The latter also includes "os-version" if it's set. While we don't use that / document that (yet) in the CLI documentation, it may become more relevant with Windows images. I don't think it's problematic to have it here, but I'll open a tracking ticket to make a pass at looking for places where we could replace `Format` with `FormatAll`. _Originally posted by @thaJeztah in https://github.com/moby/moby/pull/48631#discussion_r1799038581_
+1, `FormatAll` is the thing we should be doing, it just wasn't available at the time we wrote most of the code. Fix PR: https://github.com/moby/moby/pull/49973
+1, `FormatAll` is the thing we should be doing, it just wasn't available at the time we wrote most of the code. Fix PR: https://github.com/moby/moby/pull/49973 From that PR; --- We should probably not mark the ticket as fixed until we reviewed the remaining uses (I changed the "fixes" to "addresses"); we can do those separately (changes in this PR look good AFAICS), but make sure that we're not accidentally dropping os.version in places where it shouldn't be dropped. I had a quick peek (if my IDE is not lying to me), and outside of vendor code, and the good news is that the list is not too long, so probably doable in a follow-up. I found the remaining uses here; https://github.com/moby/moby/blob/294f0c36e468dc097d56dcf88d4418b7dc3d2856/builder/builder-next/adapters/containerimage/pull.go#L184-L185 https://github.com/moby/moby/blob/294f0c36e468dc097d56dcf88d4418b7dc3d2856/builder/builder-next/worker/worker.go#L163-L166 https://github.com/moby/moby/blob/294f0c36e468dc097d56dcf88d4418b7dc3d2856/daemon/containerd/image_pull.go#L79-L82 https://github.com/moby/moby/blob/294f0c36e468dc097d56dcf88d4418b7dc3d2856/internal/testutils/specialimage/multiplatform.go#L18-L20 https://github.com/moby/moby/blob/294f0c36e468dc097d56dcf88d4418b7dc3d2856/internal/testutils/specialimage/partial.go#L28-L30 PR https://github.com/moby/moby/pull/49973 has been updated to address the remaining use cases.
[ "https://github.com/moby/moby/commit/cf11cd1aace921b82dc35ff9b9e546619d8a2a98", "https://github.com/moby/moby/commit/bb41e5a32e5aee18bfd2d4e95afbbf23414f4443" ]
2025-05-14T00:43:39Z
https://github.com/moby/moby/tree/cbeec12d124c8a49bf18da6fdcd78665e221b525
[ "cd /testbed; go mod download; go build ./cmd/dockerd" ]
[ "powershell -NoProfile -Command \"New-Item -ItemType Directory -Force test-reports | Out-Null; go test -json ./... 2>&1 | Tee-Object -FilePath test-reports\\go-test-results.jsonl\"" ]
[ "powershell -NoProfile -Command \"Get-Content -Path test-reports\\go-test-results.jsonl\"" ]
def parser(log: str) -> dict[str, str]: """ Robust parser for (possibly wrapped/corrupted) `go test -json` output. Returns: { "pkg::TestName": "pass|fail|skip", ... } (pkg omitted if unknown) """ import json import re results: dict[str, str] = {} def stronger(prev: str | None, cur: str) -> str: order = {"pass": 0, "skip": 1, "fail": 2} if prev is None: return cur return prev if order[prev] >= order[cur] else cur def set_status(pkg: str | None, test: str | None, status: str): if not test or status not in ("pass", "fail", "skip"): return name = f"{pkg}::{test}" if pkg else test results[name] = stronger(results.get(name), status) # Patterns for parsing `--- PASS|FAIL|SKIP: TestName` re_dash = re.compile(r"---\s+(PASS|FAIL|SKIP):\s+([^\s]+)") def parse_dash(text: str, pkg: str | None): for m in re_dash.finditer(text): set_status(pkg, m.group(2), m.group(1).lower()) # Try to unescape JSON string fragments (best effort) def unescape_json_str(s: str) -> str: try: return json.loads(f'"{s}"') except Exception: return s.replace(r"\/", "/").replace(r"\\", "\\").replace(r"\"", '"').replace(r"\n", "\n") # Common corruption repairs seen in the provided log repairs = [ (r'"Elapsed"\s*::', '"Elapsed":'), (r'"Output"\s*::', '"Output":'), (r'"Outputt"\s*:', '"Output":'), (r'"Outtput"\s*:', '"Output":'), (r'"OOutput"\s*:', '"Output":'), (r'",\s*,', '",'), (r'\\r', ''), ] def try_load(objtxt: str): try: return json.loads(objtxt) except Exception: pass fixed = objtxt for pat, rep in repairs: fixed = re.sub(pat, rep, fixed) fixed = re.sub(r'("Output"\s*:\s*")\s*"(?===)', r"\1", fixed) try: return json.loads(fixed) except Exception: return None # Regex salvage for too-broken JSON objects re_action = re.compile(r'"Action"\s*:\s*"(?P<a>pass|fail|skip)"') re_pkg = re.compile(r'"Package"\s*:\s*"(?P<p>[^"\\]*(?:\\.[^"\\]*)*)"') re_test = re.compile(r'"Test"\s*:\s*"(?P<t>[^"\\]*(?:\\.[^"\\]*)*)"') re_output = re.compile(r'"Output"\s*:\s*"(?P<o>[^"\\]*(?:\\.[^"\\]*)*)"') # Extract JSON objects from entire log by brace-balance (string-aware) objs: list[str] = [] buf: list[str] = [] depth = 0 in_str = False esc = False started = False for ch in log: if not started: if ch == "{": started = True depth = 1 buf = ["{"] in_str = False esc = False continue buf.append(ch) if in_str: if esc: esc = False elif ch == "\\": esc = True elif ch == '"': in_str = False else: if ch == '"': in_str = True elif ch == "{": depth += 1 elif ch == "}": depth -= 1 if depth == 0: objs.append("".join(buf)) buf = [] started = False last_pkg: str | None = None # Consume objects for objtxt in objs: ev = try_load(objtxt) if isinstance(ev, dict): pkg = ev.get("Package") if isinstance(ev.get("Package"), str) else None if pkg: last_pkg = pkg action = ev.get("Action") if isinstance(ev.get("Action"), str) else None test = ev.get("Test") if isinstance(ev.get("Test"), str) else None output = ev.get("Output") if isinstance(ev.get("Output"), str) else None if test and action in ("pass", "fail", "skip"): set_status(pkg, test, action) if output: parse_dash(output, pkg or last_pkg) continue # Salvage fields mp = re_pkg.search(objtxt) mt = re_test.search(objtxt) ma = re_action.search(objtxt) mo = re_output.search(objtxt) pkg = unescape_json_str(mp.group("p")) if mp else None if pkg: last_pkg = pkg test = unescape_json_str(mt.group("t")) if mt else None if ma and test: set_status(pkg or last_pkg, test, ma.group("a")) if mo: parse_dash(unescape_json_str(mo.group("o")), pkg or last_pkg) # Final fallback: parse raw log for `--- PASS|FAIL|SKIP:` parse_dash(log, last_pkg) return results
[ "github.co\nom/docker/docker/daemon/logger/splunk::TestSkipVerify", "github.\n.com/docker/docker/distribution::TestDetectManifestBlobMediaType/schema1", "github.co\nom/docker/docker/daemon/logger/local::TestReadLog/Tail/Live/All", "github.\n.com/docker/docker/daemon/logger/jsonfilelog::TestReadLogs/Tail/LiveE...
[ "github.\n.com/docker/docker/api/server/httputils::TestDecodePlatform/missing_os", "github.\n.com/docker/docker/api/server::TestM", "github.co\nom/docker/docker/api/server::TestMiddlewares", "github.co\nom/docker/docker/api/server/httputils::TestReadJSON/with_whitespace", "github.\n.com/docker/docker/api/se...
starryzhang/sweb.eval.win.moby_1776_moby-49973
prometheus-community/windows_exporter
2104
prometheus-community__windows_exporter-2104
Go
[ "2020" ]
492f3af31712cb95cf35dcee05466dfe4445ec19
diff --git a/internal/collector/os/os.go b/internal/collector/os/os.go index bf02442e4..276f49bce 100644 --- a/internal/collector/os/os.go +++ b/internal/collector/os/os.go @@ -28,6 +28,7 @@ import ( "github.com/alecthomas/kingpin/v2" "github.com/prometheus-community/windows_exporter/internal/headers/kernel32" "github.com/prometheus-community/windows_exporter/internal/headers/netapi32" + "github.com/prometheus-community/windows_exporter/internal/headers/psapi" "github.com/prometheus-community/windows_exporter/internal/headers/sysinfoapi" "github.com/prometheus-community/windows_exporter/internal/mi" "github.com/prometheus-community/windows_exporter/internal/osversion" @@ -51,7 +52,9 @@ type Collector struct { hostname *prometheus.Desc osInformation *prometheus.Desc - // users + // Deprecated: Use windows_system_processes instead. + processes *prometheus.Desc + // Deprecated: Use windows_system_process_limit instead. processesLimit *prometheus.Desc @@ -169,6 +172,12 @@ func (c *Collector) Build(logger *slog.Logger, _ *mi.Session) error { nil, ) + c.processes = prometheus.NewDesc( + prometheus.BuildFQName(types.Namespace, Name, "processes"), + "Deprecated: Use `windows_system_processes` instead.", + nil, + nil, + ) c.processesLimit = prometheus.NewDesc( prometheus.BuildFQName(types.Namespace, Name, "processes_limit"), "Deprecated: Use `windows_system_process_limit` instead.", @@ -216,6 +225,10 @@ func (c *Collector) Collect(ch chan<- prometheus.Metric) error { c.collect(ch) + if err := c.collectProcessCount(ch); err != nil { + errs = append(errs, fmt.Errorf("failed to collect process count metrics: %w", err)) + } + if err := c.collectHostname(ch); err != nil { errs = append(errs, fmt.Errorf("failed to collect hostname metrics: %w", err)) } @@ -278,6 +291,20 @@ func (c *Collector) collectHostname(ch chan<- prometheus.Metric) error { return nil } +func (c *Collector) collectProcessCount(ch chan<- prometheus.Metric) error { + gpi, err := psapi.GetPerformanceInfo() + if err != nil { + return err + } + + ch <- prometheus.MustNewConstMetric(c.processes, + prometheus.GaugeValue, + float64(gpi.ProcessCount), + ) + + return nil +} + func (c *Collector) collectTime(ch chan<- prometheus.Metric) error { timeZoneInfo, err := kernel32.GetDynamicTimeZoneInformation() if err != nil {
diff --git a/tools/e2e-output.txt b/tools/e2e-output.txt index 8582c870d..0c28795ad 100644 --- a/tools/e2e-output.txt +++ b/tools/e2e-output.txt @@ -303,6 +303,8 @@ windows_exporter_collector_timeout{collector="udp"} 0 # TYPE windows_os_physical_memory_free_bytes gauge # HELP windows_os_process_memory_limit_bytes Deprecated: Use `windows_memory_process_memory_limit_bytes` instead. # TYPE windows_os_process_memory_limit_bytes gauge +# HELP windows_os_processes Deprecated: Use `windows_system_processes` instead. +# TYPE windows_os_processes gauge # HELP windows_os_processes_limit Deprecated: Use `windows_system_process_limit` instead. # TYPE windows_os_processes_limit gauge # HELP windows_os_time Deprecated: Use windows_time_current_timestamp_seconds instead.
The windows_os_processes does not back any result ### Current Behavior The windows_os_processes does not back any result . it is just work on 0.29.2. ### Expected Behavior it should back Number Of Process on windows ### Steps To Reproduce ```Markdown ``` ### Environment - windows_exporter Version: 0.30.5 - Windows Server Version: 2019 ### windows_exporter logs ```shell . ``` ### Anything else? _No response_
https://github.com/prometheus-community/windows_exporter/pull/1596 - looks like windows_os_processes is slated for removal for windows_system_processes. I didn't see anything that this has happened already but confirmed my 0.30.6 version also no longer has windows_os_processes
https://github.com/prometheus-community/windows_exporter/pull/1596 - looks like windows_os_processes is slated for removal for windows_system_processes. I didn't see anything that this has happened already but confirmed my 0.30.6 version also no longer has windows_os_processes
[ "https://github.com/prometheus-community/windows_exporter/commit/7827b2701164792dea4e61b73693c2b1c812367f", "https://github.com/prometheus-community/windows_exporter/commit/d0388c5c711d192b0aa1d13c3df2e2ee036de38f" ]
2025-07-01T22:20:34Z
https://github.com/prometheus-community/windows_exporter/tree/492f3af31712cb95cf35dcee05466dfe4445ec19
[ "cd /testbed; $env:GOTOOLCHAIN=\"auto\"; go mod download; go build ./..." ]
[ "cd /testbed; New-Item -ItemType Directory -Force -Path reports | Out-Null; $env:GOTOOLCHAIN=\"auto\"; go test -json ./... 2>&1 | Tee-Object -FilePath reports\\go-test-results.json" ]
[ "cd /testbed; Get-Content reports\\go-test-results.json" ]
def parser(log: str) -> dict[str, str]: import re import json results: dict[str, str] = {} def norm_status(s: str) -> str: s = (s or "").lower() if s.startswith("pass"): return "pass" if s.startswith("skip"): return "skip" return "fail" def clean_pkg(pkg: str) -> str: pkg = (pkg or "") pkg = re.sub(r"\s+", "", pkg) # fix common wrapping/corruption around github.com pkg = pkg.replace("github.ccom", "github.com") pkg = pkg.replace("github.coom", "github.com") pkg = pkg.replace("github.comm", "github.com") pkg = re.sub(r"^github\.[a-z]*com", "github.com", pkg) return pkg def clean_test(t: str) -> str: t = (t or "") return re.sub(r"\s+", "", t) def add_result(pkg: str, test: str, status: str): if not test: return key = f"{pkg}/{test}" if pkg else test prev = results.get(key) # prefer fail > skip > pass if prev == "fail": return if prev == "skip" and status == "pass": return results[key] = status text = (log or "").replace("\r\n", "\n") # --- 1) Parse go test -json objects (robust to wrapped/corrupt lines) --- for m in re.finditer(r'\{"Time":.*?\}\s*', text, flags=re.S): blob = m.group(0).strip() # minimal repairs for common corruptions observed cand = blob cand = re.sub(r",\s*,", ",", cand) # double comma cand = cand.replace('"Output"::', '"Output":') cand = re.sub(r"\n", "", cand) # unwrap cand = re.sub(r'"\s*"', "", cand) # adjacent quotes due to wrap obj = None try: obj = json.loads(cand) except Exception: obj = None if isinstance(obj, dict): action = obj.get("Action") if action in ("pass", "fail", "skip"): pkg = clean_pkg(obj.get("Package")) test = clean_test(obj.get("Test")) if test: add_result(pkg, test, norm_status(action)) continue # regex salvage if JSON decoding fails am = re.search(r'"Action"\s*:\s*"([^"]+)"', cand) if not am: continue action = am.group(1) if action not in ("pass", "fail", "skip"): continue pm = re.search(r'"Package"\s*:\s*"([^"]*)"', cand) tm = re.search(r'"Test"\s*:\s*"([^"]*)"', cand) pkg = clean_pkg(pm.group(1) if pm else "") test = clean_test(tm.group(1) if tm else "") if test: add_result(pkg, test, norm_status(action)) # --- 2) Fallback: parse textual `--- PASS/FAIL/SKIP:` lines --- current_pkg = "" for line in text.splitlines(): m = re.search(r'"Action"\s*:\s*"start".*?"Package"\s*:\s*"([^"]+)"', line) if m: current_pkg = clean_pkg(m.group(1)) m = re.search(r"---\s+(PASS|FAIL|SKIP):\s+([^\s(]+)", line) if m: status = norm_status(m.group(1)) testname = clean_test(m.group(2)) add_result(current_pkg, testname, status) return results
[ "github.com/prometheus-community/windows_exporter/internal/collector/system/TestCCollector" ]
[ "TestCollector/", "TestGetLogonSessions", "TestCollector/memory", "TestCounter", "Test_MI_Application_Initialize", "github.com/prometheus-community/windows_exporter/internal/collector/textfile/TesstCheckBOM", "TestRun", "github.com/prometheus-community/windows_exporter/internal/collector/process/Testt...
starryzhang/sweb.eval.win.prometheus-community_1776_windows_exporter-2104
twpayne/chezmoi
4378
twpayne__chezmoi-4378
Go
[ "4376" ]
1d3a4ee899e70aa6143a0311b3c35687886bb21b
diff --git a/assets/chezmoi.io/docs/reference/templates/directives.md b/assets/chezmoi.io/docs/reference/templates/directives.md index 6570b65ef8b..a551f853583 100644 --- a/assets/chezmoi.io/docs/reference/templates/directives.md +++ b/assets/chezmoi.io/docs/reference/templates/directives.md @@ -37,6 +37,33 @@ inherited by templates called from the file. # [[ "true" ]] ``` +## Encoding + +Templates are always written in UTF-8 with no byte order mark. + +By default, the result of executing a template is also UTF-8 with no +byte order mark but this can be transformed into another encoding with the +template directive: + + chezmoi:template:encoding=$ENCODING + +where `$ENCODING` is one of: + +| Encoding | Description | +| --------------- | -------------------------------------------- | +| `utf-8` | UTF-8 with no byte order mark | +| `utf-8-bom` | UTF-8 with a byte order mark | +| `utf-16-be` | Big-endian UTF-16 with no byte order mark | +| `utf-16-be-bom` | Big-endian UTF-16 with a byte order mark | +| `utf-16-le` | Little-endian UTF-16 with no byte order mark | +| `utf-16-le-bom` | Little-endian UTF-16 with a byte order mark | + +!!! example + + ``` + {{/* chezmoi:template:encoding=utf-16-le */}} + ``` + ## Format indent By default, chezmoi's `toJson`, `toToml`, and `toYaml` template functions use diff --git a/internal/chezmoi/template.go b/internal/chezmoi/template.go index f5f633d11e0..649e3de196d 100644 --- a/internal/chezmoi/template.go +++ b/internal/chezmoi/template.go @@ -3,6 +3,7 @@ package chezmoi import ( "bytes" "encoding/json" + "fmt" "maps" "strconv" "strings" @@ -12,6 +13,8 @@ import ( "github.com/mattn/go-runewidth" "github.com/mitchellh/copystructure" "github.com/pelletier/go-toml/v2" + "golang.org/x/text/encoding" + "golang.org/x/text/encoding/unicode" ) // A Template extends text/template.Template with support for directives. @@ -23,6 +26,7 @@ type Template struct { // TemplateOptions are template options that can be set with directives. type TemplateOptions struct { + Encoding encoding.Encoding Funcs template.FuncMap FormatIndent string LeftDelimiter string @@ -107,7 +111,12 @@ func (t *Template) Execute(data any) ([]byte, error) { if err := t.template.ExecuteTemplate(&builder, t.name, data); err != nil { return nil, err } - return []byte(replaceLineEndings(builder.String(), t.options.LineEnding)), nil + + result := []byte(replaceLineEndings(builder.String(), t.options.LineEnding)) + if t.options.Encoding != nil { + return t.options.Encoding.NewEncoder().Bytes(result) + } + return result, nil } // parseAndRemoveDirectives updates o by parsing all template directives in data @@ -126,6 +135,23 @@ func (o *TemplateOptions) parseAndRemoveDirectives(data []byte) ([]byte, error) key := string(keyValuePairMatch[1]) value := maybeUnquote(string(keyValuePairMatch[2])) switch key { + case "encoding": + switch value { + case "utf-8": + o.Encoding = unicode.UTF8 + case "utf-8-bom": + o.Encoding = unicode.UTF8BOM + case "utf-16-be": + o.Encoding = unicode.UTF16(unicode.BigEndian, unicode.IgnoreBOM) + case "utf-16-be-bom": + o.Encoding = unicode.UTF16(unicode.BigEndian, unicode.UseBOM) + case "utf-16-le": + o.Encoding = unicode.UTF16(unicode.LittleEndian, unicode.IgnoreBOM) + case "utf-16-le-bom": + o.Encoding = unicode.UTF16(unicode.LittleEndian, unicode.UseBOM) + default: + return nil, fmt.Errorf("%s: unknown encoding", value) + } case "format-indent": o.FormatIndent = value case "format-indent-width":
diff --git a/internal/cmd/main_test.go b/internal/cmd/main_test.go index d7392012fcd..3ff60e7f30e 100644 --- a/internal/cmd/main_test.go +++ b/internal/cmd/main_test.go @@ -4,6 +4,7 @@ import ( "bufio" "bytes" _ "embed" + "encoding/hex" "errors" "flag" "fmt" @@ -97,6 +98,7 @@ func TestScript(t *testing.T) { "cmpmod": cmdCmpMod, "edit": cmdEdit, "expandenv": cmdExpandEnv, + "hexdecode": cmdHexDecode, "httpd": cmdHTTPD, "isdir": cmdIsDir, "issymlink": cmdIsSymlink, @@ -260,6 +262,33 @@ func cmdHTTPD(ts *testscript.TestScript, neg bool, args []string) { ts.Setenv("HTTPD_URL", server.URL) } +// cmdHexDecode decodes each argument, which must be a file with the extension +// .hex, writing the result to the same path but without the .hex extension. +func cmdHexDecode(ts *testscript.TestScript, neg bool, args []string) { + if neg { + ts.Fatalf("unsupported: ! hexdecode") + } + for _, arg := range args { + filename := ts.MkAbs(arg) + if filepath.Ext(filename) != ".hex" { + ts.Fatalf("%s: no .hex extension", arg) + } + hexData, err := os.ReadFile(filename) + if err != nil { + ts.Fatalf("%s: %v", arg, err) + } + hexData = regexp.MustCompile(`(?m)#.*$`).ReplaceAll(hexData, nil) + hexData = regexp.MustCompile(`\s+`).ReplaceAll(hexData, nil) + data := make([]byte, hex.DecodedLen(len(hexData))) + if _, err := hex.Decode(data, hexData); err != nil { + ts.Fatalf("%s: %v", arg, err) + } + if err := os.WriteFile(strings.TrimSuffix(filename, ".hex"), data, 0o666); err != nil { + ts.Fatalf("%s: %v", arg, err) + } + } +} + // cmdIsDir succeeds if all of its arguments are directories. func cmdIsDir(ts *testscript.TestScript, neg bool, args []string) { for _, arg := range args { diff --git a/internal/cmd/testdata/scripts/templatedirectives.txtar b/internal/cmd/testdata/scripts/templatedirectives.txtar index 63d1f558a08..b95144de399 100644 --- a/internal/cmd/testdata/scripts/templatedirectives.txtar +++ b/internal/cmd/testdata/scripts/templatedirectives.txtar @@ -1,17 +1,37 @@ +hexdecode golden/encoding.hex + # test --left-delimiter and --right-delimiter flags to chezmoi execute-template exec chezmoi execute-template --left-delimiter=[[ --right-delimiter=]] '[[ "ok" ]]' stdout ^ok$ +# test that the encoding can be set in files +exec chezmoi cat $HOME${/}encoding +cmp stdout golden/encoding + +# test that missing key behavior can be set in files +exec chezmoi cat $HOME${/}missing-key +cmp stdout golden/missing-key + # test that template delimiters can be set in files -exec chezmoi cat $HOME${/}template -cmp stdout golden/template +exec chezmoi cat $HOME${/}nested-template +cmp stdout golden/nested-template --- golden/template -- -<no value>(nested) +-- golden/encoding.hex -- +fffe # UTF-16 BOM +480065006c006c006f002c00200077006f0072006c00640021000a00 # "Hello, world!\n" +-- golden/missing-key -- +<no value> +-- golden/nested-template -- +(nested) -- home/user/.local/share/chezmoi/.chezmoitemplates/nested -- # chezmoi:template:left-delimiter=(( right-delimiter=)) ((- . -)) --- home/user/.local/share/chezmoi/template.tmpl -- -# chezmoi:template:left-delimiter=[[ right-delimiter=]] +-- home/user/.local/share/chezmoi/encoding.tmpl -- +# chezmoi:template:encoding=utf-16-le-bom +Hello, world! +-- home/user/.local/share/chezmoi/missing-key.tmpl -- # chezmoi:template:missing-key=default -[[ .MissingKey ]]([[ template "nested" "nested" ]]) +{{ .MissingKey }} +-- home/user/.local/share/chezmoi/nested-template.tmpl -- +# chezmoi:template:left-delimiter=[[ right-delimiter=]] +([[ template "nested" "nested" ]])
Accept more source file encoding when execute go templating, or allow encoding conversion after templating ## Is your feature request related to a problem? Please describe. Just when I use powershell script with templating, I noticed that .tmpl files with utf16le encoding did not remove `{{- command -}}` pattern. Then I realized that go templating assumes utf8 input. However, windows world assumes utf16le inputs in some place, like powershell 5 script input ## Describe the solution you'd like Either allow templating other than utf8(system-native encoding), or allow encoding conversion after templating but before execution (using a chezmoi directive?) ## Describe alternatives you've considered Allow user to execute some command right after script template evaluation but before execution, like pre-?interpreters with argument list ## Additional context <https://learn.microsoft.com/en-us/powershell/module/microsoft.powershell.core/about/about_character_encoding?view=powershell-5.1#character-encoding-in-windows-powershell> and <https://en.wikipedia.org/wiki/Windows_code_page#UTF-8,_UTF-16>, text encoding problem on windows is difficult, in some cases utf16le works but utf8 do not work, and this can be the first step to improve windows script execution experience ww
Thank you for opening this issue! I want chezmoi to provide a good experience on Windows, but I need help understanding it as I am not a regular Windows or PowerShell user. As I understand it, the current situation is: * Currently, chezmoi expects templates to be in UTF-8. * Currently, the result of executing a chezmoi template is always in UTF-8. * On Windows, you sometimes need to use UTF-16. * On Windows, you might want to write templates in UTF-16. * On Windows, you might want the result of executing a template to be in UTF-16. If this understanding is correct, then what do think about the following? * On all systems, templates are written in UTF-8 with no byte order mark. This ensures that templates are portable across multiple systems. * chezmoi adds a template directive like `chezmoi:template:encoding=utf-16-le` which tells chezmoi to convert the output of the template execution to little-endian UTF-16, which should fulfill your "allow encoding conversion after templating but before execution (using a chezmoi directive?)" suggestion. I did some initial experiments in #4378. Would you be able to test this? Yes, I think this work solved the problem for powershell 5.1. Here I provide a test script and my output. <details> ```powershell run_encoding.ps1 # chezmoi:template:left-delimiter="#{{" # chezmoi:template:encoding=utf-16-le-bom # chezmoi:template:line-endings=crlf #{{- if eq .chezmoi.os "windows" -}} #!/usr/bin/env powershell -File #also see <https://www.i18nqa.com/debug/utf8-debug.html> # tested with 933ed88af5860182792afdaab94ac34a7232775205682cb26d5fd9b1a2336cb7 chezmoi.exe on Windows 11 24H2 x64 26100.3194 # PS C:\Users\Misaka4a92> $PSVersionTable # Name Value # ---- ----- # PSVersion 5.1.26100.2161 # PSEdition Desktop # PSCompatibleVersions {1.0, 2.0, 3.0, 4.0...} # BuildVersion 10.0.26100.2161 # CLRVersion 4.0.30319.42000 # WSManStackVersion 3.0 # PSRemotingProtocolVersion 2.3 # SerializationVersion 1.1.0.1 # # # $ chezmoi.exe apply # System default encoding is iso-8859-1 # — "ÉüДЦ本화が😂 # Hex representation is \xe2\x80\x94\x20\x22\xc3\x89\xc3\xbc\xd0\x94\xd0\xa6\xe6\x9c\xac\xed\x99\x94\xe3\x81\x8c\xf0\x9f\x98\x82 # — "ÉüДЦ本화が😂 # Bytes: 255 254 35 0 # Detected bom is Unicode UTF-16 Little-Endian. # The encoding of C:\ProgramData\Temp\1730255736.encoding.ps1 is utf-16. # https://learn.microsoft.com/en-us/dotnet/fundamentals/runtime-libraries/system-text-encoding-default Write-Host ('System default encoding is {0}' -f [System.Text.Encoding]::Default.BodyName) $x = '— "ÉüДЦ本화が😂' $hex = -join [System.Text.Encoding]::UTF8.GetBytes($x).ForEach({ '\x{0}' -f $_.toString('x2') }) # $hex = '\xe2\x80\x94\x20\x22\xc3\x89\xc3\xbc\xd0\x94\xd0\xa6\xe6\x9c\xac\xed\x99\x94\xe3\x81\x8c\xf0\x9f\x98\x82' Write-Host "$x" Write-Host "Hex representation is $hex" $busybox = (Get-Command -ErrorAction Ignore busybox).Path if ($busybox) { Start-Process -NoNewWindow -Wait -FilePath $busybox -ArgumentList ('printf', ($hex + '\n')) } # from https://www.powershellgallery.com/packages/PoshFunctions/2.2.1.1/Content/Functions%5CGet-FileEncoding.ps1 [byte[]] $byte = Get-Content -Encoding byte -ReadCount 4 -TotalCount 4 -Path $MyInvocation.MyCommand.Path Write-Host Bytes: $byte[0] $byte[1] $byte[2] $byte[3] if ( $byte[0] -eq 0xef -and $byte[1] -eq 0xbb -and $byte[2] -eq 0xbf ) { # EF BB BF (UTF8) $bom = 'UTF8' } elseif ($byte[0] -eq 0xfe -and $byte[1] -eq 0xff) { # FE FF (UTF-16 Big-Endian) $bom = 'Unicode UTF-16 Big-Endian' } elseif ($byte[0] -eq 0xff -and $byte[1] -eq 0xfe) { # FF FE (UTF-16 Little-Endian) $bom = 'Unicode UTF-16 Little-Endian' } elseif ($byte[0] -eq 0 -and $byte[1] -eq 0 -and $byte[2] -eq 0xfe -and $byte[3] -eq 0xff) { # 00 00 FE FF (UTF32 Big-Endian) $bom = 'UTF32 Big-Endian' } elseif ($byte[0] -eq 0xfe -and $byte[1] -eq 0xff -and $byte[2] -eq 0 -and $byte[3] -eq 0) { # FE FF 00 00 (UTF32 Little-Endian) $bom = 'UTF32 Little-Endian' } elseif ($byte[0] -eq 0x2b -and $byte[1] -eq 0x2f -and $byte[2] -eq 0x76 -and ($byte[3] -eq 0x38 -or $byte[3] -eq 0x39 -or $byte[3] -eq 0x2b -or $byte[3] -eq 0x2f) ) { # 2B 2F 76 (38 | 38 | 2B | 2F) $bom = 'UTF7' } elseif ( $byte[0] -eq 0xf7 -and $byte[1] -eq 0x64 -and $byte[2] -eq 0x4c ) { # F7 64 4C (UTF-1) $bom = 'UTF-1' } elseif ($byte[0] -eq 0xdd -and $byte[1] -eq 0x73 -and $byte[2] -eq 0x66 -and $byte[3] -eq 0x73) { # DD 73 66 73 (UTF-EBCDIC) $bom = 'UTF-EBCDIC' } elseif ( $byte[0] -eq 0x0e -and $byte[1] -eq 0xfe -and $byte[2] -eq 0xff ) { # 0E FE FF (SCSU) $bom = 'SCSU' } elseif ( $byte[0] -eq 0x0e -and $byte[1] -eq 0xfe -and $byte[2] -eq 0xff ) { $bom = 'SCSU' } elseif ( $byte[0] -eq 0xfb -and $byte[1] -eq 0xee -and $byte[2] -eq 0x28 ) { # FB EE 28 (BOCU-1) $bom = 'BOCU-1' } elseif ($byte[0] -eq 0x84 -and $byte[1] -eq 0x31 -and $byte[2] -eq 0x95 -and $byte[3] -eq 0x33) { # 84 31 95 33 (GB-18030) $bom = 'GB-18030' } else { $bom = 'ASCII' } Write-Host ("Detected bom is $bom.") $sr = [System.IO.FileStream]::new($MyInvocation.MyCommand.Path, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read, [System.IO.FileShare]::ReadWrite) $sr = [System.IO.StreamReader]::new($sr, $true) $sr.ReadLine().ToString() | Out-Null Write-Host ('The encoding of {0} is {1}.' -f $MyInvocation.MyCommand.Path, $sr.CurrentEncoding.HeaderName) $sr.Close() #{{- end -}} ``` seems that utf16le without bom do not work with powershell. Most windows api expects utf16le-with-bom or current codepage. This script works with workflow files but not latest release. </details>
Thank you for opening this issue! I want chezmoi to provide a good experience on Windows, but I need help understanding it as I am not a regular Windows or PowerShell user. As I understand it, the current situation is: * Currently, chezmoi expects templates to be in UTF-8. * Currently, the result of executing a chezmoi template is always in UTF-8. * On Windows, you sometimes need to use UTF-16. * On Windows, you might want to write templates in UTF-16. * On Windows, you might want the result of executing a template to be in UTF-16. If this understanding is correct, then what do think about the following? * On all systems, templates are written in UTF-8 with no byte order mark. This ensures that templates are portable across multiple systems. * chezmoi adds a template directive like `chezmoi:template:encoding=utf-16-le` which tells chezmoi to convert the output of the template execution to little-endian UTF-16, which should fulfill your "allow encoding conversion after templating but before execution (using a chezmoi directive?)" suggestion. I did some initial experiments in #4378. Would you be able to test this? Yes, I think this work solved the problem for powershell 5.1. Here I provide a test script and my output. <details> ```powershell run_encoding.ps1 # chezmoi:template:left-delimiter="#{{" # chezmoi:template:encoding=utf-16-le-bom # chezmoi:template:line-endings=crlf #{{- if eq .chezmoi.os "windows" -}} #!/usr/bin/env powershell -File #also see <https://www.i18nqa.com/debug/utf8-debug.html> # tested with 933ed88af5860182792afdaab94ac34a7232775205682cb26d5fd9b1a2336cb7 chezmoi.exe on Windows 11 24H2 x64 26100.3194 # PS C:\Users\Misaka4a92> $PSVersionTable # Name Value # ---- ----- # PSVersion 5.1.26100.2161 # PSEdition Desktop # PSCompatibleVersions {1.0, 2.0, 3.0, 4.0...} # BuildVersion 10.0.26100.2161 # CLRVersion 4.0.30319.42000 # WSManStackVersion 3.0 # PSRemotingProtocolVersion 2.3 # SerializationVersion 1.1.0.1 # # # $ chezmoi.exe apply # System default encoding is iso-8859-1 # — "ÉüДЦ本화が😂 # Hex representation is \xe2\x80\x94\x20\x22\xc3\x89\xc3\xbc\xd0\x94\xd0\xa6\xe6\x9c\xac\xed\x99\x94\xe3\x81\x8c\xf0\x9f\x98\x82 # — "ÉüДЦ本화が😂 # Bytes: 255 254 35 0 # Detected bom is Unicode UTF-16 Little-Endian. # The encoding of C:\ProgramData\Temp\1730255736.encoding.ps1 is utf-16. # https://learn.microsoft.com/en-us/dotnet/fundamentals/runtime-libraries/system-text-encoding-default Write-Host ('System default encoding is {0}' -f [System.Text.Encoding]::Default.BodyName) $x = '— "ÉüДЦ本화が😂' $hex = -join [System.Text.Encoding]::UTF8.GetBytes($x).ForEach({ '\x{0}' -f $_.toString('x2') }) # $hex = '\xe2\x80\x94\x20\x22\xc3\x89\xc3\xbc\xd0\x94\xd0\xa6\xe6\x9c\xac\xed\x99\x94\xe3\x81\x8c\xf0\x9f\x98\x82' Write-Host "$x" Write-Host "Hex representation is $hex" $busybox = (Get-Command -ErrorAction Ignore busybox).Path if ($busybox) { Start-Process -NoNewWindow -Wait -FilePath $busybox -ArgumentList ('printf', ($hex + '\n')) } # from https://www.powershellgallery.com/packages/PoshFunctions/2.2.1.1/Content/Functions%5CGet-FileEncoding.ps1 [byte[]] $byte = Get-Content -Encoding byte -ReadCount 4 -TotalCount 4 -Path $MyInvocation.MyCommand.Path Write-Host Bytes: $byte[0] $byte[1] $byte[2] $byte[3] if ( $byte[0] -eq 0xef -and $byte[1] -eq 0xbb -and $byte[2] -eq 0xbf ) { # EF BB BF (UTF8) $bom = 'UTF8' } elseif ($byte[0] -eq 0xfe -and $byte[1] -eq 0xff) { # FE FF (UTF-16 Big-Endian) $bom = 'Unicode UTF-16 Big-Endian' } elseif ($byte[0] -eq 0xff -and $byte[1] -eq 0xfe) { # FF FE (UTF-16 Little-Endian) $bom = 'Unicode UTF-16 Little-Endian' } elseif ($byte[0] -eq 0 -and $byte[1] -eq 0 -and $byte[2] -eq 0xfe -and $byte[3] -eq 0xff) { # 00 00 FE FF (UTF32 Big-Endian) $bom = 'UTF32 Big-Endian' } elseif ($byte[0] -eq 0xfe -and $byte[1] -eq 0xff -and $byte[2] -eq 0 -and $byte[3] -eq 0) { # FE FF 00 00 (UTF32 Little-Endian) $bom = 'UTF32 Little-Endian' } elseif ($byte[0] -eq 0x2b -and $byte[1] -eq 0x2f -and $byte[2] -eq 0x76 -and ($byte[3] -eq 0x38 -or $byte[3] -eq 0x39 -or $byte[3] -eq 0x2b -or $byte[3] -eq 0x2f) ) { # 2B 2F 76 (38 | 38 | 2B | 2F) $bom = 'UTF7' } elseif ( $byte[0] -eq 0xf7 -and $byte[1] -eq 0x64 -and $byte[2] -eq 0x4c ) { # F7 64 4C (UTF-1) $bom = 'UTF-1' } elseif ($byte[0] -eq 0xdd -and $byte[1] -eq 0x73 -and $byte[2] -eq 0x66 -and $byte[3] -eq 0x73) { # DD 73 66 73 (UTF-EBCDIC) $bom = 'UTF-EBCDIC' } elseif ( $byte[0] -eq 0x0e -and $byte[1] -eq 0xfe -and $byte[2] -eq 0xff ) { # 0E FE FF (SCSU) $bom = 'SCSU' } elseif ( $byte[0] -eq 0x0e -and $byte[1] -eq 0xfe -and $byte[2] -eq 0xff ) { $bom = 'SCSU' } elseif ( $byte[0] -eq 0xfb -and $byte[1] -eq 0xee -and $byte[2] -eq 0x28 ) { # FB EE 28 (BOCU-1) $bom = 'BOCU-1' } elseif ($byte[0] -eq 0x84 -and $byte[1] -eq 0x31 -and $byte[2] -eq 0x95 -and $byte[3] -eq 0x33) { # 84 31 95 33 (GB-18030) $bom = 'GB-18030' } else { $bom = 'ASCII' } Write-Host ("Detected bom is $bom.") $sr = [System.IO.FileStream]::new($MyInvocation.MyCommand.Path, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read, [System.IO.FileShare]::ReadWrite) $sr = [System.IO.StreamReader]::new($sr, $true) $sr.ReadLine().ToString() | Out-Null Write-Host ('The encoding of {0} is {1}.' -f $MyInvocation.MyCommand.Path, $sr.CurrentEncoding.HeaderName) $sr.Close() #{{- end -}} ``` seems that utf16le without bom do not work with powershell. Most windows api expects utf16le-with-bom or current codepage. This script works with workflow files but not latest release. </details>
[ "https://github.com/twpayne/chezmoi/commit/ba55f4746278a4cd235bad847c51c47f6926b81c", "https://github.com/twpayne/chezmoi/commit/b6753a7cf360f70bdffe3cdff3de48af164466d4", "https://github.com/twpayne/chezmoi/commit/41b4776d2fdd577adc79c61900a7b6413fa50ac7" ]
2025-03-25T01:48:09Z
https://github.com/twpayne/chezmoi/tree/1d3a4ee899e70aa6143a0311b3c35687886bb21b
[ "cd /testbed ; go mod download ; go build ./..." ]
[ "powershell.exe -NoLogo -NoProfile -Command \"cd /testbed; New-Item -ItemType Directory -Force -Path reports | Out-Null; go test -json ./... | Tee-Object -FilePath reports/go-test-results.json\"" ]
[ "powershell.exe -NoLogo -NoProfile -Command \"cd /testbed; Get-Content -Raw reports/go-test-results.json\"" ]
def parser(log: str) -> dict[str, str]: """ Robust parser for `go test` output, especially `go test -json` that has been soft-wrapped (e.g., PowerShell), which can inject literal newlines inside JSON tokens/strings. Approach: 1) Normalize by removing newlines that occur between two non-whitespace characters: this "unwraps" soft wraps like `github.\n.com` -> `github.com` while preserving record separators where there is whitespace. 2) Extract and parse JSON objects via brace-matching that respects strings. Prefer Action pass/fail/skip events (Package+Test). 3) Fallback: scan normalized text for textual `--- PASS|FAIL|SKIP: TestName` and associate with the nearest successfully-parsed package start (not raw regex on possibly-wrapped text). 4) Precedence: fail > skip > pass; do not upgrade skip->pass. """ import json import re from typing import Any, Dict, List, Optional, Tuple # --- 1) unwrap soft-wrapped newlines --- # Remove CR, then remove \n that are between two non-space chars. s = log.replace("\r\n", "\n").replace("\r", "\n") s = re.sub(r"(?<=\S)\n(?=\S)", "", s) results: Dict[str, str] = {} def set_status(name: str, status: str) -> None: if not name: return prev = results.get(name) if prev == "fail": return if status == "fail": results[name] = "fail" return if prev == "skip" and status == "pass": return if prev == "pass" and status == "skip": return results[name] = status # --- 2) extract JSON objects from arbitrary text (brace-match respecting strings) --- def extract_json_objects(txt: str) -> List[Tuple[int, int, str]]: out: List[Tuple[int, int, str]] = [] depth = 0 in_str = False esc = False start = -1 for i, ch in enumerate(txt): if in_str: if esc: esc = False elif ch == "\\": esc = True elif ch == '"': in_str = False continue if ch == '"': in_str = True continue if ch == "{": if depth == 0: start = i depth += 1 elif ch == "}": if depth > 0: depth -= 1 if depth == 0 and start != -1: out.append((start, i + 1, txt[start : i + 1])) start = -1 return out json_objs = extract_json_objects(s) # Build reliable package anchors ONLY from successfully-parsed JSON events # (start position -> pkg). pkg_anchors: List[Tuple[int, str]] = [] # Parse JSON and extract statuses. for start, end, obj_txt in json_objs: try: evt: Dict[str, Any] = json.loads(obj_txt) except Exception: continue pkg = evt.get("Package") if isinstance(pkg, str) and pkg: pkg_anchors.append((start, pkg)) action = evt.get("Action") test = evt.get("Test") if action in ("pass", "fail", "skip") and isinstance(pkg, str) and pkg and isinstance(test, str) and test: set_status(f"{pkg}::{test}", "fail" if action == "fail" else action) # Also parse textual markers inside Output fields (when present). out = evt.get("Output") if isinstance(out, str): for m in re.finditer(r"---\s+(PASS|FAIL|SKIP):\s+([^\s]+)", out): st = {"PASS": "pass", "FAIL": "fail", "SKIP": "skip"}[m.group(1)] tname = m.group(2) if isinstance(pkg, str) and pkg: set_status(f"{pkg}::{tname}", st) else: set_status(tname, st) pkg_anchors.sort() def nearest_pkg(pos: int) -> Optional[str]: # last anchor at/before pos lo, hi = 0, len(pkg_anchors) - 1 best: Optional[str] = None while lo <= hi: mid = (lo + hi) // 2 p, pkg = pkg_anchors[mid] if p <= pos: best = pkg lo = mid + 1 else: hi = mid - 1 return best # --- 3) fallback scan for textual markers in normalized text --- # Allow soft-wrapped test names by capturing up to whitespace + '(' time or endline. # Example: '--- PASS: TestX/Sub (0.00s)' # Also handle cases with no duration. marker_re = re.compile(r"---\s+(PASS|FAIL|SKIP):\s+(.+?)(?:\s+\(\d|\s*$)") for m in marker_re.finditer(s): st = {"PASS": "pass", "FAIL": "fail", "SKIP": "skip"}[m.group(1)] tname = m.group(2).strip() # compact any remaining whitespace/newlines (should be rare after unwrap) tname = re.sub(r"\s+", "", tname) pkg = nearest_pkg(m.start()) key = f"{pkg}::{tname}" if pkg else tname set_status(key, st) return results
[ "github..com/twpayne/chezmoi/v2/internal/cmd::TestScript/issue3344", "github..com/twpayne/chezmoi/v2/internal/cmd::TestScript/keepgoing", "github..com/twpayne/chezmoi/v2/internal/cmd::TestScript/issue2137", "github..com/twpayne/chezmoi/v2/internal/cmd::TestScript/issue4012", "github..com/twpayne/chezmoi/v2/...
[ "github.coom/twpayne/chezmoi/v2/internal/chezmoi::TestEntryStateEquivalent/dir1_remmove", "github..com/twpayne/chezmoi/v2/internal/chezmoi::TestEntryStateEquivalent/file2_dir1_copy", "github..com/twpayne/chezmoi/v2/internal/chezmoibubbles::TestStringInputModel/value_enter", "github..com/twpayne/chezmoi/v2/int...
starryzhang/sweb.eval.win.twpayne_1776_chezmoi-4378