From 378f1920db0243224fbea8aea4e69ce36cd85173 Mon Sep 17 00:00:00 2001 From: Cody Tapscott <84105208+topolarity@users.noreply.github.com> Date: Wed, 28 Aug 2024 11:58:17 -0400 Subject: [PATCH] precompile: Ensure CI has inference results available for `jl_create_native` (#55528) `jl_emit_codeinst` expects inference results to be available, so `jl_ci_cache_lookup` needs to provide a CI that has them. I noticed this because it was causing missing code when, e.g., compiling `NetworkOptions.__init__` using `--trim` (#55047). Unfortunately `jl_emit_codeinst` failures are silently ignored (they just leave the LLVM module empty), so it was easy to miss that the looked-up CIs sometimes fail to compile. --- src/aotcompile.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aotcompile.cpp b/src/aotcompile.cpp index 48afa360c0037..b4c8ef6095a55 100644 --- a/src/aotcompile.cpp +++ b/src/aotcompile.cpp @@ -284,7 +284,7 @@ jl_code_instance_t *jl_ci_cache_lookup(const jl_cgparams_t &cgparams, jl_method_ jl_value_t *ci = cgparams.lookup(mi, world, world); JL_GC_PROMISE_ROOTED(ci); jl_code_instance_t *codeinst = NULL; - if (ci != jl_nothing) { + if (ci != jl_nothing && jl_atomic_load_relaxed(&((jl_code_instance_t *)ci)->inferred) != jl_nothing) { codeinst = (jl_code_instance_t*)ci; } else {