Skip to content

staticdata: Accept CodeInstances with addl. code from pkgimages #236

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 2 commits into
base: v1.10.2+RAI
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 3 additions & 6 deletions base/compiler/typeinfer.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# This file is a part of Julia. License is MIT: https://julialang.org/license

# Tracking of newly-inferred CodeInstances during precompilation
const track_newly_inferred = RefValue{Bool}(false)
const newly_inferred = CodeInstance[]

# build (and start inferring) the inference frame for the top-level MethodInstance
Expand Down Expand Up @@ -404,11 +403,9 @@ function cache_result!(interp::AbstractInterpreter, result::InferenceResult)
if !already_inferred
inferred_result = transform_result_for_cache(interp, linfo, valid_worlds, result)
code_cache(interp)[linfo] = ci = CodeInstance(interp, result, inferred_result, valid_worlds)
if track_newly_inferred[]
m = linfo.def
if isa(m, Method) && m.module != Core
ccall(:jl_push_newly_inferred, Cvoid, (Any,), ci)
end
m = linfo.def
if isa(m, Method) && m.module != Core
ccall(:jl_push_newly_inferred, Cvoid, (Any,), ci)
end
end
unlock_mi_inference(interp, linfo)
Expand Down
4 changes: 2 additions & 2 deletions base/loading.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2225,15 +2225,15 @@ function include_package_for_output(pkg::PkgId, input::String, depot_path::Vecto
end

ccall(:jl_set_newly_inferred, Cvoid, (Any,), Core.Compiler.newly_inferred)
Core.Compiler.track_newly_inferred.x = true
ccall(:jl_track_newly_inferred, Cvoid, (Cint,), 1)
try
Base.include(Base.__toplevel__, input)
catch ex
precompilableerror(ex) || rethrow()
@debug "Aborting `create_expr_cache'" exception=(ErrorException("Declaration of __precompile__(false) not allowed"), catch_backtrace())
exit(125) # we define status = 125 means PrecompileableError
finally
Core.Compiler.track_newly_inferred.x = false
ccall(:jl_track_newly_inferred, Cvoid, (Cint,), 0)
end
end

Expand Down
2 changes: 2 additions & 0 deletions src/gf.c
Original file line number Diff line number Diff line change
Expand Up @@ -2874,6 +2874,8 @@ static void _generate_from_hint(jl_method_instance_t *mi, size_t world)
if (codeinst == jl_nothing) {
(void)jl_type_infer(mi, world, 1);
codeinst = jl_rettype_inferred(mi, world, world);
} else {
jl_push_newly_inferred(codeinst);
}
if (codeinst != jl_nothing) {
if (jl_atomic_load_relaxed(&((jl_code_instance_t*)codeinst)->invoke) == jl_fptr_const_return)
Expand Down
45 changes: 34 additions & 11 deletions src/staticdata.c
Original file line number Diff line number Diff line change
Expand Up @@ -594,8 +594,22 @@ static void jl_load_sysimg_so(void)
static int jl_needs_serialization(jl_serializer_state *s, jl_value_t *v) JL_NOTSAFEPOINT
{
// ignore items that are given a special relocation representation
if (s->incremental && jl_object_in_image(v))
if (s->incremental && jl_object_in_image(v)) {
if (native_functions && jl_is_code_instance(v)) {
// serialize a copy of a CodeInstance, if we have code to add for it
int32_t invokeptr_id = 0;
int32_t specfptr_id = 0;
// see if we generated code for it
jl_get_function_id(
native_functions,
(jl_code_instance_t *)v,
&invokeptr_id,
&specfptr_id
);
return (invokeptr_id != 0) || (specfptr_id != 0);
}
return 0;
}

if (v == NULL || jl_is_symbol(v) || v == jl_nothing) {
return 0;
Expand Down Expand Up @@ -650,7 +664,7 @@ static int needs_recaching(jl_value_t *v) JL_NOTSAFEPOINT

static int needs_uniquing(jl_value_t *v) JL_NOTSAFEPOINT
{
assert(!jl_object_in_image(v));
assert(!jl_object_in_image(v) || jl_is_code_instance(v));
return caching_tag(v) == 1;
}

Expand Down Expand Up @@ -1003,8 +1017,8 @@ static uintptr_t add_external_linkage(jl_serializer_state *s, jl_value_t *v, jl_
// Return the integer `id` for `v`. Generically this is looked up in `serialization_order`,
// but symbols, small integers, and a couple of special items (`nothing` and the root Task)
// have special handling.
#define backref_id(s, v, link_ids) _backref_id(s, (jl_value_t*)(v), link_ids)
static uintptr_t _backref_id(jl_serializer_state *s, jl_value_t *v, jl_array_t *link_ids) JL_NOTSAFEPOINT
#define backref_id(s, v, link_ids) _backref_id(s, (jl_value_t*)(v), link_ids, /* allow_copies */ 1)
static uintptr_t _backref_id(jl_serializer_state *s, jl_value_t *v, jl_array_t *link_ids, int allow_copies) JL_NOTSAFEPOINT
{
assert(v != NULL && "cannot get backref to NULL object");
void *idx = HT_NOTFOUND;
Expand Down Expand Up @@ -1041,15 +1055,24 @@ static uintptr_t _backref_id(jl_serializer_state *s, jl_value_t *v, jl_array_t *
uint8_t u8 = *(uint8_t*)v;
return ((uintptr_t)TagRef << RELOC_TAG_OFFSET) + u8 + 2 + NBOX_C + NBOX_C;
}
if (s->incremental && jl_object_in_image(v)) {
assert(link_ids);
uintptr_t item = add_external_linkage(s, v, link_ids);
assert(item && "no external linkage identified");
return item;
if (!allow_copies) {
if (s->incremental && jl_object_in_image(v)) {
assert(link_ids);
uintptr_t item = add_external_linkage(s, v, link_ids);
assert(item && "no external linkage identified");
return item;
}
}
if (idx == HT_NOTFOUND) {
idx = ptrhash_get(&serialization_order, v);
if (idx == HT_NOTFOUND) {
if (allow_copies && s->incremental && jl_object_in_image(v)) {
assert(link_ids);
uintptr_t item = add_external_linkage(s, v, link_ids);
assert(item && "no external linkage identified");
return item;
}
// something went wrong
jl_(jl_typeof(v));
jl_(v);
}
Expand Down Expand Up @@ -1190,7 +1213,7 @@ static void jl_write_values(jl_serializer_state *s) JL_GC_DISABLED
for (size_t item = 0; item < l; item++) {
jl_value_t *v = (jl_value_t*)serialization_queue.items[item]; // the object
JL_GC_PROMISE_ROOTED(v);
assert(!(s->incremental && jl_object_in_image(v)));
assert(!(s->incremental && jl_object_in_image(v) && !jl_is_code_instance(v)));
jl_datatype_t *t = (jl_datatype_t*)jl_typeof(v);
assert((t->instance == NULL || t->instance == v) && "detected singleton construction corruption");
ios_t *f = s->s;
Expand Down Expand Up @@ -2061,7 +2084,7 @@ static uint32_t write_gvars(jl_serializer_state *s, arraylist_t *globals, arrayl
for (size_t i = 0; i < external_fns->len; i++) {
jl_code_instance_t *ci = (jl_code_instance_t*)external_fns->items[i];
assert(ci && (jl_atomic_load_relaxed(&ci->specsigflags) & 0b001));
uintptr_t item = backref_id(s, (void*)ci, s->link_ids_external_fnvars);
uintptr_t item = _backref_id(s, (jl_value_t*)ci, s->link_ids_external_fnvars, /* allow_copies */ 0);
uintptr_t reloc = get_reloc_for_item(item, 0);
write_reloc_t(s->gvar_record, reloc);
}
Expand Down
30 changes: 25 additions & 5 deletions src/staticdata_utils.c
Original file line number Diff line number Diff line change
Expand Up @@ -83,9 +83,15 @@ static uint64_t jl_worklist_key(jl_array_t *worklist) JL_NOTSAFEPOINT
}

static jl_array_t *newly_inferred JL_GLOBALLY_ROOTED /*FIXME*/;
static _Atomic(int) track_newly_inferred = 0;
// Mutex for newly_inferred
jl_mutex_t newly_inferred_mutex;

JL_DLLEXPORT void jl_track_newly_inferred(int enable)
{
jl_atomic_store_release(&track_newly_inferred, enable);
}

// Register array of newly-inferred MethodInstances
// This gets called as the first step of Base.include_package_for_output
JL_DLLEXPORT void jl_set_newly_inferred(jl_value_t* _newly_inferred)
Expand All @@ -97,9 +103,11 @@ JL_DLLEXPORT void jl_set_newly_inferred(jl_value_t* _newly_inferred)
JL_DLLEXPORT void jl_push_newly_inferred(jl_value_t* ci)
{
JL_LOCK(&newly_inferred_mutex);
size_t end = jl_array_len(newly_inferred);
jl_array_grow_end(newly_inferred, 1);
jl_arrayset(newly_inferred, ci, end);
if (jl_atomic_load_acquire(&track_newly_inferred)) {
size_t end = jl_array_len(newly_inferred);
jl_array_grow_end(newly_inferred, 1);
jl_arrayset(newly_inferred, ci, end);
}
JL_UNLOCK(&newly_inferred_mutex);
}

Expand Down Expand Up @@ -230,8 +238,11 @@ static jl_array_t *queue_external_cis(jl_array_t *list)
for (i = n0; i-- > 0; ) {
jl_code_instance_t *ci = (jl_code_instance_t*)jl_array_ptr_ref(list, i);
assert(jl_is_code_instance(ci));
if (!ci->relocatability)
if (!ci->relocatability && !jl_object_in_image((jl_value_t *)ci)) {
// we don't care about re-locatability for duplicated CI's
// since the original copy will provide sufficient roots anyway
continue;
}
jl_method_instance_t *mi = ci->def;
jl_method_t *m = mi->def.method;
if (ci->inferred && jl_is_method(m) && jl_object_in_image((jl_value_t*)m->module)) {
Expand Down Expand Up @@ -1121,6 +1132,10 @@ static void jl_insert_backedges(jl_array_t *edges, jl_array_t *ext_targets, jl_a
jl_method_instance_t *caller = ci->def;
if (ci->inferred && jl_rettype_inferred(caller, minworld, ~(size_t)0) == jl_nothing) {
jl_mi_cache_insert(caller, ci);
} else if (jl_atomic_load_relaxed(&ci->invoke) != NULL &&
jl_method_compiled(caller, ci->min_world) == NULL) {
// new CI has fresh code for us
jl_mi_cache_insert(caller, ci);
}
//jl_static_show((jl_stream*)ios_stderr, (jl_value_t*)caller);
//ios_puts("free\n", ios_stderr);
Expand Down Expand Up @@ -1158,14 +1173,19 @@ static void jl_insert_backedges(jl_array_t *edges, jl_array_t *ext_targets, jl_a
// then enable any methods associated with it
void *ci = ptrhash_get(&visited, (void*)caller);
//assert(ci != HT_NOTFOUND);
if (ci != HT_NOTFOUND) {
if (ci != HT_NOTFOUND && maxvalid != 0) {
// have some new external code to use
assert(jl_is_code_instance(ci));
jl_code_instance_t *codeinst = (jl_code_instance_t*)ci;
assert(codeinst->min_world == minworld && codeinst->inferred);
assert(maxvalid >= minworld);
codeinst->max_world = maxvalid;
if (jl_rettype_inferred(caller, minworld, maxvalid) == jl_nothing) {
jl_mi_cache_insert(caller, codeinst);
} else if (jl_atomic_load_relaxed(&codeinst->invoke) != NULL &&
jl_method_compiled(caller, codeinst->min_world) == NULL) {
// new CI has fresh code for us
jl_mi_cache_insert(caller, codeinst);
}
}
}
Expand Down