Safe rust code segfault on Linux when building with `--release`

I’m developing a query planning module and use nested enum to represent a query plan. My binary segfaults when building with release mode and I’ve traced the issue to the following code. The variables in plan are incorrect thus the sub_plan points to an invalid address. When I try to access QueryPlan pointed by sub_plan, the program segfault.

fn create_group_by(
    &self,
    plan_id: PlanId,
    group_by: &GroupBy,
    // ... omit
) -> Result<QueryPlan> {
    // ... omit

    match group_by.tags {
        Some(ref group_tag_keys) => {
            // ... omit
            let mut plan = QueryPlan::GroupByTags(GroupByTagsPlan {
                plan_id,
                field_id,
                series_group_keys,
                sub_plans,
            });

            // This cause segfault
            plan = QueryPlan::Aggregate(AggregatePlan {
                plan_id,
                field_id,
                aggregator,
                sub_plan: Box::new(plan),
            });

            // No segfault if I use let binding instead of assignment
            // let plan = QueryPlan::GroupByTags(GroupByTagsPlan {});
            // let plan = QueryPlan::Aggregate(AggregatePlan {});

            // No segfault if I add a log and print the plan here
            // debug!(
            //     "QueryPlanBuilder::create_group_by create aggregate plan, plan:{:#?}",
            //     plan
            // );

            Ok(plan)
        }
        None => {
            // ... omit
        }
    }
}

// Create other plan
fn another_function(&self) -> Result<QueryPlan> {
    let mut sub_plan = self.create_group_by();
    sub_plan = QueryPlan::AnotherPlan(
        // ... omit
    );
    if condition {
        sub_plan = QueryPlan::AnotherPlan(
            // ... omit
        );
    }
    // ... omit
}

struct AggregatePlan {
    plan_id: PlanId,
    field_id: FieldId,
    aggregator: Aggregator,
    sub_plan: Box<QueryPlan>,
}

enum QueryPlan {
    // ... omit
    ReadColumn(ReadColumnPlan),
    GroupByTags(GroupByTagsPlan),
    Aggregate(AggregatePlan),
    Empty(PlanId),
}

The program has no issue If

  • Print the plan (AggregatePlan)
  • Or use let binding
  • Or build with cargo build (without optimization)

I tried to trace the assembly code of the program with and without the debug print and found the assembly code caused segfault

Assembly code of binary With debug! print (via objdump -C -S):

15b2ec:   e8 9f 6c 1c 00          callq  321f90 <log::__private_api_log::h56d8671e57922675>

              debug!("QueryPlanBuilder::create_group_by create aggregate plan, plan:{:#?}", plan);
15b2f1:   48 8b 94 24 50 01 00    mov    0x150(%rsp),%rdx
15b2f8:   00
15b2f9:   4c 8b b4 24 58 01 00    mov    0x158(%rsp),%r14
15b300:   00
15b301:   eb 05                   jmp    15b308 
15b303:   ba 05 00 00 00          mov    $0x5,%edx
15b308:   48 89 d6                mov    %rdx,%rsi
15b30b:   48 81 e6 00 ff ff ff    and    $0xffffffffffffff00,%rsi
15b312:   48 89 d7                mov    %rdx,%rdi
15b315:   48 81 e7 00 00 ff ff    and    $0xffffffffffff0000,%rdi
15b31c:   48 8d 84 24 69 01 00    lea    0x169(%rsp),%rax
15b323:   00
15b324:   48 89 c5                mov    %rax,%rbp

// ----------------------------------------------
// Address of plan is in -0x9(%rax)
// Address of %rax is 0x7fffffffd689
// (gdb) x /8xb 0x7fffffffd680
// 0x7fffffffd680: 0x60    0x11    0xa3    0xf6    0xff    0x7f    0x00    0x00
//
// Address of plan: 0x7ffff6a31160 (correct)

15b327:   0f 10 40 f7             movups -0x9(%rax),%xmm0

// ----------------------------------------------

15b32b:   0f 29 84 24 40 02 00    movaps %xmm0,0x240(%rsp)
15b332:   00
15b333:   48 8b 8c 24 70 01 00    mov    0x170(%rsp),%rcx
15b33a:   00
15b33b:   8b 84 24 78 01 00 00    mov    0x178(%rsp),%eax
15b342:   8b 9c 24 7c 01 00 00    mov    0x17c(%rsp),%ebx

Assembly code of binary Without debug! print (Segfault)

                   sub_plan: Box::new(plan),
               });

               // debug!("QueryPlanBuilder::create_group_by create aggregate plan, plan:{:#?}", plan);

              Ok(plan)
// ----------------------------------------------
// Address of plan is in -0x9(%rdx)
// Address of %rdx is 0x7fffffffd669
// Address of %rsp is 0x7fffffffd500
// (gdb) x /8xb 0x7fffffffd660
// 0x7fffffffd660: 0x60    0x11    0xa3    0xf6    0xff    0x7f    0x00    0x00
//
// The src addres of plan is correct (0x7ffff6a31160)
//
// Data on stack before `mov    -0x9(%rdx),%eax`
// (gdb) x /8xb 0x7fffffffd500 + 0x20
// 0x7fffffffd520: 0x03    0x00    0x00    0x00    0x88    0x13    0x00    0x00

15c465:   8b 42 f7                mov    -0x9(%rdx),%eax
15c468:   89 44 24 20             mov    %eax,0x20(%rsp)

// Data on stack now (only mov 4 bytes)
// (gdb) x /8xb 0x7fffffffd500 + 0x20
// 0x7fffffffd520: 0x60    0x11    0xa3    0xf6    0x88    0x13    0x00    0x00

15c46c:   8a 42 fb                mov    -0x5(%rdx),%al
15c46f:   88 44 24 24             mov    %al,0x24(%rsp)

// Data on stack now (only mov 1 byte)
// (gdb) x /8xb 0x7fffffffd500 + 0x20
// 0x7fffffffd520: 0x60    0x11    0xa3    0xf6    0xff    0x13    0x00    0x00
// We got wrong address of plan: 0x13fff6a31160
// ----------------------------------------------

15c473:   0f b7 42 04             movzwl 0x4(%rdx),%eax
15c477:   66 89 44 24 2d          mov    %ax,0x2d(%rsp)
15c47c:   8a 42 06                mov    0x6(%rdx),%al
15c47f:   88 44 24 2f             mov    %al,0x2f(%rsp)
15c483:   48 8b 8c 24 70 01 00    mov    0x170(%rsp),%rcx
15c48a:   00
15c48b:   8b 84 24 78 01 00 00    mov    0x178(%rsp),%eax
15c492:   8b ac 24 7c 01 00 00    mov    0x17c(%rsp),%ebp

Segfault backtrace:

Program received signal SIGSEGV, Segmentation fault.

// Notice: plan=0x13fff6a31160
// 0x13fff6a31160 is the address mov incorrectly!!!
engine::query::executor::QueryTaskBuilder::build_field_task::h1620d2a68ef5ed2a (self=0x7fffffffde68, plan=0x13fff6a31160) at src/engine/query/executor.rs:104
104             QueryPlan::ReadColumn(plan) => {
(gdb) bt
#0  engine::query::executor::QueryTaskBuilder::build_field_task::h1620d2a68ef5ed2a (self=0x7fffffffde68, plan=0x13fff6a31160) at src/engine/query/executor.rs:104
#1  0x0000555555698c4f in engine::query::executor::QueryTaskBuilder::build_field_task::h1620d2a68ef5ed2a (self=0x7fffffffde68, plan=0x7ffff6a311c0) at src/engine/query/executor.rs:167
#2  0x00005555556993cd in engine::query::executor::QueryTaskBuilder::build_field_task::h1620d2a68ef5ed2a (self=0x7fffffffde68, plan=0x7ffff6a31228) at src/engine/query/executor.rs:173
#3  0x000055555569873e in engine::query::executor::QueryTaskBuilder::build_task::h6fd96ff8584b896d (self=0x7fffffffde68, plan=0x7fffffffdf20) at src/engine/query/executor.rs:76
#4  0x0000555555692cd2 in crash_test::main::ha5c7b66441774f8a () at src/bin/crash_test.rs:274
#5  0x000055555568c933 in std::rt::lang_start::_$u7b$$u7b$closure$u7d$$u7d$::h1a66f71f196493a2 () at /checkout/src/libstd/rt.rs:74
#6  0x0000555555876ad3 in {{closure}} () at libstd/rt.rs:59
#7  std::panicking::try::do_call::h0208f3354fcc3e87 () at libstd/panicking.rs:310
#8  0x00005555558917ea in __rust_maybe_catch_panic () at libpanic_unwind/lib.rs:105
#9  0x0000555555877c86 in try<i32,closure> () at libstd/panicking.rs:289
#10 catch_unwind<closure,i32> () at libstd/panic.rs:392
#11 std::rt::lang_start_internal::h56481fc9d09c36f4 () at libstd/rt.rs:58
#12 0x00005555556934ea in main ()

I found out only 5 bytes of the address of plan are mov and we got a wrong address 0x13fff6a31160. When we try to access it, segfault!

My OS is Ubuntu-14.04 and rustc version is stable rustc 1.29.2 (17a9dc751 2018-10-05). This problem can reproduce on our Linux server with latest stable rustc (rustc 1.30.1 (1433507eb 2018-11-07), stable-x86_64-unknown-linux-gnu)

I think the compiler may miss optimize my code but I have no idea about what the compiler done

  • I tried to write some similar demo with enum and Box but can’t reproduce this issue :frowning:
  • Segfault is caused by safe rust code
  • No stack overflow

Does anyone have any ideas?

3 Likes

Are you able to share the full source code?

Are you sure there’s no unsafe code anywhere else (for example in a dependent crate) that is introducing a bug?

You can try using rr to see where the error is rooted.

2 Likes

Have you tried to use the sanitizers from unstable?

Maybe if you are not using any unsafe, there could be a problem with one crate.

2 Likes

Thanks! This is not my personal project :frowning: so I’m can’t share the full source code easily. We did use some unsafe code in other module or from some dependent crate. But they are not invoked in my test program or even unit test code.

Thanks, I will try it later

What is the actual instruction that is segfaulting?

1 Like

Dereference the address plan=0x13fff6a31160 (Invalid memory address), which should be 0x7ffff6a31160

(gdb) bt
#0  engine::query::executor::QueryTaskBuilder::build_field_task::h1620d2a68ef5ed2a (self=0x7fffffffde68, plan=0x13fff6a31160)

This address is created by after returned

mov    -0x9(%rdx),%eax
mov    %eax,0x20(%rsp)
mov    -0x5(%rdx),%al
mov    %al,0x24(%rsp)

Yes but what is the instruction during the fault? GDB: disas

1 Like
Program received signal SIGSEGV, Segmentation fault.
engine::query::executor::QueryTaskBuilder::build_field_task::h1620d2a68ef5ed2a (self=0x7fffffffde68, plan=0x13fff6a31160) at src/engine/query/executor.rs:104
104             QueryPlan::ReadColumn(plan) => {
(gdb) disas
Dump of assembler code for function engine::query::executor::QueryTaskBuilder::build_field_task::h1620d2a68ef5ed2a:
   0x0000555555698a20 <+0>: push   %rbp
   0x0000555555698a21 <+1>: push   %r15
   0x0000555555698a23 <+3>: push   %r14
   0x0000555555698a25 <+5>: push   %r13
   0x0000555555698a27 <+7>: push   %r12
   0x0000555555698a29 <+9>: push   %rbx
   0x0000555555698a2a <+10>:    sub    $0xa8,%rsp
   0x0000555555698a31 <+17>:    mov    %rdx,%r15
   0x0000555555698a34 <+20>:    mov    %rsi,%r12
   0x0000555555698a37 <+23>:    mov    %rdi,%r13
=> 0x0000555555698a3a <+26>:    mov    (%rdx),%al
   0x0000555555698a3c <+28>:    add    $0xff,%al
   0x0000555555698a3e <+30>:    cmp    $0x8,%al
   0x0000555555698a40 <+32>:    ja     0x555555698ca6 <engine::query::executor::QueryTaskBuilder::build_field_task::h1620d2a68ef5ed2a+646>
(gdb) i r rdx
rdx            0x13fff6a31160 21990075470176

Can you try using C-Reduce to produce a smaller test case? The C-Reduce authors claim it sometimes works on Rust.

2 Likes

Thanks! I would try to reproduce this case in some smaller case as it’s quite hard to descript the issue I encountered, but that may take some time. Once I found a much simple test case, I would share it!

Now changing my code in this form is a workaround

let plan = QueryPlan::GroupByTags(GroupByTagsPlan {
    plan_id,
    field_id,
    series_group_keys,
    sub_plans,
});

let plan = QueryPlan::Aggregate(AggregatePlan {
    plan_id,
    field_id,
    aggregator,
    sub_plan: Box::new(plan),
});

Yeah, smells like a compiler bug :frowning:.

Curious - what if you put the Box::new(plan) into its own binding, like so:

 let mut plan = QueryPlan::GroupByTags(GroupByTagsPlan {
    plan_id,
    field_id,
    series_group_keys,
    sub_plans,
});

let sub_plan = Box::new(plan);

plan = QueryPlan::Aggregate(AggregatePlan {
    plan_id,
    field_id,
    aggregator,
    sub_plan,
});

This also works, the pointer address is also correct.

@evenyag I’d go ahead and file a Rust Github issue at this point - this seems like a legit miscompile, and you’ll get more compiler-dev eyes on it there.

8 Likes

This is the source code of the function causes segfault, may provide more details about the issue

fn create_group_by(
    &self,
    plan_id: PlanId,
    group_by: &GroupBy,
    map_result: &mut MapQueryResult,
    field_info: &FieldInfo,
    downsample: Option<Downsample>,
    aggregator: Option<Aggregator>,
    series_ids: &[SeriesId],
) -> Result<QueryPlan> {
    debug!(
        "QueryPlanBuilder::create_group_by, plan_id:{}, field_id:{}, by_series:{}",
        plan_id,
        field_info.id,
        group_by.is_by_series()
    );

    let mut sub_plans = Vec::new();
    for (shard_id, sub_cond) in &mut map_result.sub_conds {
        if let Some(v) = downsample {
            let start_time = sub_cond.start_time / v.interval * v.interval + v.interval;
            if start_time > sub_cond.end_time {
                sub_plans.push((*shard_id, QueryPlan::Empty(plan_id)));
            } else {
                debug!(
                    "QueryPlanBuilder::create_group_by downsample, plan_id:{}, \
                     field_id:{}, interval:{}, old_start:{}, new_start:{}",
                    plan_id, field_info.id, v.interval, sub_cond.start_time, start_time
                );

                sub_cond.start_time = start_time;

                let mut sub_plan = self
                    .create_select_series(plan_id, *shard_id, sub_cond, field_info, series_ids);

                sub_plan = QueryPlan::Downsample(DownsamplePlan {
                    plan_id,
                    field_id: field_info.id,
                    downsample: v,
                    sub_plan: Box::new(sub_plan),
                });

                sub_plans.push((*shard_id, sub_plan));
            }
        } else {
            let sub_plan =
                self.create_select_series(plan_id, *shard_id, sub_cond, field_info, series_ids);

            sub_plans.push((*shard_id, sub_plan));
        }
    }

    match group_by.tags {
        Some(ref group_tag_keys) => {
            let mut series_group_keys = HashMap::new();

            let aggregator = match aggregator {
                Some(v) => v,
                None => {
                    return Err(CommonErr(
                        CommonErrCode::InvalidParam,
                        "Aggregator required".to_owned(),
                    ));
                }
            };

            if group_tag_keys.is_empty() {
                for series_id in series_ids {
                    series_group_keys.insert(*series_id, GroupKey::Tags(BTreeMap::new()));
                }
            } else {
                for series_id in series_ids {
                    let series_key = match self.series_manager.series_key(*series_id) {
                        Ok(Some(v)) => v,
                        Ok(None) => {
                            warn!(
                                "QueryPlanBuilder::create_select series key not found, \
                                 plan_id:{}, series_id:{}",
                                plan_id, series_id
                            );
                            continue;
                        }
                        Err(e) => {
                            error!(
                                "QueryPlanBuilder::create_select find series key failed, \
                                 plan_id:{}, series_id:{}, err:{}",
                                plan_id, series_id, e
                            );
                            continue;
                        }
                    };

                    match series::decode_series_key(&series_key) {
                        Ok((_, tags)) => {
                            let group_key = series_group_key(&group_tag_keys, &tags);

                            series_group_keys.insert(*series_id, group_key);
                        }
                        Err(e) => {
                            error!(
                                "QueryPlanBuilder::create_select decode series key \
                                 failed, plan_id:{}, series_id:{}, err:{}",
                                plan_id, series_id, e
                            );
                            continue;
                        }
                    }
                }
            }

            let field_id = field_info.id;

            let mut plan = QueryPlan::GroupByTags(GroupByTagsPlan {
                plan_id,
                field_id,
                series_group_keys,
                sub_plans,
            });

            // NOTICE No segfault if I remove this debug log
            debug!(
                "QueryPlanBuilder::create_group_by aggregate, plan_id:{}, field_id:{}",
                plan_id, field_id
            );

            plan = QueryPlan::Aggregate(AggregatePlan {
                plan_id,
                field_id,
                aggregator,
                sub_plan: Box::new(plan),
            });

            Ok(plan)
        }
        None => {
            Ok(QueryPlan::GroupBySeries(GroupBySeriesPlan {
                plan_id,
                field_id: field_info.id,
                sub_plans,
            }))
        }
    }
}

Valgrind output:

==15816== Memcheck, a memory error detector
==15816== Copyright (C) 2002-2017, and GNU GPL'd, by Julian Seward et al.
==15816== Using Valgrind-3.13.0 and LibVEX; rerun with -h for copyright info
==15816== Command: ./target/release/crash-test
==15816== 
==15816== Invalid read of size 1
==15816==    at 0x24C85A: engine::query::executor::QueryTaskBuilder::build_field_task (executor.rs:104)
==15816==    by 0x24CA6E: engine::query::executor::QueryTaskBuilder::build_field_task (executor.rs:167)
==15816==    by 0x24D1EC: engine::query::executor::QueryTaskBuilder::build_field_task (executor.rs:173)
==15816==    by 0x24C55D: engine::query::executor::QueryTaskBuilder::build_task (executor.rs:76)
==15816==    by 0x246AF1: crash_test::main (crash_test.rs:274)
==15816==    by 0x240752: std::rt::lang_start::{{closure}} (rt.rs:74)
==15816==    by 0x42A512: {{closure}} (rt.rs:59)
==15816==    by 0x42A512: _ZN3std9panicking3try7do_call17h0208f3354fcc3e87E.llvm.1680211052236547615 (panicking.rs:310)
==15816==    by 0x445229: __rust_maybe_catch_panic (lib.rs:105)
==15816==    by 0x42B6C5: try<i32,closure> (panicking.rs:289)
==15816==    by 0x42B6C5: catch_unwind<closure,i32> (panic.rs:392)
==15816==    by 0x42B6C5: std::rt::lang_start_internal (rt.rs:58)
==15816==    by 0x247309: main (in /home/target/release/crash-test)
==15816==  Address 0xd1d5300064380c0 is not stack'd, malloc'd or (recently) free'd
==15816== 
==15816== 
==15816== Process terminating with default action of signal 11 (SIGSEGV)
==15816==  General Protection Fault
==15816==    at 0x24C85A: engine::query::executor::QueryTaskBuilder::build_field_task (executor.rs:104)
==15816==    by 0x24CA6E: engine::query::executor::QueryTaskBuilder::build_field_task (executor.rs:167)
==15816==    by 0x24D1EC: engine::query::executor::QueryTaskBuilder::build_field_task (executor.rs:173)
==15816==    by 0x24C55D: engine::query::executor::QueryTaskBuilder::build_task (executor.rs:76)
==15816==    by 0x246AF1: crash_test::main (crash_test.rs:274)
==15816==    by 0x240752: std::rt::lang_start::{{closure}} (rt.rs:74)
==15816==    by 0x42A512: {{closure}} (rt.rs:59)
==15816==    by 0x42A512: _ZN3std9panicking3try7do_call17h0208f3354fcc3e87E.llvm.1680211052236547615 (panicking.rs:310)
==15816==    by 0x445229: __rust_maybe_catch_panic (lib.rs:105)
==15816==    by 0x42B6C5: try<i32,closure> (panicking.rs:289)
==15816==    by 0x42B6C5: catch_unwind<closure,i32> (panic.rs:392)
==15816==    by 0x42B6C5: std::rt::lang_start_internal (rt.rs:58)
==15816==    by 0x247309: main (in /home/target/release/crash-test)
==15816== 
==15816== HEAP SUMMARY:
==15816==     in use at exit: 1,847,176 bytes in 26,829 blocks
==15816==   total heap usage: 26,863 allocs, 34 frees, 1,850,200 bytes allocated
==15816== 
==15816== LEAK SUMMARY:
==15816==    definitely lost: 0 bytes in 0 blocks
==15816==    indirectly lost: 0 bytes in 0 blocks
==15816==      possibly lost: 0 bytes in 0 blocks
==15816==    still reachable: 1,847,176 bytes in 26,829 blocks
==15816==                       of which reachable via heuristic:
==15816==                         stdstring          : 1,373,244 bytes in 25,545 blocks
==15816==         suppressed: 0 bytes in 0 blocks
==15816== Rerun with --leak-check=full to see details of leaked memory
==15816== 
==15816== For counts of detected and suppressed errors, rerun with: -v
==15816== ERROR SUMMARY: 2 errors from 1 contexts (suppressed: 0 from 0)

MemorySanitizer output seems helpless

Uninitialized bytes in __interceptor_memchr at offset 0 inside [0x701000000020, 4)
==27899==WARNING: MemorySanitizer: use-of-uninitialized-value
    #0 0x55aee17f4cae in std::sys::unix::memchr::memchr::h2873dd310dd2c781 /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libstd/sys/unix/memchr.rs:18:8
    #1 0x55aee17f4cae in std::memchr::memchr::hbb94735e0354d025 /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libstd/memchr.rs:35
    #2 0x55aee17f4cae in std::ffi::c_str::CString::_new::hd321bebdbdfd8361 /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libstd/ffi/c_str.rs:336
    #3 0x55aee17f4cae in std::ffi::c_str::CString::new::h7d4058a9952e75c8 /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libstd/ffi/c_str.rs:332
    #4 0x55aee17f4cae in std::thread::Thread::new::_$u7b$$u7b$closure$u7d$$u7d$::hae7217d1a4fadad5 /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libstd/thread/mod.rs:1092
    #5 0x55aee17f4cae in _$LT$core..option..Option$LT$T$GT$$GT$::map::h9e724da2d381325a /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libcore/option.rs:424
    #6 0x55aee17f4cae in std::thread::Thread::new::h53d3fd3c1b618a0c /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libstd/thread/mod.rs:1091
    #7 0x55aee17fef6c in std::rt::lang_start_internal::h2d62c467a5145295 /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libstd/rt.rs:50:21
    #8 0x55aee12f586f in main (/home/target/x86_64-unknown-linux-gnu/release/crash-test+0x15386f)
    #9 0x7fb1bd014b96 in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x21b96)
    #10 0x55aee12bef49 in _start (/home/target/x86_64-unknown-linux-gnu/release/crash-test+0x11cf49)

SUMMARY: MemorySanitizer: use-of-uninitialized-value /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libstd/sys/unix/memchr.rs:18:8 in std::sys::unix::memchr::memchr::h2873dd310dd2c781

AddressSanitizer output

==27462==ERROR: AddressSanitizer: odr-violation (0x56012cd1da00):
  [1] size=0 'anon.6bc61fb03c9183515fc1577e46d0a761.25' crash_test.1un2dkmi-cgu.4
  [2] size=0 'anon.90554a891007470ed90328fa01cebd0a.2' crash_test.1un2dkmi-cgu.12
These globals were registered at these points:
  [1]:
    #0 0x56012bb51ace in __asan_register_globals /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libcompiler_builtins/compiler-rt/lib/asan/asan_globals.cc:359:3
    #1 0x56012bb529ab in __asan_register_elf_globals /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libcompiler_builtins/compiler-rt/lib/asan/asan_globals.cc:342:3

  [2]:
    #0 0x56012bb51ace in __asan_register_globals /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libcompiler_builtins/compiler-rt/lib/asan/asan_globals.cc:359:3
    #1 0x56012bb529ab in __asan_register_elf_globals /rustc/9fefb67669f00c25b476e7a80c9c9300a987d517/src/libcompiler_builtins/compiler-rt/lib/asan/asan_globals.cc:342:3

==27462==HINT: if you don't care about these errors you may set ASAN_OPTIONS=detect_odr_violation=0
SUMMARY: AddressSanitizer: odr-violation: global 'anon.6bc61fb03c9183515fc1577e46d0a761.25' at crash_test.1un2dkmi-cgu.4

Unfortunately, when you have external libs that are not compiled with memory sanitizer, if becomes useless unless you recompile every dependency with the msan. Obviously this is generally not doable :frowning:

For the asan: have you tried to use the ASAN_OPTIONS=detect_odr_violation=0 option? I always have this kind of error when I try to use the ASan, I think it is a known issue.