use BTree{Map,Set} to generate same graphs when rust version changes hashmap iteration order

This commit is contained in:
Fabian 2025-01-17 15:23:48 -06:00
parent a513700039
commit 1f9115626f
2 changed files with 184 additions and 179 deletions

View file

@ -1,4 +1,5 @@
use std::collections::{HashMap, HashSet};
use std::collections::HashSet;
use std::collections::{BTreeMap, BTreeSet};
use std::iter;
use jit::{BasicBlock, BasicBlockType, MAX_EXTRA_BASIC_BLOCKS};
@ -6,7 +7,11 @@ use profiler;
const ENTRY_NODE_ID: u32 = 0xffff_ffff;
type Graph = HashMap<u32, HashSet<u32>>;
// this code works fine with either BTree or Hash Maps/Sets
// - HashMap / HashSet: slightly faster
// - BTreeMap / BTreeSet: stable iteration order (graphs don't change between rust versions, required for expect tests)
type Set = BTreeSet<u32>;
type Graph = BTreeMap<u32, Set>;
/// Reverse the direction of all edges in the graph
fn rev_graph_edges(nodes: &Graph) -> Graph {
@ -15,7 +20,7 @@ fn rev_graph_edges(nodes: &Graph) -> Graph {
for to in tos {
rev_nodes
.entry(*to)
.or_insert_with(|| HashSet::new())
.or_insert_with(|| Set::new())
.insert(*from);
}
}
@ -24,10 +29,10 @@ fn rev_graph_edges(nodes: &Graph) -> Graph {
pub fn make_graph(basic_blocks: &Vec<BasicBlock>) -> Graph {
let mut nodes = Graph::new();
let mut entry_edges = HashSet::new();
let mut entry_edges = Set::new();
for b in basic_blocks.iter() {
let mut edges = HashSet::new();
let mut edges = Set::new();
match &b.ty {
&BasicBlockType::ConditionalJump {

View file

@ -20,11 +20,11 @@
(type $t18 (func (param i32 i64 i32)))
(type $t19 (func (param i32 i64 i32) (result i32)))
(type $t20 (func (param i32 i64 i64 i32) (result i32)))
(import "e" "instr_F4" (func $e.instr_F4 (type $t0)))
(import "e" "trigger_gp_jit" (func $e.trigger_gp_jit (type $t2)))
(import "e" "safe_read32s_slow_jit" (func $e.safe_read32s_slow_jit (type $t7)))
(import "e" "safe_write32_slow_jit" (func $e.safe_write32_slow_jit (type $t16)))
(import "e" "jit_find_cache_entry_in_page" (func $e.jit_find_cache_entry_in_page (type $t16)))
(import "e" "instr_F4" (func $e.instr_F4 (type $t0)))
(import "e" "trigger_fault_end_jit" (func $e.trigger_fault_end_jit (type $t0)))
(import "e" "m" (memory {normalised output}))
(func $f (export "f") (type $t1) (param $p0 i32)
@ -73,191 +73,191 @@
(i32.add
(get_local $l8)
(i32.const 1)))
(get_local $l0)
(if $I6
(i32.load8_u
(i32.const 727))
(then
(call $e.trigger_gp_jit
(i32.const 0)
(i32.const 0))
(br $B1)))
(i32.load
(i32.const 748))
(i32.add)
(set_local $l9)
(block $B7
(br_if $B7
(i32.store
(i32.const 560)
(i32.or
(i32.and
(i32.eq
(i32.and
(tee_local $l10
(i32.load offset={normalised output}
(i32.shl
(i32.shr_u
(get_local $l9)
(i32.const 12))
(i32.const 2))))
(i32.const 4041))
(i32.const 1))
(i32.le_s
(i32.and
(get_local $l9)
(i32.const 4095))
(i32.const 4092))))
(br_if $B1
(i32.and
(tee_local $l10
(call $e.safe_read32s_slow_jit
(get_local $l9)
(i32.const 0)))
(i32.const 1))))
(set_local $l9
(i32.add
(i32.load align=1
(i32.xor
(i32.and
(get_local $l10)
(i32.const -4096))
(get_local $l9)))
(i32.load
(i32.const 740))))
(set_local $l10
(i32.sub
(i32.or
(i32.and
(i32.load
(i32.const 556))
(i32.const -4096))
(i32.const 2))
(i32.load
(i32.const 740))))
(set_local $l12
(i32.add
(tee_local $l11
(i32.sub
(get_local $l4)
(i32.const 4)))
(i32.load
(i32.const 744))))
(block $B8
(br_if $B8
(i32.and
(i32.eq
(i32.and
(tee_local $l13
(i32.load offset={normalised output}
(i32.shl
(i32.shr_u
(get_local $l12)
(i32.const 12))
(i32.const 2))))
(i32.const 4075))
(i32.const 1))
(i32.le_s
(i32.and
(get_local $l12)
(i32.const 4095))
(i32.const 4092))))
(br_if $B1
(i32.and
(tee_local $l13
(call $e.safe_write32_slow_jit
(get_local $l12)
(get_local $l10)
(i32.const 0)))
(i32.const 1))))
(i32.store align=1
(i32.xor
(i32.and
(get_local $l13)
(i32.load
(i32.const 556))
(i32.const -4096))
(get_local $l12))
(get_local $l10))
(i32.const 2)))
(i32.store
(i32.const 556)
(i32.or
(i32.and
(i32.load
(i32.const 556))
(i32.const -4096))
(i32.const 3)))
(i32.store
(i32.const 64)
(get_local $l0))
(i32.store
(i32.const 68)
(get_local $l1))
(i32.store
(i32.const 72)
(get_local $l2))
(i32.store
(i32.const 76)
(get_local $l3))
(i32.store
(i32.const 80)
(get_local $l4))
(i32.store
(i32.const 84)
(get_local $l5))
(i32.store
(i32.const 88)
(get_local $l6))
(i32.store
(i32.const 92)
(get_local $l7))
(call $e.instr_F4)
(set_local $l0
(i32.load
(i32.const 64)))
(set_local $l1
(i32.load
(i32.const 68)))
(set_local $l2
(i32.load
(i32.const 72)))
(set_local $l3
(i32.load
(i32.const 76)))
(set_local $l4
(get_local $l11))
(i32.store offset=556
(i32.const 0)
(get_local $l9))
(br_if $L2
(i32.ge_s
(tee_local $p0
(call $e.jit_find_cache_entry_in_page
(i32.load
(i32.const 556))
(i32.const 899)
(i32.const 3)))
(i32.const 0)))
(i32.load
(i32.const 80)))
(set_local $l5
(i32.load
(i32.const 84)))
(set_local $l6
(i32.load
(i32.const 88)))
(set_local $l7
(i32.load
(i32.const 92)))
(br $B0))
(set_local $l8
(i32.add
(get_local $l8)
(i32.const 1)))
(i32.store
(i32.const 560)
(i32.or
(get_local $l0)
(if $I6
(i32.load8_u
(i32.const 727))
(then
(call $e.trigger_gp_jit
(i32.const 0)
(i32.const 0))
(br $B1)))
(i32.load
(i32.const 748))
(i32.add)
(set_local $l9)
(block $B7
(br_if $B7
(i32.and
(i32.load
(i32.const 556))
(i32.const -4096))
(i32.const 2)))
(i32.store
(i32.const 556)
(i32.or
(i32.eq
(i32.and
(tee_local $l10
(i32.load offset={normalised output}
(i32.shl
(i32.shr_u
(get_local $l9)
(i32.const 12))
(i32.const 2))))
(i32.const 4041))
(i32.const 1))
(i32.le_s
(i32.and
(get_local $l9)
(i32.const 4095))
(i32.const 4092))))
(br_if $B1
(i32.and
(i32.load
(i32.const 556))
(tee_local $l10
(call $e.safe_read32s_slow_jit
(get_local $l9)
(i32.const 0)))
(i32.const 1))))
(set_local $l9
(i32.add
(i32.load align=1
(i32.xor
(i32.and
(get_local $l10)
(i32.const -4096))
(get_local $l9)))
(i32.load
(i32.const 740))))
(set_local $l10
(i32.sub
(i32.or
(i32.and
(i32.load
(i32.const 556))
(i32.const -4096))
(i32.const 2))
(i32.load
(i32.const 740))))
(set_local $l12
(i32.add
(tee_local $l11
(i32.sub
(get_local $l4)
(i32.const 4)))
(i32.load
(i32.const 744))))
(block $B8
(br_if $B8
(i32.and
(i32.eq
(i32.and
(tee_local $l13
(i32.load offset={normalised output}
(i32.shl
(i32.shr_u
(get_local $l12)
(i32.const 12))
(i32.const 2))))
(i32.const 4075))
(i32.const 1))
(i32.le_s
(i32.and
(get_local $l12)
(i32.const 4095))
(i32.const 4092))))
(br_if $B1
(i32.and
(tee_local $l13
(call $e.safe_write32_slow_jit
(get_local $l12)
(get_local $l10)
(i32.const 0)))
(i32.const 1))))
(i32.store align=1
(i32.xor
(i32.and
(get_local $l13)
(i32.const -4096))
(i32.const 3)))
(i32.store
(i32.const 64)
(get_local $l0))
(i32.store
(i32.const 68)
(get_local $l1))
(i32.store
(i32.const 72)
(get_local $l2))
(i32.store
(i32.const 76)
(get_local $l3))
(i32.store
(i32.const 80)
(get_local $l4))
(i32.store
(i32.const 84)
(get_local $l5))
(i32.store
(i32.const 88)
(get_local $l6))
(i32.store
(i32.const 92)
(get_local $l7))
(call $e.instr_F4)
(set_local $l0
(i32.load
(i32.const 64)))
(set_local $l1
(i32.load
(i32.const 68)))
(set_local $l2
(i32.load
(i32.const 72)))
(set_local $l3
(i32.load
(i32.const 76)))
(get_local $l12))
(get_local $l10))
(set_local $l4
(i32.load
(i32.const 80)))
(set_local $l5
(i32.load
(i32.const 84)))
(set_local $l6
(i32.load
(i32.const 88)))
(set_local $l7
(i32.load
(i32.const 92)))
(get_local $l11))
(i32.store offset=556
(i32.const 0)
(get_local $l9))
(br_if $L2
(i32.ge_s
(tee_local $p0
(call $e.jit_find_cache_entry_in_page
(i32.load
(i32.const 556))
(i32.const 899)
(i32.const 3)))
(i32.const 0)))
(br $B0))
(unreachable)))
(i32.store