@@ -2761,33 +2761,73 @@ class LiftoffCompiler {
27612761    return  index;
27622762  }
27632763
2764+   bool  IndexStaticallyInBounds (const  LiftoffAssembler::VarState& index_slot,
2765+                                int  access_size, uintptr_t * offset) {
2766+     if  (!index_slot.is_const ()) return  false ;
2767+ 
2768+     //  Potentially zero extend index (which is a 32-bit constant).
2769+     const  uintptr_t  index = static_cast <uint32_t >(index_slot.i32_const ());
2770+     const  uintptr_t  effective_offset = index + *offset;
2771+ 
2772+     if  (effective_offset < index  //  overflow
2773+         || !base::IsInBounds<uintptr_t >(effective_offset, access_size,
2774+                                         env_->min_memory_size )) {
2775+       return  false ;
2776+     }
2777+ 
2778+     *offset = effective_offset;
2779+     return  true ;
2780+   }
2781+ 
27642782  void  LoadMem (FullDecoder* decoder, LoadType type,
27652783               const  MemoryAccessImmediate<validate>& imm,
27662784               const  Value& index_val, Value* result) {
27672785    ValueKind kind = type.value_type ().kind ();
2786+     RegClass rc = reg_class_for (kind);
27682787    if  (!CheckSupportedType (decoder, kind, " load" return ;
2769-     LiftoffRegister full_index = __ PopToRegister ();
2770-     Register index = BoundsCheckMem (decoder, type.size (), imm.offset ,
2771-                                     full_index, {}, kDontForceCheck );
2772-     if  (index == no_reg) return ;
27732788
27742789    uintptr_t  offset = imm.offset ;
2775-     LiftoffRegList pinned = LiftoffRegList::ForRegs (index);
2776-     index = AddMemoryMasking (index, &offset, &pinned);
2777-     DEBUG_CODE_COMMENT (" load from memory" 
2778-     Register addr = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2779-     LOAD_INSTANCE_FIELD (addr, MemoryStart, kSystemPointerSize , pinned);
2780-     RegClass rc = reg_class_for (kind);
2781-     LiftoffRegister value = pinned.set (__ GetUnusedRegister (rc, pinned));
2782-     uint32_t  protected_load_pc = 0 ;
2783-     __ Load (value, addr, index, offset, type, pinned, &protected_load_pc, true );
2784-     if  (env_->use_trap_handler ) {
2785-       AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
2786-                        protected_load_pc);
2790+     Register index = no_reg;
2791+ 
2792+     //  Only look at the slot, do not pop it yet (will happen in PopToRegister
2793+     //  below, if this is not a statically-in-bounds index).
2794+     auto & index_slot = __ cache_state ()->stack_state .back ();
2795+     if  (IndexStaticallyInBounds (index_slot, type.size (), &offset)) {
2796+       __ cache_state ()->stack_state .pop_back ();
2797+       DEBUG_CODE_COMMENT (" load from memory (constant offset)" 
2798+       LiftoffRegList pinned;
2799+       Register mem = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2800+       LOAD_INSTANCE_FIELD (mem, MemoryStart, kSystemPointerSize , pinned);
2801+       LiftoffRegister value = pinned.set (__ GetUnusedRegister (rc, pinned));
2802+       __ Load (value, mem, no_reg, offset, type, pinned, nullptr , true );
2803+       __ PushRegister (kind, value);
2804+     } else  {
2805+       LiftoffRegister full_index = __ PopToRegister ();
2806+       index = BoundsCheckMem (decoder, type.size (), offset, full_index, {},
2807+                              kDontForceCheck );
2808+       if  (index == no_reg) return ;
2809+ 
2810+       DEBUG_CODE_COMMENT (" load from memory" 
2811+       LiftoffRegList pinned = LiftoffRegList::ForRegs (index);
2812+       index = AddMemoryMasking (index, &offset, &pinned);
2813+ 
2814+       //  Load the memory start address only now to reduce register pressure
2815+       //  (important on ia32).
2816+       Register mem = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2817+       LOAD_INSTANCE_FIELD (mem, MemoryStart, kSystemPointerSize , pinned);
2818+       LiftoffRegister value = pinned.set (__ GetUnusedRegister (rc, pinned));
2819+ 
2820+       uint32_t  protected_load_pc = 0 ;
2821+       __ Load (value, mem, index, offset, type, pinned, &protected_load_pc,
2822+               true );
2823+       if  (env_->use_trap_handler ) {
2824+         AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
2825+                          protected_load_pc);
2826+       }
2827+       __ PushRegister (kind, value);
27872828    }
2788-     __ PushRegister (kind, value);
27892829
2790-     if  (FLAG_trace_wasm_memory) {
2830+     if  (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
27912831      TraceMemoryOperation (false , type.mem_type ().representation (), index,
27922832                           offset, decoder->position ());
27932833    }
@@ -2830,7 +2870,7 @@ class LiftoffCompiler {
28302870    }
28312871    __ PushRegister (kS128 , value);
28322872
2833-     if  (FLAG_trace_wasm_memory) {
2873+     if  (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
28342874      //  Again load extend is different.
28352875      MachineRepresentation mem_rep =
28362876          transform == LoadTransformationKind::kExtend 
@@ -2872,7 +2912,7 @@ class LiftoffCompiler {
28722912
28732913    __ PushRegister (kS128 , result);
28742914
2875-     if  (FLAG_trace_wasm_memory) {
2915+     if  (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
28762916      TraceMemoryOperation (false , type.mem_type ().representation (), index,
28772917                           offset, decoder->position ());
28782918    }
@@ -2883,29 +2923,45 @@ class LiftoffCompiler {
28832923                const  Value& index_val, const  Value& value_val) {
28842924    ValueKind kind = type.value_type ().kind ();
28852925    if  (!CheckSupportedType (decoder, kind, " store" return ;
2926+ 
28862927    LiftoffRegList pinned;
28872928    LiftoffRegister value = pinned.set (__ PopToRegister ());
2888-     LiftoffRegister full_index = __ PopToRegister (pinned);
2889-     Register index = BoundsCheckMem (decoder, type.size (), imm.offset ,
2890-                                     full_index, pinned, kDontForceCheck );
2891-     if  (index == no_reg) return ;
28922929
28932930    uintptr_t  offset = imm.offset ;
2894-     pinned.set (index);
2895-     index = AddMemoryMasking (index, &offset, &pinned);
2896-     DEBUG_CODE_COMMENT (" store to memory" 
2897-     Register addr = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2898-     LOAD_INSTANCE_FIELD (addr, MemoryStart, kSystemPointerSize , pinned);
2899-     uint32_t  protected_store_pc = 0 ;
2900-     LiftoffRegList outer_pinned;
2901-     if  (FLAG_trace_wasm_memory) outer_pinned.set (index);
2902-     __ Store (addr, index, offset, value, type, outer_pinned,
2903-              &protected_store_pc, true );
2904-     if  (env_->use_trap_handler ) {
2905-       AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
2906-                        protected_store_pc);
2931+     Register index = no_reg;
2932+ 
2933+     auto & index_slot = __ cache_state ()->stack_state .back ();
2934+     if  (IndexStaticallyInBounds (index_slot, type.size (), &offset)) {
2935+       __ cache_state ()->stack_state .pop_back ();
2936+       DEBUG_CODE_COMMENT (" store to memory (constant offset)" 
2937+       Register mem = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2938+       LOAD_INSTANCE_FIELD (mem, MemoryStart, kSystemPointerSize , pinned);
2939+       __ Store (mem, no_reg, offset, value, type, pinned, nullptr , true );
2940+     } else  {
2941+       LiftoffRegister full_index = __ PopToRegister (pinned);
2942+       index = BoundsCheckMem (decoder, type.size (), imm.offset , full_index,
2943+                              pinned, kDontForceCheck );
2944+       if  (index == no_reg) return ;
2945+ 
2946+       pinned.set (index);
2947+       index = AddMemoryMasking (index, &offset, &pinned);
2948+       DEBUG_CODE_COMMENT (" store to memory" 
2949+       uint32_t  protected_store_pc = 0 ;
2950+       //  Load the memory start address only now to reduce register pressure
2951+       //  (important on ia32).
2952+       Register mem = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
2953+       LOAD_INSTANCE_FIELD (mem, MemoryStart, kSystemPointerSize , pinned);
2954+       LiftoffRegList outer_pinned;
2955+       if  (V8_UNLIKELY (FLAG_trace_wasm_memory)) outer_pinned.set (index);
2956+       __ Store (mem, index, offset, value, type, outer_pinned,
2957+                &protected_store_pc, true );
2958+       if  (env_->use_trap_handler ) {
2959+         AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
2960+                          protected_store_pc);
2961+       }
29072962    }
2908-     if  (FLAG_trace_wasm_memory) {
2963+ 
2964+     if  (V8_UNLIKELY (FLAG_trace_wasm_memory)) {
29092965      TraceMemoryOperation (true , type.mem_rep (), index, offset,
29102966                           decoder->position ());
29112967    }
@@ -2934,7 +2990,7 @@ class LiftoffCompiler {
29342990      AddOutOfLineTrap (decoder, WasmCode::kThrowWasmTrapMemOutOfBounds ,
29352991                       protected_store_pc);
29362992    }
2937-     if  (FLAG_trace_wasm_memory) {
2993+     if  (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
29382994      TraceMemoryOperation (true , type.mem_rep (), index, offset,
29392995                           decoder->position ());
29402996    }
@@ -4179,9 +4235,9 @@ class LiftoffCompiler {
41794235    Register addr = pinned.set (__ GetUnusedRegister (kGpReg , pinned)).gp ();
41804236    LOAD_INSTANCE_FIELD (addr, MemoryStart, kSystemPointerSize , pinned);
41814237    LiftoffRegList outer_pinned;
4182-     if  (FLAG_trace_wasm_memory) outer_pinned.set (index);
4238+     if  (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) outer_pinned.set (index);
41834239    __ AtomicStore (addr, index, offset, value, type, outer_pinned);
4184-     if  (FLAG_trace_wasm_memory) {
4240+     if  (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
41854241      TraceMemoryOperation (true , type.mem_rep (), index, offset,
41864242                           decoder->position ());
41874243    }
@@ -4207,7 +4263,7 @@ class LiftoffCompiler {
42074263    __ AtomicLoad (value, addr, index, offset, type, pinned);
42084264    __ PushRegister (kind, value);
42094265
4210-     if  (FLAG_trace_wasm_memory) {
4266+     if  (V8_UNLIKELY ( FLAG_trace_wasm_memory) ) {
42114267      TraceMemoryOperation (false , type.mem_type ().representation (), index,
42124268                           offset, decoder->position ());
42134269    }
0 commit comments