7114 {
7115 ASSERT(element_size_ <= 16);
7116 const intptr_t num_bytes = num_elements * element_size_;
7117#if defined(TARGET_ARCH_ARM64)
7118
7119 const intptr_t mov_size = element_size_;
7120#else
7121 const intptr_t mov_size =
7122 Utils::Minimum<intptr_t>(element_size_, compiler::target::kWordSize);
7123#endif
7124 const intptr_t mov_repeat = num_bytes / mov_size;
7125 ASSERT(num_bytes % mov_size == 0);
7126
7127#if defined(TARGET_ARCH_IA32)
7128
7129 const Register temp_reg = locs()->temp(0).reg();
7130#else
7132#endif
7133 for (intptr_t i = 0; i < mov_repeat; i++) {
7134 const intptr_t
offset = (reversed ? (mov_repeat - (i + 1)) : i) * mov_size;
7135 switch (mov_size) {
7136 case 1:
7139 break;
7140 case 2:
7141 __ LoadFromOffset(temp_reg, src_reg,
offset,
7143 __ StoreToOffset(temp_reg, dest_reg,
offset,
7145 break;
7146 case 4:
7147 __ LoadFromOffset(temp_reg, src_reg,
offset,
7149 __ StoreToOffset(temp_reg, dest_reg,
offset,
7151 break;
7152 case 8:
7153#if defined(TARGET_ARCH_IS_64_BIT)
7156#else
7158#endif
7159 break;
7160 case 16: {
7161#if defined(TARGET_ARCH_ARM64)
7168#else
7170#endif
7171 break;
7172 }
7173 default:
7175 }
7176 }
7177
7178#if defined(USING_MEMORY_SANITIZER) && defined(TARGET_ARCH_X64)
7181 __ PushRegisters(kVolatileRegisterSet);
7182 __ MsanUnpoison(dest_reg, num_bytes);
7183 __ PopRegisters(kVolatileRegisterSet);
7184#endif
7185}
static constexpr RegList kVolatileXmmRegisters
static constexpr intptr_t kVolatileCpuRegisters