diff --git a/crypto/src/crypto/engines/AesEngine_X86.cs b/crypto/src/crypto/engines/AesEngine_X86.cs
index aeaf9317c..32a58f2e3 100644
--- a/crypto/src/crypto/engines/AesEngine_X86.cs
+++ b/crypto/src/crypto/engines/AesEngine_X86.cs
@@ -185,7 +185,7 @@ namespace Org.BouncyCastle.Crypto.Engines
var state = Load128(inBuf.AsSpan(inOff, 16));
ImplRounds(ref state);
- Store128(ref state, outBuf.AsSpan(outOff, 16));
+ Store128(state, outBuf.AsSpan(outOff, 16));
return 16;
}
@@ -196,7 +196,7 @@ namespace Org.BouncyCastle.Crypto.Engines
var state = Load128(input[..16]);
ImplRounds(ref state);
- Store128(ref state, output[..16]);
+ Store128(state, output[..16]);
return 16;
}
@@ -210,10 +210,10 @@ namespace Org.BouncyCastle.Crypto.Engines
var s3 = Load128(input[32..48]);
var s4 = Load128(input[48..64]);
ImplRounds(ref s1, ref s2, ref s3, ref s4);
- Store128(ref s1, output[..16]);
- Store128(ref s2, output[16..32]);
- Store128(ref s3, output[32..48]);
- Store128(ref s4, output[48..64]);
+ Store128(s1, output[..16]);
+ Store128(s2, output[16..32]);
+ Store128(s3, output[32..48]);
+ Store128(s4, output[48..64]);
return 64;
}
@@ -804,7 +804,7 @@ namespace Org.BouncyCastle.Crypto.Engines
}
[MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)]
- private static void Store128(ref Vector128<byte> s, Span<byte> t)
+ private static void Store128(Vector128<byte> s, Span<byte> t)
{
#if NET7_0_OR_GREATER
Vector128.CopyTo(s, t);
diff --git a/crypto/src/crypto/engines/ChaCha7539Engine.cs b/crypto/src/crypto/engines/ChaCha7539Engine.cs
index a438c0bfb..f6e34b93a 100644
--- a/crypto/src/crypto/engines/ChaCha7539Engine.cs
+++ b/crypto/src/crypto/engines/ChaCha7539Engine.cs
@@ -298,10 +298,10 @@ namespace Org.BouncyCastle.Crypto.Engines
n2 = Avx2.Xor(n2, Load256_Byte(input[0x40..]));
n3 = Avx2.Xor(n3, Load256_Byte(input[0x60..]));
- Store256_Byte(ref n0, output);
- Store256_Byte(ref n1, output[0x20..]);
- Store256_Byte(ref n2, output[0x40..]);
- Store256_Byte(ref n3, output[0x60..]);
+ Store256_Byte(n0, output);
+ Store256_Byte(n1, output[0x20..]);
+ Store256_Byte(n2, output[0x40..]);
+ Store256_Byte(n3, output[0x60..]);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -379,10 +379,10 @@ namespace Org.BouncyCastle.Crypto.Engines
n2 = Sse2.Xor(n2, v2.AsByte());
n3 = Sse2.Xor(n3, v3.AsByte());
- Store128_Byte(ref n0, output);
- Store128_Byte(ref n1, output[0x10..]);
- Store128_Byte(ref n2, output[0x20..]);
- Store128_Byte(ref n3, output[0x30..]);
+ Store128_Byte(n0, output);
+ Store128_Byte(n1, output[0x10..]);
+ Store128_Byte(n2, output[0x20..]);
+ Store128_Byte(n3, output[0x30..]);
x3 = Load128_UInt32(state.AsSpan(12));
++state[12];
@@ -444,10 +444,10 @@ namespace Org.BouncyCastle.Crypto.Engines
n2 = Sse2.Xor(n2, v2.AsByte());
n3 = Sse2.Xor(n3, v3.AsByte());
- Store128_Byte(ref n0, output[0x40..]);
- Store128_Byte(ref n1, output[0x50..]);
- Store128_Byte(ref n2, output[0x60..]);
- Store128_Byte(ref n3, output[0x70..]);
+ Store128_Byte(n0, output[0x40..]);
+ Store128_Byte(n1, output[0x50..]);
+ Store128_Byte(n2, output[0x60..]);
+ Store128_Byte(n3, output[0x70..]);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -481,7 +481,7 @@ namespace Org.BouncyCastle.Crypto.Engines
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
- private static void Store128_Byte(ref Vector128<byte> s, Span<byte> t)
+ private static void Store128_Byte(Vector128<byte> s, Span<byte> t)
{
if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16)
{
@@ -495,7 +495,7 @@ namespace Org.BouncyCastle.Crypto.Engines
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
- private static void Store256_Byte(ref Vector256<byte> s, Span<byte> t)
+ private static void Store256_Byte(Vector256<byte> s, Span<byte> t)
{
if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector256<byte>>() == 32)
{
diff --git a/crypto/src/crypto/engines/ChaChaEngine.cs b/crypto/src/crypto/engines/ChaChaEngine.cs
index 646a6976c..2683e335a 100644
--- a/crypto/src/crypto/engines/ChaChaEngine.cs
+++ b/crypto/src/crypto/engines/ChaChaEngine.cs
@@ -135,10 +135,10 @@ namespace Org.BouncyCastle.Crypto.Engines
v2 = Sse2.Add(v2, x2);
v3 = Sse2.Add(v3, x3);
- Store128_UInt32(ref v0, output.AsSpan());
- Store128_UInt32(ref v1, output.AsSpan(0x10));
- Store128_UInt32(ref v2, output.AsSpan(0x20));
- Store128_UInt32(ref v3, output.AsSpan(0x30));
+ Store128_UInt32(v0, output.AsSpan());
+ Store128_UInt32(v1, output.AsSpan(0x10));
+ Store128_UInt32(v2, output.AsSpan(0x20));
+ Store128_UInt32(v3, output.AsSpan(0x30));
return;
}
#endif
@@ -222,7 +222,7 @@ namespace Org.BouncyCastle.Crypto.Engines
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
- private static void Store128_UInt32(ref Vector128<uint> s, Span<byte> t)
+ private static void Store128_UInt32(Vector128<uint> s, Span<byte> t)
{
if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<uint>>() == 16)
{
|