diff options
author | Peter Dettman <peter.dettman@bouncycastle.org> | 2022-10-24 00:02:41 +0700 |
---|---|---|
committer | Peter Dettman <peter.dettman@bouncycastle.org> | 2022-10-24 00:02:41 +0700 |
commit | 0102c19ed6d0e0a2ce630b3118c40e1f4e9ffe97 (patch) | |
tree | 019bcceff8c0426593c44df7066d595f19fb3691 | |
parent | Add Gost2012 algorithms to registries (diff) | |
download | BouncyCastle.NET-ed25519-0102c19ed6d0e0a2ce630b3118c40e1f4e9ffe97.tar.xz |
Refactor intrinsics code
-rw-r--r-- | crypto/src/crypto/digests/Haraka256_X86.cs | 24 | ||||
-rw-r--r-- | crypto/src/crypto/digests/Haraka512_X86.cs | 32 | ||||
-rw-r--r-- | crypto/src/crypto/engines/AesEngine_X86.cs | 57 | ||||
-rw-r--r-- | crypto/src/crypto/engines/ChaCha7539Engine.cs | 61 | ||||
-rw-r--r-- | crypto/src/crypto/engines/ChaChaEngine.cs | 14 | ||||
-rw-r--r-- | crypto/src/crypto/modes/GCMBlockCipher.cs | 65 | ||||
-rw-r--r-- | crypto/src/crypto/modes/gcm/GcmUtilities.cs | 5 | ||||
-rw-r--r-- | crypto/src/pqc/crypto/sphincsplus/HarakaS_X86.cs | 14 |
8 files changed, 136 insertions, 136 deletions
diff --git a/crypto/src/crypto/digests/Haraka256_X86.cs b/crypto/src/crypto/digests/Haraka256_X86.cs index ab64f91ae..4c9a798c0 100644 --- a/crypto/src/crypto/digests/Haraka256_X86.cs +++ b/crypto/src/crypto/digests/Haraka256_X86.cs @@ -1,6 +1,8 @@ #if NETCOREAPP3_0_OR_GREATER using System; +using System.Buffers.Binary; using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; using System.Runtime.Intrinsics; namespace Org.BouncyCastle.Crypto.Digests @@ -115,33 +117,27 @@ namespace Org.BouncyCastle.Crypto.Digests [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Vector128<byte> Load128(ReadOnlySpan<byte> t) { -#if NET7_0_OR_GREATER - return Vector128.Create<byte>(t); -#else if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) - return Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(t[0])); + return MemoryMarshal.Read<Vector128<byte>>(t); - return Vector128.Create(t[0], t[1], t[2], t[3], t[4], t[5], t[6], t[7], t[8], t[9], t[10], t[11], t[12], - t[13], t[14], t[15]); -#endif + return Vector128.Create( + BinaryPrimitives.ReadUInt64LittleEndian(t[..8]), + BinaryPrimitives.ReadUInt64LittleEndian(t[8..]) + ).AsByte(); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Store128(Vector128<byte> s, Span<byte> t) { -#if NET7_0_OR_GREATER - Vector128.CopyTo(s, t); -#else if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) { - Unsafe.WriteUnaligned(ref t[0], s); + MemoryMarshal.Write(t, ref s); return; } var u = s.AsUInt64(); - Utilities.Pack.UInt64_To_LE(u.GetElement(0), t); - Utilities.Pack.UInt64_To_LE(u.GetElement(1), t[8..]); -#endif + BinaryPrimitives.WriteUInt64LittleEndian(t[..8], u.GetElement(0)); + BinaryPrimitives.WriteUInt64LittleEndian(t[8..], u.GetElement(1)); } } } diff --git a/crypto/src/crypto/digests/Haraka512_X86.cs b/crypto/src/crypto/digests/Haraka512_X86.cs index 8e67228ae..6dcd3e782 100644 --- a/crypto/src/crypto/digests/Haraka512_X86.cs +++ b/crypto/src/crypto/digests/Haraka512_X86.cs @@ -1,6 +1,8 @@ #if NETCOREAPP3_0_OR_GREATER using System; +using System.Buffers.Binary; using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; using System.Runtime.Intrinsics; namespace Org.BouncyCastle.Crypto.Digests @@ -198,50 +200,40 @@ namespace Org.BouncyCastle.Crypto.Digests [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Vector128<byte> Load128(ReadOnlySpan<byte> t) { -#if NET7_0_OR_GREATER - return Vector128.Create<byte>(t); -#else if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) - return Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(t[0])); + return MemoryMarshal.Read<Vector128<byte>>(t); - return Vector128.Create(t[0], t[1], t[2], t[3], t[4], t[5], t[6], t[7], t[8], t[9], t[10], t[11], t[12], - t[13], t[14], t[15]); -#endif + return Vector128.Create( + BinaryPrimitives.ReadUInt64LittleEndian(t[..8]), + BinaryPrimitives.ReadUInt64LittleEndian(t[8..]) + ).AsByte(); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Store128(Vector128<byte> s, Span<byte> t) { -#if NET7_0_OR_GREATER - Vector128.CopyTo(s, t); -#else if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) { - Unsafe.WriteUnaligned(ref t[0], s); + MemoryMarshal.Write(t, ref s); return; } var u = s.AsUInt64(); - Utilities.Pack.UInt64_To_LE(u.GetElement(0), t); - Utilities.Pack.UInt64_To_LE(u.GetElement(1), t[8..]); -#endif + BinaryPrimitives.WriteUInt64LittleEndian(t[..8], u.GetElement(0)); + BinaryPrimitives.WriteUInt64LittleEndian(t[8..], u.GetElement(1)); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Store64(Vector64<byte> s, Span<byte> t) { -#if NET7_0_OR_GREATER - Vector64.CopyTo(s, t); -#else if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector64<byte>>() == 8) { - Unsafe.WriteUnaligned(ref t[0], s); + MemoryMarshal.Write(t, ref s); return; } var u = s.AsUInt64(); - Utilities.Pack.UInt64_To_LE(u.ToScalar(), t); -#endif + BinaryPrimitives.WriteUInt64LittleEndian(t, u.ToScalar()); } } } diff --git a/crypto/src/crypto/engines/AesEngine_X86.cs b/crypto/src/crypto/engines/AesEngine_X86.cs index 32a58f2e3..e61deb174 100644 --- a/crypto/src/crypto/engines/AesEngine_X86.cs +++ b/crypto/src/crypto/engines/AesEngine_X86.cs @@ -1,6 +1,8 @@ #if NETCOREAPP3_0_OR_GREATER using System; +using System.Buffers.Binary; using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using Org.BouncyCastle.Crypto.Parameters; @@ -16,7 +18,6 @@ namespace Org.BouncyCastle.Crypto.Engines { public static bool IsSupported => Aes.IsSupported; - [MethodImpl(MethodImplOptions.AggressiveOptimization)] private static Vector128<byte>[] CreateRoundKeys(byte[] key, bool forEncryption) { Vector128<byte>[] K; @@ -217,7 +218,7 @@ namespace Org.BouncyCastle.Crypto.Engines return 64; } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void ImplRounds(ref Vector128<byte> state) { switch (m_mode) @@ -232,7 +233,7 @@ namespace Org.BouncyCastle.Crypto.Engines } } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private void ImplRounds( ref Vector128<byte> s1, ref Vector128<byte> s2, ref Vector128<byte> s3, ref Vector128<byte> s4) { @@ -248,7 +249,7 @@ namespace Org.BouncyCastle.Crypto.Engines } } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Decrypt128(Vector128<byte>[] roundKeys, ref Vector128<byte> state) { state = Sse2.Xor(state, roundKeys[0]); @@ -264,7 +265,7 @@ namespace Org.BouncyCastle.Crypto.Engines state = Aes.DecryptLast(state, roundKeys[10]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Decrypt192(Vector128<byte>[] roundKeys, ref Vector128<byte> state) { state = Sse2.Xor(state, roundKeys[0]); @@ -282,7 +283,7 @@ namespace Org.BouncyCastle.Crypto.Engines state = Aes.DecryptLast(state, roundKeys[12]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Decrypt256(Vector128<byte>[] roundKeys, ref Vector128<byte> state) { state = Sse2.Xor(state, roundKeys[0]); @@ -302,7 +303,7 @@ namespace Org.BouncyCastle.Crypto.Engines state = Aes.DecryptLast(state, roundKeys[14]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void DecryptFour128(Vector128<byte>[] rk, ref Vector128<byte> s1, ref Vector128<byte> s2, ref Vector128<byte> s3, ref Vector128<byte> s4) { @@ -362,7 +363,7 @@ namespace Org.BouncyCastle.Crypto.Engines s4 = Aes.DecryptLast(s4, rk[10]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void DecryptFour192(Vector128<byte>[] rk, ref Vector128<byte> s1, ref Vector128<byte> s2, ref Vector128<byte> s3, ref Vector128<byte> s4) { @@ -432,7 +433,7 @@ namespace Org.BouncyCastle.Crypto.Engines s4 = Aes.DecryptLast(s4, rk[12]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void DecryptFour256(Vector128<byte>[] rk, ref Vector128<byte> s1, ref Vector128<byte> s2, ref Vector128<byte> s3, ref Vector128<byte> s4) { @@ -512,7 +513,7 @@ namespace Org.BouncyCastle.Crypto.Engines s4 = Aes.DecryptLast(s4, rk[14]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Encrypt128(Vector128<byte>[] roundKeys, ref Vector128<byte> state) { state = Sse2.Xor(state, roundKeys[0]); @@ -528,7 +529,7 @@ namespace Org.BouncyCastle.Crypto.Engines state = Aes.EncryptLast(state, roundKeys[10]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Encrypt192(Vector128<byte>[] roundKeys, ref Vector128<byte> state) { state = Sse2.Xor(state, roundKeys[0]); @@ -546,7 +547,7 @@ namespace Org.BouncyCastle.Crypto.Engines state = Aes.EncryptLast(state, roundKeys[12]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Encrypt256(Vector128<byte>[] roundKeys, ref Vector128<byte> state) { state = Sse2.Xor(state, roundKeys[0]); @@ -566,7 +567,7 @@ namespace Org.BouncyCastle.Crypto.Engines state = Aes.EncryptLast(state, roundKeys[14]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void EncryptFour128(Vector128<byte>[] rk, ref Vector128<byte> s1, ref Vector128<byte> s2, ref Vector128<byte> s3, ref Vector128<byte> s4) { @@ -626,7 +627,7 @@ namespace Org.BouncyCastle.Crypto.Engines s4 = Aes.EncryptLast(s4, rk[10]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void EncryptFour192(Vector128<byte>[] rk, ref Vector128<byte> s1, ref Vector128<byte> s2, ref Vector128<byte> s3, ref Vector128<byte> s4) { @@ -696,7 +697,7 @@ namespace Org.BouncyCastle.Crypto.Engines s4 = Aes.EncryptLast(s4, rk[12]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void EncryptFour256(Vector128<byte>[] rk, ref Vector128<byte> s1, ref Vector128<byte> s2, ref Vector128<byte> s3, ref Vector128<byte> s4) { @@ -776,34 +777,38 @@ namespace Org.BouncyCastle.Crypto.Engines s4 = Aes.EncryptLast(s4, rk[14]); } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Vector128<byte> Load128(ReadOnlySpan<byte> t) { #if NET7_0_OR_GREATER return Vector128.Create<byte>(t); #else if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) - return Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(t[0])); + return MemoryMarshal.Read<Vector128<byte>>(t); - return Vector128.Create(t[0], t[1], t[2], t[3], t[4], t[5], t[6], t[7], t[8], t[9], t[10], t[11], t[12], - t[13], t[14], t[15]); + return Vector128.Create( + BinaryPrimitives.ReadUInt64LittleEndian(t[..8]), + BinaryPrimitives.ReadUInt64LittleEndian(t[8..]) + ).AsByte(); #endif } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Vector64<byte> Load64(ReadOnlySpan<byte> t) { #if NET7_0_OR_GREATER return Vector64.Create<byte>(t); #else if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector64<byte>>() == 8) - return Unsafe.ReadUnaligned<Vector64<byte>>(ref Unsafe.AsRef(t[0])); + return MemoryMarshal.Read<Vector64<byte>>(t); - return Vector64.Create(t[0], t[1], t[2], t[3], t[4], t[5], t[6], t[7]); + return Vector64.Create( + BinaryPrimitives.ReadUInt64LittleEndian(t[..8]) + ).AsByte(); #endif } - [MethodImpl(MethodImplOptions.AggressiveInlining | MethodImplOptions.AggressiveOptimization)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Store128(Vector128<byte> s, Span<byte> t) { #if NET7_0_OR_GREATER @@ -811,13 +816,13 @@ namespace Org.BouncyCastle.Crypto.Engines #else if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) { - Unsafe.WriteUnaligned(ref t[0], s); + MemoryMarshal.Write(t, ref s); return; } var u = s.AsUInt64(); - Utilities.Pack.UInt64_To_LE(u.GetElement(0), t); - Utilities.Pack.UInt64_To_LE(u.GetElement(1), t[8..]); + BinaryPrimitives.WriteUInt64LittleEndian(t[..8], u.GetElement(0)); + BinaryPrimitives.WriteUInt64LittleEndian(t[8..], u.GetElement(1)); #endif } } diff --git a/crypto/src/crypto/engines/ChaCha7539Engine.cs b/crypto/src/crypto/engines/ChaCha7539Engine.cs index f6e34b93a..e3b645c4b 100644 --- a/crypto/src/crypto/engines/ChaCha7539Engine.cs +++ b/crypto/src/crypto/engines/ChaCha7539Engine.cs @@ -4,6 +4,8 @@ using System.Diagnostics; using System.Runtime.CompilerServices; #endif #if NETCOREAPP3_0_OR_GREATER +using System.Buffers.Binary; +using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; #endif @@ -453,18 +455,20 @@ namespace Org.BouncyCastle.Crypto.Engines [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Vector128<byte> Load128_Byte(ReadOnlySpan<byte> t) { - if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) - return Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(t[0])); + if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) + return MemoryMarshal.Read<Vector128<byte>>(t); - return Vector128.Create(t[0], t[1], t[2], t[3], t[4], t[5], t[6], t[7], t[8], t[9], t[10], t[11], t[12], - t[13], t[14], t[15]); - } + return Vector128.Create( + BinaryPrimitives.ReadUInt64LittleEndian(t[..8]), + BinaryPrimitives.ReadUInt64LittleEndian(t[8..]) + ).AsByte(); + } - [MethodImpl(MethodImplOptions.AggressiveInlining)] + [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Vector128<uint> Load128_UInt32(ReadOnlySpan<uint> t) { if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<uint>>() == 16) - return Unsafe.ReadUnaligned<Vector128<uint>>(ref Unsafe.As<uint, byte>(ref Unsafe.AsRef(t[0]))); + return MemoryMarshal.Read<Vector128<uint>>(MemoryMarshal.Cast<uint, byte>(t)); return Vector128.Create(t[0], t[1], t[2], t[3]); } @@ -472,42 +476,45 @@ namespace Org.BouncyCastle.Crypto.Engines [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Vector256<byte> Load256_Byte(ReadOnlySpan<byte> t) { - if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector256<byte>>() == 32) - return Unsafe.ReadUnaligned<Vector256<byte>>(ref Unsafe.AsRef(t[0])); - - return Vector256.Create(t[0], t[1], t[2], t[3], t[4], t[5], t[6], t[7], t[8], t[9], t[10], t[11], t[12], - t[13], t[14], t[15], t[16], t[17], t[18], t[19], t[20], t[21], t[22], t[23], t[24], t[25], t[26], t[27], - t[28], t[29], t[30], t[31]); + if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector256<byte>>() == 32) + return MemoryMarshal.Read<Vector256<byte>>(t); + + return Vector256.Create( + BinaryPrimitives.ReadUInt64LittleEndian(t[ 0.. 8]), + BinaryPrimitives.ReadUInt64LittleEndian(t[ 8..16]), + BinaryPrimitives.ReadUInt64LittleEndian(t[16..24]), + BinaryPrimitives.ReadUInt64LittleEndian(t[24..32]) + ).AsByte(); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Store128_Byte(Vector128<byte> s, Span<byte> t) { - if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) - { - Unsafe.WriteUnaligned(ref t[0], s); - return; - } + if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) + { + MemoryMarshal.Write(t, ref s); + return; + } - var u = s.AsUInt64(); - Pack.UInt64_To_LE(u.GetElement(0), t); - Pack.UInt64_To_LE(u.GetElement(1), t[8..]); + var u = s.AsUInt64(); + BinaryPrimitives.WriteUInt64LittleEndian(t[..8], u.GetElement(0)); + BinaryPrimitives.WriteUInt64LittleEndian(t[8..], u.GetElement(1)); } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Store256_Byte(Vector256<byte> s, Span<byte> t) { - if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector256<byte>>() == 32) + if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector256<byte>>() == 32) { - Unsafe.WriteUnaligned(ref t[0], s); + MemoryMarshal.Write(t, ref s); return; } var u = s.AsUInt64(); - Pack.UInt64_To_LE(u.GetElement(0), t); - Pack.UInt64_To_LE(u.GetElement(1), t[8..]); - Pack.UInt64_To_LE(u.GetElement(2), t[16..]); - Pack.UInt64_To_LE(u.GetElement(3), t[24..]); + BinaryPrimitives.WriteUInt64LittleEndian(t[ 0.. 8], u.GetElement(0)); + BinaryPrimitives.WriteUInt64LittleEndian(t[ 8..16], u.GetElement(1)); + BinaryPrimitives.WriteUInt64LittleEndian(t[16..24], u.GetElement(2)); + BinaryPrimitives.WriteUInt64LittleEndian(t[24..32], u.GetElement(3)); } #endif } diff --git a/crypto/src/crypto/engines/ChaChaEngine.cs b/crypto/src/crypto/engines/ChaChaEngine.cs index 2683e335a..bf2bf6300 100644 --- a/crypto/src/crypto/engines/ChaChaEngine.cs +++ b/crypto/src/crypto/engines/ChaChaEngine.cs @@ -1,7 +1,9 @@ using System; using System.Diagnostics; #if NETCOREAPP3_0_OR_GREATER +using System.Buffers.Binary; using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; #endif @@ -215,10 +217,10 @@ namespace Org.BouncyCastle.Crypto.Engines [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Vector128<uint> Load128_UInt32(ReadOnlySpan<uint> t) { - if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<uint>>() == 16) - return Unsafe.ReadUnaligned<Vector128<uint>>(ref Unsafe.As<uint, byte>(ref Unsafe.AsRef(t[0]))); + if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<uint>>() == 16) + return MemoryMarshal.Read<Vector128<uint>>(MemoryMarshal.Cast<uint, byte>(t)); - return Vector128.Create(t[0], t[1], t[2], t[3]); + return Vector128.Create(t[0], t[1], t[2], t[3]); } [MethodImpl(MethodImplOptions.AggressiveInlining)] @@ -226,13 +228,13 @@ namespace Org.BouncyCastle.Crypto.Engines { if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<uint>>() == 16) { - Unsafe.WriteUnaligned(ref t[0], s); + MemoryMarshal.Write(t, ref s); return; } var u = s.AsUInt64(); - Pack.UInt64_To_LE(u.GetElement(0), t); - Pack.UInt64_To_LE(u.GetElement(1), t[8..]); + BinaryPrimitives.WriteUInt64LittleEndian(t[..8], u.GetElement(0)); + BinaryPrimitives.WriteUInt64LittleEndian(t[8..], u.GetElement(1)); } #endif } diff --git a/crypto/src/crypto/modes/GCMBlockCipher.cs b/crypto/src/crypto/modes/GCMBlockCipher.cs index 2255b6276..ce5faf91f 100644 --- a/crypto/src/crypto/modes/GCMBlockCipher.cs +++ b/crypto/src/crypto/modes/GCMBlockCipher.cs @@ -3,6 +3,7 @@ using System; using System.Runtime.CompilerServices; #endif #if NETCOREAPP3_0_OR_GREATER +using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; #endif @@ -897,7 +898,6 @@ namespace Org.BouncyCastle.Crypto.Modes } #if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER - [MethodImpl(MethodImplOptions.AggressiveOptimization)] private void DecryptBlock(ReadOnlySpan<byte> input, Span<byte> output) { Check.OutputLength(output, BlockSize, "output buffer too short"); @@ -913,15 +913,15 @@ namespace Org.BouncyCastle.Crypto.Modes #if NETCOREAPP3_0_OR_GREATER if (Sse2.IsSupported && Unsafe.SizeOf<Vector128<byte>>() == BlockSize) { - var t0 = Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(input[0])); - var t1 = Unsafe.ReadUnaligned<Vector128<byte>>(ref ctrBlock[0]); - var t2 = Unsafe.ReadUnaligned<Vector128<byte>>(ref S[0]); + var t0 = MemoryMarshal.Read<Vector128<byte>>(input); + var t1 = MemoryMarshal.Read<Vector128<byte>>(ctrBlock); + var t2 = MemoryMarshal.Read<Vector128<byte>>(S.AsSpan()); t1 = Sse2.Xor(t1, t0); t2 = Sse2.Xor(t2, t0); - Unsafe.WriteUnaligned(ref output[0], t1); - Unsafe.WriteUnaligned(ref S[0], t2); + MemoryMarshal.Write(output, ref t1); + MemoryMarshal.Write(S.AsSpan(), ref t2); } else #endif @@ -949,7 +949,6 @@ namespace Org.BouncyCastle.Crypto.Modes totalLength += BlockSize; } - [MethodImpl(MethodImplOptions.AggressiveOptimization)] private void DecryptBlocks2(ReadOnlySpan<byte> input, Span<byte> output) { Check.OutputLength(output, BlockSize * 2, "output buffer too short"); @@ -965,15 +964,15 @@ namespace Org.BouncyCastle.Crypto.Modes #if NETCOREAPP3_0_OR_GREATER if (Sse2.IsSupported && Unsafe.SizeOf<Vector128<byte>>() == BlockSize) { - var t0 = Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(input[0])); - var t1 = Unsafe.ReadUnaligned<Vector128<byte>>(ref ctrBlock[0]); - var t2 = Unsafe.ReadUnaligned<Vector128<byte>>(ref S[0]); + var t0 = MemoryMarshal.Read<Vector128<byte>>(input); + var t1 = MemoryMarshal.Read<Vector128<byte>>(ctrBlock); + var t2 = MemoryMarshal.Read<Vector128<byte>>(S.AsSpan()); t1 = Sse2.Xor(t1, t0); t2 = Sse2.Xor(t2, t0); - Unsafe.WriteUnaligned(ref output[0], t1); - Unsafe.WriteUnaligned(ref S[0], t2); + MemoryMarshal.Write(output, ref t1); + MemoryMarshal.Write(S.AsSpan(), ref t2); } else #endif @@ -1005,15 +1004,15 @@ namespace Org.BouncyCastle.Crypto.Modes #if NETCOREAPP3_0_OR_GREATER if (Sse2.IsSupported && Unsafe.SizeOf<Vector128<byte>>() == BlockSize) { - var t0 = Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(input[0])); - var t1 = Unsafe.ReadUnaligned<Vector128<byte>>(ref ctrBlock[0]); - var t2 = Unsafe.ReadUnaligned<Vector128<byte>>(ref S[0]); + var t0 = MemoryMarshal.Read<Vector128<byte>>(input); + var t1 = MemoryMarshal.Read<Vector128<byte>>(ctrBlock); + var t2 = MemoryMarshal.Read<Vector128<byte>>(S.AsSpan()); t1 = Sse2.Xor(t1, t0); t2 = Sse2.Xor(t2, t0); - Unsafe.WriteUnaligned(ref output[0], t1); - Unsafe.WriteUnaligned(ref S[0], t2); + MemoryMarshal.Write(output, ref t1); + MemoryMarshal.Write(S.AsSpan(), ref t2); } else #endif @@ -1041,7 +1040,6 @@ namespace Org.BouncyCastle.Crypto.Modes totalLength += BlockSize * 2; } - [MethodImpl(MethodImplOptions.AggressiveOptimization)] private void EncryptBlock(ReadOnlySpan<byte> input, Span<byte> output) { Check.OutputLength(output, BlockSize, "output buffer too short"); @@ -1057,15 +1055,15 @@ namespace Org.BouncyCastle.Crypto.Modes #if NETCOREAPP3_0_OR_GREATER if (Sse2.IsSupported && Unsafe.SizeOf<Vector128<byte>>() == BlockSize) { - var t0 = Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(input[0])); - var t1 = Unsafe.ReadUnaligned<Vector128<byte>>(ref ctrBlock[0]); - var t2 = Unsafe.ReadUnaligned<Vector128<byte>>(ref S[0]); + var t0 = MemoryMarshal.Read<Vector128<byte>>(input); + var t1 = MemoryMarshal.Read<Vector128<byte>>(ctrBlock); + var t2 = MemoryMarshal.Read<Vector128<byte>>(S.AsSpan()); t1 = Sse2.Xor(t1, t0); t2 = Sse2.Xor(t2, t1); - Unsafe.WriteUnaligned(ref output[0], t1); - Unsafe.WriteUnaligned(ref S[0], t2); + MemoryMarshal.Write(output, ref t1); + MemoryMarshal.Write(S.AsSpan(), ref t2); } else #endif @@ -1093,7 +1091,6 @@ namespace Org.BouncyCastle.Crypto.Modes totalLength += BlockSize; } - [MethodImpl(MethodImplOptions.AggressiveOptimization)] private void EncryptBlocks2(ReadOnlySpan<byte> input, Span<byte> output) { Check.OutputLength(output, BlockSize * 2, "Output buffer too short"); @@ -1109,15 +1106,15 @@ namespace Org.BouncyCastle.Crypto.Modes #if NETCOREAPP3_0_OR_GREATER if (Sse2.IsSupported && Unsafe.SizeOf<Vector128<byte>>() == BlockSize) { - var t0 = Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(input[0])); - var t1 = Unsafe.ReadUnaligned<Vector128<byte>>(ref ctrBlock[0]); - var t2 = Unsafe.ReadUnaligned<Vector128<byte>>(ref S[0]); + var t0 = MemoryMarshal.Read<Vector128<byte>>(input); + var t1 = MemoryMarshal.Read<Vector128<byte>>(ctrBlock); + var t2 = MemoryMarshal.Read<Vector128<byte>>(S.AsSpan()); t1 = Sse2.Xor(t1, t0); t2 = Sse2.Xor(t2, t1); - Unsafe.WriteUnaligned(ref output[0], t1); - Unsafe.WriteUnaligned(ref S[0], t2); + MemoryMarshal.Write(output, ref t1); + MemoryMarshal.Write(S.AsSpan(), ref t2); } else #endif @@ -1149,15 +1146,15 @@ namespace Org.BouncyCastle.Crypto.Modes #if NETCOREAPP3_0_OR_GREATER if (Sse2.IsSupported && Unsafe.SizeOf<Vector128<byte>>() == BlockSize) { - var t0 = Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(input[0])); - var t1 = Unsafe.ReadUnaligned<Vector128<byte>>(ref ctrBlock[0]); - var t2 = Unsafe.ReadUnaligned<Vector128<byte>>(ref S[0]); + var t0 = MemoryMarshal.Read<Vector128<byte>>(input); + var t1 = MemoryMarshal.Read<Vector128<byte>>(ctrBlock); + var t2 = MemoryMarshal.Read<Vector128<byte>>(S.AsSpan()); t1 = Sse2.Xor(t1, t0); t2 = Sse2.Xor(t2, t1); - Unsafe.WriteUnaligned(ref output[0], t1); - Unsafe.WriteUnaligned(ref S[0], t2); + MemoryMarshal.Write(output, ref t1); + MemoryMarshal.Write(S.AsSpan(), ref t2); } else #endif diff --git a/crypto/src/crypto/modes/gcm/GcmUtilities.cs b/crypto/src/crypto/modes/gcm/GcmUtilities.cs index ab2d81e06..e25092dce 100644 --- a/crypto/src/crypto/modes/gcm/GcmUtilities.cs +++ b/crypto/src/crypto/modes/gcm/GcmUtilities.cs @@ -4,6 +4,7 @@ using System.Diagnostics; using System.Runtime.CompilerServices; #endif #if NETCOREAPP3_0_OR_GREATER +using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; #endif @@ -46,7 +47,7 @@ namespace Org.BouncyCastle.Crypto.Modes.Gcm var X = Vector128.Create(x0, x1).AsByte(); // TODO[Arm] System.Runtime.Intrinsics.Arm.AdvSimd.Reverse8 var Z = Ssse3.Shuffle(X, EndianMask); - Unsafe.WriteUnaligned(ref z[0], Z); + MemoryMarshal.Write(z.AsSpan(), ref Z); return; } #endif @@ -71,7 +72,7 @@ namespace Org.BouncyCastle.Crypto.Modes.Gcm #if NETCOREAPP3_0_OR_GREATER if (Ssse3.IsSupported && BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) { - var X = Unsafe.ReadUnaligned<Vector128<byte>>(ref x[0]); + var X = MemoryMarshal.Read<Vector128<byte>>(x.AsSpan()); var Z = Ssse3.Shuffle(X, EndianMask).AsUInt64(); z.n0 = Z.GetElement(0); z.n1 = Z.GetElement(1); diff --git a/crypto/src/pqc/crypto/sphincsplus/HarakaS_X86.cs b/crypto/src/pqc/crypto/sphincsplus/HarakaS_X86.cs index 3975f02ff..87681c484 100644 --- a/crypto/src/pqc/crypto/sphincsplus/HarakaS_X86.cs +++ b/crypto/src/pqc/crypto/sphincsplus/HarakaS_X86.cs @@ -1,7 +1,9 @@ #if NETCOREAPP3_0_OR_GREATER using System; +using System.Buffers.Binary; using System.Diagnostics; using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using Org.BouncyCastle.Crypto; @@ -185,15 +187,13 @@ namespace Org.BouncyCastle.Pqc.Crypto.SphincsPlus [MethodImpl(MethodImplOptions.AggressiveInlining)] private static Vector128<byte> Load128(ReadOnlySpan<byte> t) { -#if NET7_0_OR_GREATER - return Vector128.Create<byte>(t); -#else if (BitConverter.IsLittleEndian && Unsafe.SizeOf<Vector128<byte>>() == 16) - return Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.AsRef(t[0])); + return MemoryMarshal.Read<Vector128<byte>>(t); - return Vector128.Create(t[0], t[1], t[2], t[3], t[4], t[5], t[6], t[7], t[8], t[9], t[10], t[11], t[12], - t[13], t[14], t[15]); -#endif + return Vector128.Create( + BinaryPrimitives.ReadUInt64LittleEndian(t[..8]), + BinaryPrimitives.ReadUInt64LittleEndian(t[8..]) + ).AsByte(); } [MethodImpl(MethodImplOptions.AggressiveInlining)] |