/** @file\r
64-bit left rotation for Ia32\r
\r
- Copyright (c) 2006, Intel Corporation<BR>\r
- All rights reserved. This program and the accompanying materials\r
- are licensed and made available under the terms and conditions of the BSD License\r
- which accompanies this distribution. The full text of the license may be found at\r
- http://opensource.org/licenses/bsd-license.php\r
-\r
- THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
- WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+ Copyright (c) 2006 - 2015, Intel Corporation. All rights reserved.<BR>\r
+ SPDX-License-Identifier: BSD-2-Clause-Patent\r
\r
**/\r
\r
-#if _MSC_EXTENSIONS\r
+/**\r
+ Rotates a 64-bit integer left between 0 and 63 bits, filling\r
+ the low bits with the high bits that were rotated.\r
+\r
+ This function rotates the 64-bit value Operand to the left by Count bits. The\r
+ low Count bits are fill with the high Count bits of Operand. The rotated\r
+ value is returned.\r
+\r
+ @param Operand The 64-bit operand to rotate left.\r
+ @param Count The number of bits to rotate left.\r
\r
+ @return Operand <<< Count\r
+\r
+**/\r
UINT64\r
EFIAPI\r
InternalMathLRotU64 (\r
- IN UINT64 Operand,\r
- IN UINTN Count\r
+ IN UINT64 Operand,\r
+ IN UINTN Count\r
)\r
{\r
_asm {\r
ror ebx, cl\r
shld eax, ebx, cl\r
test cl, 32 ; Count >= 32?\r
- cmovnz ecx, eax\r
- cmovnz eax, edx\r
- cmovnz edx, ecx\r
+ jz L0\r
+ mov ecx, eax\r
+ mov eax, edx\r
+ mov edx, ecx\r
+ L0 :\r
}\r
}\r
-\r
-#endif\r