+#------------------------------------------------------------------------------ \r
+#\r
+# SemMem() worker for ARM\r
+#\r
+# This file started out as C code that did 64 bit moves if the buffer was\r
+# 32-bit aligned, else it does a byte copy. It also does a byte copy for\r
+# any trailing bytes. Update to use VSTM/VLDM to do 128 byte writes.\r
+#\r
+# Copyright (c) 2008-2010 Apple Inc. All rights reserved.<BR>\r
+# All rights reserved. This program and the accompanying materials\r
+# are licensed and made available under the terms and conditions of the BSD License\r
+# which accompanies this distribution. The full text of the license may be found at\r
+# http://opensource.org/licenses/bsd-license.php\r
+#\r
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+/**\r
+ Set Buffer to Value for Size bytes.\r
+\r
+ @param Buffer Memory to set.\r
+ @param Length Number of bytes to set\r
+ @param Value Value of the set operation.\r
+\r
+ @return Buffer\r
+\r
+VOID *\r
+EFIAPI\r
+InternalMemSetMem (\r
+ OUT VOID *Buffer,\r
+ IN UINTN Length,\r
+ IN UINT8 Value\r
+ )\r
+**/\r
+ \r
+.text\r
+.align 2\r
+.globl ASM_PFX(InternalMemSetMem)\r
+\r
+ASM_PFX(InternalMemSetMem):\r
+ stmfd sp!, {r4-r7, lr}\r
+ tst r0, #3\r
+ movne r3, #0\r
+ moveq r3, #1\r
+ cmp r1, #127\r
+ movls lr, #0\r
+ andhi lr, r3, #1\r
+ cmp lr, #0\r
+ mov r12, r0\r
+ bne L31\r
+L32:\r
+ mov r3, #0\r
+ b L43\r
+L31:\r
+ vdup.8 q0,r2\r
+ vmov q1,q0\r
+ vmov q2,q0\r
+ vmov q3,q0\r
+ vmov q4,q0\r
+ vmov q5,q0\r
+ vmov q6,q0\r
+ vmov q7,q0\r
+ b L32\r
+L34:\r
+ cmp lr, #0\r
+ streqb r2, [r12], #1\r
+ subeq r1, r1, #1\r
+ beq L43\r
+ sub r1, r1, #128\r
+ cmp r1, #127\r
+ cmp r1, #31\r
+ movls lr, r3\r
+ vstm r12!, {d0-d15}\r
+L43:\r
+ cmp r1, #0\r
+ bne L34\r
+ ldmfd sp!, {pc}\r
+
\ No newline at end of file