summaryrefslogtreecommitdiffstats
path: root/src/arch/arm64/include
diff options
context:
space:
mode:
authorMichael Brown2016-05-09 17:03:19 +0200
committerMichael Brown2016-05-11 09:15:52 +0200
commit95716ece91a29f1d122741ec3dd307765d96e314 (patch)
treea1667c0153611091b46b5a765d97f4dda9505ba6 /src/arch/arm64/include
parent[libc] Avoid implicit assumptions about potentially-optimised memcpy() (diff)
downloadipxe-95716ece91a29f1d122741ec3dd307765d96e314.tar.gz
ipxe-95716ece91a29f1d122741ec3dd307765d96e314.tar.xz
ipxe-95716ece91a29f1d122741ec3dd307765d96e314.zip
[arm] Add optimised string functions for 64-bit ARM
Signed-off-by: Michael Brown <mcb30@ipxe.org>
Diffstat (limited to 'src/arch/arm64/include')
-rw-r--r--src/arch/arm64/include/bits/string.h106
1 files changed, 106 insertions, 0 deletions
diff --git a/src/arch/arm64/include/bits/string.h b/src/arch/arm64/include/bits/string.h
new file mode 100644
index 000000000..c05fbe346
--- /dev/null
+++ b/src/arch/arm64/include/bits/string.h
@@ -0,0 +1,106 @@
+#ifndef BITS_STRING_H
+#define BITS_STRING_H
+
+FILE_LICENCE ( GPL2_OR_LATER_OR_UBDL );
+
+/** @file
+ *
+ * String functions
+ *
+ */
+
+extern void arm64_bzero ( void *dest, size_t len );
+extern void arm64_memset ( void *dest, size_t len, int character );
+extern void arm64_memcpy ( void *dest, const void *src, size_t len );
+extern void arm64_memmove_forwards ( void *dest, const void *src, size_t len );
+extern void arm64_memmove_backwards ( void *dest, const void *src, size_t len );
+extern void arm64_memmove ( void *dest, const void *src, size_t len );
+
+/**
+ * Fill memory region
+ *
+ * @v dest Destination region
+ * @v character Fill character
+ * @v len Length
+ * @ret dest Destination region
+ */
+static inline __attribute__ (( always_inline )) void *
+memset ( void *dest, int character, size_t len ) {
+
+ /* Allow gcc to generate inline "stX xzr" instructions for
+ * small, constant lengths.
+ */
+ if ( __builtin_constant_p ( character ) && ( character == 0 ) &&
+ __builtin_constant_p ( len ) && ( len <= 64 ) ) {
+ __builtin_memset ( dest, 0, len );
+ return dest;
+ }
+
+ /* For zeroing larger or non-constant lengths, use the
+ * optimised variable-length zeroing code.
+ */
+ if ( __builtin_constant_p ( character ) && ( character == 0 ) ) {
+ arm64_bzero ( dest, len );
+ return dest;
+ }
+
+ /* Not necessarily zeroing: use basic variable-length code */
+ arm64_memset ( dest, len, character );
+ return dest;
+}
+
+/**
+ * Copy memory region
+ *
+ * @v dest Destination region
+ * @v src Source region
+ * @v len Length
+ * @ret dest Destination region
+ */
+static inline __attribute__ (( always_inline )) void *
+memcpy ( void *dest, const void *src, size_t len ) {
+
+ /* Allow gcc to generate inline "ldX"/"stX" instructions for
+ * small, constant lengths.
+ */
+ if ( __builtin_constant_p ( len ) && ( len <= 64 ) ) {
+ __builtin_memcpy ( dest, src, len );
+ return dest;
+ }
+
+ /* Otherwise, use variable-length code */
+ arm64_memcpy ( dest, src, len );
+ return dest;
+}
+
+/**
+ * Copy (possibly overlapping) memory region
+ *
+ * @v dest Destination region
+ * @v src Source region
+ * @v len Length
+ * @ret dest Destination region
+ */
+static inline __attribute__ (( always_inline )) void *
+memmove ( void *dest, const void *src, size_t len ) {
+ ssize_t offset = ( dest - src );
+
+ /* If required direction of copy is known at build time, then
+ * use the appropriate forwards/backwards copy directly.
+ */
+ if ( __builtin_constant_p ( offset ) ) {
+ if ( offset <= 0 ) {
+ arm64_memmove_forwards ( dest, src, len );
+ return dest;
+ } else {
+ arm64_memmove_backwards ( dest, src, len );
+ return dest;
+ }
+ }
+
+ /* Otherwise, use ambidirectional copy */
+ arm64_memmove ( dest, src, len );
+ return dest;
+}
+
+#endif /* BITS_STRING_H */