xref: /aosp_15_r20/external/coreboot/payloads/libpayload/arch/x86/string.c (revision b9411a12aaaa7e1e6a6fb7c5e057f44ee179a49c)
1 /*
2  * Copyright (C) 1991,1992,1993,1997,1998,2003, 2005 Free Software Foundation, Inc.
3  * Copyright (c) 2011 The ChromiumOS Authors.
4  *
5  * This program is free software; you can redistribute it and/or
6  * modify it under the terms of the GNU General Public License as
7  * published by the Free Software Foundation; either version 2 of
8  * the License, or (at your option) any later version.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  */
15 
16 /* From glibc-2.14, sysdeps/i386/memset.c */
17 
18 #include <stdint.h>
19 
20 #include "string.h"
21 
22 typedef uint32_t op_t;
23 
memset(void * dstpp,int c,size_t len)24 void *memset(void *dstpp, int c, size_t len)
25 {
26 	int d0;
27 	unsigned long int dstp = (unsigned long int) dstpp;
28 
29 	/* This explicit register allocation improves code very much indeed. */
30 	register op_t x asm("ax");
31 
32 	x = (unsigned char) c;
33 
34 	/* Clear the direction flag, so filling will move forward.  */
35 	asm volatile("cld");
36 
37 	/* This threshold value is optimal.  */
38 	if (len >= 12) {
39 		/* Fill X with four copies of the char we want to fill with. */
40 		x |= (x << 8);
41 		x |= (x << 16);
42 
43 		/* Adjust LEN for the bytes handled in the first loop.  */
44 		len -= (-dstp) % sizeof(op_t);
45 
46 		/*
47 		 * There are at least some bytes to set. No need to test for
48 		 * LEN == 0 in this alignment loop.
49 		 */
50 
51 		/* Fill bytes until DSTP is aligned on a longword boundary. */
52 		asm volatile(
53 			"rep\n"
54 			"stosb" /* %0, %2, %3 */ :
55 			"=D" (dstp), "=c" (d0) :
56 			"0" (dstp), "1" ((-dstp) % sizeof(op_t)), "a" (x) :
57 			"memory");
58 
59 		/* Fill longwords.  */
60 		asm volatile(
61 			"rep\n"
62 			"stosl" /* %0, %2, %3 */ :
63 			"=D" (dstp), "=c" (d0) :
64 			"0" (dstp), "1" (len / sizeof(op_t)), "a" (x) :
65 			"memory");
66 		len %= sizeof(op_t);
67 	}
68 
69 	/* Write the last few bytes. */
70 	asm volatile(
71 		"rep\n"
72 		"stosb" /* %0, %2, %3 */ :
73 		"=D" (dstp), "=c" (d0) :
74 		"0" (dstp), "1" (len), "a" (x) :
75 		"memory");
76 
77 	return dstpp;
78 }
79 
memcpy(void * dest,const void * src,size_t n)80 void *memcpy(void *dest, const void *src, size_t n)
81 {
82 	unsigned long d0, d1, d2;
83 
84 #if CONFIG(LP_ARCH_X86_64)
85 	asm volatile(
86 		"rep ; movsq\n\t"
87 		"mov %4,%%rcx\n\t"
88 		"rep ; movsb\n\t"
89 		: "=&c" (d0), "=&D" (d1), "=&S" (d2)
90 		: "0" (n >> 3), "g" (n & 7), "1" (dest), "2" (src)
91 		: "memory"
92 	);
93 #else
94 	asm volatile(
95 		"rep ; movsl\n\t"
96 		"movl %4,%%ecx\n\t"
97 		"rep ; movsb\n\t"
98 		: "=&c" (d0), "=&D" (d1), "=&S" (d2)
99 		: "0" (n >> 2), "g" (n & 3), "1" (dest), "2" (src)
100 		: "memory"
101 	);
102 #endif
103 
104 	return dest;
105 }
106