/* SPDX-License-Identifier: GPL-2.0-or-later */ /* From glibc-2.14, sysdeps/i386/memset.c */ #include <string.h> #include <stdint.h> #include <stdbool.h> #include <asan.h> typedef uint32_t op_t; void *memset(void *dstpp, int c, size_t len) { int d0; unsigned long int dstp = (unsigned long int) dstpp; #if (ENV_ROMSTAGE && CONFIG(ASAN_IN_ROMSTAGE)) || \ (ENV_RAMSTAGE && CONFIG(ASAN_IN_RAMSTAGE)) check_memory_region((unsigned long)dstpp, len, true, _RET_IP_); #endif /* This explicit register allocation improves code very much indeed. */ register op_t x asm("ax"); x = (unsigned char) c; /* Clear the direction flag, so filling will move forward. */ asm volatile("cld"); /* This threshold value is optimal. */ if (len >= 12) { /* Fill X with four copies of the char we want to fill with. */ x |= (x << 8); x |= (x << 16); /* Adjust LEN for the bytes handled in the first loop. */ len -= (-dstp) % sizeof(op_t); /* * There are at least some bytes to set. No need to test for * LEN == 0 in this alignment loop. */ /* Fill bytes until DSTP is aligned on a longword boundary. */ asm volatile( "rep\n" "stosb" /* %0, %2, %3 */ : "=D" (dstp), "=c" (d0) : "0" (dstp), "1" ((-dstp) % sizeof(op_t)), "a" (x) : "memory"); /* Fill longwords. */ asm volatile( "rep\n" "stosl" /* %0, %2, %3 */ : "=D" (dstp), "=c" (d0) : "0" (dstp), "1" (len / sizeof(op_t)), "a" (x) : "memory"); len %= sizeof(op_t); } /* Write the last few bytes. */ asm volatile( "rep\n" "stosb" /* %0, %2, %3 */ : "=D" (dstp), "=c" (d0) : "0" (dstp), "1" (len), "a" (x) : "memory"); return dstpp; }