PowerPC LE strcpy
http://sourceware.org/ml/libc-alpha/2013-08/msg00100.html The strcpy changes for little-endian are quite straight-forward, just a matter of rotating the last word differently. I'll note that the powerpc64 version of stpcpy is just begging to be converted to use 64-bit loads and stores.. * sysdeps/powerpc/powerpc64/strcpy.S: Add little-endian support: * sysdeps/powerpc/powerpc32/strcpy.S: Likewise. * sysdeps/powerpc/powerpc64/stpcpy.S: Likewise. * sysdeps/powerpc/powerpc32/stpcpy.S: Likewise.
This commit is contained in:
parent
8a7413f9b0
commit
43b8401371
@ -1,3 +1,10 @@
|
||||
2013-10-04 Alan Modra <amodra@gmail.com>
|
||||
|
||||
* sysdeps/powerpc/powerpc64/strcpy.S: Add little-endian support:
|
||||
* sysdeps/powerpc/powerpc32/strcpy.S: Likewise.
|
||||
* sysdeps/powerpc/powerpc64/stpcpy.S: Likewise.
|
||||
* sysdeps/powerpc/powerpc32/stpcpy.S: Likewise.
|
||||
|
||||
2013-10-04 Alan Modra <amodra@gmail.com>
|
||||
|
||||
* sysdeps/powerpc/powerpc64/strcmp.S (rTMP2): Define as r0.
|
||||
|
@ -62,7 +62,22 @@ L(g2): add rTMP, rFEFE, rWORD
|
||||
|
||||
mr rALT, rWORD
|
||||
/* We've hit the end of the string. Do the rest byte-by-byte. */
|
||||
L(g1): rlwinm. rTMP, rALT, 8, 24, 31
|
||||
L(g1):
|
||||
#ifdef __LITTLE_ENDIAN__
|
||||
rlwinm. rTMP, rALT, 0, 24, 31
|
||||
stbu rALT, 4(rDEST)
|
||||
beqlr-
|
||||
rlwinm. rTMP, rALT, 24, 24, 31
|
||||
stbu rTMP, 1(rDEST)
|
||||
beqlr-
|
||||
rlwinm. rTMP, rALT, 16, 24, 31
|
||||
stbu rTMP, 1(rDEST)
|
||||
beqlr-
|
||||
rlwinm rTMP, rALT, 8, 24, 31
|
||||
stbu rTMP, 1(rDEST)
|
||||
blr
|
||||
#else
|
||||
rlwinm. rTMP, rALT, 8, 24, 31
|
||||
stbu rTMP, 4(rDEST)
|
||||
beqlr-
|
||||
rlwinm. rTMP, rALT, 16, 24, 31
|
||||
@ -73,6 +88,7 @@ L(g1): rlwinm. rTMP, rALT, 8, 24, 31
|
||||
beqlr-
|
||||
stbu rALT, 1(rDEST)
|
||||
blr
|
||||
#endif
|
||||
|
||||
/* Oh well. In this case, we just do a byte-by-byte copy. */
|
||||
.align 4
|
||||
|
@ -62,7 +62,22 @@ L(g2): add rTMP, rFEFE, rWORD
|
||||
|
||||
mr rALT, rWORD
|
||||
/* We've hit the end of the string. Do the rest byte-by-byte. */
|
||||
L(g1): rlwinm. rTMP, rALT, 8, 24, 31
|
||||
L(g1):
|
||||
#ifdef __LITTLE_ENDIAN__
|
||||
rlwinm. rTMP, rALT, 0, 24, 31
|
||||
stb rALT, 4(rDEST)
|
||||
beqlr-
|
||||
rlwinm. rTMP, rALT, 24, 24, 31
|
||||
stb rTMP, 5(rDEST)
|
||||
beqlr-
|
||||
rlwinm. rTMP, rALT, 16, 24, 31
|
||||
stb rTMP, 6(rDEST)
|
||||
beqlr-
|
||||
rlwinm rTMP, rALT, 8, 24, 31
|
||||
stb rTMP, 7(rDEST)
|
||||
blr
|
||||
#else
|
||||
rlwinm. rTMP, rALT, 8, 24, 31
|
||||
stb rTMP, 4(rDEST)
|
||||
beqlr-
|
||||
rlwinm. rTMP, rALT, 16, 24, 31
|
||||
@ -73,6 +88,7 @@ L(g1): rlwinm. rTMP, rALT, 8, 24, 31
|
||||
beqlr-
|
||||
stb rALT, 7(rDEST)
|
||||
blr
|
||||
#endif
|
||||
|
||||
/* Oh well. In this case, we just do a byte-by-byte copy. */
|
||||
.align 4
|
||||
|
@ -62,7 +62,22 @@ L(g2): add rTMP, rFEFE, rWORD
|
||||
|
||||
mr rALT, rWORD
|
||||
/* We've hit the end of the string. Do the rest byte-by-byte. */
|
||||
L(g1): rlwinm. rTMP, rALT, 8, 24, 31
|
||||
L(g1):
|
||||
#ifdef __LITTLE_ENDIAN__
|
||||
rlwinm. rTMP, rALT, 0, 24, 31
|
||||
stbu rALT, 4(rDEST)
|
||||
beqlr-
|
||||
rlwinm. rTMP, rALT, 24, 24, 31
|
||||
stbu rTMP, 1(rDEST)
|
||||
beqlr-
|
||||
rlwinm. rTMP, rALT, 16, 24, 31
|
||||
stbu rTMP, 1(rDEST)
|
||||
beqlr-
|
||||
rlwinm rTMP, rALT, 8, 24, 31
|
||||
stbu rTMP, 1(rDEST)
|
||||
blr
|
||||
#else
|
||||
rlwinm. rTMP, rALT, 8, 24, 31
|
||||
stbu rTMP, 4(rDEST)
|
||||
beqlr-
|
||||
rlwinm. rTMP, rALT, 16, 24, 31
|
||||
@ -73,6 +88,7 @@ L(g1): rlwinm. rTMP, rALT, 8, 24, 31
|
||||
beqlr-
|
||||
stbu rALT, 1(rDEST)
|
||||
blr
|
||||
#endif
|
||||
|
||||
/* Oh well. In this case, we just do a byte-by-byte copy. */
|
||||
.align 4
|
||||
|
@ -68,6 +68,32 @@ L(g2): add rTMP, rFEFE, rWORD
|
||||
mr rALT, rWORD
|
||||
/* We've hit the end of the string. Do the rest byte-by-byte. */
|
||||
L(g1):
|
||||
#ifdef __LITTLE_ENDIAN__
|
||||
extrdi. rTMP, rALT, 8, 56
|
||||
stb rALT, 8(rDEST)
|
||||
beqlr-
|
||||
extrdi. rTMP, rALT, 8, 48
|
||||
stb rTMP, 9(rDEST)
|
||||
beqlr-
|
||||
extrdi. rTMP, rALT, 8, 40
|
||||
stb rTMP, 10(rDEST)
|
||||
beqlr-
|
||||
extrdi. rTMP, rALT, 8, 32
|
||||
stb rTMP, 11(rDEST)
|
||||
beqlr-
|
||||
extrdi. rTMP, rALT, 8, 24
|
||||
stb rTMP, 12(rDEST)
|
||||
beqlr-
|
||||
extrdi. rTMP, rALT, 8, 16
|
||||
stb rTMP, 13(rDEST)
|
||||
beqlr-
|
||||
extrdi. rTMP, rALT, 8, 8
|
||||
stb rTMP, 14(rDEST)
|
||||
beqlr-
|
||||
extrdi rTMP, rALT, 8, 0
|
||||
stb rTMP, 15(rDEST)
|
||||
blr
|
||||
#else
|
||||
extrdi. rTMP, rALT, 8, 0
|
||||
stb rTMP, 8(rDEST)
|
||||
beqlr-
|
||||
@ -91,6 +117,7 @@ L(g1):
|
||||
beqlr-
|
||||
stb rALT, 15(rDEST)
|
||||
blr
|
||||
#endif
|
||||
|
||||
/* Oh well. In this case, we just do a byte-by-byte copy. */
|
||||
.align 4
|
||||
|
Loading…
Reference in New Issue
Block a user