aboutsummaryrefslogtreecommitdiff
path: root/gmp-6.3.0/mpn/loongarch/64/rshift.asm
diff options
context:
space:
mode:
authorDuncan Wilkie <antigravityd@gmail.com>2023-11-18 06:11:09 -0600
committerDuncan Wilkie <antigravityd@gmail.com>2023-11-18 06:11:09 -0600
commit11da511c784eca003deb90c23570f0873954e0de (patch)
treee14fdd3d5d6345956d67e79ae771d0633d28362b /gmp-6.3.0/mpn/loongarch/64/rshift.asm
Initial commit.
Diffstat (limited to 'gmp-6.3.0/mpn/loongarch/64/rshift.asm')
-rw-r--r--gmp-6.3.0/mpn/loongarch/64/rshift.asm119
1 files changed, 119 insertions, 0 deletions
diff --git a/gmp-6.3.0/mpn/loongarch/64/rshift.asm b/gmp-6.3.0/mpn/loongarch/64/rshift.asm
new file mode 100644
index 0000000..a183576
--- /dev/null
+++ b/gmp-6.3.0/mpn/loongarch/64/rshift.asm
@@ -0,0 +1,119 @@
+dnl Loongarch mpn_rshift
+
+dnl Contributed to the GNU project by Torbjorn Granlund.
+
+dnl Copyright 2023 Free Software Foundation, Inc.
+
+dnl This file is part of the GNU MP Library.
+dnl
+dnl The GNU MP Library is free software; you can redistribute it and/or modify
+dnl it under the terms of either:
+dnl
+dnl * the GNU Lesser General Public License as published by the Free
+dnl Software Foundation; either version 3 of the License, or (at your
+dnl option) any later version.
+dnl
+dnl or
+dnl
+dnl * the GNU General Public License as published by the Free Software
+dnl Foundation; either version 2 of the License, or (at your option) any
+dnl later version.
+dnl
+dnl or both in parallel, as here.
+dnl
+dnl The GNU MP Library is distributed in the hope that it will be useful, but
+dnl WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+dnl or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+dnl for more details.
+dnl
+dnl You should have received copies of the GNU General Public License and the
+dnl GNU Lesser General Public License along with the GNU MP Library. If not,
+dnl see https://www.gnu.org/licenses/.
+
+include(`../config.m4')
+
+C INPUT PARAMETERS
+define(`rp_arg',`$a0')
+define(`ap', `$a1')
+define(`n', `$a2')
+define(`cnt', `$a3')
+
+define(`rp', `$a4')
+define(`tnc', `$t8')
+define(`i', `$a7')
+
+ASM_START()
+PROLOGUE(mpn_rshift)
+ move rp, rp_arg
+ sub.d tnc, $zero, cnt
+ srli.d i, n, 2
+
+ ld.d $t0, ap, 0
+ sll.d $a0, $t0, tnc
+
+ andi $t6, n, 1
+ andi $t7, n, 2
+ bnez $t6, L(bx1)
+
+ srl.d $t3, $t0, cnt
+ ld.d $t0, ap, 8
+ addi.d i, i, -1
+ bnez $t7, L(b10)
+ addi.d rp, rp, -16
+ b L(b0)
+L(b10): addi.d ap, ap, 16
+ bge i, $zero, L(b2)
+L(eq2): sll.d $t4, $t0, tnc
+ srl.d $t2, $t0, cnt
+ or $t4, $t3, $t4
+ st.d $t4, rp, 0
+ st.d $t2, rp, 8
+ jr $r1
+
+L(bx1): srl.d $t2, $t0, cnt
+ bnez $t7, L(b11)
+ bnez i, L(gt1)
+ st.d $t2, rp, 0
+ jr $r1
+L(gt1): ld.d $t0, ap, 8
+ addi.d ap, ap, 8
+ addi.d rp, rp, -8
+ addi.d i, i, -1
+ b L(b1)
+L(b11): ld.d $t0, ap, 8
+ addi.d ap, ap, -8
+ addi.d rp, rp, -24
+ b L(b3)
+
+L(top): addi.d ap, ap, 32
+ addi.d rp, rp, 32
+ addi.d i, i, -1
+L(b2): sll.d $t4, $t0, tnc
+ srl.d $t2, $t0, cnt
+ ld.d $t0, ap, 0
+ or $t4, $t3, $t4
+ st.d $t4, rp, 0
+L(b1): sll.d $t4, $t0, tnc
+ srl.d $t3, $t0, cnt
+ ld.d $t0, ap, 8
+ or $t4, $t2, $t4
+ st.d $t4, rp, 8
+L(b0): sll.d $t4, $t0, tnc
+ srl.d $t2, $t0, cnt
+ ld.d $t0, ap, 16
+ or $t4, $t3, $t4
+ st.d $t4, rp, 16
+L(b3): sll.d $t4, $t0, tnc
+ srl.d $t3, $t0, cnt
+ ld.d $t0, ap, 24
+ or $t4, $t2, $t4
+ st.d $t4, rp, 24
+ bnez i, L(top)
+
+L(end): sll.d $t4, $t0, tnc
+ srl.d $t2, $t0, cnt
+ or $t4, $t3, $t4
+ st.d $t4, rp, 32
+ st.d $t2, rp, 40
+ jr $r1
+EPILOGUE()