Root/arch/sparc/lib/atomic_64.S

1/* atomic.S: These things are too big to do inline.
2 *
3 * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
4 */
5
6#include <asm/asi.h>
7#include <asm/backoff.h>
8
9    .text
10
11    /* Two versions of the atomic routines, one that
12     * does not return a value and does not perform
13     * memory barriers, and a second which returns
14     * a value and does the barriers.
15     */
16    .globl atomic_add
17    .type atomic_add,#function
18atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
19    BACKOFF_SETUP(%o2)
201: lduw [%o1], %g1
21    add %g1, %o0, %g7
22    cas [%o1], %g1, %g7
23    cmp %g1, %g7
24    bne,pn %icc, BACKOFF_LABEL(2f, 1b)
25     nop
26    retl
27     nop
282: BACKOFF_SPIN(%o2, %o3, 1b)
29    .size atomic_add, .-atomic_add
30
31    .globl atomic_sub
32    .type atomic_sub,#function
33atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
34    BACKOFF_SETUP(%o2)
351: lduw [%o1], %g1
36    sub %g1, %o0, %g7
37    cas [%o1], %g1, %g7
38    cmp %g1, %g7
39    bne,pn %icc, BACKOFF_LABEL(2f, 1b)
40     nop
41    retl
42     nop
432: BACKOFF_SPIN(%o2, %o3, 1b)
44    .size atomic_sub, .-atomic_sub
45
46    .globl atomic_add_ret
47    .type atomic_add_ret,#function
48atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
49    BACKOFF_SETUP(%o2)
501: lduw [%o1], %g1
51    add %g1, %o0, %g7
52    cas [%o1], %g1, %g7
53    cmp %g1, %g7
54    bne,pn %icc, BACKOFF_LABEL(2f, 1b)
55     add %g1, %o0, %g1
56    retl
57     sra %g1, 0, %o0
582: BACKOFF_SPIN(%o2, %o3, 1b)
59    .size atomic_add_ret, .-atomic_add_ret
60
61    .globl atomic_sub_ret
62    .type atomic_sub_ret,#function
63atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
64    BACKOFF_SETUP(%o2)
651: lduw [%o1], %g1
66    sub %g1, %o0, %g7
67    cas [%o1], %g1, %g7
68    cmp %g1, %g7
69    bne,pn %icc, BACKOFF_LABEL(2f, 1b)
70     sub %g1, %o0, %g1
71    retl
72     sra %g1, 0, %o0
732: BACKOFF_SPIN(%o2, %o3, 1b)
74    .size atomic_sub_ret, .-atomic_sub_ret
75
76    .globl atomic64_add
77    .type atomic64_add,#function
78atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
79    BACKOFF_SETUP(%o2)
801: ldx [%o1], %g1
81    add %g1, %o0, %g7
82    casx [%o1], %g1, %g7
83    cmp %g1, %g7
84    bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
85     nop
86    retl
87     nop
882: BACKOFF_SPIN(%o2, %o3, 1b)
89    .size atomic64_add, .-atomic64_add
90
91    .globl atomic64_sub
92    .type atomic64_sub,#function
93atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
94    BACKOFF_SETUP(%o2)
951: ldx [%o1], %g1
96    sub %g1, %o0, %g7
97    casx [%o1], %g1, %g7
98    cmp %g1, %g7
99    bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
100     nop
101    retl
102     nop
1032: BACKOFF_SPIN(%o2, %o3, 1b)
104    .size atomic64_sub, .-atomic64_sub
105
106    .globl atomic64_add_ret
107    .type atomic64_add_ret,#function
108atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
109    BACKOFF_SETUP(%o2)
1101: ldx [%o1], %g1
111    add %g1, %o0, %g7
112    casx [%o1], %g1, %g7
113    cmp %g1, %g7
114    bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
115     nop
116    retl
117     add %g1, %o0, %o0
1182: BACKOFF_SPIN(%o2, %o3, 1b)
119    .size atomic64_add_ret, .-atomic64_add_ret
120
121    .globl atomic64_sub_ret
122    .type atomic64_sub_ret,#function
123atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
124    BACKOFF_SETUP(%o2)
1251: ldx [%o1], %g1
126    sub %g1, %o0, %g7
127    casx [%o1], %g1, %g7
128    cmp %g1, %g7
129    bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
130     nop
131    retl
132     sub %g1, %o0, %o0
1332: BACKOFF_SPIN(%o2, %o3, 1b)
134    .size atomic64_sub_ret, .-atomic64_sub_ret
135

Archive Download this file



interactive