geneve: avoid using stale geneve socket.
[cascardo/linux.git] / arch / sparc / lib / atomic_64.S
1 /* atomic.S: These things are too big to do inline.
2  *
3  * Copyright (C) 1999, 2007 2012 David S. Miller (davem@davemloft.net)
4  */
5
6 #include <linux/linkage.h>
7 #include <asm/asi.h>
8 #include <asm/backoff.h>
9
10         .text
11
12         /* Three versions of the atomic routines, one that
13          * does not return a value and does not perform
14          * memory barriers, and a two which return
15          * a value, the new and old value resp. and does the
16          * barriers.
17          */
18
19 #define ATOMIC_OP(op)                                                   \
20 ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */              \
21         BACKOFF_SETUP(%o2);                                             \
22 1:      lduw    [%o1], %g1;                                             \
23         op      %g1, %o0, %g7;                                          \
24         cas     [%o1], %g1, %g7;                                        \
25         cmp     %g1, %g7;                                               \
26         bne,pn  %icc, BACKOFF_LABEL(2f, 1b);                            \
27          nop;                                                           \
28         retl;                                                           \
29          nop;                                                           \
30 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
31 ENDPROC(atomic_##op);                                                   \
32
33 #define ATOMIC_OP_RETURN(op)                                            \
34 ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */     \
35         BACKOFF_SETUP(%o2);                                             \
36 1:      lduw    [%o1], %g1;                                             \
37         op      %g1, %o0, %g7;                                          \
38         cas     [%o1], %g1, %g7;                                        \
39         cmp     %g1, %g7;                                               \
40         bne,pn  %icc, BACKOFF_LABEL(2f, 1b);                            \
41          op     %g1, %o0, %g1;                                          \
42         retl;                                                           \
43          sra    %g1, 0, %o0;                                            \
44 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
45 ENDPROC(atomic_##op##_return);
46
47 #define ATOMIC_FETCH_OP(op)                                             \
48 ENTRY(atomic_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */        \
49         BACKOFF_SETUP(%o2);                                             \
50 1:      lduw    [%o1], %g1;                                             \
51         op      %g1, %o0, %g7;                                          \
52         cas     [%o1], %g1, %g7;                                        \
53         cmp     %g1, %g7;                                               \
54         bne,pn  %icc, BACKOFF_LABEL(2f, 1b);                            \
55          nop;                                                           \
56         retl;                                                           \
57          sra    %g1, 0, %o0;                                            \
58 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
59 ENDPROC(atomic_fetch_##op);
60
61 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
62
63 ATOMIC_OPS(add)
64 ATOMIC_OPS(sub)
65
66 #undef ATOMIC_OPS
67 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
68
69 ATOMIC_OPS(and)
70 ATOMIC_OPS(or)
71 ATOMIC_OPS(xor)
72
73 #undef ATOMIC_OPS
74 #undef ATOMIC_FETCH_OP
75 #undef ATOMIC_OP_RETURN
76 #undef ATOMIC_OP
77
78 #define ATOMIC64_OP(op)                                                 \
79 ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */            \
80         BACKOFF_SETUP(%o2);                                             \
81 1:      ldx     [%o1], %g1;                                             \
82         op      %g1, %o0, %g7;                                          \
83         casx    [%o1], %g1, %g7;                                        \
84         cmp     %g1, %g7;                                               \
85         bne,pn  %xcc, BACKOFF_LABEL(2f, 1b);                            \
86          nop;                                                           \
87         retl;                                                           \
88          nop;                                                           \
89 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
90 ENDPROC(atomic64_##op);                                                 \
91
92 #define ATOMIC64_OP_RETURN(op)                                          \
93 ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */   \
94         BACKOFF_SETUP(%o2);                                             \
95 1:      ldx     [%o1], %g1;                                             \
96         op      %g1, %o0, %g7;                                          \
97         casx    [%o1], %g1, %g7;                                        \
98         cmp     %g1, %g7;                                               \
99         bne,pn  %xcc, BACKOFF_LABEL(2f, 1b);                            \
100          nop;                                                           \
101         retl;                                                           \
102          op     %g1, %o0, %o0;                                          \
103 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
104 ENDPROC(atomic64_##op##_return);
105
106 #define ATOMIC64_FETCH_OP(op)                                           \
107 ENTRY(atomic64_fetch_##op) /* %o0 = increment, %o1 = atomic_ptr */      \
108         BACKOFF_SETUP(%o2);                                             \
109 1:      ldx     [%o1], %g1;                                             \
110         op      %g1, %o0, %g7;                                          \
111         casx    [%o1], %g1, %g7;                                        \
112         cmp     %g1, %g7;                                               \
113         bne,pn  %xcc, BACKOFF_LABEL(2f, 1b);                            \
114          nop;                                                           \
115         retl;                                                           \
116          mov    %g1, %o0;                                               \
117 2:      BACKOFF_SPIN(%o2, %o3, 1b);                                     \
118 ENDPROC(atomic64_fetch_##op);
119
120 #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op)
121
122 ATOMIC64_OPS(add)
123 ATOMIC64_OPS(sub)
124
125 #undef ATOMIC64_OPS
126 #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op)
127
128 ATOMIC64_OPS(and)
129 ATOMIC64_OPS(or)
130 ATOMIC64_OPS(xor)
131
132 #undef ATOMIC64_OPS
133 #undef ATOMIC64_FETCH_OP
134 #undef ATOMIC64_OP_RETURN
135 #undef ATOMIC64_OP
136
137 ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
138         BACKOFF_SETUP(%o2)
139 1:      ldx     [%o0], %g1
140         brlez,pn %g1, 3f
141          sub    %g1, 1, %g7
142         casx    [%o0], %g1, %g7
143         cmp     %g1, %g7
144         bne,pn  %xcc, BACKOFF_LABEL(2f, 1b)
145          nop
146 3:      retl
147          sub    %g1, 1, %o0
148 2:      BACKOFF_SPIN(%o2, %o3, 1b)
149 ENDPROC(atomic64_dec_if_positive)