xref: /aosp_15_r20/external/cronet/third_party/apache-portable-runtime/src/atomic/unix/s390.c (revision 6777b5387eb2ff775bb5750e3f5d96f37fb7352b)
1 /* Licensed to the Apache Software Foundation (ASF) under one or more
2  * contributor license agreements.  See the NOTICE file distributed with
3  * this work for additional information regarding copyright ownership.
4  * The ASF licenses this file to You under the Apache License, Version 2.0
5  * (the "License"); you may not use this file except in compliance with
6  * the License.  You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "apr_arch_atomic.h"
18 
19 #ifdef USE_ATOMICS_S390
20 
apr_atomic_init(apr_pool_t * p)21 APR_DECLARE(apr_status_t) apr_atomic_init(apr_pool_t *p)
22 {
23     return APR_SUCCESS;
24 }
25 
apr_atomic_read32(volatile apr_uint32_t * mem)26 APR_DECLARE(apr_uint32_t) apr_atomic_read32(volatile apr_uint32_t *mem)
27 {
28     return *mem;
29 }
30 
apr_atomic_set32(volatile apr_uint32_t * mem,apr_uint32_t val)31 APR_DECLARE(void) apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val)
32 {
33     *mem = val;
34 }
35 
atomic_add(volatile apr_uint32_t * mem,apr_uint32_t val)36 static APR_INLINE apr_uint32_t atomic_add(volatile apr_uint32_t *mem, apr_uint32_t val)
37 {
38     apr_uint32_t prev = *mem, temp;
39 
40     asm volatile ("loop_%=:\n"
41                   "    lr  %1,%0\n"
42                   "    alr %1,%3\n"
43                   "    cs  %0,%1,%2\n"
44                   "    jl  loop_%=\n"
45                   : "+d" (prev), "+d" (temp), "=Q" (*mem)
46                   : "d" (val), "m" (*mem)
47                   : "cc", "memory");
48 
49     return prev;
50 }
51 
apr_atomic_add32(volatile apr_uint32_t * mem,apr_uint32_t val)52 APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint32_t val)
53 {
54     return atomic_add(mem, val);
55 }
56 
apr_atomic_inc32(volatile apr_uint32_t * mem)57 APR_DECLARE(apr_uint32_t) apr_atomic_inc32(volatile apr_uint32_t *mem)
58 {
59     return atomic_add(mem, 1);
60 }
61 
atomic_sub(volatile apr_uint32_t * mem,apr_uint32_t val)62 static APR_INLINE apr_uint32_t atomic_sub(volatile apr_uint32_t *mem, apr_uint32_t val)
63 {
64     apr_uint32_t prev = *mem, temp;
65 
66     asm volatile ("loop_%=:\n"
67                   "    lr  %1,%0\n"
68                   "    slr %1,%3\n"
69                   "    cs  %0,%1,%2\n"
70                   "    jl  loop_%=\n"
71                   : "+d" (prev), "+d" (temp), "=Q" (*mem)
72                   : "d" (val), "m" (*mem)
73                   : "cc", "memory");
74 
75     return temp;
76 }
77 
apr_atomic_sub32(volatile apr_uint32_t * mem,apr_uint32_t val)78 APR_DECLARE(void) apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
79 {
80     atomic_sub(mem, val);
81 }
82 
apr_atomic_dec32(volatile apr_uint32_t * mem)83 APR_DECLARE(int) apr_atomic_dec32(volatile apr_uint32_t *mem)
84 {
85     return atomic_sub(mem, 1);
86 }
87 
apr_atomic_cas32(volatile apr_uint32_t * mem,apr_uint32_t with,apr_uint32_t cmp)88 APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint32_t with,
89                                            apr_uint32_t cmp)
90 {
91     asm volatile ("    cs  %0,%2,%1\n"
92                   : "+d" (cmp), "=Q" (*mem)
93                   : "d" (with), "m" (*mem)
94                   : "cc", "memory");
95 
96     return cmp;
97 }
98 
apr_atomic_xchg32(volatile apr_uint32_t * mem,apr_uint32_t val)99 APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
100 {
101     apr_uint32_t prev = *mem;
102 
103     asm volatile ("loop_%=:\n"
104                   "    cs  %0,%2,%1\n"
105                   "    jl  loop_%=\n"
106                   : "+d" (prev), "=Q" (*mem)
107                   : "d" (val), "m" (*mem)
108                   : "cc", "memory");
109 
110     return prev;
111 }
112 
apr_atomic_casptr(volatile void ** mem,void * with,const void * cmp)113 APR_DECLARE(void*) apr_atomic_casptr(volatile void **mem, void *with, const void *cmp)
114 {
115     void *prev = (void *) cmp;
116 #if APR_SIZEOF_VOIDP == 4
117     asm volatile ("    cs  %0,%2,%1\n"
118                   : "+d" (prev), "=Q" (*mem)
119                   : "d" (with), "m" (*mem)
120                   : "cc", "memory");
121 #elif APR_SIZEOF_VOIDP == 8
122     asm volatile ("    csg %0,%2,%1\n"
123                   : "+d" (prev), "=Q" (*mem)
124                   : "d" (with), "m" (*mem)
125                   : "cc", "memory");
126 #else
127 #error APR_SIZEOF_VOIDP value not supported
128 #endif
129     return prev;
130 }
131 
apr_atomic_xchgptr(volatile void ** mem,void * with)132 APR_DECLARE(void*) apr_atomic_xchgptr(volatile void **mem, void *with)
133 {
134     void *prev = (void *) *mem;
135 #if APR_SIZEOF_VOIDP == 4
136     asm volatile ("loop_%=:\n"
137                   "    cs  %0,%2,%1\n"
138                   "    jl  loop_%=\n"
139                   : "+d" (prev), "=Q" (*mem)
140                   : "d" (with), "m" (*mem)
141                   : "cc", "memory");
142 #elif APR_SIZEOF_VOIDP == 8
143     asm volatile ("loop_%=:\n"
144                   "    csg %0,%2,%1\n"
145                   "    jl  loop_%=\n"
146                   : "+d" (prev), "=Q" (*mem)
147                   : "d" (with), "m" (*mem)
148                   : "cc", "memory");
149 #else
150 #error APR_SIZEOF_VOIDP value not supported
151 #endif
152     return prev;
153 }
154 
155 #endif /* USE_ATOMICS_S390 */
156