blob: a8434afbad9d8ecd0070a191b337bd5c313d6ada [file] [log] [blame]
Andrew Geissler82c905d2020-04-13 13:39:40 -05001From 48ddf4fb999931942c359350fb31cd557514e1c6 Mon Sep 17 00:00:00 2001
2From: Chenxi Mao <maochenxi@eswin.com>
3Date: Mon, 20 Apr 2020 15:27:22 +0800
4Subject: [PATCH 1/1] adb: Support riscv64
5
6---
7 include/cutils/atomic-inline.h | 2 +
8 include/cutils/atomic-riscv64.h | 156 ++++++++++++++++++++++++++++++++
9 2 files changed, 158 insertions(+)
10 create mode 100644 include/cutils/atomic-riscv64.h
11
12diff --git a/include/cutils/atomic-inline.h b/include/cutils/atomic-inline.h
13index a31e913579..b5dc38209c 100644
14--- a/include/cutils/atomic-inline.h
15+++ b/include/cutils/atomic-inline.h
16@@ -55,6 +55,8 @@ extern "C" {
17 #include <cutils/atomic-mips64.h>
18 #elif defined(__mips__)
19 #include <cutils/atomic-mips.h>
20+#elif defined(__riscv) && __riscv_xlen == 64
21+#include <cutils/atomic-riscv64.h>
22 #else
23 #error atomic operations are unsupported
24 #endif
25diff --git a/include/cutils/atomic-riscv64.h b/include/cutils/atomic-riscv64.h
26new file mode 100644
27index 0000000000..2664db5a86
28--- /dev/null
29+++ b/include/cutils/atomic-riscv64.h
30@@ -0,0 +1,156 @@
31+/*
32+ * Copyright (C) 2014 The Android Open Source Project
33+ * All rights reserved.
34+ *
35+ * Redistribution and use in source and binary forms, with or without
36+ * modification, are permitted provided that the following conditions
37+ * are met:
38+ * * Redistributions of source code must retain the above copyright
39+ * notice, this list of conditions and the following disclaimer.
40+ * * Redistributions in binary form must reproduce the above copyright
41+ * notice, this list of conditions and the following disclaimer in
42+ * the documentation and/or other materials provided with the
43+ * distribution.
44+ *
45+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
46+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
47+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
48+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
49+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
50+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
51+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
52+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
53+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
54+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
55+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
56+ * SUCH DAMAGE.
57+ */
58+
59+#ifndef ANDROID_CUTILS_ATOMIC_RISCV64_H
60+#define ANDROID_CUTILS_ATOMIC_RISCV64_H
61+
62+#include <stdint.h>
63+
64+#ifndef ANDROID_ATOMIC_INLINE
65+#define ANDROID_ATOMIC_INLINE inline __attribute__((always_inline))
66+#endif
67+
68+/*
69+ TODOAArch64: Revisit the below functions and check for potential
70+ optimizations using assembly code or otherwise.
71+*/
72+
73+extern ANDROID_ATOMIC_INLINE
74+void android_compiler_barrier(void)
75+{
76+ __asm__ __volatile__ ("" : : : "memory");
77+}
78+
79+extern ANDROID_ATOMIC_INLINE
80+void android_memory_barrier(void)
81+{
82+ __asm__ __volatile__ ("fence rw,rw" : : : "memory");
83+}
84+
85+extern ANDROID_ATOMIC_INLINE
86+int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
87+{
88+ int32_t value = *ptr;
89+ android_memory_barrier();
90+ return value;
91+}
92+
93+extern ANDROID_ATOMIC_INLINE
94+int32_t android_atomic_release_load(volatile const int32_t *ptr)
95+{
96+ android_memory_barrier();
97+ return *ptr;
98+}
99+
100+extern ANDROID_ATOMIC_INLINE
101+void android_atomic_acquire_store(int32_t value, volatile int32_t *ptr)
102+{
103+ *ptr = value;
104+ android_memory_barrier();
105+}
106+
107+extern ANDROID_ATOMIC_INLINE
108+void android_atomic_release_store(int32_t value, volatile int32_t *ptr)
109+{
110+ android_memory_barrier();
111+ *ptr = value;
112+}
113+
114+extern ANDROID_ATOMIC_INLINE
115+int android_atomic_cas(int32_t old_value, int32_t new_value,
116+ volatile int32_t *ptr)
117+{
118+ return __sync_val_compare_and_swap(ptr, old_value, new_value) != old_value;
119+}
120+
121+extern ANDROID_ATOMIC_INLINE
122+int android_atomic_acquire_cas(int32_t old_value, int32_t new_value,
123+ volatile int32_t *ptr)
124+{
125+ int status = android_atomic_cas(old_value, new_value, ptr);
126+ android_memory_barrier();
127+ return status;
128+}
129+
130+extern ANDROID_ATOMIC_INLINE
131+int android_atomic_release_cas(int32_t old_value, int32_t new_value,
132+ volatile int32_t *ptr)
133+{
134+ android_memory_barrier();
135+ return android_atomic_cas(old_value, new_value, ptr);
136+}
137+
138+extern ANDROID_ATOMIC_INLINE
139+int32_t android_atomic_add(int32_t increment, volatile int32_t *ptr)
140+{
141+ int32_t prev, status;
142+ android_memory_barrier();
143+ do {
144+ prev = *ptr;
145+ status = android_atomic_cas(prev, prev + increment, ptr);
146+ } while (__builtin_expect(status != 0, 0));
147+ return prev;
148+}
149+
150+extern ANDROID_ATOMIC_INLINE
151+int32_t android_atomic_inc(volatile int32_t *addr)
152+{
153+ return android_atomic_add(1, addr);
154+}
155+
156+extern ANDROID_ATOMIC_INLINE
157+int32_t android_atomic_dec(volatile int32_t *addr)
158+{
159+ return android_atomic_add(-1, addr);
160+}
161+
162+extern ANDROID_ATOMIC_INLINE
163+int32_t android_atomic_and(int32_t value, volatile int32_t *ptr)
164+{
165+ int32_t prev, status;
166+ android_memory_barrier();
167+ do {
168+ prev = *ptr;
169+ status = android_atomic_cas(prev, prev & value, ptr);
170+ } while (__builtin_expect(status != 0, 0));
171+ return prev;
172+}
173+
174+extern ANDROID_ATOMIC_INLINE
175+int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
176+{
177+ int32_t prev, status;
178+ android_memory_barrier();
179+ do {
180+ prev = *ptr;
181+ status = android_atomic_cas(prev, prev | value, ptr);
182+ } while (__builtin_expect(status != 0, 0));
183+ return prev;
184+}
185+
186+#endif /* ANDROID_CUTILS_ATOMIC_RISCV_H */
187--
1882.17.1
189