Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
L
linux-davinci-2.6.23
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Redmine
Redmine
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Metrics
Environments
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
linux
linux-davinci-2.6.23
Commits
06a98dba
Commit
06a98dba
authored
Nov 10, 2005
by
Stephen Rothwell
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
powerpc: implement atomic64_t on ppc64
Signed-off-by:
Stephen Rothwell
<
sfr@canb.auug.org.au
>
parent
676e2497
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
178 additions
and
0 deletions
+178
-0
include/asm-powerpc/atomic.h
include/asm-powerpc/atomic.h
+178
-0
No files found.
include/asm-powerpc/atomic.h
View file @
06a98dba
...
...
@@ -197,5 +197,183 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
#define smp_mb__before_atomic_inc() smp_mb()
#define smp_mb__after_atomic_inc() smp_mb()
#ifdef __powerpc64__
typedef
struct
{
volatile
long
counter
;
}
atomic64_t
;
#define ATOMIC64_INIT(i) { (i) }
#define atomic64_read(v) ((v)->counter)
#define atomic64_set(v,i) (((v)->counter) = (i))
static
__inline__
void
atomic64_add
(
long
a
,
atomic64_t
*
v
)
{
long
t
;
__asm__
__volatile__
(
"1: ldarx %0,0,%3 # atomic64_add
\n
\
add %0,%2,%0
\n
\
stdcx. %0,0,%3
\n
\
bne- 1b"
:
"=&r"
(
t
),
"=m"
(
v
->
counter
)
:
"r"
(
a
),
"r"
(
&
v
->
counter
),
"m"
(
v
->
counter
)
:
"cc"
);
}
static
__inline__
long
atomic64_add_return
(
long
a
,
atomic64_t
*
v
)
{
long
t
;
__asm__
__volatile__
(
EIEIO_ON_SMP
"1: ldarx %0,0,%2 # atomic64_add_return
\n
\
add %0,%1,%0
\n
\
stdcx. %0,0,%2
\n
\
bne- 1b"
ISYNC_ON_SMP
:
"=&r"
(
t
)
:
"r"
(
a
),
"r"
(
&
v
->
counter
)
:
"cc"
,
"memory"
);
return
t
;
}
#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
static
__inline__
void
atomic64_sub
(
long
a
,
atomic64_t
*
v
)
{
long
t
;
__asm__
__volatile__
(
"1: ldarx %0,0,%3 # atomic64_sub
\n
\
subf %0,%2,%0
\n
\
stdcx. %0,0,%3
\n
\
bne- 1b"
:
"=&r"
(
t
),
"=m"
(
v
->
counter
)
:
"r"
(
a
),
"r"
(
&
v
->
counter
),
"m"
(
v
->
counter
)
:
"cc"
);
}
static
__inline__
long
atomic64_sub_return
(
long
a
,
atomic64_t
*
v
)
{
long
t
;
__asm__
__volatile__
(
EIEIO_ON_SMP
"1: ldarx %0,0,%2 # atomic64_sub_return
\n
\
subf %0,%1,%0
\n
\
stdcx. %0,0,%2
\n
\
bne- 1b"
ISYNC_ON_SMP
:
"=&r"
(
t
)
:
"r"
(
a
),
"r"
(
&
v
->
counter
)
:
"cc"
,
"memory"
);
return
t
;
}
static
__inline__
void
atomic64_inc
(
atomic64_t
*
v
)
{
long
t
;
__asm__
__volatile__
(
"1: ldarx %0,0,%2 # atomic64_inc
\n
\
addic %0,%0,1
\n
\
stdcx. %0,0,%2
\n
\
bne- 1b"
:
"=&r"
(
t
),
"=m"
(
v
->
counter
)
:
"r"
(
&
v
->
counter
),
"m"
(
v
->
counter
)
:
"cc"
);
}
static
__inline__
long
atomic64_inc_return
(
atomic64_t
*
v
)
{
long
t
;
__asm__
__volatile__
(
EIEIO_ON_SMP
"1: ldarx %0,0,%1 # atomic64_inc_return
\n
\
addic %0,%0,1
\n
\
stdcx. %0,0,%1
\n
\
bne- 1b"
ISYNC_ON_SMP
:
"=&r"
(
t
)
:
"r"
(
&
v
->
counter
)
:
"cc"
,
"memory"
);
return
t
;
}
/*
* atomic64_inc_and_test - increment and test
* @v: pointer of type atomic64_t
*
* Atomically increments @v by 1
* and returns true if the result is zero, or false for all
* other cases.
*/
#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
static
__inline__
void
atomic64_dec
(
atomic64_t
*
v
)
{
long
t
;
__asm__
__volatile__
(
"1: ldarx %0,0,%2 # atomic64_dec
\n
\
addic %0,%0,-1
\n
\
stdcx. %0,0,%2
\n
\
bne- 1b"
:
"=&r"
(
t
),
"=m"
(
v
->
counter
)
:
"r"
(
&
v
->
counter
),
"m"
(
v
->
counter
)
:
"cc"
);
}
static
__inline__
long
atomic64_dec_return
(
atomic64_t
*
v
)
{
long
t
;
__asm__
__volatile__
(
EIEIO_ON_SMP
"1: ldarx %0,0,%1 # atomic64_dec_return
\n
\
addic %0,%0,-1
\n
\
stdcx. %0,0,%1
\n
\
bne- 1b"
ISYNC_ON_SMP
:
"=&r"
(
t
)
:
"r"
(
&
v
->
counter
)
:
"cc"
,
"memory"
);
return
t
;
}
#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
/*
* Atomically test *v and decrement if it is greater than 0.
* The function returns the old value of *v minus 1.
*/
static
__inline__
long
atomic64_dec_if_positive
(
atomic64_t
*
v
)
{
long
t
;
__asm__
__volatile__
(
EIEIO_ON_SMP
"1: ldarx %0,0,%1 # atomic64_dec_if_positive
\n
\
addic. %0,%0,-1
\n
\
blt- 2f
\n
\
stdcx. %0,0,%1
\n
\
bne- 1b"
ISYNC_ON_SMP
"
\n
\
2:"
:
"=&r"
(
t
)
:
"r"
(
&
v
->
counter
)
:
"cc"
,
"memory"
);
return
t
;
}
#endif
/* __powerpc64__ */
#endif
/* __KERNEL__ */
#endif
/* _ASM_POWERPC_ATOMIC_H_ */
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment