Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
L
linux-davinci
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Redmine
Redmine
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Metrics
Environments
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
linux
linux-davinci
Commits
c5019a57
Commit
c5019a57
authored
Jul 24, 2005
by
David S. Miller
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[SPARC64]: Kill totally unused inline functions from asm/spitfire.h
Signed-off-by:
David S. Miller
<
davem@davemloft.net
>
parent
620de546
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
0 additions
and
130 deletions
+0
-130
include/asm-sparc64/spitfire.h
include/asm-sparc64/spitfire.h
+0
-130
No files found.
include/asm-sparc64/spitfire.h
View file @
c5019a57
...
@@ -56,52 +56,6 @@ extern void cheetah_enable_pcache(void);
...
@@ -56,52 +56,6 @@ extern void cheetah_enable_pcache(void);
SPITFIRE_HIGHEST_LOCKED_TLBENT : \
SPITFIRE_HIGHEST_LOCKED_TLBENT : \
CHEETAH_HIGHEST_LOCKED_TLBENT)
CHEETAH_HIGHEST_LOCKED_TLBENT)
static
__inline__
unsigned
long
spitfire_get_isfsr
(
void
)
{
unsigned
long
ret
;
__asm__
__volatile__
(
"ldxa [%1] %2, %0"
:
"=r"
(
ret
)
:
"r"
(
TLB_SFSR
),
"i"
(
ASI_IMMU
));
return
ret
;
}
static
__inline__
unsigned
long
spitfire_get_dsfsr
(
void
)
{
unsigned
long
ret
;
__asm__
__volatile__
(
"ldxa [%1] %2, %0"
:
"=r"
(
ret
)
:
"r"
(
TLB_SFSR
),
"i"
(
ASI_DMMU
));
return
ret
;
}
static
__inline__
unsigned
long
spitfire_get_sfar
(
void
)
{
unsigned
long
ret
;
__asm__
__volatile__
(
"ldxa [%1] %2, %0"
:
"=r"
(
ret
)
:
"r"
(
DMMU_SFAR
),
"i"
(
ASI_DMMU
));
return
ret
;
}
static
__inline__
void
spitfire_put_isfsr
(
unsigned
long
sfsr
)
{
__asm__
__volatile__
(
"stxa %0, [%1] %2
\n\t
"
"membar #Sync"
:
/* no outputs */
:
"r"
(
sfsr
),
"r"
(
TLB_SFSR
),
"i"
(
ASI_IMMU
));
}
static
__inline__
void
spitfire_put_dsfsr
(
unsigned
long
sfsr
)
{
__asm__
__volatile__
(
"stxa %0, [%1] %2
\n\t
"
"membar #Sync"
:
/* no outputs */
:
"r"
(
sfsr
),
"r"
(
TLB_SFSR
),
"i"
(
ASI_DMMU
));
}
/* The data cache is write through, so this just invalidates the
/* The data cache is write through, so this just invalidates the
* specified line.
* specified line.
*/
*/
...
@@ -193,90 +147,6 @@ static __inline__ void spitfire_put_itlb_data(int entry, unsigned long data)
...
@@ -193,90 +147,6 @@ static __inline__ void spitfire_put_itlb_data(int entry, unsigned long data)
"i"
(
ASI_ITLB_DATA_ACCESS
));
"i"
(
ASI_ITLB_DATA_ACCESS
));
}
}
/* Spitfire hardware assisted TLB flushes. */
/* Context level flushes. */
static
__inline__
void
spitfire_flush_dtlb_primary_context
(
void
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
0x40
),
"i"
(
ASI_DMMU_DEMAP
));
}
static
__inline__
void
spitfire_flush_itlb_primary_context
(
void
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
0x40
),
"i"
(
ASI_IMMU_DEMAP
));
}
static
__inline__
void
spitfire_flush_dtlb_secondary_context
(
void
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
0x50
),
"i"
(
ASI_DMMU_DEMAP
));
}
static
__inline__
void
spitfire_flush_itlb_secondary_context
(
void
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
0x50
),
"i"
(
ASI_IMMU_DEMAP
));
}
static
__inline__
void
spitfire_flush_dtlb_nucleus_context
(
void
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
0x60
),
"i"
(
ASI_DMMU_DEMAP
));
}
static
__inline__
void
spitfire_flush_itlb_nucleus_context
(
void
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
0x60
),
"i"
(
ASI_IMMU_DEMAP
));
}
/* Page level flushes. */
static
__inline__
void
spitfire_flush_dtlb_primary_page
(
unsigned
long
page
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
page
),
"i"
(
ASI_DMMU_DEMAP
));
}
static
__inline__
void
spitfire_flush_itlb_primary_page
(
unsigned
long
page
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
page
),
"i"
(
ASI_IMMU_DEMAP
));
}
static
__inline__
void
spitfire_flush_dtlb_secondary_page
(
unsigned
long
page
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
page
|
0x10
),
"i"
(
ASI_DMMU_DEMAP
));
}
static
__inline__
void
spitfire_flush_itlb_secondary_page
(
unsigned
long
page
)
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
"membar #Sync"
:
/* No outputs */
:
"r"
(
page
|
0x10
),
"i"
(
ASI_IMMU_DEMAP
));
}
static
__inline__
void
spitfire_flush_dtlb_nucleus_page
(
unsigned
long
page
)
static
__inline__
void
spitfire_flush_dtlb_nucleus_page
(
unsigned
long
page
)
{
{
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
__asm__
__volatile__
(
"stxa %%g0, [%0] %1
\n\t
"
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment