summaryrefslogtreecommitdiffstats
path: root/pk/ppe42/ppe42_msr.h
blob: ac086dd61a8514dd9f8f6c80dd5b35ad559f8bdc (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
#ifndef __PPE42_MSR_H__
#define __PPE42_MSR_H__
//-----------------------------------------------------------------------------
// *! (C) Copyright International Business Machines Corp. 2014
// *! All Rights Reserved -- Property of IBM
// *! *** IBM Confidential ***
//-----------------------------------------------------------------------------

/// \file ppe42_msr.h
/// \brief Everything related to the PPE42 Machine State Register
///
/// All of the macros defined here that \e modify the MSR create a compiler
/// memory barrier that will cause GCC to flush/invalidate all memory data
/// held in registers before the macro. This is consistent with other systems,
/// e.g., the PowerPC Linux kernel, and is the safest way to define these
/// macros as it guarantess for example that kernel data structure updates
/// have completed before exiting a critical section.

#define MSR_SEM     0x7f000000      /* SIB Error Mask */
#define MSR_IS0     0x00800000      /* Instance-Specific Field 0 */
#define MSR_SIBRC   0x00700000      /* Last SIB return code */
#define MSR_LP      0x00080000      /* Low Priority */
#define MSR_WE      0x00040000      /* Wait State Enable */
#define MSR_IS1     0x00020000      /* Instance-Specific Field 1 */
#define MSR_UIE     0x00010000      /* Unmaskable Interrupt Enable */
#define MSR_EE      0x00008000      /* External Interrupt Enable */
#define MSR_ME      0x00001000      /* Machine Check Exception Enable */
#define MSR_IPE     0x00000100      /* Imprecise Mode Enable */
#define MSR_SIBRCA  0x000000ff      /* SIB Return Code Accumulator */

//#define MSR_CE_BIT 14
#define MSR_EE_BIT 16
//#define MSR_IR_BIT 26
//#define MSR_DR_BIT 27


#define MSR_SEM_START_BIT       1
#define MSR_SEM_LEN             7
#define MSR_SIBRC_START_BIT     9
#define MSR_SIBRC_LEN           3


#ifndef __ASSEMBLER__

/// Move From MSR

#define mfmsr()                               \
   ({uint32_t __msr;                          \
    asm volatile ("mfmsr %0" : "=r" (__msr)); \
    __msr;})


/// Move to MSR

#define mtmsr(value) \
    asm volatile ("mtmsr %0" : : "r" (value) : "memory")


/// Read-Modify-Write the MSR with OR (Set MSR bits).  This operation is only
/// guaranteed atomic in a critical section.

#define or_msr(x) \
    mtmsr(mfmsr() | (x))


/// Read-Modify-Write the MSR with AND complement (Clear MSR bits). This
/// operation is only guaranteed atomic in a critical section.

#define andc_msr(x) \
     mtmsr(mfmsr() & ~(x))


/// Write MSR[EE] with an immediate value (0/1)
///
/// Note that the immediate value \a i must be a compile-time constant.

#define wrteei(i) \
    asm volatile ("wrteei %0" : : "i" (i) : "memory")


/// Write MSR[EE] from the EE bit of another MSR

#define wrtee(other_msr) \
    asm volatile ("wrtee %0" : : "r" (other_msr) : "memory")

#endif /* __ASSEMBLER__ */

#endif /* __PPE42_MSR_H__ */
OpenPOWER on IntegriCloud