Rev 1428 | Go to most recent revision | Only display areas with differences | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed
Rev 1428 | Rev 7143 | ||
---|---|---|---|
1 | #ifndef _LINUX_UNALIGNED_ACCESS_OK_H |
1 | #ifndef _LINUX_UNALIGNED_ACCESS_OK_H |
2 | #define _LINUX_UNALIGNED_ACCESS_OK_H |
2 | #define _LINUX_UNALIGNED_ACCESS_OK_H |
3 | 3 | ||
4 | #include |
4 | #include |
5 | #include |
5 | #include |
6 | 6 | ||
7 | static inline u16 get_unaligned_le16(const void *p) |
7 | static __always_inline u16 get_unaligned_le16(const void *p) |
8 | { |
8 | { |
9 | return le16_to_cpup((__le16 *)p); |
9 | return le16_to_cpup((__le16 *)p); |
10 | } |
10 | } |
11 | 11 | ||
12 | static inline u32 get_unaligned_le32(const void *p) |
12 | static __always_inline u32 get_unaligned_le32(const void *p) |
13 | { |
13 | { |
14 | return le32_to_cpup((__le32 *)p); |
14 | return le32_to_cpup((__le32 *)p); |
15 | } |
15 | } |
16 | 16 | ||
17 | static inline u64 get_unaligned_le64(const void *p) |
17 | static __always_inline u64 get_unaligned_le64(const void *p) |
18 | { |
18 | { |
19 | return le64_to_cpup((__le64 *)p); |
19 | return le64_to_cpup((__le64 *)p); |
20 | } |
20 | } |
21 | 21 | ||
22 | static inline u16 get_unaligned_be16(const void *p) |
22 | static __always_inline u16 get_unaligned_be16(const void *p) |
23 | { |
23 | { |
24 | return be16_to_cpup((__be16 *)p); |
24 | return be16_to_cpup((__be16 *)p); |
25 | } |
25 | } |
26 | 26 | ||
27 | static inline u32 get_unaligned_be32(const void *p) |
27 | static __always_inline u32 get_unaligned_be32(const void *p) |
28 | { |
28 | { |
29 | return be32_to_cpup((__be32 *)p); |
29 | return be32_to_cpup((__be32 *)p); |
30 | } |
30 | } |
31 | 31 | ||
32 | static inline u64 get_unaligned_be64(const void *p) |
32 | static __always_inline u64 get_unaligned_be64(const void *p) |
33 | { |
33 | { |
34 | return be64_to_cpup((__be64 *)p); |
34 | return be64_to_cpup((__be64 *)p); |
35 | } |
35 | } |
36 | 36 | ||
37 | static inline void put_unaligned_le16(u16 val, void *p) |
37 | static __always_inline void put_unaligned_le16(u16 val, void *p) |
38 | { |
38 | { |
39 | *((__le16 *)p) = cpu_to_le16(val); |
39 | *((__le16 *)p) = cpu_to_le16(val); |
40 | } |
40 | } |
41 | 41 | ||
42 | static inline void put_unaligned_le32(u32 val, void *p) |
42 | static __always_inline void put_unaligned_le32(u32 val, void *p) |
43 | { |
43 | { |
44 | *((__le32 *)p) = cpu_to_le32(val); |
44 | *((__le32 *)p) = cpu_to_le32(val); |
45 | } |
45 | } |
46 | 46 | ||
47 | static inline void put_unaligned_le64(u64 val, void *p) |
47 | static __always_inline void put_unaligned_le64(u64 val, void *p) |
48 | { |
48 | { |
49 | *((__le64 *)p) = cpu_to_le64(val); |
49 | *((__le64 *)p) = cpu_to_le64(val); |
50 | } |
50 | } |
51 | 51 | ||
52 | static inline void put_unaligned_be16(u16 val, void *p) |
52 | static __always_inline void put_unaligned_be16(u16 val, void *p) |
53 | { |
53 | { |
54 | *((__be16 *)p) = cpu_to_be16(val); |
54 | *((__be16 *)p) = cpu_to_be16(val); |
55 | } |
55 | } |
56 | 56 | ||
57 | static inline void put_unaligned_be32(u32 val, void *p) |
57 | static __always_inline void put_unaligned_be32(u32 val, void *p) |
58 | { |
58 | { |
59 | *((__be32 *)p) = cpu_to_be32(val); |
59 | *((__be32 *)p) = cpu_to_be32(val); |
60 | } |
60 | } |
61 | 61 | ||
62 | static inline void put_unaligned_be64(u64 val, void *p) |
62 | static __always_inline void put_unaligned_be64(u64 val, void *p) |
63 | { |
63 | { |
64 | *((__be64 *)p) = cpu_to_be64(val); |
64 | *((__be64 *)p) = cpu_to_be64(val); |
65 | } |
65 | } |
66 | 66 | ||
67 | #endif /* _LINUX_UNALIGNED_ACCESS_OK_H */ |
67 | #endif /* _LINUX_UNALIGNED_ACCESS_OK_H */ |