Subversion Repositories Kolibri OS

Rev

Rev 1428 | Show entire file | Regard whitespace | Details | Blame | Last modification | View Log | RSS feed

Rev 1428 Rev 7143
Line 2... Line 2...
2
#define _LINUX_UNALIGNED_ACCESS_OK_H
2
#define _LINUX_UNALIGNED_ACCESS_OK_H
Line 3... Line 3...
3
 
3
 
4
#include 
4
#include 
Line 5... Line 5...
5
#include 
5
#include 
6
 
6
 
7
static inline u16 get_unaligned_le16(const void *p)
7
static __always_inline u16 get_unaligned_le16(const void *p)
8
{
8
{
Line 9... Line 9...
9
	return le16_to_cpup((__le16 *)p);
9
	return le16_to_cpup((__le16 *)p);
10
}
10
}
11
 
11
 
12
static inline u32 get_unaligned_le32(const void *p)
12
static __always_inline u32 get_unaligned_le32(const void *p)
Line 13... Line 13...
13
{
13
{
14
	return le32_to_cpup((__le32 *)p);
14
	return le32_to_cpup((__le32 *)p);
15
}
15
}
16
 
16
 
Line 17... Line 17...
17
static inline u64 get_unaligned_le64(const void *p)
17
static __always_inline u64 get_unaligned_le64(const void *p)
18
{
18
{
19
	return le64_to_cpup((__le64 *)p);
19
	return le64_to_cpup((__le64 *)p);
20
}
20
}
Line 21... Line 21...
21
 
21
 
22
static inline u16 get_unaligned_be16(const void *p)
22
static __always_inline u16 get_unaligned_be16(const void *p)
23
{
23
{
24
	return be16_to_cpup((__be16 *)p);
24
	return be16_to_cpup((__be16 *)p);
Line 25... Line 25...
25
}
25
}
26
 
26
 
27
static inline u32 get_unaligned_be32(const void *p)
27
static __always_inline u32 get_unaligned_be32(const void *p)
28
{
28
{
Line 29... Line 29...
29
	return be32_to_cpup((__be32 *)p);
29
	return be32_to_cpup((__be32 *)p);
30
}
30
}
31
 
31
 
32
static inline u64 get_unaligned_be64(const void *p)
32
static __always_inline u64 get_unaligned_be64(const void *p)
Line 33... Line 33...
33
{
33
{
34
	return be64_to_cpup((__be64 *)p);
34
	return be64_to_cpup((__be64 *)p);
35
}
35
}
36
 
36
 
Line 37... Line 37...
37
static inline void put_unaligned_le16(u16 val, void *p)
37
static __always_inline void put_unaligned_le16(u16 val, void *p)
38
{
38
{
39
	*((__le16 *)p) = cpu_to_le16(val);
39
	*((__le16 *)p) = cpu_to_le16(val);
40
}
40
}
Line 41... Line 41...
41
 
41
 
42
static inline void put_unaligned_le32(u32 val, void *p)
42
static __always_inline void put_unaligned_le32(u32 val, void *p)
43
{
43
{
44
	*((__le32 *)p) = cpu_to_le32(val);
44
	*((__le32 *)p) = cpu_to_le32(val);
Line 45... Line 45...
45
}
45
}
46
 
46
 
47
static inline void put_unaligned_le64(u64 val, void *p)
47
static __always_inline void put_unaligned_le64(u64 val, void *p)
48
{
48
{
Line 49... Line 49...
49
	*((__le64 *)p) = cpu_to_le64(val);
49
	*((__le64 *)p) = cpu_to_le64(val);
50
}
50
}
51
 
51
 
52
static inline void put_unaligned_be16(u16 val, void *p)
52
static __always_inline void put_unaligned_be16(u16 val, void *p)
Line 53... Line 53...
53
{
53
{