1 /*-
2 * SPDX-License-Identifier: BSD-2-Clause
3 *
4 * Copyright (c) 2020 The FreeBSD Foundation
5 *
6 * This software was developed by Björn Zeeb under sponsorship from
7 * the FreeBSD Foundation.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions
11 * are met:
12 * 1. Redistributions of source code must retain the above copyright
13 * notice, this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright
15 * notice, this list of conditions and the following disclaimer in the
16 * documentation and/or other materials provided with the distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
19 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
22 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
24 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
25 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
26 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 * SUCH DAMAGE.
29 *
30 * $FreeBSD$
31 */
32
33 #ifndef _LINUXKPI_ASM_UNALIGNED_H
34 #define _LINUXKPI_ASM_UNALIGNED_H
35
36 #include <linux/types.h>
37 #include <asm/byteorder.h>
38
39 static __inline uint16_t
40 get_unaligned_le16(const void *p)
41 {
42
43 return (le16_to_cpup((const __le16 *)p));
44 }
45
46 static __inline uint32_t
47 get_unaligned_le32(const void *p)
48 {
49
50 return (le32_to_cpup((const __le32 *)p));
51 }
52
53 static __inline void
54 put_unaligned_le32(__le32 v, void *p)
55 {
56 __le32 x;
57
58 x = cpu_to_le32(v);
59 memcpy(p, &x, sizeof(x));
60 }
61
62 static __inline void
63 put_unaligned_le64(__le64 v, void *p)
64 {
65 __le64 x;
66
67 x = cpu_to_le64(v);
68 memcpy(p, &x, sizeof(x));
69 }
70
71 static __inline uint16_t
72 get_unaligned_be16(const void *p)
73 {
74
75 return (be16_to_cpup((const __be16 *)p));
76 }
77
78 static __inline uint32_t
79 get_unaligned_be32(const void *p)
80 {
81
82 return (be32_to_cpup((const __be32 *)p));
83 }
84
85 #endif /* _LINUXKPI_ASM_UNALIGNED_H */
Cache object: 0c4e11ed5c042662053ce178923bad03
|