1 #ifndef __CEPH_DECODE_H
2 #define __CEPH_DECODE_H
5 #include <linux/time.h>
6 #include <asm/unaligned.h>
12 * void **p pointer to position pointer
13 * void *end pointer to end of buffer (last byte + 1)
16 static inline u64 ceph_decode_64(void **p)
18 u64 v = get_unaligned_le64(*p);
22 static inline u32 ceph_decode_32(void **p)
24 u32 v = get_unaligned_le32(*p);
28 static inline u16 ceph_decode_16(void **p)
30 u16 v = get_unaligned_le16(*p);
34 static inline u8 ceph_decode_8(void **p)
40 static inline void ceph_decode_copy(void **p, void *pv, size_t n)
49 #define ceph_decode_need(p, end, n, bad) \
51 if (unlikely(*(p) + (n) > (end))) \
55 #define ceph_decode_64_safe(p, end, v, bad) \
57 ceph_decode_need(p, end, sizeof(u64), bad); \
58 v = ceph_decode_64(p); \
60 #define ceph_decode_32_safe(p, end, v, bad) \
62 ceph_decode_need(p, end, sizeof(u32), bad); \
63 v = ceph_decode_32(p); \
65 #define ceph_decode_16_safe(p, end, v, bad) \
67 ceph_decode_need(p, end, sizeof(u16), bad); \
68 v = ceph_decode_16(p); \
70 #define ceph_decode_8_safe(p, end, v, bad) \
72 ceph_decode_need(p, end, sizeof(u8), bad); \
73 v = ceph_decode_8(p); \
76 #define ceph_decode_copy_safe(p, end, pv, n, bad) \
78 ceph_decode_need(p, end, n, bad); \
79 ceph_decode_copy(p, pv, n); \
83 * struct ceph_timespec <-> struct timespec
85 static inline void ceph_decode_timespec(struct timespec *ts,
86 const struct ceph_timespec *tv)
88 ts->tv_sec = le32_to_cpu(tv->tv_sec);
89 ts->tv_nsec = le32_to_cpu(tv->tv_nsec);
91 static inline void ceph_encode_timespec(struct ceph_timespec *tv,
92 const struct timespec *ts)
94 tv->tv_sec = cpu_to_le32(ts->tv_sec);
95 tv->tv_nsec = cpu_to_le32(ts->tv_nsec);
99 * sockaddr_storage <-> ceph_sockaddr
101 static inline void ceph_encode_addr(struct ceph_entity_addr *a)
103 __be16 ss_family = htons(a->in_addr.ss_family);
104 a->in_addr.ss_family = *(__u16 *)&ss_family;
106 static inline void ceph_decode_addr(struct ceph_entity_addr *a)
108 __be16 ss_family = *(__be16 *)&a->in_addr.ss_family;
109 a->in_addr.ss_family = ntohs(ss_family);
110 WARN_ON(a->in_addr.ss_family == 512);
116 static inline void ceph_encode_64(void **p, u64 v)
118 put_unaligned_le64(v, (__le64 *)*p);
121 static inline void ceph_encode_32(void **p, u32 v)
123 put_unaligned_le32(v, (__le32 *)*p);
126 static inline void ceph_encode_16(void **p, u16 v)
128 put_unaligned_le16(v, (__le16 *)*p);
131 static inline void ceph_encode_8(void **p, u8 v)
136 static inline void ceph_encode_copy(void **p, const void *s, int len)
143 * filepath, string encoders
145 static inline void ceph_encode_filepath(void **p, void *end,
146 u64 ino, const char *path)
148 u32 len = path ? strlen(path) : 0;
149 BUG_ON(*p + sizeof(ino) + sizeof(len) + len > end);
151 ceph_encode_64(p, ino);
152 ceph_encode_32(p, len);
154 memcpy(*p, path, len);
158 static inline void ceph_encode_string(void **p, void *end,
159 const char *s, u32 len)
161 BUG_ON(*p + sizeof(len) + len > end);
162 ceph_encode_32(p, len);
168 #define ceph_encode_need(p, end, n, bad) \
170 if (unlikely(*(p) + (n) > (end))) \
174 #define ceph_encode_64_safe(p, end, v, bad) \
176 ceph_encode_need(p, end, sizeof(u64), bad); \
177 ceph_encode_64(p, v); \
179 #define ceph_encode_32_safe(p, end, v, bad) \
181 ceph_encode_need(p, end, sizeof(u32), bad); \
182 ceph_encode_32(p, v); \
184 #define ceph_encode_16_safe(p, end, v, bad) \
186 ceph_encode_need(p, end, sizeof(u16), bad); \
187 ceph_encode_16(p, v); \
190 #define ceph_encode_copy_safe(p, end, pv, n, bad) \
192 ceph_encode_need(p, end, n, bad); \
193 ceph_encode_copy(p, pv, n); \
195 #define ceph_encode_string_safe(p, end, s, n, bad) \
197 ceph_encode_need(p, end, n, bad); \
198 ceph_encode_string(p, end, s, n); \