25 #ifndef __MOTR_LIB_VIRTUAL_ARRAY_H__ 26 #define __MOTR_LIB_VIRTUAL_ARRAY_H__ 234 #define m0_varr_iter(arr, type, idx, obj, start, end, inc) \ 236 uint64_t idx = (start); \ 237 uint64_t __end = (end); \ 238 uint64_t __inc = (inc); \ 239 struct m0_varr *__arr = (arr); \ 242 struct m0_varr_cursor __cursor; \ 244 M0_PRE(idx < __arr->va_nr && __end <= __arr->va_nr); \ 245 M0_PRE(ergo(__arr->va_obj_shift > 0, \ 246 sizeof *obj > M0_BITS(__arr->va_obj_shift - 1) && \ 247 sizeof *obj <= M0_BITS(__arr->va_obj_shift))); \ 249 __rc = m0_varr_cursor_init(&__cursor, __arr, __arr->va_depth); \ 250 M0_ASSERT(__rc == 0); \ 251 m0_varr_cursor_move(&__cursor, idx); \ 252 for (obj = m0_varr_cursor_get(&__cursor); idx < __end; \ 253 idx += __inc, m0_varr_cursor_move(&__cursor, __inc), \ 254 obj = m0_varr_cursor_get(&__cursor)) { \ 256 #define m0_varr_enditer } } ) 259 #define m0_varr_for(arr, type, idx, obj) \ 261 struct m0_varr *__arr__ = (arr); \ 262 m0_varr_iter(__arr__, type, idx, obj, 0, m0_varr_size(__arr__), 1) 264 #define m0_varr_endfor m0_varr_enditer; }) M0_INTERNAL int m0_varr_cursor_init(struct m0_varr_cursor *cursor, const struct m0_varr *arr, uint32_t depth)
void * va_tree[M0_VA_TNODE_NR]
struct m0_varr_path_element vc_path[M0_VA_DEPTH_MAX]
M0_INTERNAL int m0_varr_cursor_move(struct m0_varr_cursor *cursor, uint64_t inc)
struct varr_cache * va_cache
M0_BASSERT(M0_VA_TNODE_NR==M0_BITS(M0_VA_TNODE_NR_SHIFT))
M0_INTERNAL uint64_t m0_varr_size(const struct m0_varr *arr)
M0_INTERNAL void * m0_varr_cursor_get(struct m0_varr_cursor *cursor)
M0_INTERNAL int m0_varr_init(struct m0_varr *arr, uint64_t nr, size_t size, size_t bufsize)
M0_INTERNAL int m0_varr_cursor_next(struct m0_varr_cursor *cursor)
uint32_t va_failure_depth
M0_INTERNAL void m0_varr_fini(struct m0_varr *arr)
uint8_t va_bufptr_nr_shift
M0_INTERNAL void * m0_varr_ele_get(struct m0_varr *arr, uint64_t index)