4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 only,
8 * as published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License version 2 for more details (a copy is included
14 * in the LICENSE file that accompanied this code).
16 * You should have received a copy of the GNU General Public License
17 * version 2 along with this program; If not, see
18 * http://www.sun.com/software/products/lustre/docs/GPLv2.pdf
20 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
21 * CA 95054 USA or visit www.sun.com if you need additional information or
27 * Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
28 * Use is subject to license terms.
31 * This file is part of Lustre, http://www.lustre.org/
32 * Lustre is a trademark of Sun Microsystems, Inc.
34 * lustre/ldlm/interval_tree.c
36 * Interval tree library used by ldlm extent lock code
38 * Author: Huang Wei <huangwei@clusterfs.com>
39 * Author: Jay Xiong <jinshan.xiong@sun.com>
42 # include <lustre_dlm.h>
44 # include <libcfs/libcfs.h>
46 #include <obd_support.h>
47 #include <interval_tree.h>
54 static inline int node_is_left_child(struct interval_node *node)
56 LASSERT(node->in_parent != NULL);
57 return node == node->in_parent->in_left;
60 static inline int node_is_right_child(struct interval_node *node)
62 LASSERT(node->in_parent != NULL);
63 return node == node->in_parent->in_right;
66 static inline int node_is_red(struct interval_node *node)
68 return node->in_color == INTERVAL_RED;
71 static inline int node_is_black(struct interval_node *node)
73 return node->in_color == INTERVAL_BLACK;
76 static inline int extent_compare(struct interval_node_extent *e1,
77 struct interval_node_extent *e2)
80 if (e1->start == e2->start) {
81 if (e1->end < e2->end)
83 else if (e1->end > e2->end)
88 if (e1->start < e2->start)
96 static inline int extent_equal(struct interval_node_extent *e1,
97 struct interval_node_extent *e2)
99 return (e1->start == e2->start) && (e1->end == e2->end);
102 static inline int extent_overlapped(struct interval_node_extent *e1,
103 struct interval_node_extent *e2)
105 return (e1->start <= e2->end) && (e2->start <= e1->end);
108 static inline int node_compare(struct interval_node *n1,
109 struct interval_node *n2)
111 return extent_compare(&n1->in_extent, &n2->in_extent);
114 int node_equal(struct interval_node *n1, struct interval_node *n2)
116 return extent_equal(&n1->in_extent, &n2->in_extent);
118 EXPORT_SYMBOL(node_equal);
120 static inline __u64 max_u64(__u64 x, __u64 y)
122 return x > y ? x : y;
125 static inline __u64 min_u64(__u64 x, __u64 y)
127 return x < y ? x : y;
130 #define interval_for_each(node, root) \
131 for (node = interval_first(root); node != NULL; \
132 node = interval_next(node))
134 #define interval_for_each_reverse(node, root) \
135 for (node = interval_last(root); node != NULL; \
136 node = interval_prev(node))
138 static struct interval_node *interval_first(struct interval_node *node)
144 while (node->in_left)
145 node = node->in_left;
149 static struct interval_node *interval_last(struct interval_node *node)
155 while (node->in_right)
156 node = node->in_right;
160 static struct interval_node *interval_next(struct interval_node *node)
167 RETURN(interval_first(node->in_right));
168 while (node->in_parent && node_is_right_child(node))
169 node = node->in_parent;
170 RETURN(node->in_parent);
173 static struct interval_node *interval_prev(struct interval_node *node)
181 RETURN(interval_last(node->in_left));
183 while (node->in_parent && node_is_left_child(node))
184 node = node->in_parent;
186 RETURN(node->in_parent);
189 enum interval_iter interval_iterate(struct interval_node *root,
190 interval_callback_t func,
193 struct interval_node *node;
194 enum interval_iter rc = INTERVAL_ITER_CONT;
197 interval_for_each(node, root) {
198 rc = func(node, data);
199 if (rc == INTERVAL_ITER_STOP)
205 EXPORT_SYMBOL(interval_iterate);
207 enum interval_iter interval_iterate_reverse(struct interval_node *root,
208 interval_callback_t func,
211 struct interval_node *node;
212 enum interval_iter rc = INTERVAL_ITER_CONT;
215 interval_for_each_reverse(node, root) {
216 rc = func(node, data);
217 if (rc == INTERVAL_ITER_STOP)
223 EXPORT_SYMBOL(interval_iterate_reverse);
225 /* try to find a node with same interval in the tree,
226 * if found, return the pointer to the node, otherwise return NULL*/
227 struct interval_node *interval_find(struct interval_node *root,
228 struct interval_node_extent *ex)
230 struct interval_node *walk = root;
235 rc = extent_compare(ex, &walk->in_extent);
239 walk = walk->in_left;
241 walk = walk->in_right;
246 EXPORT_SYMBOL(interval_find);
248 static void __rotate_change_maxhigh(struct interval_node *node,
249 struct interval_node *rotate)
251 __u64 left_max, right_max;
253 rotate->in_max_high = node->in_max_high;
254 left_max = node->in_left ? node->in_left->in_max_high : 0;
255 right_max = node->in_right ? node->in_right->in_max_high : 0;
256 node->in_max_high = max_u64(interval_high(node),
257 max_u64(left_max,right_max));
260 /* The left rotation "pivots" around the link from node to node->right, and
261 * - node will be linked to node->right's left child, and
262 * - node->right's left child will be linked to node's right child. */
263 static void __rotate_left(struct interval_node *node,
264 struct interval_node **root)
266 struct interval_node *right = node->in_right;
267 struct interval_node *parent = node->in_parent;
269 node->in_right = right->in_left;
271 right->in_left->in_parent = node;
273 right->in_left = node;
274 right->in_parent = parent;
276 if (node_is_left_child(node))
277 parent->in_left = right;
279 parent->in_right = right;
283 node->in_parent = right;
285 /* update max_high for node and right */
286 __rotate_change_maxhigh(node, right);
289 /* The right rotation "pivots" around the link from node to node->left, and
290 * - node will be linked to node->left's right child, and
291 * - node->left's right child will be linked to node's left child. */
292 static void __rotate_right(struct interval_node *node,
293 struct interval_node **root)
295 struct interval_node *left = node->in_left;
296 struct interval_node *parent = node->in_parent;
298 node->in_left = left->in_right;
300 left->in_right->in_parent = node;
301 left->in_right = node;
303 left->in_parent = parent;
305 if (node_is_right_child(node))
306 parent->in_right = left;
308 parent->in_left = left;
312 node->in_parent = left;
314 /* update max_high for node and left */
315 __rotate_change_maxhigh(node, left);
318 #define interval_swap(a, b) do { \
319 struct interval_node *c = a; a = b; b = c; \
323 * Operations INSERT and DELETE, when run on a tree with n keys,
324 * take O(logN) time.Because they modify the tree, the result
325 * may violate the red-black properties.To restore these properties,
326 * we must change the colors of some of the nodes in the tree
327 * and also change the pointer structure.
329 static void interval_insert_color(struct interval_node *node,
330 struct interval_node **root)
332 struct interval_node *parent, *gparent;
335 while ((parent = node->in_parent) && node_is_red(parent)) {
336 gparent = parent->in_parent;
337 /* Parent is RED, so gparent must not be NULL */
338 if (node_is_left_child(parent)) {
339 struct interval_node *uncle;
340 uncle = gparent->in_right;
341 if (uncle && node_is_red(uncle)) {
342 uncle->in_color = INTERVAL_BLACK;
343 parent->in_color = INTERVAL_BLACK;
344 gparent->in_color = INTERVAL_RED;
349 if (parent->in_right == node) {
350 __rotate_left(parent, root);
351 interval_swap(node, parent);
354 parent->in_color = INTERVAL_BLACK;
355 gparent->in_color = INTERVAL_RED;
356 __rotate_right(gparent, root);
358 struct interval_node *uncle;
359 uncle = gparent->in_left;
360 if (uncle && node_is_red(uncle)) {
361 uncle->in_color = INTERVAL_BLACK;
362 parent->in_color = INTERVAL_BLACK;
363 gparent->in_color = INTERVAL_RED;
368 if (node_is_left_child(node)) {
369 __rotate_right(parent, root);
370 interval_swap(node, parent);
373 parent->in_color = INTERVAL_BLACK;
374 gparent->in_color = INTERVAL_RED;
375 __rotate_left(gparent, root);
379 (*root)->in_color = INTERVAL_BLACK;
383 struct interval_node *interval_insert(struct interval_node *node,
384 struct interval_node **root)
387 struct interval_node **p, *parent = NULL;
390 LASSERT(!interval_is_intree(node));
394 if (node_equal(parent, node))
397 /* max_high field must be updated after each iteration */
398 if (parent->in_max_high < interval_high(node))
399 parent->in_max_high = interval_high(node);
401 if (node_compare(node, parent) < 0)
402 p = &parent->in_left;
404 p = &parent->in_right;
407 /* link node into the tree */
408 node->in_parent = parent;
409 node->in_color = INTERVAL_RED;
410 node->in_left = node->in_right = NULL;
413 interval_insert_color(node, root);
418 EXPORT_SYMBOL(interval_insert);
420 static inline int node_is_black_or_0(struct interval_node *node)
422 return !node || node_is_black(node);
425 static void interval_erase_color(struct interval_node *node,
426 struct interval_node *parent,
427 struct interval_node **root)
429 struct interval_node *tmp;
432 while (node_is_black_or_0(node) && node != *root) {
433 if (parent->in_left == node) {
434 tmp = parent->in_right;
435 if (node_is_red(tmp)) {
436 tmp->in_color = INTERVAL_BLACK;
437 parent->in_color = INTERVAL_RED;
438 __rotate_left(parent, root);
439 tmp = parent->in_right;
441 if (node_is_black_or_0(tmp->in_left) &&
442 node_is_black_or_0(tmp->in_right)) {
443 tmp->in_color = INTERVAL_RED;
445 parent = node->in_parent;
447 if (node_is_black_or_0(tmp->in_right)) {
448 struct interval_node *o_left;
449 if ((o_left = tmp->in_left))
450 o_left->in_color = INTERVAL_BLACK;
451 tmp->in_color = INTERVAL_RED;
452 __rotate_right(tmp, root);
453 tmp = parent->in_right;
455 tmp->in_color = parent->in_color;
456 parent->in_color = INTERVAL_BLACK;
458 tmp->in_right->in_color = INTERVAL_BLACK;
459 __rotate_left(parent, root);
464 tmp = parent->in_left;
465 if (node_is_red(tmp)) {
466 tmp->in_color = INTERVAL_BLACK;
467 parent->in_color = INTERVAL_RED;
468 __rotate_right(parent, root);
469 tmp = parent->in_left;
471 if (node_is_black_or_0(tmp->in_left) &&
472 node_is_black_or_0(tmp->in_right)) {
473 tmp->in_color = INTERVAL_RED;
475 parent = node->in_parent;
477 if (node_is_black_or_0(tmp->in_left)) {
478 struct interval_node *o_right;
479 if ((o_right = tmp->in_right))
480 o_right->in_color = INTERVAL_BLACK;
481 tmp->in_color = INTERVAL_RED;
482 __rotate_left(tmp, root);
483 tmp = parent->in_left;
485 tmp->in_color = parent->in_color;
486 parent->in_color = INTERVAL_BLACK;
488 tmp->in_left->in_color = INTERVAL_BLACK;
489 __rotate_right(parent, root);
496 node->in_color = INTERVAL_BLACK;
501 * if the @max_high value of @node is changed, this function traverse a path
502 * from node up to the root to update max_high for the whole tree.
504 static void update_maxhigh(struct interval_node *node,
507 __u64 left_max, right_max;
511 left_max = node->in_left ? node->in_left->in_max_high : 0;
512 right_max = node->in_right ? node->in_right->in_max_high : 0;
513 node->in_max_high = max_u64(interval_high(node),
514 max_u64(left_max, right_max));
516 if (node->in_max_high >= old_maxhigh)
518 node = node->in_parent;
523 void interval_erase(struct interval_node *node,
524 struct interval_node **root)
526 struct interval_node *child, *parent;
530 LASSERT(interval_is_intree(node));
532 if (!node->in_left) {
533 child = node->in_right;
534 } else if (!node->in_right) {
535 child = node->in_left;
536 } else { /* Both left and right child are not NULL */
537 struct interval_node *old = node;
539 node = interval_next(node);
540 child = node->in_right;
541 parent = node->in_parent;
542 color = node->in_color;
545 child->in_parent = parent;
547 parent->in_right = child;
549 parent->in_left = child;
551 node->in_color = old->in_color;
552 node->in_right = old->in_right;
553 node->in_left = old->in_left;
554 node->in_parent = old->in_parent;
556 if (old->in_parent) {
557 if (node_is_left_child(old))
558 old->in_parent->in_left = node;
560 old->in_parent->in_right = node;
565 old->in_left->in_parent = node;
567 old->in_right->in_parent = node;
568 update_maxhigh(child ? : parent, node->in_max_high);
569 update_maxhigh(node, old->in_max_high);
574 parent = node->in_parent;
575 color = node->in_color;
578 child->in_parent = parent;
580 if (node_is_left_child(node))
581 parent->in_left = child;
583 parent->in_right = child;
588 update_maxhigh(child ? : parent, node->in_max_high);
591 if (color == INTERVAL_BLACK)
592 interval_erase_color(child, parent, root);
595 EXPORT_SYMBOL(interval_erase);
597 static inline int interval_may_overlap(struct interval_node *node,
598 struct interval_node_extent *ext)
600 return (ext->start <= node->in_max_high &&
601 ext->end >= interval_low(node));
605 * This function finds all intervals that overlap interval ext,
606 * and calls func to handle resulted intervals one by one.
607 * in lustre, this function will find all conflicting locks in
608 * the granted queue and add these locks to the ast work list.
613 * if (ext->end < interval_low(node)) {
614 * interval_search(node->in_left, ext, func, data);
615 * } else if (interval_may_overlap(node, ext)) {
616 * if (extent_overlapped(ext, &node->in_extent))
618 * interval_search(node->in_left, ext, func, data);
619 * interval_search(node->in_right, ext, func, data);
625 enum interval_iter interval_search(struct interval_node *node,
626 struct interval_node_extent *ext,
627 interval_callback_t func,
630 struct interval_node *parent;
631 enum interval_iter rc = INTERVAL_ITER_CONT;
635 LASSERT(ext != NULL);
636 LASSERT(func != NULL);
639 if (ext->end < interval_low(node)) {
641 node = node->in_left;
644 } else if (interval_may_overlap(node, ext)) {
645 if (extent_overlapped(ext, &node->in_extent)) {
646 rc = func(node, data);
647 if (rc == INTERVAL_ITER_STOP)
652 node = node->in_left;
655 if (node->in_right) {
656 node = node->in_right;
661 parent = node->in_parent;
663 if (node_is_left_child(node) &&
665 /* If we ever got the left, it means that the
666 * parent met ext->end<interval_low(parent), or
667 * may_overlap(parent). If the former is true,
668 * we needn't go back. So stop early and check
669 * may_overlap(parent) after this loop. */
670 node = parent->in_right;
674 parent = parent->in_parent;
676 if (parent == NULL || !interval_may_overlap(parent, ext))
682 EXPORT_SYMBOL(interval_search);
684 static enum interval_iter interval_overlap_cb(struct interval_node *n,
688 return INTERVAL_ITER_STOP;
691 int interval_is_overlapped(struct interval_node *root,
692 struct interval_node_extent *ext)
695 (void)interval_search(root, ext, interval_overlap_cb, &has);
698 EXPORT_SYMBOL(interval_is_overlapped);
700 /* Don't expand to low. Expanding downwards is expensive, and meaningless to
701 * some extents, because programs seldom do IO backward.
703 * The recursive algorithm of expanding low:
705 * struct interval_node *tmp;
706 * static __u64 res = 0;
710 * if (root->in_max_high < low) {
711 * res = max_u64(root->in_max_high + 1, res);
713 * } else if (low < interval_low(root)) {
714 * interval_expand_low(root->in_left, low);
718 * if (interval_high(root) < low)
719 * res = max_u64(interval_high(root) + 1, res);
720 * interval_expand_low(root->in_left, low);
721 * interval_expand_low(root->in_right, low);
726 * It's much easy to eliminate the recursion, see interval_search for
729 static inline __u64 interval_expand_low(struct interval_node *root, __u64 low)
731 /* we only concern the empty tree right now. */
737 static inline __u64 interval_expand_high(struct interval_node *node, __u64 high)
741 while (node != NULL) {
742 if (node->in_max_high < high)
745 if (interval_low(node) > high) {
746 result = interval_low(node) - 1;
747 node = node->in_left;
749 node = node->in_right;
756 /* expanding the extent based on @ext. */
757 void interval_expand(struct interval_node *root,
758 struct interval_node_extent *ext,
759 struct interval_node_extent *limiter)
761 /* The assertion of interval_is_overlapped is expensive because we may
762 * travel many nodes to find the overlapped node. */
763 LASSERT(interval_is_overlapped(root, ext) == 0);
764 if (!limiter || limiter->start < ext->start)
765 ext->start = interval_expand_low(root, ext->start);
766 if (!limiter || limiter->end > ext->end)
767 ext->end = interval_expand_high(root, ext->end);
768 LASSERT(interval_is_overlapped(root, ext) == 0);
770 EXPORT_SYMBOL(interval_expand);