4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 only,
8 * as published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License version 2 for more details (a copy is included
14 * in the LICENSE file that accompanied this code).
16 * You should have received a copy of the GNU General Public License
17 * version 2 along with this program; If not, see
18 * http://www.gnu.org/licenses/gpl-2.0.html
23 * Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
24 * Use is subject to license terms.
26 * Copyright (c) 2014, Intel Corporation.
29 * This file is part of Lustre, http://www.lustre.org/
30 * Lustre is a trademark of Sun Microsystems, Inc.
32 * lustre/ldlm/interval_tree.c
34 * Interval tree library used by ldlm extent lock code
36 * Author: Huang Wei <huangwei@clusterfs.com>
37 * Author: Jay Xiong <jinshan.xiong@sun.com>
40 # include <lustre_dlm.h>
42 #include <interval_tree.h>
49 static inline int node_is_left_child(struct interval_node *node)
51 LASSERT(node->in_parent != NULL);
52 return node == node->in_parent->in_left;
55 static inline int node_is_right_child(struct interval_node *node)
57 LASSERT(node->in_parent != NULL);
58 return node == node->in_parent->in_right;
61 static inline int node_is_red(struct interval_node *node)
63 return node->in_color == INTERVAL_RED;
66 static inline int node_is_black(struct interval_node *node)
68 return node->in_color == INTERVAL_BLACK;
71 static inline int extent_compare(struct interval_node_extent *e1,
72 struct interval_node_extent *e2)
75 if (e1->start == e2->start) {
76 if (e1->end < e2->end)
78 else if (e1->end > e2->end)
83 if (e1->start < e2->start)
91 static inline int extent_equal(struct interval_node_extent *e1,
92 struct interval_node_extent *e2)
94 return (e1->start == e2->start) && (e1->end == e2->end);
97 static inline int extent_overlapped(struct interval_node_extent *e1,
98 struct interval_node_extent *e2)
100 return (e1->start <= e2->end) && (e2->start <= e1->end);
103 static inline int node_compare(struct interval_node *n1,
104 struct interval_node *n2)
106 return extent_compare(&n1->in_extent, &n2->in_extent);
109 int node_equal(struct interval_node *n1, struct interval_node *n2)
111 return extent_equal(&n1->in_extent, &n2->in_extent);
114 static inline __u64 max_u64(__u64 x, __u64 y)
116 return x > y ? x : y;
119 static inline __u64 min_u64(__u64 x, __u64 y)
121 return x < y ? x : y;
124 #define interval_for_each(node, root) \
125 for (node = interval_first(root); node != NULL; \
126 node = interval_next(node))
128 #define interval_for_each_reverse(node, root) \
129 for (node = interval_last(root); node != NULL; \
130 node = interval_prev(node))
132 static struct interval_node *interval_first(struct interval_node *node)
138 while (node->in_left)
139 node = node->in_left;
143 static struct interval_node *interval_last(struct interval_node *node)
149 while (node->in_right)
150 node = node->in_right;
154 static struct interval_node *interval_next(struct interval_node *node)
161 RETURN(interval_first(node->in_right));
162 while (node->in_parent && node_is_right_child(node))
163 node = node->in_parent;
164 RETURN(node->in_parent);
167 static struct interval_node *interval_prev(struct interval_node *node)
175 RETURN(interval_last(node->in_left));
177 while (node->in_parent && node_is_left_child(node))
178 node = node->in_parent;
180 RETURN(node->in_parent);
183 enum interval_iter interval_iterate(struct interval_node *root,
184 interval_callback_t func,
187 struct interval_node *node;
188 enum interval_iter rc = INTERVAL_ITER_CONT;
191 interval_for_each(node, root) {
192 rc = func(node, data);
193 if (rc == INTERVAL_ITER_STOP)
199 EXPORT_SYMBOL(interval_iterate);
201 enum interval_iter interval_iterate_reverse(struct interval_node *root,
202 interval_callback_t func,
205 struct interval_node *node;
206 enum interval_iter rc = INTERVAL_ITER_CONT;
209 interval_for_each_reverse(node, root) {
210 rc = func(node, data);
211 if (rc == INTERVAL_ITER_STOP)
217 EXPORT_SYMBOL(interval_iterate_reverse);
219 /* try to find a node with same interval in the tree,
220 * if found, return the pointer to the node, otherwise return NULL*/
221 struct interval_node *interval_find(struct interval_node *root,
222 struct interval_node_extent *ex)
224 struct interval_node *walk = root;
229 rc = extent_compare(ex, &walk->in_extent);
233 walk = walk->in_left;
235 walk = walk->in_right;
240 EXPORT_SYMBOL(interval_find);
242 static void __rotate_change_maxhigh(struct interval_node *node,
243 struct interval_node *rotate)
245 __u64 left_max, right_max;
247 rotate->in_max_high = node->in_max_high;
248 left_max = node->in_left ? node->in_left->in_max_high : 0;
249 right_max = node->in_right ? node->in_right->in_max_high : 0;
250 node->in_max_high = max_u64(interval_high(node),
251 max_u64(left_max,right_max));
254 /* The left rotation "pivots" around the link from node to node->right, and
255 * - node will be linked to node->right's left child, and
256 * - node->right's left child will be linked to node's right child. */
257 static void __rotate_left(struct interval_node *node,
258 struct interval_node **root)
260 struct interval_node *right = node->in_right;
261 struct interval_node *parent = node->in_parent;
263 node->in_right = right->in_left;
265 right->in_left->in_parent = node;
267 right->in_left = node;
268 right->in_parent = parent;
270 if (node_is_left_child(node))
271 parent->in_left = right;
273 parent->in_right = right;
277 node->in_parent = right;
279 /* update max_high for node and right */
280 __rotate_change_maxhigh(node, right);
283 /* The right rotation "pivots" around the link from node to node->left, and
284 * - node will be linked to node->left's right child, and
285 * - node->left's right child will be linked to node's left child. */
286 static void __rotate_right(struct interval_node *node,
287 struct interval_node **root)
289 struct interval_node *left = node->in_left;
290 struct interval_node *parent = node->in_parent;
292 node->in_left = left->in_right;
294 left->in_right->in_parent = node;
295 left->in_right = node;
297 left->in_parent = parent;
299 if (node_is_right_child(node))
300 parent->in_right = left;
302 parent->in_left = left;
306 node->in_parent = left;
308 /* update max_high for node and left */
309 __rotate_change_maxhigh(node, left);
312 #define interval_swap(a, b) do { \
313 struct interval_node *c = a; a = b; b = c; \
317 * Operations INSERT and DELETE, when run on a tree with n keys,
318 * take O(logN) time.Because they modify the tree, the result
319 * may violate the red-black properties.To restore these properties,
320 * we must change the colors of some of the nodes in the tree
321 * and also change the pointer structure.
323 static void interval_insert_color(struct interval_node *node,
324 struct interval_node **root)
326 struct interval_node *parent, *gparent;
329 while ((parent = node->in_parent) && node_is_red(parent)) {
330 gparent = parent->in_parent;
331 /* Parent is RED, so gparent must not be NULL */
332 if (node_is_left_child(parent)) {
333 struct interval_node *uncle;
334 uncle = gparent->in_right;
335 if (uncle && node_is_red(uncle)) {
336 uncle->in_color = INTERVAL_BLACK;
337 parent->in_color = INTERVAL_BLACK;
338 gparent->in_color = INTERVAL_RED;
343 if (parent->in_right == node) {
344 __rotate_left(parent, root);
345 interval_swap(node, parent);
348 parent->in_color = INTERVAL_BLACK;
349 gparent->in_color = INTERVAL_RED;
350 __rotate_right(gparent, root);
352 struct interval_node *uncle;
353 uncle = gparent->in_left;
354 if (uncle && node_is_red(uncle)) {
355 uncle->in_color = INTERVAL_BLACK;
356 parent->in_color = INTERVAL_BLACK;
357 gparent->in_color = INTERVAL_RED;
362 if (node_is_left_child(node)) {
363 __rotate_right(parent, root);
364 interval_swap(node, parent);
367 parent->in_color = INTERVAL_BLACK;
368 gparent->in_color = INTERVAL_RED;
369 __rotate_left(gparent, root);
373 (*root)->in_color = INTERVAL_BLACK;
377 struct interval_node *interval_insert(struct interval_node *node,
378 struct interval_node **root)
381 struct interval_node **p, *parent = NULL;
384 LASSERT(!interval_is_intree(node));
388 if (node_equal(parent, node))
391 /* max_high field must be updated after each iteration */
392 if (parent->in_max_high < interval_high(node))
393 parent->in_max_high = interval_high(node);
395 if (node_compare(node, parent) < 0)
396 p = &parent->in_left;
398 p = &parent->in_right;
401 /* link node into the tree */
402 node->in_parent = parent;
403 node->in_color = INTERVAL_RED;
404 node->in_left = node->in_right = NULL;
407 interval_insert_color(node, root);
412 EXPORT_SYMBOL(interval_insert);
414 static inline int node_is_black_or_0(struct interval_node *node)
416 return !node || node_is_black(node);
419 static void interval_erase_color(struct interval_node *node,
420 struct interval_node *parent,
421 struct interval_node **root)
423 struct interval_node *tmp;
426 while (node_is_black_or_0(node) && node != *root) {
427 if (parent->in_left == node) {
428 tmp = parent->in_right;
429 if (node_is_red(tmp)) {
430 tmp->in_color = INTERVAL_BLACK;
431 parent->in_color = INTERVAL_RED;
432 __rotate_left(parent, root);
433 tmp = parent->in_right;
435 if (node_is_black_or_0(tmp->in_left) &&
436 node_is_black_or_0(tmp->in_right)) {
437 tmp->in_color = INTERVAL_RED;
439 parent = node->in_parent;
441 if (node_is_black_or_0(tmp->in_right)) {
442 struct interval_node *o_left;
443 if ((o_left = tmp->in_left))
444 o_left->in_color = INTERVAL_BLACK;
445 tmp->in_color = INTERVAL_RED;
446 __rotate_right(tmp, root);
447 tmp = parent->in_right;
449 tmp->in_color = parent->in_color;
450 parent->in_color = INTERVAL_BLACK;
452 tmp->in_right->in_color = INTERVAL_BLACK;
453 __rotate_left(parent, root);
458 tmp = parent->in_left;
459 if (node_is_red(tmp)) {
460 tmp->in_color = INTERVAL_BLACK;
461 parent->in_color = INTERVAL_RED;
462 __rotate_right(parent, root);
463 tmp = parent->in_left;
465 if (node_is_black_or_0(tmp->in_left) &&
466 node_is_black_or_0(tmp->in_right)) {
467 tmp->in_color = INTERVAL_RED;
469 parent = node->in_parent;
471 if (node_is_black_or_0(tmp->in_left)) {
472 struct interval_node *o_right;
473 if ((o_right = tmp->in_right))
474 o_right->in_color = INTERVAL_BLACK;
475 tmp->in_color = INTERVAL_RED;
476 __rotate_left(tmp, root);
477 tmp = parent->in_left;
479 tmp->in_color = parent->in_color;
480 parent->in_color = INTERVAL_BLACK;
482 tmp->in_left->in_color = INTERVAL_BLACK;
483 __rotate_right(parent, root);
490 node->in_color = INTERVAL_BLACK;
495 * if the @max_high value of @node is changed, this function traverse a path
496 * from node up to the root to update max_high for the whole tree.
498 static void update_maxhigh(struct interval_node *node,
501 __u64 left_max, right_max;
505 left_max = node->in_left ? node->in_left->in_max_high : 0;
506 right_max = node->in_right ? node->in_right->in_max_high : 0;
507 node->in_max_high = max_u64(interval_high(node),
508 max_u64(left_max, right_max));
510 if (node->in_max_high >= old_maxhigh)
512 node = node->in_parent;
517 void interval_erase(struct interval_node *node,
518 struct interval_node **root)
520 struct interval_node *child, *parent;
524 LASSERT(interval_is_intree(node));
526 if (!node->in_left) {
527 child = node->in_right;
528 } else if (!node->in_right) {
529 child = node->in_left;
530 } else { /* Both left and right child are not NULL */
531 struct interval_node *old = node;
533 node = interval_next(node);
534 child = node->in_right;
535 parent = node->in_parent;
536 color = node->in_color;
539 child->in_parent = parent;
541 parent->in_right = child;
543 parent->in_left = child;
545 node->in_color = old->in_color;
546 node->in_right = old->in_right;
547 node->in_left = old->in_left;
548 node->in_parent = old->in_parent;
550 if (old->in_parent) {
551 if (node_is_left_child(old))
552 old->in_parent->in_left = node;
554 old->in_parent->in_right = node;
559 old->in_left->in_parent = node;
561 old->in_right->in_parent = node;
562 update_maxhigh(child ? : parent, node->in_max_high);
563 update_maxhigh(node, old->in_max_high);
568 parent = node->in_parent;
569 color = node->in_color;
572 child->in_parent = parent;
574 if (node_is_left_child(node))
575 parent->in_left = child;
577 parent->in_right = child;
582 update_maxhigh(child ? : parent, node->in_max_high);
585 if (color == INTERVAL_BLACK)
586 interval_erase_color(child, parent, root);
589 EXPORT_SYMBOL(interval_erase);
591 static inline int interval_may_overlap(struct interval_node *node,
592 struct interval_node_extent *ext)
594 return (ext->start <= node->in_max_high &&
595 ext->end >= interval_low(node));
599 * This function finds all intervals that overlap interval ext,
600 * and calls func to handle resulted intervals one by one.
601 * in lustre, this function will find all conflicting locks in
602 * the granted queue and add these locks to the ast work list.
607 * if (ext->end < interval_low(node)) {
608 * interval_search(node->in_left, ext, func, data);
609 * } else if (interval_may_overlap(node, ext)) {
610 * if (extent_overlapped(ext, &node->in_extent))
612 * interval_search(node->in_left, ext, func, data);
613 * interval_search(node->in_right, ext, func, data);
619 enum interval_iter interval_search(struct interval_node *node,
620 struct interval_node_extent *ext,
621 interval_callback_t func,
624 struct interval_node *parent;
625 enum interval_iter rc = INTERVAL_ITER_CONT;
629 LASSERT(ext != NULL);
630 LASSERT(func != NULL);
633 if (ext->end < interval_low(node)) {
635 node = node->in_left;
638 } else if (interval_may_overlap(node, ext)) {
639 if (extent_overlapped(ext, &node->in_extent)) {
640 rc = func(node, data);
641 if (rc == INTERVAL_ITER_STOP)
646 node = node->in_left;
649 if (node->in_right) {
650 node = node->in_right;
655 parent = node->in_parent;
657 if (node_is_left_child(node) &&
659 /* If we ever got the left, it means that the
660 * parent met ext->end<interval_low(parent), or
661 * may_overlap(parent). If the former is true,
662 * we needn't go back. So stop early and check
663 * may_overlap(parent) after this loop. */
664 node = parent->in_right;
668 parent = parent->in_parent;
670 if (parent == NULL || !interval_may_overlap(parent, ext))
676 EXPORT_SYMBOL(interval_search);
678 static enum interval_iter interval_overlap_cb(struct interval_node *n,
682 return INTERVAL_ITER_STOP;
685 int interval_is_overlapped(struct interval_node *root,
686 struct interval_node_extent *ext)
689 (void)interval_search(root, ext, interval_overlap_cb, &has);
692 EXPORT_SYMBOL(interval_is_overlapped);
694 /* Don't expand to low. Expanding downwards is expensive, and meaningless to
695 * some extents, because programs seldom do IO backward.
697 * The recursive algorithm of expanding low:
699 * struct interval_node *tmp;
700 * static __u64 res = 0;
704 * if (root->in_max_high < low) {
705 * res = max_u64(root->in_max_high + 1, res);
707 * } else if (low < interval_low(root)) {
708 * interval_expand_low(root->in_left, low);
712 * if (interval_high(root) < low)
713 * res = max_u64(interval_high(root) + 1, res);
714 * interval_expand_low(root->in_left, low);
715 * interval_expand_low(root->in_right, low);
720 * It's much easy to eliminate the recursion, see interval_search for
723 static inline __u64 interval_expand_low(struct interval_node *root, __u64 low)
725 /* we only concern the empty tree right now. */
731 static inline __u64 interval_expand_high(struct interval_node *node, __u64 high)
735 while (node != NULL) {
736 if (node->in_max_high < high)
739 if (interval_low(node) > high) {
740 result = interval_low(node) - 1;
741 node = node->in_left;
743 node = node->in_right;
750 /* expanding the extent based on @ext. */
751 void interval_expand(struct interval_node *root,
752 struct interval_node_extent *ext,
753 struct interval_node_extent *limiter)
755 /* The assertion of interval_is_overlapped is expensive because we may
756 * travel many nodes to find the overlapped node. */
757 LASSERT(interval_is_overlapped(root, ext) == 0);
758 if (!limiter || limiter->start < ext->start)
759 ext->start = interval_expand_low(root, ext->start);
760 if (!limiter || limiter->end > ext->end)
761 ext->end = interval_expand_high(root, ext->end);
762 LASSERT(interval_is_overlapped(root, ext) == 0);