4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 only,
8 * as published by the Free Software Foundation.
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License version 2 for more details (a copy is included
14 * in the LICENSE file that accompanied this code).
16 * You should have received a copy of the GNU General Public License
17 * version 2 along with this program; If not, see
18 * http://www.gnu.org/licenses/gpl-2.0.html
23 * Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
24 * Use is subject to license terms.
26 * Copyright (c) 2014, Intel Corporation.
29 * This file is part of Lustre, http://www.lustre.org/
30 * Lustre is a trademark of Sun Microsystems, Inc.
32 * lustre/ldlm/interval_tree.c
34 * Interval tree library used by ldlm extent lock code
36 * Author: Huang Wei <huangwei@clusterfs.com>
37 * Author: Jay Xiong <jinshan.xiong@sun.com>
40 # include <lustre_dlm.h>
42 # include <libcfs/libcfs.h>
44 #include <interval_tree.h>
51 static inline int node_is_left_child(struct interval_node *node)
53 LASSERT(node->in_parent != NULL);
54 return node == node->in_parent->in_left;
57 static inline int node_is_right_child(struct interval_node *node)
59 LASSERT(node->in_parent != NULL);
60 return node == node->in_parent->in_right;
63 static inline int node_is_red(struct interval_node *node)
65 return node->in_color == INTERVAL_RED;
68 static inline int node_is_black(struct interval_node *node)
70 return node->in_color == INTERVAL_BLACK;
73 static inline int extent_compare(struct interval_node_extent *e1,
74 struct interval_node_extent *e2)
77 if (e1->start == e2->start) {
78 if (e1->end < e2->end)
80 else if (e1->end > e2->end)
85 if (e1->start < e2->start)
93 static inline int extent_equal(struct interval_node_extent *e1,
94 struct interval_node_extent *e2)
96 return (e1->start == e2->start) && (e1->end == e2->end);
99 static inline int extent_overlapped(struct interval_node_extent *e1,
100 struct interval_node_extent *e2)
102 return (e1->start <= e2->end) && (e2->start <= e1->end);
105 static inline int node_compare(struct interval_node *n1,
106 struct interval_node *n2)
108 return extent_compare(&n1->in_extent, &n2->in_extent);
111 int node_equal(struct interval_node *n1, struct interval_node *n2)
113 return extent_equal(&n1->in_extent, &n2->in_extent);
116 static inline __u64 max_u64(__u64 x, __u64 y)
118 return x > y ? x : y;
121 static inline __u64 min_u64(__u64 x, __u64 y)
123 return x < y ? x : y;
126 #define interval_for_each(node, root) \
127 for (node = interval_first(root); node != NULL; \
128 node = interval_next(node))
130 #define interval_for_each_reverse(node, root) \
131 for (node = interval_last(root); node != NULL; \
132 node = interval_prev(node))
134 static struct interval_node *interval_first(struct interval_node *node)
140 while (node->in_left)
141 node = node->in_left;
145 static struct interval_node *interval_last(struct interval_node *node)
151 while (node->in_right)
152 node = node->in_right;
156 static struct interval_node *interval_next(struct interval_node *node)
163 RETURN(interval_first(node->in_right));
164 while (node->in_parent && node_is_right_child(node))
165 node = node->in_parent;
166 RETURN(node->in_parent);
169 static struct interval_node *interval_prev(struct interval_node *node)
177 RETURN(interval_last(node->in_left));
179 while (node->in_parent && node_is_left_child(node))
180 node = node->in_parent;
182 RETURN(node->in_parent);
185 enum interval_iter interval_iterate(struct interval_node *root,
186 interval_callback_t func,
189 struct interval_node *node;
190 enum interval_iter rc = INTERVAL_ITER_CONT;
193 interval_for_each(node, root) {
194 rc = func(node, data);
195 if (rc == INTERVAL_ITER_STOP)
201 EXPORT_SYMBOL(interval_iterate);
203 enum interval_iter interval_iterate_reverse(struct interval_node *root,
204 interval_callback_t func,
207 struct interval_node *node;
208 enum interval_iter rc = INTERVAL_ITER_CONT;
211 interval_for_each_reverse(node, root) {
212 rc = func(node, data);
213 if (rc == INTERVAL_ITER_STOP)
219 EXPORT_SYMBOL(interval_iterate_reverse);
221 /* try to find a node with same interval in the tree,
222 * if found, return the pointer to the node, otherwise return NULL*/
223 struct interval_node *interval_find(struct interval_node *root,
224 struct interval_node_extent *ex)
226 struct interval_node *walk = root;
231 rc = extent_compare(ex, &walk->in_extent);
235 walk = walk->in_left;
237 walk = walk->in_right;
242 EXPORT_SYMBOL(interval_find);
244 static void __rotate_change_maxhigh(struct interval_node *node,
245 struct interval_node *rotate)
247 __u64 left_max, right_max;
249 rotate->in_max_high = node->in_max_high;
250 left_max = node->in_left ? node->in_left->in_max_high : 0;
251 right_max = node->in_right ? node->in_right->in_max_high : 0;
252 node->in_max_high = max_u64(interval_high(node),
253 max_u64(left_max,right_max));
256 /* The left rotation "pivots" around the link from node to node->right, and
257 * - node will be linked to node->right's left child, and
258 * - node->right's left child will be linked to node's right child. */
259 static void __rotate_left(struct interval_node *node,
260 struct interval_node **root)
262 struct interval_node *right = node->in_right;
263 struct interval_node *parent = node->in_parent;
265 node->in_right = right->in_left;
267 right->in_left->in_parent = node;
269 right->in_left = node;
270 right->in_parent = parent;
272 if (node_is_left_child(node))
273 parent->in_left = right;
275 parent->in_right = right;
279 node->in_parent = right;
281 /* update max_high for node and right */
282 __rotate_change_maxhigh(node, right);
285 /* The right rotation "pivots" around the link from node to node->left, and
286 * - node will be linked to node->left's right child, and
287 * - node->left's right child will be linked to node's left child. */
288 static void __rotate_right(struct interval_node *node,
289 struct interval_node **root)
291 struct interval_node *left = node->in_left;
292 struct interval_node *parent = node->in_parent;
294 node->in_left = left->in_right;
296 left->in_right->in_parent = node;
297 left->in_right = node;
299 left->in_parent = parent;
301 if (node_is_right_child(node))
302 parent->in_right = left;
304 parent->in_left = left;
308 node->in_parent = left;
310 /* update max_high for node and left */
311 __rotate_change_maxhigh(node, left);
314 #define interval_swap(a, b) do { \
315 struct interval_node *c = a; a = b; b = c; \
319 * Operations INSERT and DELETE, when run on a tree with n keys,
320 * take O(logN) time.Because they modify the tree, the result
321 * may violate the red-black properties.To restore these properties,
322 * we must change the colors of some of the nodes in the tree
323 * and also change the pointer structure.
325 static void interval_insert_color(struct interval_node *node,
326 struct interval_node **root)
328 struct interval_node *parent, *gparent;
331 while ((parent = node->in_parent) && node_is_red(parent)) {
332 gparent = parent->in_parent;
333 /* Parent is RED, so gparent must not be NULL */
334 if (node_is_left_child(parent)) {
335 struct interval_node *uncle;
336 uncle = gparent->in_right;
337 if (uncle && node_is_red(uncle)) {
338 uncle->in_color = INTERVAL_BLACK;
339 parent->in_color = INTERVAL_BLACK;
340 gparent->in_color = INTERVAL_RED;
345 if (parent->in_right == node) {
346 __rotate_left(parent, root);
347 interval_swap(node, parent);
350 parent->in_color = INTERVAL_BLACK;
351 gparent->in_color = INTERVAL_RED;
352 __rotate_right(gparent, root);
354 struct interval_node *uncle;
355 uncle = gparent->in_left;
356 if (uncle && node_is_red(uncle)) {
357 uncle->in_color = INTERVAL_BLACK;
358 parent->in_color = INTERVAL_BLACK;
359 gparent->in_color = INTERVAL_RED;
364 if (node_is_left_child(node)) {
365 __rotate_right(parent, root);
366 interval_swap(node, parent);
369 parent->in_color = INTERVAL_BLACK;
370 gparent->in_color = INTERVAL_RED;
371 __rotate_left(gparent, root);
375 (*root)->in_color = INTERVAL_BLACK;
379 struct interval_node *interval_insert(struct interval_node *node,
380 struct interval_node **root)
383 struct interval_node **p, *parent = NULL;
386 LASSERT(!interval_is_intree(node));
390 if (node_equal(parent, node))
393 /* max_high field must be updated after each iteration */
394 if (parent->in_max_high < interval_high(node))
395 parent->in_max_high = interval_high(node);
397 if (node_compare(node, parent) < 0)
398 p = &parent->in_left;
400 p = &parent->in_right;
403 /* link node into the tree */
404 node->in_parent = parent;
405 node->in_color = INTERVAL_RED;
406 node->in_left = node->in_right = NULL;
409 interval_insert_color(node, root);
414 EXPORT_SYMBOL(interval_insert);
416 static inline int node_is_black_or_0(struct interval_node *node)
418 return !node || node_is_black(node);
421 static void interval_erase_color(struct interval_node *node,
422 struct interval_node *parent,
423 struct interval_node **root)
425 struct interval_node *tmp;
428 while (node_is_black_or_0(node) && node != *root) {
429 if (parent->in_left == node) {
430 tmp = parent->in_right;
431 if (node_is_red(tmp)) {
432 tmp->in_color = INTERVAL_BLACK;
433 parent->in_color = INTERVAL_RED;
434 __rotate_left(parent, root);
435 tmp = parent->in_right;
437 if (node_is_black_or_0(tmp->in_left) &&
438 node_is_black_or_0(tmp->in_right)) {
439 tmp->in_color = INTERVAL_RED;
441 parent = node->in_parent;
443 if (node_is_black_or_0(tmp->in_right)) {
444 struct interval_node *o_left;
445 if ((o_left = tmp->in_left))
446 o_left->in_color = INTERVAL_BLACK;
447 tmp->in_color = INTERVAL_RED;
448 __rotate_right(tmp, root);
449 tmp = parent->in_right;
451 tmp->in_color = parent->in_color;
452 parent->in_color = INTERVAL_BLACK;
454 tmp->in_right->in_color = INTERVAL_BLACK;
455 __rotate_left(parent, root);
460 tmp = parent->in_left;
461 if (node_is_red(tmp)) {
462 tmp->in_color = INTERVAL_BLACK;
463 parent->in_color = INTERVAL_RED;
464 __rotate_right(parent, root);
465 tmp = parent->in_left;
467 if (node_is_black_or_0(tmp->in_left) &&
468 node_is_black_or_0(tmp->in_right)) {
469 tmp->in_color = INTERVAL_RED;
471 parent = node->in_parent;
473 if (node_is_black_or_0(tmp->in_left)) {
474 struct interval_node *o_right;
475 if ((o_right = tmp->in_right))
476 o_right->in_color = INTERVAL_BLACK;
477 tmp->in_color = INTERVAL_RED;
478 __rotate_left(tmp, root);
479 tmp = parent->in_left;
481 tmp->in_color = parent->in_color;
482 parent->in_color = INTERVAL_BLACK;
484 tmp->in_left->in_color = INTERVAL_BLACK;
485 __rotate_right(parent, root);
492 node->in_color = INTERVAL_BLACK;
497 * if the @max_high value of @node is changed, this function traverse a path
498 * from node up to the root to update max_high for the whole tree.
500 static void update_maxhigh(struct interval_node *node,
503 __u64 left_max, right_max;
507 left_max = node->in_left ? node->in_left->in_max_high : 0;
508 right_max = node->in_right ? node->in_right->in_max_high : 0;
509 node->in_max_high = max_u64(interval_high(node),
510 max_u64(left_max, right_max));
512 if (node->in_max_high >= old_maxhigh)
514 node = node->in_parent;
519 void interval_erase(struct interval_node *node,
520 struct interval_node **root)
522 struct interval_node *child, *parent;
526 LASSERT(interval_is_intree(node));
528 if (!node->in_left) {
529 child = node->in_right;
530 } else if (!node->in_right) {
531 child = node->in_left;
532 } else { /* Both left and right child are not NULL */
533 struct interval_node *old = node;
535 node = interval_next(node);
536 child = node->in_right;
537 parent = node->in_parent;
538 color = node->in_color;
541 child->in_parent = parent;
543 parent->in_right = child;
545 parent->in_left = child;
547 node->in_color = old->in_color;
548 node->in_right = old->in_right;
549 node->in_left = old->in_left;
550 node->in_parent = old->in_parent;
552 if (old->in_parent) {
553 if (node_is_left_child(old))
554 old->in_parent->in_left = node;
556 old->in_parent->in_right = node;
561 old->in_left->in_parent = node;
563 old->in_right->in_parent = node;
564 update_maxhigh(child ? : parent, node->in_max_high);
565 update_maxhigh(node, old->in_max_high);
570 parent = node->in_parent;
571 color = node->in_color;
574 child->in_parent = parent;
576 if (node_is_left_child(node))
577 parent->in_left = child;
579 parent->in_right = child;
584 update_maxhigh(child ? : parent, node->in_max_high);
587 if (color == INTERVAL_BLACK)
588 interval_erase_color(child, parent, root);
591 EXPORT_SYMBOL(interval_erase);
593 static inline int interval_may_overlap(struct interval_node *node,
594 struct interval_node_extent *ext)
596 return (ext->start <= node->in_max_high &&
597 ext->end >= interval_low(node));
601 * This function finds all intervals that overlap interval ext,
602 * and calls func to handle resulted intervals one by one.
603 * in lustre, this function will find all conflicting locks in
604 * the granted queue and add these locks to the ast work list.
609 * if (ext->end < interval_low(node)) {
610 * interval_search(node->in_left, ext, func, data);
611 * } else if (interval_may_overlap(node, ext)) {
612 * if (extent_overlapped(ext, &node->in_extent))
614 * interval_search(node->in_left, ext, func, data);
615 * interval_search(node->in_right, ext, func, data);
621 enum interval_iter interval_search(struct interval_node *node,
622 struct interval_node_extent *ext,
623 interval_callback_t func,
626 struct interval_node *parent;
627 enum interval_iter rc = INTERVAL_ITER_CONT;
631 LASSERT(ext != NULL);
632 LASSERT(func != NULL);
635 if (ext->end < interval_low(node)) {
637 node = node->in_left;
640 } else if (interval_may_overlap(node, ext)) {
641 if (extent_overlapped(ext, &node->in_extent)) {
642 rc = func(node, data);
643 if (rc == INTERVAL_ITER_STOP)
648 node = node->in_left;
651 if (node->in_right) {
652 node = node->in_right;
657 parent = node->in_parent;
659 if (node_is_left_child(node) &&
661 /* If we ever got the left, it means that the
662 * parent met ext->end<interval_low(parent), or
663 * may_overlap(parent). If the former is true,
664 * we needn't go back. So stop early and check
665 * may_overlap(parent) after this loop. */
666 node = parent->in_right;
670 parent = parent->in_parent;
672 if (parent == NULL || !interval_may_overlap(parent, ext))
678 EXPORT_SYMBOL(interval_search);
680 static enum interval_iter interval_overlap_cb(struct interval_node *n,
684 return INTERVAL_ITER_STOP;
687 int interval_is_overlapped(struct interval_node *root,
688 struct interval_node_extent *ext)
691 (void)interval_search(root, ext, interval_overlap_cb, &has);
694 EXPORT_SYMBOL(interval_is_overlapped);
696 /* Don't expand to low. Expanding downwards is expensive, and meaningless to
697 * some extents, because programs seldom do IO backward.
699 * The recursive algorithm of expanding low:
701 * struct interval_node *tmp;
702 * static __u64 res = 0;
706 * if (root->in_max_high < low) {
707 * res = max_u64(root->in_max_high + 1, res);
709 * } else if (low < interval_low(root)) {
710 * interval_expand_low(root->in_left, low);
714 * if (interval_high(root) < low)
715 * res = max_u64(interval_high(root) + 1, res);
716 * interval_expand_low(root->in_left, low);
717 * interval_expand_low(root->in_right, low);
722 * It's much easy to eliminate the recursion, see interval_search for
725 static inline __u64 interval_expand_low(struct interval_node *root, __u64 low)
727 /* we only concern the empty tree right now. */
733 static inline __u64 interval_expand_high(struct interval_node *node, __u64 high)
737 while (node != NULL) {
738 if (node->in_max_high < high)
741 if (interval_low(node) > high) {
742 result = interval_low(node) - 1;
743 node = node->in_left;
745 node = node->in_right;
752 /* expanding the extent based on @ext. */
753 void interval_expand(struct interval_node *root,
754 struct interval_node_extent *ext,
755 struct interval_node_extent *limiter)
757 /* The assertion of interval_is_overlapped is expensive because we may
758 * travel many nodes to find the overlapped node. */
759 LASSERT(interval_is_overlapped(root, ext) == 0);
760 if (!limiter || limiter->start < ext->start)
761 ext->start = interval_expand_low(root, ext->start);
762 if (!limiter || limiter->end > ext->end)
763 ext->end = interval_expand_high(root, ext->end);
764 LASSERT(interval_is_overlapped(root, ext) == 0);