Linux Audio

Check our new training course

Loading...
Note: File does not exist in v5.14.15.
   1// SPDX-License-Identifier: GPL-2.0+
   2/*
   3 * test_maple_tree.c: Test the maple tree API
   4 * Copyright (c) 2018-2022 Oracle Corporation
   5 * Author: Liam R. Howlett <Liam.Howlett@Oracle.com>
   6 *
   7 * Any tests that only require the interface of the tree.
   8 */
   9
  10#include <linux/maple_tree.h>
  11#include <linux/module.h>
  12#include <linux/rwsem.h>
  13
  14#define MTREE_ALLOC_MAX 0x2000000000000Ul
  15#define CONFIG_MAPLE_SEARCH
  16#define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
  17
  18#ifndef CONFIG_DEBUG_MAPLE_TREE
  19#define mt_dump(mt, fmt)		do {} while (0)
  20#define mt_validate(mt)			do {} while (0)
  21#define mt_cache_shrink()		do {} while (0)
  22#define mas_dump(mas)			do {} while (0)
  23#define mas_wr_dump(mas)		do {} while (0)
  24atomic_t maple_tree_tests_run;
  25atomic_t maple_tree_tests_passed;
  26#undef MT_BUG_ON
  27
  28#define MT_BUG_ON(__tree, __x) do {					\
  29	atomic_inc(&maple_tree_tests_run);				\
  30	if (__x) {							\
  31		pr_info("BUG at %s:%d (%u)\n",				\
  32		__func__, __LINE__, __x);				\
  33		pr_info("Pass: %u Run:%u\n",				\
  34			atomic_read(&maple_tree_tests_passed),		\
  35			atomic_read(&maple_tree_tests_run));		\
  36	} else {							\
  37		atomic_inc(&maple_tree_tests_passed);			\
  38	}								\
  39} while (0)
  40#endif
  41
  42/* #define BENCH_SLOT_STORE */
  43/* #define BENCH_NODE_STORE */
  44/* #define BENCH_AWALK */
  45/* #define BENCH_WALK */
  46/* #define BENCH_LOAD */
  47/* #define BENCH_MT_FOR_EACH */
  48/* #define BENCH_FORK */
  49/* #define BENCH_MAS_FOR_EACH */
  50/* #define BENCH_MAS_PREV */
  51
  52#ifdef __KERNEL__
  53#define mt_set_non_kernel(x)		do {} while (0)
  54#define mt_zero_nr_tallocated(x)	do {} while (0)
  55#else
  56#define cond_resched()			do {} while (0)
  57#endif
  58
  59#define mas_is_none(x)		((x)->status == ma_none)
  60#define mas_is_overflow(x)	((x)->status == ma_overflow)
  61#define mas_is_underflow(x)	((x)->status == ma_underflow)
  62
  63static int __init mtree_insert_index(struct maple_tree *mt,
  64				     unsigned long index, gfp_t gfp)
  65{
  66	return mtree_insert(mt, index, xa_mk_value(index & LONG_MAX), gfp);
  67}
  68
  69static void __init mtree_erase_index(struct maple_tree *mt, unsigned long index)
  70{
  71	MT_BUG_ON(mt, mtree_erase(mt, index) != xa_mk_value(index & LONG_MAX));
  72	MT_BUG_ON(mt, mtree_load(mt, index) != NULL);
  73}
  74
  75static int __init mtree_test_insert(struct maple_tree *mt, unsigned long index,
  76				void *ptr)
  77{
  78	return mtree_insert(mt, index, ptr, GFP_KERNEL);
  79}
  80
  81static int __init mtree_test_store_range(struct maple_tree *mt,
  82			unsigned long start, unsigned long end, void *ptr)
  83{
  84	return mtree_store_range(mt, start, end, ptr, GFP_KERNEL);
  85}
  86
  87static int __init mtree_test_store(struct maple_tree *mt, unsigned long start,
  88				void *ptr)
  89{
  90	return mtree_test_store_range(mt, start, start, ptr);
  91}
  92
  93static int __init mtree_test_insert_range(struct maple_tree *mt,
  94			unsigned long start, unsigned long end, void *ptr)
  95{
  96	return mtree_insert_range(mt, start, end, ptr, GFP_KERNEL);
  97}
  98
  99static void __init *mtree_test_load(struct maple_tree *mt, unsigned long index)
 100{
 101	return mtree_load(mt, index);
 102}
 103
 104static void __init *mtree_test_erase(struct maple_tree *mt, unsigned long index)
 105{
 106	return mtree_erase(mt, index);
 107}
 108
 109#if defined(CONFIG_64BIT)
 110static noinline void __init check_mtree_alloc_range(struct maple_tree *mt,
 111		unsigned long start, unsigned long end, unsigned long size,
 112		unsigned long expected, int eret, void *ptr)
 113{
 114
 115	unsigned long result = expected + 1;
 116	int ret;
 117
 118	ret = mtree_alloc_range(mt, &result, ptr, size, start, end,
 119			GFP_KERNEL);
 120	MT_BUG_ON(mt, ret != eret);
 121	if (ret)
 122		return;
 123
 124	MT_BUG_ON(mt, result != expected);
 125}
 126
 127static noinline void __init check_mtree_alloc_rrange(struct maple_tree *mt,
 128		unsigned long start, unsigned long end, unsigned long size,
 129		unsigned long expected, int eret, void *ptr)
 130{
 131
 132	unsigned long result = expected + 1;
 133	int ret;
 134
 135	ret = mtree_alloc_rrange(mt, &result, ptr, size, start, end,
 136			GFP_KERNEL);
 137	MT_BUG_ON(mt, ret != eret);
 138	if (ret)
 139		return;
 140
 141	MT_BUG_ON(mt, result != expected);
 142}
 143#endif
 144
 145static noinline void __init check_load(struct maple_tree *mt,
 146				       unsigned long index, void *ptr)
 147{
 148	void *ret = mtree_test_load(mt, index);
 149
 150	if (ret != ptr)
 151		pr_err("Load %lu returned %p expect %p\n", index, ret, ptr);
 152	MT_BUG_ON(mt, ret != ptr);
 153}
 154
 155static noinline void __init check_store_range(struct maple_tree *mt,
 156		unsigned long start, unsigned long end, void *ptr, int expected)
 157{
 158	int ret = -EINVAL;
 159	unsigned long i;
 160
 161	ret = mtree_test_store_range(mt, start, end, ptr);
 162	MT_BUG_ON(mt, ret != expected);
 163
 164	if (ret)
 165		return;
 166
 167	for (i = start; i <= end; i++)
 168		check_load(mt, i, ptr);
 169}
 170
 171static noinline void __init check_insert_range(struct maple_tree *mt,
 172		unsigned long start, unsigned long end, void *ptr, int expected)
 173{
 174	int ret = -EINVAL;
 175	unsigned long i;
 176
 177	ret = mtree_test_insert_range(mt, start, end, ptr);
 178	MT_BUG_ON(mt, ret != expected);
 179
 180	if (ret)
 181		return;
 182
 183	for (i = start; i <= end; i++)
 184		check_load(mt, i, ptr);
 185}
 186
 187static noinline void __init check_insert(struct maple_tree *mt,
 188					 unsigned long index, void *ptr)
 189{
 190	int ret = -EINVAL;
 191
 192	ret = mtree_test_insert(mt, index, ptr);
 193	MT_BUG_ON(mt, ret != 0);
 194}
 195
 196static noinline void __init check_dup_insert(struct maple_tree *mt,
 197				      unsigned long index, void *ptr)
 198{
 199	int ret = -EINVAL;
 200
 201	ret = mtree_test_insert(mt, index, ptr);
 202	MT_BUG_ON(mt, ret != -EEXIST);
 203}
 204
 205
 206static noinline void __init check_index_load(struct maple_tree *mt,
 207					     unsigned long index)
 208{
 209	return check_load(mt, index, xa_mk_value(index & LONG_MAX));
 210}
 211
 212static inline __init int not_empty(struct maple_node *node)
 213{
 214	int i;
 215
 216	if (node->parent)
 217		return 1;
 218
 219	for (i = 0; i < ARRAY_SIZE(node->slot); i++)
 220		if (node->slot[i])
 221			return 1;
 222
 223	return 0;
 224}
 225
 226
 227static noinline void __init check_rev_seq(struct maple_tree *mt,
 228					  unsigned long max, bool verbose)
 229{
 230	unsigned long i = max, j;
 231
 232	MT_BUG_ON(mt, !mtree_empty(mt));
 233
 234	mt_zero_nr_tallocated();
 235	while (i) {
 236		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
 237		for (j = i; j <= max; j++)
 238			check_index_load(mt, j);
 239
 240		check_load(mt, i - 1, NULL);
 241		mt_set_in_rcu(mt);
 242		MT_BUG_ON(mt, !mt_height(mt));
 243		mt_clear_in_rcu(mt);
 244		MT_BUG_ON(mt, !mt_height(mt));
 245		i--;
 246	}
 247	check_load(mt, max + 1, NULL);
 248
 249#ifndef __KERNEL__
 250	if (verbose) {
 251		rcu_barrier();
 252		mt_dump(mt, mt_dump_dec);
 253		pr_info(" %s test of 0-%lu %luK in %d active (%d total)\n",
 254			__func__, max, mt_get_alloc_size()/1024, mt_nr_allocated(),
 255			mt_nr_tallocated());
 256	}
 257#endif
 258}
 259
 260static noinline void __init check_seq(struct maple_tree *mt, unsigned long max,
 261		bool verbose)
 262{
 263	unsigned long i, j;
 264
 265	MT_BUG_ON(mt, !mtree_empty(mt));
 266
 267	mt_zero_nr_tallocated();
 268	for (i = 0; i <= max; i++) {
 269		MT_BUG_ON(mt, mtree_insert_index(mt, i, GFP_KERNEL));
 270		for (j = 0; j <= i; j++)
 271			check_index_load(mt, j);
 272
 273		if (i)
 274			MT_BUG_ON(mt, !mt_height(mt));
 275		check_load(mt, i + 1, NULL);
 276	}
 277
 278#ifndef __KERNEL__
 279	if (verbose) {
 280		rcu_barrier();
 281		mt_dump(mt, mt_dump_dec);
 282		pr_info(" seq test of 0-%lu %luK in %d active (%d total)\n",
 283			max, mt_get_alloc_size()/1024, mt_nr_allocated(),
 284			mt_nr_tallocated());
 285	}
 286#endif
 287}
 288
 289static noinline void __init check_lb_not_empty(struct maple_tree *mt)
 290{
 291	unsigned long i, j;
 292	unsigned long huge = 4000UL * 1000 * 1000;
 293
 294
 295	i = huge;
 296	while (i > 4096) {
 297		check_insert(mt, i, (void *) i);
 298		for (j = huge; j >= i; j /= 2) {
 299			check_load(mt, j-1, NULL);
 300			check_load(mt, j, (void *) j);
 301			check_load(mt, j+1, NULL);
 302		}
 303		i /= 2;
 304	}
 305	mtree_destroy(mt);
 306}
 307
 308static noinline void __init check_lower_bound_split(struct maple_tree *mt)
 309{
 310	MT_BUG_ON(mt, !mtree_empty(mt));
 311	check_lb_not_empty(mt);
 312}
 313
 314static noinline void __init check_upper_bound_split(struct maple_tree *mt)
 315{
 316	unsigned long i, j;
 317	unsigned long huge;
 318
 319	MT_BUG_ON(mt, !mtree_empty(mt));
 320
 321	if (MAPLE_32BIT)
 322		huge = 2147483647UL;
 323	else
 324		huge = 4000UL * 1000 * 1000;
 325
 326	i = 4096;
 327	while (i < huge) {
 328		check_insert(mt, i, (void *) i);
 329		for (j = i; j >= huge; j *= 2) {
 330			check_load(mt, j-1, NULL);
 331			check_load(mt, j, (void *) j);
 332			check_load(mt, j+1, NULL);
 333		}
 334		i *= 2;
 335	}
 336	mtree_destroy(mt);
 337}
 338
 339static noinline void __init check_mid_split(struct maple_tree *mt)
 340{
 341	unsigned long huge = 8000UL * 1000 * 1000;
 342
 343	check_insert(mt, huge, (void *) huge);
 344	check_insert(mt, 0, xa_mk_value(0));
 345	check_lb_not_empty(mt);
 346}
 347
 348static noinline void __init check_rev_find(struct maple_tree *mt)
 349{
 350	int i, nr_entries = 200;
 351	void *val;
 352	MA_STATE(mas, mt, 0, 0);
 353
 354	for (i = 0; i <= nr_entries; i++)
 355		mtree_store_range(mt, i*10, i*10 + 5,
 356				  xa_mk_value(i), GFP_KERNEL);
 357
 358	rcu_read_lock();
 359	mas_set(&mas, 1000);
 360	val = mas_find_rev(&mas, 1000);
 361	MT_BUG_ON(mt, val != xa_mk_value(100));
 362	val = mas_find_rev(&mas, 1000);
 363	MT_BUG_ON(mt, val != NULL);
 364
 365	mas_set(&mas, 999);
 366	val = mas_find_rev(&mas, 997);
 367	MT_BUG_ON(mt, val != NULL);
 368
 369	mas_set(&mas, 1000);
 370	val = mas_find_rev(&mas, 900);
 371	MT_BUG_ON(mt, val != xa_mk_value(100));
 372	val = mas_find_rev(&mas, 900);
 373	MT_BUG_ON(mt, val != xa_mk_value(99));
 374
 375	mas_set(&mas, 20);
 376	val = mas_find_rev(&mas, 0);
 377	MT_BUG_ON(mt, val != xa_mk_value(2));
 378	val = mas_find_rev(&mas, 0);
 379	MT_BUG_ON(mt, val != xa_mk_value(1));
 380	val = mas_find_rev(&mas, 0);
 381	MT_BUG_ON(mt, val != xa_mk_value(0));
 382	val = mas_find_rev(&mas, 0);
 383	MT_BUG_ON(mt, val != NULL);
 384	rcu_read_unlock();
 385}
 386
 387static noinline void __init check_find(struct maple_tree *mt)
 388{
 389	unsigned long val = 0;
 390	unsigned long count;
 391	unsigned long max;
 392	unsigned long top;
 393	unsigned long last = 0, index = 0;
 394	void *entry, *entry2;
 395
 396	MA_STATE(mas, mt, 0, 0);
 397
 398	/* Insert 0. */
 399	MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
 400
 401#if defined(CONFIG_64BIT)
 402	top = 4398046511104UL;
 403#else
 404	top = ULONG_MAX;
 405#endif
 406
 407	if (MAPLE_32BIT) {
 408		count = 15;
 409	} else {
 410		count = 20;
 411	}
 412
 413	for (int i = 0; i <= count; i++) {
 414		if (val != 64)
 415			MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
 416		else
 417			MT_BUG_ON(mt, mtree_insert(mt, val,
 418				XA_ZERO_ENTRY, GFP_KERNEL));
 419
 420		val <<= 2;
 421	}
 422
 423	val = 0;
 424	mas_set(&mas, val);
 425	mas_lock(&mas);
 426	while ((entry = mas_find(&mas, 268435456)) != NULL) {
 427		if (val != 64)
 428			MT_BUG_ON(mt, xa_mk_value(val) != entry);
 429		else
 430			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
 431
 432		val <<= 2;
 433		/* For zero check. */
 434		if (!val)
 435			val = 1;
 436	}
 437	mas_unlock(&mas);
 438
 439	val = 0;
 440	mas_set(&mas, val);
 441	mas_lock(&mas);
 442	mas_for_each(&mas, entry, ULONG_MAX) {
 443		if (val != 64)
 444			MT_BUG_ON(mt, xa_mk_value(val) != entry);
 445		else
 446			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
 447		val <<= 2;
 448		/* For zero check. */
 449		if (!val)
 450			val = 1;
 451	}
 452	mas_unlock(&mas);
 453
 454	/* Test mas_pause */
 455	val = 0;
 456	mas_set(&mas, val);
 457	mas_lock(&mas);
 458	mas_for_each(&mas, entry, ULONG_MAX) {
 459		if (val != 64)
 460			MT_BUG_ON(mt, xa_mk_value(val) != entry);
 461		else
 462			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
 463		val <<= 2;
 464		/* For zero check. */
 465		if (!val)
 466			val = 1;
 467
 468		mas_pause(&mas);
 469		mas_unlock(&mas);
 470		mas_lock(&mas);
 471	}
 472	mas_unlock(&mas);
 473
 474	val = 0;
 475	max = 300; /* A value big enough to include XA_ZERO_ENTRY at 64. */
 476	mt_for_each(mt, entry, index, max) {
 477		MT_BUG_ON(mt, xa_mk_value(val) != entry);
 478		val <<= 2;
 479		if (val == 64) /* Skip zero entry. */
 480			val <<= 2;
 481		/* For zero check. */
 482		if (!val)
 483			val = 1;
 484	}
 485
 486	val = 0;
 487	max = 0;
 488	index = 0;
 489	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
 490	mt_for_each(mt, entry, index, ULONG_MAX) {
 491		if (val == top)
 492			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
 493		else
 494			MT_BUG_ON(mt, xa_mk_value(val) != entry);
 495
 496		/* Workaround for 32bit */
 497		if ((val << 2) < val)
 498			val = ULONG_MAX;
 499		else
 500			val <<= 2;
 501
 502		if (val == 64) /* Skip zero entry. */
 503			val <<= 2;
 504		/* For zero check. */
 505		if (!val)
 506			val = 1;
 507		max++;
 508		MT_BUG_ON(mt, max > 25);
 509	}
 510	mtree_erase_index(mt, ULONG_MAX);
 511
 512	mas_reset(&mas);
 513	index = 17;
 514	entry = mt_find(mt, &index, 512);
 515	MT_BUG_ON(mt, xa_mk_value(256) != entry);
 516
 517	mas_reset(&mas);
 518	index = 17;
 519	entry = mt_find(mt, &index, 20);
 520	MT_BUG_ON(mt, entry != NULL);
 521
 522
 523	/* Range check.. */
 524	/* Insert ULONG_MAX */
 525	MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
 526
 527	val = 0;
 528	mas_set(&mas, 0);
 529	mas_lock(&mas);
 530	mas_for_each(&mas, entry, ULONG_MAX) {
 531		if (val == 64)
 532			MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
 533		else if (val == top)
 534			MT_BUG_ON(mt, entry != xa_mk_value(LONG_MAX));
 535		else
 536			MT_BUG_ON(mt, xa_mk_value(val) != entry);
 537
 538		/* Workaround for 32bit */
 539		if ((val << 2) < val)
 540			val = ULONG_MAX;
 541		else
 542			val <<= 2;
 543
 544		/* For zero check. */
 545		if (!val)
 546			val = 1;
 547		mas_pause(&mas);
 548		mas_unlock(&mas);
 549		mas_lock(&mas);
 550	}
 551	mas_unlock(&mas);
 552
 553	mas_set(&mas, 1048576);
 554	mas_lock(&mas);
 555	entry = mas_find(&mas, 1048576);
 556	mas_unlock(&mas);
 557	MT_BUG_ON(mas.tree, entry == NULL);
 558
 559	/*
 560	 * Find last value.
 561	 * 1. get the expected value, leveraging the existence of an end entry
 562	 * 2. delete end entry
 563	 * 3. find the last value but searching for ULONG_MAX and then using
 564	 * prev
 565	 */
 566	/* First, get the expected result. */
 567	mas_lock(&mas);
 568	mas_reset(&mas);
 569	mas.index = ULONG_MAX; /* start at max.. */
 570	entry = mas_find(&mas, ULONG_MAX);
 571	entry = mas_prev(&mas, 0);
 572	index = mas.index;
 573	last = mas.last;
 574
 575	/* Erase the last entry. */
 576	mas_reset(&mas);
 577	mas.index = ULONG_MAX;
 578	mas.last = ULONG_MAX;
 579	mas_erase(&mas);
 580
 581	/* Get the previous value from MAS_START */
 582	mas_reset(&mas);
 583	entry2 = mas_prev(&mas, 0);
 584
 585	/* Check results. */
 586	MT_BUG_ON(mt, entry != entry2);
 587	MT_BUG_ON(mt, index != mas.index);
 588	MT_BUG_ON(mt, last != mas.last);
 589
 590
 591	mas.status = ma_none;
 592	mas.index = ULONG_MAX;
 593	mas.last = ULONG_MAX;
 594	entry2 = mas_prev(&mas, 0);
 595	MT_BUG_ON(mt, entry != entry2);
 596
 597	mas_set(&mas, 0);
 598	MT_BUG_ON(mt, mas_prev(&mas, 0) != NULL);
 599
 600	mas_unlock(&mas);
 601	mtree_destroy(mt);
 602}
 603
 604static noinline void __init check_find_2(struct maple_tree *mt)
 605{
 606	unsigned long i, j;
 607	void *entry;
 608
 609	MA_STATE(mas, mt, 0, 0);
 610	rcu_read_lock();
 611	mas_for_each(&mas, entry, ULONG_MAX)
 612		MT_BUG_ON(mt, true);
 613	rcu_read_unlock();
 614
 615	for (i = 0; i < 256; i++) {
 616		mtree_insert_index(mt, i, GFP_KERNEL);
 617		j = 0;
 618		mas_set(&mas, 0);
 619		rcu_read_lock();
 620		mas_for_each(&mas, entry, ULONG_MAX) {
 621			MT_BUG_ON(mt, entry != xa_mk_value(j));
 622			j++;
 623		}
 624		rcu_read_unlock();
 625		MT_BUG_ON(mt, j != i + 1);
 626	}
 627
 628	for (i = 0; i < 256; i++) {
 629		mtree_erase_index(mt, i);
 630		j = i + 1;
 631		mas_set(&mas, 0);
 632		rcu_read_lock();
 633		mas_for_each(&mas, entry, ULONG_MAX) {
 634			if (xa_is_zero(entry))
 635				continue;
 636
 637			MT_BUG_ON(mt, entry != xa_mk_value(j));
 638			j++;
 639		}
 640		rcu_read_unlock();
 641		MT_BUG_ON(mt, j != 256);
 642	}
 643
 644	/*MT_BUG_ON(mt, !mtree_empty(mt)); */
 645}
 646
 647
 648#if defined(CONFIG_64BIT)
 649static noinline void __init check_alloc_rev_range(struct maple_tree *mt)
 650{
 651	/*
 652	 * Generated by:
 653	 * cat /proc/self/maps | awk '{print $1}'|
 654	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
 655	 */
 656
 657	static const unsigned long range[] = {
 658	/*      Inclusive     , Exclusive. */
 659		0x565234af2000, 0x565234af4000,
 660		0x565234af4000, 0x565234af9000,
 661		0x565234af9000, 0x565234afb000,
 662		0x565234afc000, 0x565234afd000,
 663		0x565234afd000, 0x565234afe000,
 664		0x565235def000, 0x565235e10000,
 665		0x7f36d4bfd000, 0x7f36d4ee2000,
 666		0x7f36d4ee2000, 0x7f36d4f04000,
 667		0x7f36d4f04000, 0x7f36d504c000,
 668		0x7f36d504c000, 0x7f36d5098000,
 669		0x7f36d5098000, 0x7f36d5099000,
 670		0x7f36d5099000, 0x7f36d509d000,
 671		0x7f36d509d000, 0x7f36d509f000,
 672		0x7f36d509f000, 0x7f36d50a5000,
 673		0x7f36d50b9000, 0x7f36d50db000,
 674		0x7f36d50db000, 0x7f36d50dc000,
 675		0x7f36d50dc000, 0x7f36d50fa000,
 676		0x7f36d50fa000, 0x7f36d5102000,
 677		0x7f36d5102000, 0x7f36d5103000,
 678		0x7f36d5103000, 0x7f36d5104000,
 679		0x7f36d5104000, 0x7f36d5105000,
 680		0x7fff5876b000, 0x7fff5878d000,
 681		0x7fff5878e000, 0x7fff58791000,
 682		0x7fff58791000, 0x7fff58793000,
 683	};
 684
 685	static const unsigned long holes[] = {
 686		/*
 687		 * Note: start of hole is INCLUSIVE
 688		 *        end of hole is EXCLUSIVE
 689		 *        (opposite of the above table.)
 690		 * Start of hole, end of hole,  size of hole (+1)
 691		 */
 692		0x565234afb000, 0x565234afc000, 0x1000,
 693		0x565234afe000, 0x565235def000, 0x12F1000,
 694		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
 695	};
 696
 697	/*
 698	 * req_range consists of 4 values.
 699	 * 1. min index
 700	 * 2. max index
 701	 * 3. size
 702	 * 4. number that should be returned.
 703	 * 5. return value
 704	 */
 705	static const unsigned long req_range[] = {
 706		0x565234af9000, /* Min */
 707		0x7fff58791000, /* Max */
 708		0x1000,         /* Size */
 709		0x7fff5878d << 12,  /* First rev hole of size 0x1000 */
 710		0,              /* Return value success. */
 711
 712		0x0,            /* Min */
 713		0x565234AF0 << 12,    /* Max */
 714		0x3000,         /* Size */
 715		0x565234AEE << 12,  /* max - 3. */
 716		0,              /* Return value success. */
 717
 718		0x0,            /* Min */
 719		-1,             /* Max */
 720		0x1000,         /* Size */
 721		562949953421311 << 12,/* First rev hole of size 0x1000 */
 722		0,              /* Return value success. */
 723
 724		0x0,            /* Min */
 725		0x7F36D5109 << 12,    /* Max */
 726		0x4000,         /* Size */
 727		0x7F36D5106 << 12,    /* First rev hole of size 0x4000 */
 728		0,              /* Return value success. */
 729
 730		/* Ascend test. */
 731		0x0,
 732		34148798628 << 12,
 733		19 << 12,
 734		34148797418 << 12,
 735		0x0,
 736
 737		/* Too big test. */
 738		0x0,
 739		18446744073709551615UL,
 740		562915594369134UL << 12,
 741		0x0,
 742		-EBUSY,
 743
 744		/* Single space test. */
 745		34148798725 << 12,
 746		34148798725 << 12,
 747		1 << 12,
 748		34148798725 << 12,
 749		0,
 750	};
 751
 752	int i, range_count = ARRAY_SIZE(range);
 753	int req_range_count = ARRAY_SIZE(req_range);
 754	unsigned long min = 0;
 755
 756	MA_STATE(mas, mt, 0, 0);
 757
 758	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
 759			  GFP_KERNEL);
 760#define DEBUG_REV_RANGE 0
 761	for (i = 0; i < range_count; i += 2) {
 762		/* Inclusive, Inclusive (with the -1) */
 763
 764#if DEBUG_REV_RANGE
 765		pr_debug("\t%s: Insert %lu-%lu\n", __func__, range[i] >> 12,
 766				(range[i + 1] >> 12) - 1);
 767#endif
 768		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
 769				xa_mk_value(range[i] >> 12), 0);
 770		mt_validate(mt);
 771	}
 772
 773
 774	mas_lock(&mas);
 775	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
 776#if DEBUG_REV_RANGE
 777		pr_debug("Search from %lu-%lu for gap %lu should be at %lu\n",
 778				min, holes[i+1]>>12, holes[i+2]>>12,
 779				holes[i] >> 12);
 780#endif
 781		MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
 782					holes[i+1] >> 12,
 783					holes[i+2] >> 12));
 784#if DEBUG_REV_RANGE
 785		pr_debug("Found %lu %lu\n", mas.index, mas.last);
 786		pr_debug("gap %lu %lu\n", (holes[i] >> 12),
 787				(holes[i+1] >> 12));
 788#endif
 789		MT_BUG_ON(mt, mas.last + 1 != (holes[i+1] >> 12));
 790		MT_BUG_ON(mt, mas.index != (holes[i+1] >> 12) - (holes[i+2] >> 12));
 791		min = holes[i+1] >> 12;
 792		mas_reset(&mas);
 793	}
 794
 795	mas_unlock(&mas);
 796	for (i = 0; i < req_range_count; i += 5) {
 797#if DEBUG_REV_RANGE
 798		pr_debug("\tReverse request %d between %lu-%lu size %lu, should get %lu\n",
 799				i, req_range[i] >> 12,
 800				(req_range[i + 1] >> 12),
 801				req_range[i+2] >> 12,
 802				req_range[i+3] >> 12);
 803#endif
 804		check_mtree_alloc_rrange(mt,
 805				req_range[i]   >> 12, /* start */
 806				req_range[i+1] >> 12, /* end */
 807				req_range[i+2] >> 12, /* size */
 808				req_range[i+3] >> 12, /* expected address */
 809				req_range[i+4],       /* expected return */
 810				xa_mk_value(req_range[i] >> 12)); /* pointer */
 811		mt_validate(mt);
 812	}
 813
 814	mt_set_non_kernel(1);
 815	mtree_erase(mt, 34148798727); /* create a deleted range. */
 816	mtree_erase(mt, 34148798725);
 817	check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
 818			34148798725, 0, mt);
 819
 820	mtree_destroy(mt);
 821}
 822
 823static noinline void __init check_alloc_range(struct maple_tree *mt)
 824{
 825	/*
 826	 * Generated by:
 827	 * cat /proc/self/maps|awk '{print $1}'|
 828	 * awk -F "-" '{printf "0x%s, 0x%s, ", $1, $2}'
 829	 */
 830
 831	static const unsigned long range[] = {
 832	/*      Inclusive     , Exclusive. */
 833		0x565234af2000, 0x565234af4000,
 834		0x565234af4000, 0x565234af9000,
 835		0x565234af9000, 0x565234afb000,
 836		0x565234afc000, 0x565234afd000,
 837		0x565234afd000, 0x565234afe000,
 838		0x565235def000, 0x565235e10000,
 839		0x7f36d4bfd000, 0x7f36d4ee2000,
 840		0x7f36d4ee2000, 0x7f36d4f04000,
 841		0x7f36d4f04000, 0x7f36d504c000,
 842		0x7f36d504c000, 0x7f36d5098000,
 843		0x7f36d5098000, 0x7f36d5099000,
 844		0x7f36d5099000, 0x7f36d509d000,
 845		0x7f36d509d000, 0x7f36d509f000,
 846		0x7f36d509f000, 0x7f36d50a5000,
 847		0x7f36d50b9000, 0x7f36d50db000,
 848		0x7f36d50db000, 0x7f36d50dc000,
 849		0x7f36d50dc000, 0x7f36d50fa000,
 850		0x7f36d50fa000, 0x7f36d5102000,
 851		0x7f36d5102000, 0x7f36d5103000,
 852		0x7f36d5103000, 0x7f36d5104000,
 853		0x7f36d5104000, 0x7f36d5105000,
 854		0x7fff5876b000, 0x7fff5878d000,
 855		0x7fff5878e000, 0x7fff58791000,
 856		0x7fff58791000, 0x7fff58793000,
 857	};
 858	static const unsigned long holes[] = {
 859		/* Start of hole, end of hole,  size of hole (+1) */
 860		0x565234afb000, 0x565234afc000, 0x1000,
 861		0x565234afe000, 0x565235def000, 0x12F1000,
 862		0x565235e10000, 0x7f36d4bfd000, 0x28E49EDED000,
 863	};
 864
 865	/*
 866	 * req_range consists of 4 values.
 867	 * 1. min index
 868	 * 2. max index
 869	 * 3. size
 870	 * 4. number that should be returned.
 871	 * 5. return value
 872	 */
 873	static const unsigned long req_range[] = {
 874		0x565234af9000, /* Min */
 875		0x7fff58791000, /* Max */
 876		0x1000,         /* Size */
 877		0x565234afb000, /* First hole in our data of size 1000. */
 878		0,              /* Return value success. */
 879
 880		0x0,            /* Min */
 881		0x7fff58791000, /* Max */
 882		0x1F00,         /* Size */
 883		0x0,            /* First hole in our data of size 2000. */
 884		0,              /* Return value success. */
 885
 886		/* Test ascend. */
 887		34148797436 << 12, /* Min */
 888		0x7fff587AF000,    /* Max */
 889		0x3000,         /* Size */
 890		34148798629 << 12,             /* Expected location */
 891		0,              /* Return value success. */
 892
 893		/* Test failing. */
 894		34148798623 << 12,  /* Min */
 895		34148798683 << 12,  /* Max */
 896		0x15000,            /* Size */
 897		0,             /* Expected location */
 898		-EBUSY,              /* Return value failed. */
 899
 900		/* Test filling entire gap. */
 901		34148798623 << 12,  /* Min */
 902		0x7fff587AF000,    /* Max */
 903		0x10000,           /* Size */
 904		34148798632 << 12,             /* Expected location */
 905		0,              /* Return value success. */
 906
 907		/* Test walking off the end of root. */
 908		0,                  /* Min */
 909		-1,                 /* Max */
 910		-1,                 /* Size */
 911		0,                  /* Expected location */
 912		-EBUSY,             /* Return value failure. */
 913
 914		/* Test looking for too large a hole across entire range. */
 915		0,                  /* Min */
 916		-1,                 /* Max */
 917		4503599618982063UL << 12,  /* Size */
 918		34359052178 << 12,  /* Expected location */
 919		-EBUSY,             /* Return failure. */
 920
 921		/* Test a single entry */
 922		34148798648 << 12,		/* Min */
 923		34148798648 << 12,		/* Max */
 924		4096,			/* Size of 1 */
 925		34148798648 << 12,	/* Location is the same as min/max */
 926		0,			/* Success */
 927	};
 928	int i, range_count = ARRAY_SIZE(range);
 929	int req_range_count = ARRAY_SIZE(req_range);
 930	unsigned long min = 0x565234af2000;
 931	MA_STATE(mas, mt, 0, 0);
 932
 933	mtree_store_range(mt, MTREE_ALLOC_MAX, ULONG_MAX, XA_ZERO_ENTRY,
 934			  GFP_KERNEL);
 935	for (i = 0; i < range_count; i += 2) {
 936#define DEBUG_ALLOC_RANGE 0
 937#if DEBUG_ALLOC_RANGE
 938		pr_debug("\tInsert %lu-%lu\n", range[i] >> 12,
 939			 (range[i + 1] >> 12) - 1);
 940		mt_dump(mt, mt_dump_hex);
 941#endif
 942		check_insert_range(mt, range[i] >> 12, (range[i + 1] >> 12) - 1,
 943				xa_mk_value(range[i] >> 12), 0);
 944		mt_validate(mt);
 945	}
 946
 947
 948
 949	mas_lock(&mas);
 950	for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
 951
 952#if DEBUG_ALLOC_RANGE
 953		pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
 954			holes[i+1] >> 12, holes[i+2] >> 12,
 955			min, holes[i+1]);
 956#endif
 957		MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
 958					holes[i+1] >> 12,
 959					holes[i+2] >> 12));
 960		MT_BUG_ON(mt, mas.index != holes[i] >> 12);
 961		min = holes[i+1];
 962		mas_reset(&mas);
 963	}
 964	mas_unlock(&mas);
 965	for (i = 0; i < req_range_count; i += 5) {
 966#if DEBUG_ALLOC_RANGE
 967		pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
 968			 i/5, req_range[i]   >> 12, req_range[i + 1]   >> 12,
 969			 req_range[i + 2]   >> 12, req_range[i + 3]   >> 12,
 970			 req_range[i], req_range[i+1]);
 971#endif
 972		check_mtree_alloc_range(mt,
 973				req_range[i]   >> 12, /* start */
 974				req_range[i+1] >> 12, /* end */
 975				req_range[i+2] >> 12, /* size */
 976				req_range[i+3] >> 12, /* expected address */
 977				req_range[i+4],       /* expected return */
 978				xa_mk_value(req_range[i] >> 12)); /* pointer */
 979		mt_validate(mt);
 980#if DEBUG_ALLOC_RANGE
 981		mt_dump(mt, mt_dump_hex);
 982#endif
 983	}
 984
 985	mtree_destroy(mt);
 986}
 987#endif
 988
 989static noinline void __init check_ranges(struct maple_tree *mt)
 990{
 991	int i, val, val2;
 992	static const unsigned long r[] = {
 993		10, 15,
 994		20, 25,
 995		17, 22, /* Overlaps previous range. */
 996		9, 1000, /* Huge. */
 997		100, 200,
 998		45, 168,
 999		118, 128,
1000			};
1001
1002	MT_BUG_ON(mt, !mtree_empty(mt));
1003	check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
1004	check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
1005	check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
1006	MT_BUG_ON(mt, !mt_height(mt));
1007	/* Store */
1008	check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
1009	check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
1010	check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
1011	MT_BUG_ON(mt, !mt_height(mt));
1012	mtree_destroy(mt);
1013	MT_BUG_ON(mt, mt_height(mt));
1014
1015	check_seq(mt, 50, false);
1016	mt_set_non_kernel(4);
1017	check_store_range(mt, 5, 47,  xa_mk_value(47), 0);
1018	MT_BUG_ON(mt, !mt_height(mt));
1019	mtree_destroy(mt);
1020
1021	/* Create tree of 1-100 */
1022	check_seq(mt, 100, false);
1023	/* Store 45-168 */
1024	mt_set_non_kernel(10);
1025	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1026	MT_BUG_ON(mt, !mt_height(mt));
1027	mtree_destroy(mt);
1028
1029	/* Create tree of 1-200 */
1030	check_seq(mt, 200, false);
1031	/* Store 45-168 */
1032	check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
1033	MT_BUG_ON(mt, !mt_height(mt));
1034	mtree_destroy(mt);
1035
1036	check_seq(mt, 30, false);
1037	check_store_range(mt, 6, 18, xa_mk_value(6), 0);
1038	MT_BUG_ON(mt, !mt_height(mt));
1039	mtree_destroy(mt);
1040
1041	/* Overwrite across multiple levels. */
1042	/* Create tree of 1-400 */
1043	check_seq(mt, 400, false);
1044	mt_set_non_kernel(50);
1045	/* Store 118-128 */
1046	check_store_range(mt, r[12], r[13], xa_mk_value(r[12]), 0);
1047	mt_set_non_kernel(50);
1048	mtree_test_erase(mt, 140);
1049	mtree_test_erase(mt, 141);
1050	mtree_test_erase(mt, 142);
1051	mtree_test_erase(mt, 143);
1052	mtree_test_erase(mt, 130);
1053	mtree_test_erase(mt, 131);
1054	mtree_test_erase(mt, 132);
1055	mtree_test_erase(mt, 133);
1056	mtree_test_erase(mt, 134);
1057	mtree_test_erase(mt, 135);
1058	check_load(mt, r[12], xa_mk_value(r[12]));
1059	check_load(mt, r[13], xa_mk_value(r[12]));
1060	check_load(mt, r[13] - 1, xa_mk_value(r[12]));
1061	check_load(mt, r[13] + 1, xa_mk_value(r[13] + 1));
1062	check_load(mt, 135, NULL);
1063	check_load(mt, 140, NULL);
1064	mt_set_non_kernel(0);
1065	MT_BUG_ON(mt, !mt_height(mt));
1066	mtree_destroy(mt);
1067
1068
1069
1070	/* Overwrite multiple levels at the end of the tree (slot 7) */
1071	mt_set_non_kernel(50);
1072	check_seq(mt, 400, false);
1073	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1074	check_store_range(mt, 347, 352, xa_mk_value(347), 0);
1075
1076	check_load(mt, 346, xa_mk_value(346));
1077	for (i = 347; i <= 352; i++)
1078		check_load(mt, i, xa_mk_value(347));
1079	for (i = 353; i <= 361; i++)
1080		check_load(mt, i, xa_mk_value(353));
1081	check_load(mt, 362, xa_mk_value(362));
1082	mt_set_non_kernel(0);
1083	MT_BUG_ON(mt, !mt_height(mt));
1084	mtree_destroy(mt);
1085
1086	mt_set_non_kernel(50);
1087	check_seq(mt, 400, false);
1088	check_store_range(mt, 352, 364, NULL, 0);
1089	check_store_range(mt, 351, 363, xa_mk_value(352), 0);
1090	check_load(mt, 350, xa_mk_value(350));
1091	check_load(mt, 351, xa_mk_value(352));
1092	for (i = 352; i <= 363; i++)
1093		check_load(mt, i, xa_mk_value(352));
1094	check_load(mt, 364, NULL);
1095	check_load(mt, 365, xa_mk_value(365));
1096	mt_set_non_kernel(0);
1097	MT_BUG_ON(mt, !mt_height(mt));
1098	mtree_destroy(mt);
1099
1100	mt_set_non_kernel(5);
1101	check_seq(mt, 400, false);
1102	check_store_range(mt, 352, 364, NULL, 0);
1103	check_store_range(mt, 351, 364, xa_mk_value(352), 0);
1104	check_load(mt, 350, xa_mk_value(350));
1105	check_load(mt, 351, xa_mk_value(352));
1106	for (i = 352; i <= 364; i++)
1107		check_load(mt, i, xa_mk_value(352));
1108	check_load(mt, 365, xa_mk_value(365));
1109	mt_set_non_kernel(0);
1110	MT_BUG_ON(mt, !mt_height(mt));
1111	mtree_destroy(mt);
1112
1113
1114	mt_set_non_kernel(50);
1115	check_seq(mt, 400, false);
1116	check_store_range(mt, 362, 367, xa_mk_value(362), 0);
1117	check_store_range(mt, 353, 361, xa_mk_value(353), 0);
1118	mt_set_non_kernel(0);
1119	mt_validate(mt);
1120	MT_BUG_ON(mt, !mt_height(mt));
1121	mtree_destroy(mt);
1122	/*
1123	 * Interesting cases:
1124	 * 1. Overwrite the end of a node and end in the first entry of the next
1125	 * node.
1126	 * 2. Split a single range
1127	 * 3. Overwrite the start of a range
1128	 * 4. Overwrite the end of a range
1129	 * 5. Overwrite the entire range
1130	 * 6. Overwrite a range that causes multiple parent nodes to be
1131	 * combined
1132	 * 7. Overwrite a range that causes multiple parent nodes and part of
1133	 * root to be combined
1134	 * 8. Overwrite the whole tree
1135	 * 9. Try to overwrite the zero entry of an alloc tree.
1136	 * 10. Write a range larger than a nodes current pivot
1137	 */
1138
1139	mt_set_non_kernel(50);
1140	for (i = 0; i <= 500; i++) {
1141		val = i*5;
1142		val2 = (i+1)*5;
1143		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1144	}
1145	check_store_range(mt, 2400, 2400, xa_mk_value(2400), 0);
1146	check_store_range(mt, 2411, 2411, xa_mk_value(2411), 0);
1147	check_store_range(mt, 2412, 2412, xa_mk_value(2412), 0);
1148	check_store_range(mt, 2396, 2400, xa_mk_value(4052020), 0);
1149	check_store_range(mt, 2402, 2402, xa_mk_value(2402), 0);
1150	mtree_destroy(mt);
1151	mt_set_non_kernel(0);
1152
1153	mt_set_non_kernel(50);
1154	for (i = 0; i <= 500; i++) {
1155		val = i*5;
1156		val2 = (i+1)*5;
1157		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1158	}
1159	check_store_range(mt, 2422, 2422, xa_mk_value(2422), 0);
1160	check_store_range(mt, 2424, 2424, xa_mk_value(2424), 0);
1161	check_store_range(mt, 2425, 2425, xa_mk_value(2), 0);
1162	check_store_range(mt, 2460, 2470, NULL, 0);
1163	check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
1164	check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
1165	mt_set_non_kernel(0);
1166	MT_BUG_ON(mt, !mt_height(mt));
1167	mtree_destroy(mt);
1168
1169	/* Check in-place modifications */
1170	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1171	/* Append to the start of last range */
1172	mt_set_non_kernel(50);
1173	for (i = 0; i <= 500; i++) {
1174		val = i * 5 + 1;
1175		val2 = val + 4;
1176		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1177	}
1178
1179	/* Append to the last range without touching any boundaries */
1180	for (i = 0; i < 10; i++) {
1181		val = val2 + 5;
1182		val2 = val + 4;
1183		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1184	}
1185
1186	/* Append to the end of last range */
1187	val = val2;
1188	for (i = 0; i < 10; i++) {
1189		val += 5;
1190		MT_BUG_ON(mt, mtree_test_store_range(mt, val, ULONG_MAX,
1191						     xa_mk_value(val)) != 0);
1192	}
1193
1194	/* Overwriting the range and over a part of the next range */
1195	for (i = 10; i < 30; i += 2) {
1196		val = i * 5 + 1;
1197		val2 = val + 5;
1198		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1199	}
1200
1201	/* Overwriting a part of the range and over the next range */
1202	for (i = 50; i < 70; i += 2) {
1203		val2 = i * 5;
1204		val = val2 - 5;
1205		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1206	}
1207
1208	/*
1209	 * Expand the range, only partially overwriting the previous and
1210	 * next ranges
1211	 */
1212	for (i = 100; i < 130; i += 3) {
1213		val = i * 5 - 5;
1214		val2 = i * 5 + 1;
1215		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1216	}
1217
1218	/*
1219	 * Expand the range, only partially overwriting the previous and
1220	 * next ranges, in RCU mode
1221	 */
1222	mt_set_in_rcu(mt);
1223	for (i = 150; i < 180; i += 3) {
1224		val = i * 5 - 5;
1225		val2 = i * 5 + 1;
1226		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1227	}
1228
1229	MT_BUG_ON(mt, !mt_height(mt));
1230	mt_validate(mt);
1231	mt_set_non_kernel(0);
1232	mtree_destroy(mt);
1233
1234	/* Test rebalance gaps */
1235	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1236	mt_set_non_kernel(50);
1237	for (i = 0; i <= 50; i++) {
1238		val = i*10;
1239		val2 = (i+1)*10;
1240		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1241	}
1242	check_store_range(mt, 161, 161, xa_mk_value(161), 0);
1243	check_store_range(mt, 162, 162, xa_mk_value(162), 0);
1244	check_store_range(mt, 163, 163, xa_mk_value(163), 0);
1245	check_store_range(mt, 240, 249, NULL, 0);
1246	mtree_erase(mt, 200);
1247	mtree_erase(mt, 210);
1248	mtree_erase(mt, 220);
1249	mtree_erase(mt, 230);
1250	mt_set_non_kernel(0);
1251	MT_BUG_ON(mt, !mt_height(mt));
1252	mtree_destroy(mt);
1253
1254	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1255	for (i = 0; i <= 500; i++) {
1256		val = i*10;
1257		val2 = (i+1)*10;
1258		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1259	}
1260	check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
1261	mt_validate(mt);
1262	MT_BUG_ON(mt, !mt_height(mt));
1263	mtree_destroy(mt);
1264
1265	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1266	for (i = 0; i <= 500; i++) {
1267		val = i*10;
1268		val2 = (i+1)*10;
1269		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1270	}
1271	check_store_range(mt, 4811, 4811, xa_mk_value(4811), 0);
1272	check_store_range(mt, 4812, 4812, xa_mk_value(4812), 0);
1273	check_store_range(mt, 4861, 4861, xa_mk_value(4861), 0);
1274	check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
1275	check_store_range(mt, 4842, 4849, NULL, 0);
1276	mt_validate(mt);
1277	MT_BUG_ON(mt, !mt_height(mt));
1278	mtree_destroy(mt);
1279
1280	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1281	for (i = 0; i <= 1300; i++) {
1282		val = i*10;
1283		val2 = (i+1)*10;
1284		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1285		MT_BUG_ON(mt, mt_height(mt) >= 4);
1286	}
1287	/*  Cause a 3 child split all the way up the tree. */
1288	for (i = 5; i < 215; i += 10)
1289		check_store_range(mt, 11450 + i, 11450 + i + 1, NULL, 0);
1290	for (i = 5; i < 65; i += 10)
1291		check_store_range(mt, 11770 + i, 11770 + i + 1, NULL, 0);
1292
1293	MT_BUG_ON(mt, mt_height(mt) >= 4);
1294	for (i = 5; i < 45; i += 10)
1295		check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
1296	if (!MAPLE_32BIT)
1297		MT_BUG_ON(mt, mt_height(mt) < 4);
1298	mtree_destroy(mt);
1299
1300
1301	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1302	for (i = 0; i <= 1200; i++) {
1303		val = i*10;
1304		val2 = (i+1)*10;
1305		check_store_range(mt, val, val2, xa_mk_value(val), 0);
1306		MT_BUG_ON(mt, mt_height(mt) >= 4);
1307	}
1308	/* Fill parents and leaves before split. */
1309	for (i = 5; i < 455; i += 10)
1310		check_store_range(mt, 7800 + i, 7800 + i + 1, NULL, 0);
1311
1312	for (i = 1; i < 16; i++)
1313		check_store_range(mt, 8185 + i, 8185 + i + 1,
1314				  xa_mk_value(8185+i), 0);
1315	MT_BUG_ON(mt, mt_height(mt) >= 4);
1316	/* triple split across multiple levels. */
1317	check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
1318	if (!MAPLE_32BIT)
1319		MT_BUG_ON(mt, mt_height(mt) != 4);
1320}
1321
1322static noinline void __init check_next_entry(struct maple_tree *mt)
1323{
1324	void *entry = NULL;
1325	unsigned long limit = 30, i = 0;
1326	MA_STATE(mas, mt, i, i);
1327
1328	MT_BUG_ON(mt, !mtree_empty(mt));
1329
1330	check_seq(mt, limit, false);
1331	rcu_read_lock();
1332
1333	/* Check the first one and get ma_state in the correct state. */
1334	MT_BUG_ON(mt, mas_walk(&mas) != xa_mk_value(i++));
1335	for ( ; i <= limit + 1; i++) {
1336		entry = mas_next(&mas, limit);
1337		if (i > limit)
1338			MT_BUG_ON(mt, entry != NULL);
1339		else
1340			MT_BUG_ON(mt, xa_mk_value(i) != entry);
1341	}
1342	rcu_read_unlock();
1343	mtree_destroy(mt);
1344}
1345
1346static noinline void __init check_prev_entry(struct maple_tree *mt)
1347{
1348	unsigned long index = 16;
1349	void *value;
1350	int i;
1351
1352	MA_STATE(mas, mt, index, index);
1353
1354	MT_BUG_ON(mt, !mtree_empty(mt));
1355	check_seq(mt, 30, false);
1356
1357	rcu_read_lock();
1358	value = mas_find(&mas, ULONG_MAX);
1359	MT_BUG_ON(mt, value != xa_mk_value(index));
1360	value = mas_prev(&mas, 0);
1361	MT_BUG_ON(mt, value != xa_mk_value(index - 1));
1362	rcu_read_unlock();
1363	mtree_destroy(mt);
1364
1365	/* Check limits on prev */
1366	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1367	mas_lock(&mas);
1368	for (i = 0; i <= index; i++) {
1369		mas_set_range(&mas, i*10, i*10+5);
1370		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1371	}
1372
1373	mas_set(&mas, 20);
1374	value = mas_walk(&mas);
1375	MT_BUG_ON(mt, value != xa_mk_value(2));
1376
1377	value = mas_prev(&mas, 19);
1378	MT_BUG_ON(mt, value != NULL);
1379
1380	mas_set(&mas, 80);
1381	value = mas_walk(&mas);
1382	MT_BUG_ON(mt, value != xa_mk_value(8));
1383
1384	value = mas_prev(&mas, 76);
1385	MT_BUG_ON(mt, value != NULL);
1386
1387	mas_unlock(&mas);
1388}
1389
1390static noinline void __init check_store_null(struct maple_tree *mt)
1391{
1392	MA_STATE(mas, mt, 0, ULONG_MAX);
1393
1394	/*
1395	 * Store NULL at range [0, ULONG_MAX] to an empty tree should result
1396	 * in an empty tree
1397	 */
1398	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1399	mas_lock(&mas);
1400	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1401	MT_BUG_ON(mt, !mtree_empty(mt));
1402	mas_unlock(&mas);
1403	mtree_destroy(mt);
1404
1405	/*
1406	 * Store NULL at any range to an empty tree should result in an empty
1407	 * tree
1408	 */
1409	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1410	mas_lock(&mas);
1411	mas_set_range(&mas, 3, 10);
1412	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1413	MT_BUG_ON(mt, !mtree_empty(mt));
1414	mas_unlock(&mas);
1415	mtree_destroy(mt);
1416
1417	/*
1418	 * Store NULL at range [0, ULONG_MAX] to a single entry tree should
1419	 * result in an empty tree
1420	 */
1421	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1422	mas_lock(&mas);
1423	mas_set(&mas, 0);
1424	mas_store_gfp(&mas, &mas, GFP_KERNEL);
1425	mas_set_range(&mas, 0, ULONG_MAX);
1426	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1427	MT_BUG_ON(mt, !mtree_empty(mt));
1428	mas_unlock(&mas);
1429	mtree_destroy(mt);
1430
1431	/*
1432	 * Store NULL at range [0, n] to a single entry tree should
1433	 * result in an empty tree
1434	 */
1435	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1436	mas_lock(&mas);
1437	mas_set(&mas, 0);
1438	mas_store_gfp(&mas, &mas, GFP_KERNEL);
1439	mas_set_range(&mas, 0, 5);
1440	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1441	MT_BUG_ON(mt, !mtree_empty(mt));
1442	mas_unlock(&mas);
1443	mtree_destroy(mt);
1444
1445	/*
1446	 * Store NULL at range [m, n] where m > 0 to a single entry tree
1447	 * should still be a single entry tree
1448	 */
1449	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1450	mas_lock(&mas);
1451	mas_set(&mas, 0);
1452	mas_store_gfp(&mas, &mas, GFP_KERNEL);
1453	mas_set_range(&mas, 2, 5);
1454	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1455	MT_BUG_ON(mt, mtree_empty(mt));
1456//	MT_BUG_ON(mt, xa_is_node(mas_root(&mas)));
1457	mas_unlock(&mas);
1458	mtree_destroy(mt);
1459
1460	/*
1461	 * Store NULL at range [0, ULONG_MAX] to a tree with node should
1462	 * result in an empty tree
1463	 */
1464	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1465	mas_lock(&mas);
1466	mas_set_range(&mas, 1, 3);
1467	mas_store_gfp(&mas, &mas, GFP_KERNEL);
1468//	MT_BUG_ON(mt, !xa_is_node(mas_root(&mas)));
1469	mas_set_range(&mas, 0, ULONG_MAX);
1470	mas_store_gfp(&mas, NULL, GFP_KERNEL);
1471	MT_BUG_ON(mt, !mtree_empty(mt));
1472	mas_unlock(&mas);
1473	mtree_destroy(mt);
1474}
1475
1476static noinline void __init check_root_expand(struct maple_tree *mt)
1477{
1478	MA_STATE(mas, mt, 0, 0);
1479	void *ptr;
1480
1481
1482	mas_lock(&mas);
1483	mas_set(&mas, 3);
1484	ptr = mas_walk(&mas);
1485	MT_BUG_ON(mt, mas.index != 0);
1486	MT_BUG_ON(mt, ptr != NULL);
1487	MT_BUG_ON(mt, mas.index != 0);
1488	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1489
1490	ptr = &check_prev_entry;
1491	mas_set(&mas, 1);
1492	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1493
1494	mas_set(&mas, 0);
1495	ptr = mas_walk(&mas);
1496	MT_BUG_ON(mt, ptr != NULL);
1497
1498	mas_set(&mas, 1);
1499	ptr = mas_walk(&mas);
1500	MT_BUG_ON(mt, ptr != &check_prev_entry);
1501
1502	mas_set(&mas, 2);
1503	ptr = mas_walk(&mas);
1504	MT_BUG_ON(mt, ptr != NULL);
1505	mas_unlock(&mas);
1506	mtree_destroy(mt);
1507
1508
1509	mt_init_flags(mt, 0);
1510	mas_lock(&mas);
1511
1512	mas_set(&mas, 0);
1513	ptr = &check_prev_entry;
1514	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1515
1516	mas_set(&mas, 5);
1517	ptr = mas_walk(&mas);
1518	MT_BUG_ON(mt, ptr != NULL);
1519	MT_BUG_ON(mt, mas.index != 1);
1520	MT_BUG_ON(mt, mas.last != ULONG_MAX);
1521
1522	mas_set_range(&mas, 0, 100);
1523	ptr = mas_walk(&mas);
1524	MT_BUG_ON(mt, ptr != &check_prev_entry);
1525	MT_BUG_ON(mt, mas.last != 0);
1526	mas_unlock(&mas);
1527	mtree_destroy(mt);
1528
1529	mt_init_flags(mt, 0);
1530	mas_lock(&mas);
1531
1532	mas_set(&mas, 0);
1533	ptr = (void *)((unsigned long) check_prev_entry | 1UL);
1534	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1535	ptr = mas_next(&mas, ULONG_MAX);
1536	MT_BUG_ON(mt, ptr != NULL);
1537	MT_BUG_ON(mt, (mas.index != 1) && (mas.last != ULONG_MAX));
1538
1539	mas_set(&mas, 1);
1540	ptr = mas_prev(&mas, 0);
1541	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1542	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 1UL));
1543
1544	mas_unlock(&mas);
1545
1546	mtree_destroy(mt);
1547
1548	mt_init_flags(mt, 0);
1549	mas_lock(&mas);
1550	mas_set(&mas, 0);
1551	ptr = (void *)((unsigned long) check_prev_entry | 2UL);
1552	mas_store_gfp(&mas, ptr, GFP_KERNEL);
1553	ptr = mas_next(&mas, ULONG_MAX);
1554	MT_BUG_ON(mt, ptr != NULL);
1555	MT_BUG_ON(mt, (mas.index != ULONG_MAX) && (mas.last != ULONG_MAX));
1556
1557	mas_set(&mas, 1);
1558	ptr = mas_prev(&mas, 0);
1559	MT_BUG_ON(mt, (mas.index != 0) && (mas.last != 0));
1560	MT_BUG_ON(mt, ptr != (void *)((unsigned long) check_prev_entry | 2UL));
1561
1562
1563	mas_unlock(&mas);
1564}
1565
1566static noinline void __init check_gap_combining(struct maple_tree *mt)
1567{
1568	struct maple_enode *mn1, *mn2;
1569	void *entry;
1570	unsigned long singletons = 100;
1571	static const unsigned long *seq100;
1572	static const unsigned long seq100_64[] = {
1573		/* 0-5 */
1574		74, 75, 76,
1575		50, 100, 2,
1576
1577		/* 6-12 */
1578		44, 45, 46, 43,
1579		20, 50, 3,
1580
1581		/* 13-20*/
1582		80, 81, 82,
1583		76, 2, 79, 85, 4,
1584	};
1585
1586	static const unsigned long seq100_32[] = {
1587		/* 0-5 */
1588		61, 62, 63,
1589		50, 100, 2,
1590
1591		/* 6-12 */
1592		31, 32, 33, 30,
1593		20, 50, 3,
1594
1595		/* 13-20*/
1596		80, 81, 82,
1597		76, 2, 79, 85, 4,
1598	};
1599
1600	static const unsigned long seq2000[] = {
1601		1152, 1151,
1602		1100, 1200, 2,
1603	};
1604	static const unsigned long seq400[] = {
1605		286, 318,
1606		256, 260, 266, 270, 275, 280, 290, 398,
1607		286, 310,
1608	};
1609
1610	unsigned long index;
1611
1612	MA_STATE(mas, mt, 0, 0);
1613
1614	if (MAPLE_32BIT)
1615		seq100 = seq100_32;
1616	else
1617		seq100 = seq100_64;
1618
1619	index = seq100[0];
1620	mas_set(&mas, index);
1621	MT_BUG_ON(mt, !mtree_empty(mt));
1622	check_seq(mt, singletons, false); /* create 100 singletons. */
1623
1624	mt_set_non_kernel(1);
1625	mtree_test_erase(mt, seq100[2]);
1626	check_load(mt, seq100[2], NULL);
1627	mtree_test_erase(mt, seq100[1]);
1628	check_load(mt, seq100[1], NULL);
1629
1630	rcu_read_lock();
1631	entry = mas_find(&mas, ULONG_MAX);
1632	MT_BUG_ON(mt, entry != xa_mk_value(index));
1633	mn1 = mas.node;
1634	mas_next(&mas, ULONG_MAX);
1635	entry = mas_next(&mas, ULONG_MAX);
1636	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1637	mn2 = mas.node;
1638	MT_BUG_ON(mt, mn1 == mn2); /* test the test. */
1639
1640	/*
1641	 * At this point, there is a gap of 2 at index + 1 between seq100[3] and
1642	 * seq100[4]. Search for the gap.
1643	 */
1644	mt_set_non_kernel(1);
1645	mas_reset(&mas);
1646	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
1647					     seq100[5]));
1648	MT_BUG_ON(mt, mas.index != index + 1);
1649	rcu_read_unlock();
1650
1651	mtree_test_erase(mt, seq100[6]);
1652	check_load(mt, seq100[6], NULL);
1653	mtree_test_erase(mt, seq100[7]);
1654	check_load(mt, seq100[7], NULL);
1655	mtree_test_erase(mt, seq100[8]);
1656	index = seq100[9];
1657
1658	rcu_read_lock();
1659	mas.index = index;
1660	mas.last = index;
1661	mas_reset(&mas);
1662	entry = mas_find(&mas, ULONG_MAX);
1663	MT_BUG_ON(mt, entry != xa_mk_value(index));
1664	mn1 = mas.node;
1665	entry = mas_next(&mas, ULONG_MAX);
1666	MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
1667	mas_next(&mas, ULONG_MAX); /* go to the next entry. */
1668	mn2 = mas.node;
1669	MT_BUG_ON(mt, mn1 == mn2); /* test the next entry is in the next node. */
1670
1671	/*
1672	 * At this point, there is a gap of 3 at seq100[6].  Find it by
1673	 * searching 20 - 50 for size 3.
1674	 */
1675	mas_reset(&mas);
1676	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
1677					     seq100[12]));
1678	MT_BUG_ON(mt, mas.index != seq100[6]);
1679	rcu_read_unlock();
1680
1681	mt_set_non_kernel(1);
1682	mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
1683	check_load(mt, seq100[13], NULL);
1684	check_load(mt, seq100[14], xa_mk_value(seq100[14]));
1685	mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
1686	check_load(mt, seq100[13], NULL);
1687	check_load(mt, seq100[14], NULL);
1688
1689	mas_reset(&mas);
1690	rcu_read_lock();
1691	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
1692					     seq100[17]));
1693	MT_BUG_ON(mt, mas.index != seq100[13]);
1694	mt_validate(mt);
1695	rcu_read_unlock();
1696
1697	/*
1698	 * *DEPRECATED: no retries anymore* Test retry entry in the start of a
1699	 * gap.
1700	 */
1701	mt_set_non_kernel(2);
1702	mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
1703	mtree_test_erase(mt, seq100[15]);
1704	mas_reset(&mas);
1705	rcu_read_lock();
1706	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
1707					     seq100[20]));
1708	rcu_read_unlock();
1709	MT_BUG_ON(mt, mas.index != seq100[18]);
1710	mt_validate(mt);
1711	mtree_destroy(mt);
1712
1713	/* seq 2000 tests are for multi-level tree gaps */
1714	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1715	check_seq(mt, 2000, false);
1716	mt_set_non_kernel(1);
1717	mtree_test_erase(mt, seq2000[0]);
1718	mtree_test_erase(mt, seq2000[1]);
1719
1720	mt_set_non_kernel(2);
1721	mas_reset(&mas);
1722	rcu_read_lock();
1723	MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
1724					     seq2000[4]));
1725	MT_BUG_ON(mt, mas.index != seq2000[1]);
1726	rcu_read_unlock();
1727	mt_validate(mt);
1728	mtree_destroy(mt);
1729
1730	/* seq 400 tests rebalancing over two levels. */
1731	mt_set_non_kernel(99);
1732	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1733	check_seq(mt, 400, false);
1734	mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
1735	mt_set_non_kernel(0);
1736	mtree_destroy(mt);
1737
1738	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
1739	check_seq(mt, 400, false);
1740	mt_set_non_kernel(50);
1741	mtree_test_store_range(mt, seq400[2], seq400[9],
1742			       xa_mk_value(seq400[2]));
1743	mtree_test_store_range(mt, seq400[3], seq400[9],
1744			       xa_mk_value(seq400[3]));
1745	mtree_test_store_range(mt, seq400[4], seq400[9],
1746			       xa_mk_value(seq400[4]));
1747	mtree_test_store_range(mt, seq400[5], seq400[9],
1748			       xa_mk_value(seq400[5]));
1749	mtree_test_store_range(mt, seq400[0], seq400[9],
1750			       xa_mk_value(seq400[0]));
1751	mtree_test_store_range(mt, seq400[6], seq400[9],
1752			       xa_mk_value(seq400[6]));
1753	mtree_test_store_range(mt, seq400[7], seq400[9],
1754			       xa_mk_value(seq400[7]));
1755	mtree_test_store_range(mt, seq400[8], seq400[9],
1756			       xa_mk_value(seq400[8]));
1757	mtree_test_store_range(mt, seq400[10], seq400[11],
1758			       xa_mk_value(seq400[10]));
1759	mt_validate(mt);
1760	mt_set_non_kernel(0);
1761	mtree_destroy(mt);
1762}
1763static noinline void __init check_node_overwrite(struct maple_tree *mt)
1764{
1765	int i, max = 4000;
1766
1767	for (i = 0; i < max; i++)
1768		mtree_test_store_range(mt, i*100, i*100 + 50, xa_mk_value(i*100));
1769
1770	mtree_test_store_range(mt, 319951, 367950, NULL);
1771	/*mt_dump(mt, mt_dump_dec); */
1772	mt_validate(mt);
1773}
1774
1775#if defined(BENCH_SLOT_STORE)
1776static noinline void __init bench_slot_store(struct maple_tree *mt)
1777{
1778	int i, brk = 105, max = 1040, brk_start = 100, count = 20000000;
1779
1780	for (i = 0; i < max; i += 10)
1781		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1782
1783	for (i = 0; i < count; i++) {
1784		mtree_store_range(mt, brk, brk, NULL, GFP_KERNEL);
1785		mtree_store_range(mt, brk_start, brk, xa_mk_value(brk),
1786				  GFP_KERNEL);
1787	}
1788}
1789#endif
1790
1791#if defined(BENCH_NODE_STORE)
1792static noinline void __init bench_node_store(struct maple_tree *mt)
1793{
1794	int i, overwrite = 76, max = 240, count = 20000000;
1795
1796	for (i = 0; i < max; i += 10)
1797		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1798
1799	for (i = 0; i < count; i++) {
1800		mtree_store_range(mt, overwrite,  overwrite + 15,
1801				  xa_mk_value(overwrite), GFP_KERNEL);
1802
1803		overwrite += 5;
1804		if (overwrite >= 135)
1805			overwrite = 76;
1806	}
1807}
1808#endif
1809
1810#if defined(BENCH_AWALK)
1811static noinline void __init bench_awalk(struct maple_tree *mt)
1812{
1813	int i, max = 2500, count = 50000000;
1814	MA_STATE(mas, mt, 1470, 1470);
1815
1816	for (i = 0; i < max; i += 10)
1817		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1818
1819	mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
1820
1821	for (i = 0; i < count; i++) {
1822		mas_empty_area_rev(&mas, 0, 2000, 10);
1823		mas_reset(&mas);
1824	}
1825}
1826#endif
1827#if defined(BENCH_WALK)
1828static noinline void __init bench_walk(struct maple_tree *mt)
1829{
1830	int i, max = 2500, count = 550000000;
1831	MA_STATE(mas, mt, 1470, 1470);
1832
1833	for (i = 0; i < max; i += 10)
1834		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1835
1836	for (i = 0; i < count; i++) {
1837		mas_walk(&mas);
1838		mas_reset(&mas);
1839	}
1840
1841}
1842#endif
1843
1844#if defined(BENCH_LOAD)
1845static noinline void __init bench_load(struct maple_tree *mt)
1846{
1847	int i, max = 2500, count = 550000000;
1848
1849	for (i = 0; i < max; i += 10)
1850		mtree_store_range(mt, i, i + 5, xa_mk_value(i), GFP_KERNEL);
1851
1852	for (i = 0; i < count; i++)
1853		mtree_load(mt, 1470);
1854}
1855#endif
1856
1857#if defined(BENCH_MT_FOR_EACH)
1858static noinline void __init bench_mt_for_each(struct maple_tree *mt)
1859{
1860	int i, count = 1000000;
1861	unsigned long max = 2500, index = 0;
1862	void *entry;
1863
1864	for (i = 0; i < max; i += 5)
1865		mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
1866
1867	for (i = 0; i < count; i++) {
1868		unsigned long j = 0;
1869
1870		mt_for_each(mt, entry, index, max) {
1871			MT_BUG_ON(mt, entry != xa_mk_value(j));
1872			j += 5;
1873		}
1874
1875		index = 0;
1876	}
1877
1878}
1879#endif
1880
1881#if defined(BENCH_MAS_FOR_EACH)
1882static noinline void __init bench_mas_for_each(struct maple_tree *mt)
1883{
1884	int i, count = 1000000;
1885	unsigned long max = 2500;
1886	void *entry;
1887	MA_STATE(mas, mt, 0, 0);
1888
1889	for (i = 0; i < max; i += 5) {
1890		int gap = 4;
1891
1892		if (i % 30 == 0)
1893			gap = 3;
1894		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1895	}
1896
1897	rcu_read_lock();
1898	for (i = 0; i < count; i++) {
1899		unsigned long j = 0;
1900
1901		mas_for_each(&mas, entry, max) {
1902			MT_BUG_ON(mt, entry != xa_mk_value(j));
1903			j += 5;
1904		}
1905		mas_set(&mas, 0);
1906	}
1907	rcu_read_unlock();
1908
1909}
1910#endif
1911#if defined(BENCH_MAS_PREV)
1912static noinline void __init bench_mas_prev(struct maple_tree *mt)
1913{
1914	int i, count = 1000000;
1915	unsigned long max = 2500;
1916	void *entry;
1917	MA_STATE(mas, mt, 0, 0);
1918
1919	for (i = 0; i < max; i += 5) {
1920		int gap = 4;
1921
1922		if (i % 30 == 0)
1923			gap = 3;
1924		mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
1925	}
1926
1927	rcu_read_lock();
1928	for (i = 0; i < count; i++) {
1929		unsigned long j = 2495;
1930
1931		mas_set(&mas, ULONG_MAX);
1932		while ((entry = mas_prev(&mas, 0)) != NULL) {
1933			MT_BUG_ON(mt, entry != xa_mk_value(j));
1934			j -= 5;
1935		}
1936	}
1937	rcu_read_unlock();
1938
1939}
1940#endif
1941/* check_forking - simulate the kernel forking sequence with the tree. */
1942static noinline void __init check_forking(void)
1943{
1944	struct maple_tree mt, newmt;
1945	int i, nr_entries = 134, ret;
1946	void *val;
1947	MA_STATE(mas, &mt, 0, 0);
1948	MA_STATE(newmas, &newmt, 0, 0);
1949	struct rw_semaphore mt_lock, newmt_lock;
1950
1951	init_rwsem(&mt_lock);
1952	init_rwsem(&newmt_lock);
1953
1954	mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1955	mt_set_external_lock(&mt, &mt_lock);
1956
1957	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
1958	mt_set_external_lock(&newmt, &newmt_lock);
1959
1960	down_write(&mt_lock);
1961	for (i = 0; i <= nr_entries; i++) {
1962		mas_set_range(&mas, i*10, i*10 + 5);
1963		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
1964	}
1965
1966	down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
1967	ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
1968	if (ret) {
1969		pr_err("OOM!");
1970		BUG_ON(1);
1971	}
1972
1973	mas_set(&newmas, 0);
1974	mas_for_each(&newmas, val, ULONG_MAX)
1975		mas_store(&newmas, val);
1976
1977	mas_destroy(&newmas);
1978	mas_destroy(&mas);
1979	mt_validate(&newmt);
1980	__mt_destroy(&newmt);
1981	__mt_destroy(&mt);
1982	up_write(&newmt_lock);
1983	up_write(&mt_lock);
1984}
1985
1986static noinline void __init check_iteration(struct maple_tree *mt)
1987{
1988	int i, nr_entries = 125;
1989	void *val;
1990	MA_STATE(mas, mt, 0, 0);
1991
1992	for (i = 0; i <= nr_entries; i++)
1993		mtree_store_range(mt, i * 10, i * 10 + 9,
1994				  xa_mk_value(i), GFP_KERNEL);
1995
1996	mt_set_non_kernel(99999);
1997
1998	i = 0;
1999	mas_lock(&mas);
2000	mas_for_each(&mas, val, 925) {
2001		MT_BUG_ON(mt, mas.index != i * 10);
2002		MT_BUG_ON(mt, mas.last != i * 10 + 9);
2003		/* Overwrite end of entry 92 */
2004		if (i == 92) {
2005			mas.index = 925;
2006			mas.last = 929;
2007			mas_store(&mas, val);
2008		}
2009		i++;
2010	}
2011	/* Ensure mas_find() gets the next value */
2012	val = mas_find(&mas, ULONG_MAX);
2013	MT_BUG_ON(mt, val != xa_mk_value(i));
2014
2015	mas_set(&mas, 0);
2016	i = 0;
2017	mas_for_each(&mas, val, 785) {
2018		MT_BUG_ON(mt, mas.index != i * 10);
2019		MT_BUG_ON(mt, mas.last != i * 10 + 9);
2020		/* Overwrite start of entry 78 */
2021		if (i == 78) {
2022			mas.index = 780;
2023			mas.last = 785;
2024			mas_store(&mas, val);
2025		} else {
2026			i++;
2027		}
2028	}
2029	val = mas_find(&mas, ULONG_MAX);
2030	MT_BUG_ON(mt, val != xa_mk_value(i));
2031
2032	mas_set(&mas, 0);
2033	i = 0;
2034	mas_for_each(&mas, val, 765) {
2035		MT_BUG_ON(mt, mas.index != i * 10);
2036		MT_BUG_ON(mt, mas.last != i * 10 + 9);
2037		/* Overwrite end of entry 76 and advance to the end */
2038		if (i == 76) {
2039			mas.index = 760;
2040			mas.last = 765;
2041			mas_store(&mas, val);
2042		}
2043		i++;
2044	}
2045	/* Make sure the next find returns the one after 765, 766-769 */
2046	val = mas_find(&mas, ULONG_MAX);
2047	MT_BUG_ON(mt, val != xa_mk_value(76));
2048	mas_unlock(&mas);
2049	mas_destroy(&mas);
2050	mt_set_non_kernel(0);
2051}
2052
2053static noinline void __init check_mas_store_gfp(struct maple_tree *mt)
2054{
2055
2056	struct maple_tree newmt;
2057	int i, nr_entries = 135;
2058	void *val;
2059	MA_STATE(mas, mt, 0, 0);
2060	MA_STATE(newmas, mt, 0, 0);
2061
2062	for (i = 0; i <= nr_entries; i++)
2063		mtree_store_range(mt, i*10, i*10 + 5,
2064				  xa_mk_value(i), GFP_KERNEL);
2065
2066	mt_set_non_kernel(99999);
2067	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
2068	newmas.tree = &newmt;
2069	rcu_read_lock();
2070	mas_lock(&newmas);
2071	mas_reset(&newmas);
2072	mas_set(&mas, 0);
2073	mas_for_each(&mas, val, ULONG_MAX) {
2074		newmas.index = mas.index;
2075		newmas.last = mas.last;
2076		mas_store_gfp(&newmas, val, GFP_KERNEL);
2077	}
2078	mas_unlock(&newmas);
2079	rcu_read_unlock();
2080	mt_validate(&newmt);
2081	mt_set_non_kernel(0);
2082	mtree_destroy(&newmt);
2083}
2084
2085#if defined(BENCH_FORK)
2086static noinline void __init bench_forking(void)
2087{
2088	struct maple_tree mt, newmt;
2089	int i, nr_entries = 134, nr_fork = 80000, ret;
2090	void *val;
2091	MA_STATE(mas, &mt, 0, 0);
2092	MA_STATE(newmas, &newmt, 0, 0);
2093	struct rw_semaphore mt_lock, newmt_lock;
2094
2095	init_rwsem(&mt_lock);
2096	init_rwsem(&newmt_lock);
2097
2098	mt_init_flags(&mt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2099	mt_set_external_lock(&mt, &mt_lock);
2100
2101	down_write(&mt_lock);
2102	for (i = 0; i <= nr_entries; i++) {
2103		mas_set_range(&mas, i*10, i*10 + 5);
2104		mas_store_gfp(&mas, xa_mk_value(i), GFP_KERNEL);
2105	}
2106
2107	for (i = 0; i < nr_fork; i++) {
2108		mt_init_flags(&newmt,
2109			      MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2110		mt_set_external_lock(&newmt, &newmt_lock);
2111
2112		down_write_nested(&newmt_lock, SINGLE_DEPTH_NESTING);
2113		ret = __mt_dup(&mt, &newmt, GFP_KERNEL);
2114		if (ret) {
2115			pr_err("OOM!");
2116			BUG_ON(1);
2117		}
2118
2119		mas_set(&newmas, 0);
2120		mas_for_each(&newmas, val, ULONG_MAX)
2121			mas_store(&newmas, val);
2122
2123		mas_destroy(&newmas);
2124		mt_validate(&newmt);
2125		__mt_destroy(&newmt);
2126		up_write(&newmt_lock);
2127	}
2128	mas_destroy(&mas);
2129	__mt_destroy(&mt);
2130	up_write(&mt_lock);
2131}
2132#endif
2133
2134static noinline void __init next_prev_test(struct maple_tree *mt)
2135{
2136	int i, nr_entries;
2137	void *val;
2138	MA_STATE(mas, mt, 0, 0);
2139	struct maple_enode *mn;
2140	static const unsigned long *level2;
2141	static const unsigned long level2_64[] = { 707, 1000, 710, 715, 720,
2142						   725};
2143	static const unsigned long level2_32[] = { 1747, 2000, 1750, 1755,
2144						   1760, 1765};
2145	unsigned long last_index;
2146
2147	if (MAPLE_32BIT) {
2148		nr_entries = 500;
2149		level2 = level2_32;
2150		last_index = 0x138e;
2151	} else {
2152		nr_entries = 200;
2153		level2 = level2_64;
2154		last_index = 0x7d6;
2155	}
2156
2157	for (i = 0; i <= nr_entries; i++)
2158		mtree_store_range(mt, i*10, i*10 + 5,
2159				  xa_mk_value(i), GFP_KERNEL);
2160
2161	mas_lock(&mas);
2162	for (i = 0; i <= nr_entries / 2; i++) {
2163		mas_next(&mas, 1000);
2164		if (mas_is_none(&mas))
2165			break;
2166
2167	}
2168	mas_reset(&mas);
2169	mas_set(&mas, 0);
2170	i = 0;
2171	mas_for_each(&mas, val, 1000) {
2172		i++;
2173	}
2174
2175	mas_reset(&mas);
2176	mas_set(&mas, 0);
2177	i = 0;
2178	mas_for_each(&mas, val, 1000) {
2179		mas_pause(&mas);
2180		i++;
2181	}
2182
2183	/*
2184	 * 680 - 685 = 0x61a00001930c
2185	 * 686 - 689 = NULL;
2186	 * 690 - 695 = 0x61a00001930c
2187	 * Check simple next/prev
2188	 */
2189	mas_set(&mas, 686);
2190	val = mas_walk(&mas);
2191	MT_BUG_ON(mt, val != NULL);
2192
2193	val = mas_next(&mas, 1000);
2194	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2195	MT_BUG_ON(mt, mas.index != 690);
2196	MT_BUG_ON(mt, mas.last != 695);
2197
2198	val = mas_prev(&mas, 0);
2199	MT_BUG_ON(mt, val != xa_mk_value(680 / 10));
2200	MT_BUG_ON(mt, mas.index != 680);
2201	MT_BUG_ON(mt, mas.last != 685);
2202
2203	val = mas_next(&mas, 1000);
2204	MT_BUG_ON(mt, val != xa_mk_value(690 / 10));
2205	MT_BUG_ON(mt, mas.index != 690);
2206	MT_BUG_ON(mt, mas.last != 695);
2207
2208	val = mas_next(&mas, 1000);
2209	MT_BUG_ON(mt, val != xa_mk_value(700 / 10));
2210	MT_BUG_ON(mt, mas.index != 700);
2211	MT_BUG_ON(mt, mas.last != 705);
2212
2213	/* Check across node boundaries of the tree */
2214	mas_set(&mas, 70);
2215	val = mas_walk(&mas);
2216	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2217	MT_BUG_ON(mt, mas.index != 70);
2218	MT_BUG_ON(mt, mas.last != 75);
2219
2220	val = mas_next(&mas, 1000);
2221	MT_BUG_ON(mt, val != xa_mk_value(80 / 10));
2222	MT_BUG_ON(mt, mas.index != 80);
2223	MT_BUG_ON(mt, mas.last != 85);
2224
2225	val = mas_prev(&mas, 70);
2226	MT_BUG_ON(mt, val != xa_mk_value(70 / 10));
2227	MT_BUG_ON(mt, mas.index != 70);
2228	MT_BUG_ON(mt, mas.last != 75);
2229
2230	/* Check across two levels of the tree */
2231	mas_reset(&mas);
2232	mas_set(&mas, level2[0]);
2233	val = mas_walk(&mas);
2234	MT_BUG_ON(mt, val != NULL);
2235	val = mas_next(&mas, level2[1]);
2236	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2237	MT_BUG_ON(mt, mas.index != level2[2]);
2238	MT_BUG_ON(mt, mas.last != level2[3]);
2239	mn = mas.node;
2240
2241	val = mas_next(&mas, level2[1]);
2242	MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
2243	MT_BUG_ON(mt, mas.index != level2[4]);
2244	MT_BUG_ON(mt, mas.last != level2[5]);
2245	MT_BUG_ON(mt, mn == mas.node);
2246
2247	val = mas_prev(&mas, 0);
2248	MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
2249	MT_BUG_ON(mt, mas.index != level2[2]);
2250	MT_BUG_ON(mt, mas.last != level2[3]);
2251
2252	/* Check running off the end and back on */
2253	mas_set(&mas, nr_entries * 10);
2254	val = mas_walk(&mas);
2255	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2256	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2257	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2258
2259	val = mas_next(&mas, ULONG_MAX);
2260	MT_BUG_ON(mt, val != NULL);
2261	MT_BUG_ON(mt, mas.index != last_index);
2262	MT_BUG_ON(mt, mas.last != ULONG_MAX);
2263
2264	val = mas_prev(&mas, 0);
2265	MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
2266	MT_BUG_ON(mt, mas.index != (nr_entries * 10));
2267	MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
2268
2269	/* Check running off the start and back on */
2270	mas_reset(&mas);
2271	mas_set(&mas, 10);
2272	val = mas_walk(&mas);
2273	MT_BUG_ON(mt, val != xa_mk_value(1));
2274	MT_BUG_ON(mt, mas.index != 10);
2275	MT_BUG_ON(mt, mas.last != 15);
2276
2277	val = mas_prev(&mas, 0);
2278	MT_BUG_ON(mt, val != xa_mk_value(0));
2279	MT_BUG_ON(mt, mas.index != 0);
2280	MT_BUG_ON(mt, mas.last != 5);
2281
2282	val = mas_prev(&mas, 0);
2283	MT_BUG_ON(mt, val != NULL);
2284	MT_BUG_ON(mt, mas.index != 0);
2285	MT_BUG_ON(mt, mas.last != 5);
2286	MT_BUG_ON(mt, !mas_is_underflow(&mas));
2287
2288	mas.index = 0;
2289	mas.last = 5;
2290	mas_store(&mas, NULL);
2291	mas_reset(&mas);
2292	mas_set(&mas, 10);
2293	mas_walk(&mas);
2294
2295	val = mas_prev(&mas, 0);
2296	MT_BUG_ON(mt, val != NULL);
2297	MT_BUG_ON(mt, mas.index != 0);
2298	MT_BUG_ON(mt, mas.last != 9);
2299	mas_unlock(&mas);
2300
2301	mtree_destroy(mt);
2302
2303	mt_init(mt);
2304	mtree_store_range(mt, 0, 0, xa_mk_value(0), GFP_KERNEL);
2305	mtree_store_range(mt, 5, 5, xa_mk_value(5), GFP_KERNEL);
2306	rcu_read_lock();
2307	mas_set(&mas, 5);
2308	val = mas_prev(&mas, 4);
2309	MT_BUG_ON(mt, val != NULL);
2310	rcu_read_unlock();
2311}
2312
2313
2314
2315/* Test spanning writes that require balancing right sibling or right cousin */
2316static noinline void __init check_spanning_relatives(struct maple_tree *mt)
2317{
2318
2319	unsigned long i, nr_entries = 1000;
2320
2321	for (i = 0; i <= nr_entries; i++)
2322		mtree_store_range(mt, i*10, i*10 + 5,
2323				  xa_mk_value(i), GFP_KERNEL);
2324
2325
2326	mtree_store_range(mt, 9365, 9955, NULL, GFP_KERNEL);
2327}
2328
2329static noinline void __init check_fuzzer(struct maple_tree *mt)
2330{
2331	/*
2332	 * 1. Causes a spanning rebalance of a single root node.
2333	 * Fixed by setting the correct limit in mast_cp_to_nodes() when the
2334	 * entire right side is consumed.
2335	 */
2336	mtree_test_insert(mt, 88, (void *)0xb1);
2337	mtree_test_insert(mt, 84, (void *)0xa9);
2338	mtree_test_insert(mt, 2,  (void *)0x5);
2339	mtree_test_insert(mt, 4,  (void *)0x9);
2340	mtree_test_insert(mt, 14, (void *)0x1d);
2341	mtree_test_insert(mt, 7,  (void *)0xf);
2342	mtree_test_insert(mt, 12, (void *)0x19);
2343	mtree_test_insert(mt, 18, (void *)0x25);
2344	mtree_test_store_range(mt, 8, 18, (void *)0x11);
2345	mtree_destroy(mt);
2346
2347
2348	/*
2349	 * 2. Cause a spanning rebalance of two nodes in root.
2350	 * Fixed by setting mast->r->max correctly.
2351	 */
2352	mt_init_flags(mt, 0);
2353	mtree_test_store(mt, 87, (void *)0xaf);
2354	mtree_test_store(mt, 0, (void *)0x1);
2355	mtree_test_load(mt, 4);
2356	mtree_test_insert(mt, 4, (void *)0x9);
2357	mtree_test_store(mt, 8, (void *)0x11);
2358	mtree_test_store(mt, 44, (void *)0x59);
2359	mtree_test_store(mt, 68, (void *)0x89);
2360	mtree_test_store(mt, 2, (void *)0x5);
2361	mtree_test_insert(mt, 43, (void *)0x57);
2362	mtree_test_insert(mt, 24, (void *)0x31);
2363	mtree_test_insert(mt, 844, (void *)0x699);
2364	mtree_test_store(mt, 84, (void *)0xa9);
2365	mtree_test_store(mt, 4, (void *)0x9);
2366	mtree_test_erase(mt, 4);
2367	mtree_test_load(mt, 5);
2368	mtree_test_erase(mt, 0);
2369	mtree_destroy(mt);
2370
2371	/*
2372	 * 3. Cause a node overflow on copy
2373	 * Fixed by using the correct check for node size in mas_wr_modify()
2374	 * Also discovered issue with metadata setting.
2375	 */
2376	mt_init_flags(mt, 0);
2377	mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
2378	mtree_test_store(mt, 4, (void *)0x9);
2379	mtree_test_erase(mt, 5);
2380	mtree_test_erase(mt, 0);
2381	mtree_test_erase(mt, 4);
2382	mtree_test_store(mt, 5, (void *)0xb);
2383	mtree_test_erase(mt, 5);
2384	mtree_test_store(mt, 5, (void *)0xb);
2385	mtree_test_erase(mt, 5);
2386	mtree_test_erase(mt, 4);
2387	mtree_test_store(mt, 4, (void *)0x9);
2388	mtree_test_store(mt, 444, (void *)0x379);
2389	mtree_test_store(mt, 0, (void *)0x1);
2390	mtree_test_load(mt, 0);
2391	mtree_test_store(mt, 5, (void *)0xb);
2392	mtree_test_erase(mt, 0);
2393	mtree_destroy(mt);
2394
2395	/*
2396	 * 4. spanning store failure due to writing incorrect pivot value at
2397	 * last slot.
2398	 * Fixed by setting mast->r->max correctly in mast_cp_to_nodes()
2399	 *
2400	 */
2401	mt_init_flags(mt, 0);
2402	mtree_test_insert(mt, 261, (void *)0x20b);
2403	mtree_test_store(mt, 516, (void *)0x409);
2404	mtree_test_store(mt, 6, (void *)0xd);
2405	mtree_test_insert(mt, 5, (void *)0xb);
2406	mtree_test_insert(mt, 1256, (void *)0x9d1);
2407	mtree_test_store(mt, 4, (void *)0x9);
2408	mtree_test_erase(mt, 1);
2409	mtree_test_store(mt, 56, (void *)0x71);
2410	mtree_test_insert(mt, 1, (void *)0x3);
2411	mtree_test_store(mt, 24, (void *)0x31);
2412	mtree_test_erase(mt, 1);
2413	mtree_test_insert(mt, 2263, (void *)0x11af);
2414	mtree_test_insert(mt, 446, (void *)0x37d);
2415	mtree_test_store_range(mt, 6, 45, (void *)0xd);
2416	mtree_test_store_range(mt, 3, 446, (void *)0x7);
2417	mtree_destroy(mt);
2418
2419	/*
2420	 * 5. mas_wr_extend_null() may overflow slots.
2421	 * Fix by checking against wr_mas->node_end.
2422	 */
2423	mt_init_flags(mt, 0);
2424	mtree_test_store(mt, 48, (void *)0x61);
2425	mtree_test_store(mt, 3, (void *)0x7);
2426	mtree_test_load(mt, 0);
2427	mtree_test_store(mt, 88, (void *)0xb1);
2428	mtree_test_store(mt, 81, (void *)0xa3);
2429	mtree_test_insert(mt, 0, (void *)0x1);
2430	mtree_test_insert(mt, 8, (void *)0x11);
2431	mtree_test_insert(mt, 4, (void *)0x9);
2432	mtree_test_insert(mt, 2480, (void *)0x1361);
2433	mtree_test_insert(mt, ULONG_MAX,
2434			  (void *)0xffffffffffffffff);
2435	mtree_test_erase(mt, ULONG_MAX);
2436	mtree_destroy(mt);
2437
2438	/*
2439	 * 6.  When reusing a node with an implied pivot and the node is
2440	 * shrinking, old data would be left in the implied slot
2441	 * Fixed by checking the last pivot for the mas->max and clear
2442	 * accordingly.  This only affected the left-most node as that node is
2443	 * the only one allowed to end in NULL.
2444	 */
2445	mt_init_flags(mt, 0);
2446	mtree_test_erase(mt, 3);
2447	mtree_test_insert(mt, 22, (void *)0x2d);
2448	mtree_test_insert(mt, 15, (void *)0x1f);
2449	mtree_test_load(mt, 2);
2450	mtree_test_insert(mt, 1, (void *)0x3);
2451	mtree_test_insert(mt, 1, (void *)0x3);
2452	mtree_test_insert(mt, 5, (void *)0xb);
2453	mtree_test_erase(mt, 1);
2454	mtree_test_insert(mt, 1, (void *)0x3);
2455	mtree_test_insert(mt, 4, (void *)0x9);
2456	mtree_test_insert(mt, 1, (void *)0x3);
2457	mtree_test_erase(mt, 1);
2458	mtree_test_insert(mt, 2, (void *)0x5);
2459	mtree_test_insert(mt, 1, (void *)0x3);
2460	mtree_test_erase(mt, 3);
2461	mtree_test_insert(mt, 22, (void *)0x2d);
2462	mtree_test_insert(mt, 15, (void *)0x1f);
2463	mtree_test_insert(mt, 2, (void *)0x5);
2464	mtree_test_insert(mt, 1, (void *)0x3);
2465	mtree_test_insert(mt, 8, (void *)0x11);
2466	mtree_test_load(mt, 2);
2467	mtree_test_insert(mt, 1, (void *)0x3);
2468	mtree_test_insert(mt, 1, (void *)0x3);
2469	mtree_test_store(mt, 1, (void *)0x3);
2470	mtree_test_insert(mt, 5, (void *)0xb);
2471	mtree_test_erase(mt, 1);
2472	mtree_test_insert(mt, 1, (void *)0x3);
2473	mtree_test_insert(mt, 4, (void *)0x9);
2474	mtree_test_insert(mt, 1, (void *)0x3);
2475	mtree_test_erase(mt, 1);
2476	mtree_test_insert(mt, 2, (void *)0x5);
2477	mtree_test_insert(mt, 1, (void *)0x3);
2478	mtree_test_erase(mt, 3);
2479	mtree_test_insert(mt, 22, (void *)0x2d);
2480	mtree_test_insert(mt, 15, (void *)0x1f);
2481	mtree_test_insert(mt, 2, (void *)0x5);
2482	mtree_test_insert(mt, 1, (void *)0x3);
2483	mtree_test_insert(mt, 8, (void *)0x11);
2484	mtree_test_insert(mt, 12, (void *)0x19);
2485	mtree_test_erase(mt, 1);
2486	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2487	mtree_test_erase(mt, 62);
2488	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2489	mtree_test_insert(mt, 11, (void *)0x17);
2490	mtree_test_insert(mt, 3, (void *)0x7);
2491	mtree_test_insert(mt, 3, (void *)0x7);
2492	mtree_test_store(mt, 62, (void *)0x7d);
2493	mtree_test_erase(mt, 62);
2494	mtree_test_store_range(mt, 1, 15, (void *)0x3);
2495	mtree_test_erase(mt, 1);
2496	mtree_test_insert(mt, 22, (void *)0x2d);
2497	mtree_test_insert(mt, 12, (void *)0x19);
2498	mtree_test_erase(mt, 1);
2499	mtree_test_insert(mt, 3, (void *)0x7);
2500	mtree_test_store(mt, 62, (void *)0x7d);
2501	mtree_test_erase(mt, 62);
2502	mtree_test_insert(mt, 122, (void *)0xf5);
2503	mtree_test_store(mt, 3, (void *)0x7);
2504	mtree_test_insert(mt, 0, (void *)0x1);
2505	mtree_test_store_range(mt, 0, 1, (void *)0x1);
2506	mtree_test_insert(mt, 85, (void *)0xab);
2507	mtree_test_insert(mt, 72, (void *)0x91);
2508	mtree_test_insert(mt, 81, (void *)0xa3);
2509	mtree_test_insert(mt, 726, (void *)0x5ad);
2510	mtree_test_insert(mt, 0, (void *)0x1);
2511	mtree_test_insert(mt, 1, (void *)0x3);
2512	mtree_test_store(mt, 51, (void *)0x67);
2513	mtree_test_insert(mt, 611, (void *)0x4c7);
2514	mtree_test_insert(mt, 485, (void *)0x3cb);
2515	mtree_test_insert(mt, 1, (void *)0x3);
2516	mtree_test_erase(mt, 1);
2517	mtree_test_insert(mt, 0, (void *)0x1);
2518	mtree_test_insert(mt, 1, (void *)0x3);
2519	mtree_test_insert_range(mt, 26, 1, (void *)0x35);
2520	mtree_test_load(mt, 1);
2521	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2522	mtree_test_insert(mt, 1, (void *)0x3);
2523	mtree_test_erase(mt, 1);
2524	mtree_test_load(mt, 53);
2525	mtree_test_load(mt, 1);
2526	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2527	mtree_test_insert(mt, 222, (void *)0x1bd);
2528	mtree_test_insert(mt, 485, (void *)0x3cb);
2529	mtree_test_insert(mt, 1, (void *)0x3);
2530	mtree_test_erase(mt, 1);
2531	mtree_test_load(mt, 0);
2532	mtree_test_insert(mt, 21, (void *)0x2b);
2533	mtree_test_insert(mt, 3, (void *)0x7);
2534	mtree_test_store(mt, 621, (void *)0x4db);
2535	mtree_test_insert(mt, 0, (void *)0x1);
2536	mtree_test_erase(mt, 5);
2537	mtree_test_insert(mt, 1, (void *)0x3);
2538	mtree_test_store(mt, 62, (void *)0x7d);
2539	mtree_test_erase(mt, 62);
2540	mtree_test_store_range(mt, 1, 0, (void *)0x3);
2541	mtree_test_insert(mt, 22, (void *)0x2d);
2542	mtree_test_insert(mt, 12, (void *)0x19);
2543	mtree_test_erase(mt, 1);
2544	mtree_test_insert(mt, 1, (void *)0x3);
2545	mtree_test_store_range(mt, 4, 62, (void *)0x9);
2546	mtree_test_erase(mt, 62);
2547	mtree_test_erase(mt, 1);
2548	mtree_test_load(mt, 1);
2549	mtree_test_store_range(mt, 1, 22, (void *)0x3);
2550	mtree_test_insert(mt, 1, (void *)0x3);
2551	mtree_test_erase(mt, 1);
2552	mtree_test_load(mt, 53);
2553	mtree_test_load(mt, 1);
2554	mtree_test_store_range(mt, 1, 1, (void *)0x3);
2555	mtree_test_insert(mt, 222, (void *)0x1bd);
2556	mtree_test_insert(mt, 485, (void *)0x3cb);
2557	mtree_test_insert(mt, 1, (void *)0x3);
2558	mtree_test_erase(mt, 1);
2559	mtree_test_insert(mt, 1, (void *)0x3);
2560	mtree_test_load(mt, 0);
2561	mtree_test_load(mt, 0);
2562	mtree_destroy(mt);
2563
2564	/*
2565	 * 7. Previous fix was incomplete, fix mas_resuse_node() clearing of old
2566	 * data by overwriting it first - that way metadata is of no concern.
2567	 */
2568	mt_init_flags(mt, 0);
2569	mtree_test_load(mt, 1);
2570	mtree_test_insert(mt, 102, (void *)0xcd);
2571	mtree_test_erase(mt, 2);
2572	mtree_test_erase(mt, 0);
2573	mtree_test_load(mt, 0);
2574	mtree_test_insert(mt, 4, (void *)0x9);
2575	mtree_test_insert(mt, 2, (void *)0x5);
2576	mtree_test_insert(mt, 110, (void *)0xdd);
2577	mtree_test_insert(mt, 1, (void *)0x3);
2578	mtree_test_insert_range(mt, 5, 0, (void *)0xb);
2579	mtree_test_erase(mt, 2);
2580	mtree_test_store(mt, 0, (void *)0x1);
2581	mtree_test_store(mt, 112, (void *)0xe1);
2582	mtree_test_insert(mt, 21, (void *)0x2b);
2583	mtree_test_store(mt, 1, (void *)0x3);
2584	mtree_test_insert_range(mt, 110, 2, (void *)0xdd);
2585	mtree_test_store(mt, 2, (void *)0x5);
2586	mtree_test_load(mt, 22);
2587	mtree_test_erase(mt, 2);
2588	mtree_test_store(mt, 210, (void *)0x1a5);
2589	mtree_test_store_range(mt, 0, 2, (void *)0x1);
2590	mtree_test_store(mt, 2, (void *)0x5);
2591	mtree_test_erase(mt, 2);
2592	mtree_test_erase(mt, 22);
2593	mtree_test_erase(mt, 1);
2594	mtree_test_erase(mt, 2);
2595	mtree_test_store(mt, 0, (void *)0x1);
2596	mtree_test_load(mt, 112);
2597	mtree_test_insert(mt, 2, (void *)0x5);
2598	mtree_test_erase(mt, 2);
2599	mtree_test_store(mt, 1, (void *)0x3);
2600	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2601	mtree_test_erase(mt, 0);
2602	mtree_test_erase(mt, 2);
2603	mtree_test_store(mt, 2, (void *)0x5);
2604	mtree_test_erase(mt, 0);
2605	mtree_test_erase(mt, 2);
2606	mtree_test_store(mt, 0, (void *)0x1);
2607	mtree_test_store(mt, 0, (void *)0x1);
2608	mtree_test_erase(mt, 2);
2609	mtree_test_store(mt, 2, (void *)0x5);
2610	mtree_test_erase(mt, 2);
2611	mtree_test_insert(mt, 2, (void *)0x5);
2612	mtree_test_insert_range(mt, 1, 2, (void *)0x3);
2613	mtree_test_erase(mt, 0);
2614	mtree_test_erase(mt, 2);
2615	mtree_test_store(mt, 0, (void *)0x1);
2616	mtree_test_load(mt, 112);
2617	mtree_test_store_range(mt, 110, 12, (void *)0xdd);
2618	mtree_test_store(mt, 2, (void *)0x5);
2619	mtree_test_load(mt, 110);
2620	mtree_test_insert_range(mt, 4, 71, (void *)0x9);
2621	mtree_test_load(mt, 2);
2622	mtree_test_store(mt, 2, (void *)0x5);
2623	mtree_test_insert_range(mt, 11, 22, (void *)0x17);
2624	mtree_test_erase(mt, 12);
2625	mtree_test_store(mt, 2, (void *)0x5);
2626	mtree_test_load(mt, 22);
2627	mtree_destroy(mt);
2628
2629
2630	/*
2631	 * 8.  When rebalancing or spanning_rebalance(), the max of the new node
2632	 * may be set incorrectly to the final pivot and not the right max.
2633	 * Fix by setting the left max to orig right max if the entire node is
2634	 * consumed.
2635	 */
2636	mt_init_flags(mt, 0);
2637	mtree_test_store(mt, 6, (void *)0xd);
2638	mtree_test_store(mt, 67, (void *)0x87);
2639	mtree_test_insert(mt, 15, (void *)0x1f);
2640	mtree_test_insert(mt, 6716, (void *)0x3479);
2641	mtree_test_store(mt, 61, (void *)0x7b);
2642	mtree_test_insert(mt, 13, (void *)0x1b);
2643	mtree_test_store(mt, 8, (void *)0x11);
2644	mtree_test_insert(mt, 1, (void *)0x3);
2645	mtree_test_load(mt, 0);
2646	mtree_test_erase(mt, 67167);
2647	mtree_test_insert_range(mt, 6, 7167, (void *)0xd);
2648	mtree_test_insert(mt, 6, (void *)0xd);
2649	mtree_test_erase(mt, 67);
2650	mtree_test_insert(mt, 1, (void *)0x3);
2651	mtree_test_erase(mt, 667167);
2652	mtree_test_insert(mt, 6, (void *)0xd);
2653	mtree_test_store(mt, 67, (void *)0x87);
2654	mtree_test_insert(mt, 5, (void *)0xb);
2655	mtree_test_erase(mt, 1);
2656	mtree_test_insert(mt, 6, (void *)0xd);
2657	mtree_test_erase(mt, 67);
2658	mtree_test_insert(mt, 15, (void *)0x1f);
2659	mtree_test_insert(mt, 67167, (void *)0x20cbf);
2660	mtree_test_insert(mt, 1, (void *)0x3);
2661	mtree_test_load(mt, 7);
2662	mtree_test_insert(mt, 16, (void *)0x21);
2663	mtree_test_insert(mt, 36, (void *)0x49);
2664	mtree_test_store(mt, 67, (void *)0x87);
2665	mtree_test_store(mt, 6, (void *)0xd);
2666	mtree_test_insert(mt, 367, (void *)0x2df);
2667	mtree_test_insert(mt, 115, (void *)0xe7);
2668	mtree_test_store(mt, 0, (void *)0x1);
2669	mtree_test_store_range(mt, 1, 3, (void *)0x3);
2670	mtree_test_store(mt, 1, (void *)0x3);
2671	mtree_test_erase(mt, 67167);
2672	mtree_test_insert_range(mt, 6, 47, (void *)0xd);
2673	mtree_test_store(mt, 1, (void *)0x3);
2674	mtree_test_insert_range(mt, 1, 67, (void *)0x3);
2675	mtree_test_load(mt, 67);
2676	mtree_test_insert(mt, 1, (void *)0x3);
2677	mtree_test_erase(mt, 67167);
2678	mtree_destroy(mt);
2679
2680	/*
2681	 * 9. spanning store to the end of data caused an invalid metadata
2682	 * length which resulted in a crash eventually.
2683	 * Fix by checking if there is a value in pivot before incrementing the
2684	 * metadata end in mab_mas_cp().  To ensure this doesn't happen again,
2685	 * abstract the two locations this happens into a function called
2686	 * mas_leaf_set_meta().
2687	 */
2688	mt_init_flags(mt, 0);
2689	mtree_test_insert(mt, 21, (void *)0x2b);
2690	mtree_test_insert(mt, 12, (void *)0x19);
2691	mtree_test_insert(mt, 6, (void *)0xd);
2692	mtree_test_insert(mt, 8, (void *)0x11);
2693	mtree_test_insert(mt, 2, (void *)0x5);
2694	mtree_test_insert(mt, 91, (void *)0xb7);
2695	mtree_test_insert(mt, 18, (void *)0x25);
2696	mtree_test_insert(mt, 81, (void *)0xa3);
2697	mtree_test_store_range(mt, 0, 128, (void *)0x1);
2698	mtree_test_store(mt, 1, (void *)0x3);
2699	mtree_test_erase(mt, 8);
2700	mtree_test_insert(mt, 11, (void *)0x17);
2701	mtree_test_insert(mt, 8, (void *)0x11);
2702	mtree_test_insert(mt, 21, (void *)0x2b);
2703	mtree_test_insert(mt, 2, (void *)0x5);
2704	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2705	mtree_test_erase(mt, ULONG_MAX - 10);
2706	mtree_test_store_range(mt, 0, 281, (void *)0x1);
2707	mtree_test_erase(mt, 2);
2708	mtree_test_insert(mt, 1211, (void *)0x977);
2709	mtree_test_insert(mt, 111, (void *)0xdf);
2710	mtree_test_insert(mt, 13, (void *)0x1b);
2711	mtree_test_insert(mt, 211, (void *)0x1a7);
2712	mtree_test_insert(mt, 11, (void *)0x17);
2713	mtree_test_insert(mt, 5, (void *)0xb);
2714	mtree_test_insert(mt, 1218, (void *)0x985);
2715	mtree_test_insert(mt, 61, (void *)0x7b);
2716	mtree_test_store(mt, 1, (void *)0x3);
2717	mtree_test_insert(mt, 121, (void *)0xf3);
2718	mtree_test_insert(mt, 8, (void *)0x11);
2719	mtree_test_insert(mt, 21, (void *)0x2b);
2720	mtree_test_insert(mt, 2, (void *)0x5);
2721	mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
2722	mtree_test_erase(mt, ULONG_MAX - 10);
2723}
2724
2725/* duplicate the tree with a specific gap */
2726static noinline void __init check_dup_gaps(struct maple_tree *mt,
2727				    unsigned long nr_entries, bool zero_start,
2728				    unsigned long gap)
2729{
2730	unsigned long i = 0;
2731	struct maple_tree newmt;
2732	int ret;
2733	void *tmp;
2734	MA_STATE(mas, mt, 0, 0);
2735	MA_STATE(newmas, &newmt, 0, 0);
2736	struct rw_semaphore newmt_lock;
2737
2738	init_rwsem(&newmt_lock);
2739	mt_set_external_lock(&newmt, &newmt_lock);
2740
2741	if (!zero_start)
2742		i = 1;
2743
2744	mt_zero_nr_tallocated();
2745	for (; i <= nr_entries; i++)
2746		mtree_store_range(mt, i*10, (i+1)*10 - gap,
2747				  xa_mk_value(i), GFP_KERNEL);
2748
2749	mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE | MT_FLAGS_LOCK_EXTERN);
2750	mt_set_non_kernel(99999);
2751	down_write(&newmt_lock);
2752	ret = mas_expected_entries(&newmas, nr_entries);
2753	mt_set_non_kernel(0);
2754	MT_BUG_ON(mt, ret != 0);
2755
2756	rcu_read_lock();
2757	mas_for_each(&mas, tmp, ULONG_MAX) {
2758		newmas.index = mas.index;
2759		newmas.last = mas.last;
2760		mas_store(&newmas, tmp);
2761	}
2762	rcu_read_unlock();
2763	mas_destroy(&newmas);
2764
2765	__mt_destroy(&newmt);
2766	up_write(&newmt_lock);
2767}
2768
2769/* Duplicate many sizes of trees.  Mainly to test expected entry values */
2770static noinline void __init check_dup(struct maple_tree *mt)
2771{
2772	int i;
2773	int big_start = 100010;
2774
2775	/* Check with a value at zero */
2776	for (i = 10; i < 1000; i++) {
2777		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2778		check_dup_gaps(mt, i, true, 5);
2779		mtree_destroy(mt);
2780		rcu_barrier();
2781	}
2782
2783	cond_resched();
2784	mt_cache_shrink();
2785	/* Check with a value at zero, no gap */
2786	for (i = 1000; i < 2000; i++) {
2787		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2788		check_dup_gaps(mt, i, true, 0);
2789		mtree_destroy(mt);
2790		rcu_barrier();
2791	}
2792
2793	cond_resched();
2794	mt_cache_shrink();
2795	/* Check with a value at zero and unreasonably large */
2796	for (i = big_start; i < big_start + 10; i++) {
2797		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2798		check_dup_gaps(mt, i, true, 5);
2799		mtree_destroy(mt);
2800		rcu_barrier();
2801	}
2802
2803	cond_resched();
2804	mt_cache_shrink();
2805	/* Small to medium size not starting at zero*/
2806	for (i = 200; i < 1000; i++) {
2807		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2808		check_dup_gaps(mt, i, false, 5);
2809		mtree_destroy(mt);
2810		rcu_barrier();
2811	}
2812
2813	cond_resched();
2814	mt_cache_shrink();
2815	/* Unreasonably large not starting at zero*/
2816	for (i = big_start; i < big_start + 10; i++) {
2817		mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
2818		check_dup_gaps(mt, i, false, 5);
2819		mtree_destroy(mt);
2820		rcu_barrier();
2821		cond_resched();
2822		mt_cache_shrink();
2823	}
2824
2825	/* Check non-allocation tree not starting at zero */
2826	for (i = 1500; i < 3000; i++) {
2827		mt_init_flags(mt, 0);
2828		check_dup_gaps(mt, i, false, 5);
2829		mtree_destroy(mt);
2830		rcu_barrier();
2831		cond_resched();
2832		if (i % 2 == 0)
2833			mt_cache_shrink();
2834	}
2835
2836	mt_cache_shrink();
2837	/* Check non-allocation tree starting at zero */
2838	for (i = 200; i < 1000; i++) {
2839		mt_init_flags(mt, 0);
2840		check_dup_gaps(mt, i, true, 5);
2841		mtree_destroy(mt);
2842		rcu_barrier();
2843		cond_resched();
2844	}
2845
2846	mt_cache_shrink();
2847	/* Unreasonably large */
2848	for (i = big_start + 5; i < big_start + 10; i++) {
2849		mt_init_flags(mt, 0);
2850		check_dup_gaps(mt, i, true, 5);
2851		mtree_destroy(mt);
2852		rcu_barrier();
2853		mt_cache_shrink();
2854		cond_resched();
2855	}
2856}
2857
2858static noinline void __init check_bnode_min_spanning(struct maple_tree *mt)
2859{
2860	int i = 50;
2861	MA_STATE(mas, mt, 0, 0);
2862
2863	mt_set_non_kernel(9999);
2864	mas_lock(&mas);
2865	do {
2866		mas_set_range(&mas, i*10, i*10+9);
2867		mas_store(&mas, check_bnode_min_spanning);
2868	} while (i--);
2869
2870	mas_set_range(&mas, 240, 509);
2871	mas_store(&mas, NULL);
2872	mas_unlock(&mas);
2873	mas_destroy(&mas);
2874	mt_set_non_kernel(0);
2875}
2876
2877static noinline void __init check_empty_area_window(struct maple_tree *mt)
2878{
2879	unsigned long i, nr_entries = 20;
2880	MA_STATE(mas, mt, 0, 0);
2881
2882	for (i = 1; i <= nr_entries; i++)
2883		mtree_store_range(mt, i*10, i*10 + 9,
2884				  xa_mk_value(i), GFP_KERNEL);
2885
2886	/* Create another hole besides the one at 0 */
2887	mtree_store_range(mt, 160, 169, NULL, GFP_KERNEL);
2888
2889	/* Check lower bounds that don't fit */
2890	rcu_read_lock();
2891	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 10) != -EBUSY);
2892
2893	mas_reset(&mas);
2894	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 6, 90, 5) != -EBUSY);
2895
2896	/* Check lower bound that does fit */
2897	mas_reset(&mas);
2898	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 90, 5) != 0);
2899	MT_BUG_ON(mt, mas.index != 5);
2900	MT_BUG_ON(mt, mas.last != 9);
2901	rcu_read_unlock();
2902
2903	/* Check one gap that doesn't fit and one that does */
2904	rcu_read_lock();
2905	mas_reset(&mas);
2906	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 5, 217, 9) != 0);
2907	MT_BUG_ON(mt, mas.index != 161);
2908	MT_BUG_ON(mt, mas.last != 169);
2909
2910	/* Check one gap that does fit above the min */
2911	mas_reset(&mas);
2912	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 3) != 0);
2913	MT_BUG_ON(mt, mas.index != 216);
2914	MT_BUG_ON(mt, mas.last != 218);
2915
2916	/* Check size that doesn't fit any gap */
2917	mas_reset(&mas);
2918	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 218, 16) != -EBUSY);
2919
2920	/*
2921	 * Check size that doesn't fit the lower end of the window but
2922	 * does fit the gap
2923	 */
2924	mas_reset(&mas);
2925	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 167, 200, 4) != -EBUSY);
2926
2927	/*
2928	 * Check size that doesn't fit the upper end of the window but
2929	 * does fit the gap
2930	 */
2931	mas_reset(&mas);
2932	MT_BUG_ON(mt, mas_empty_area_rev(&mas, 100, 162, 4) != -EBUSY);
2933
2934	/* Check mas_empty_area forward */
2935	mas_reset(&mas);
2936	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 9) != 0);
2937	MT_BUG_ON(mt, mas.index != 0);
2938	MT_BUG_ON(mt, mas.last != 8);
2939
2940	mas_reset(&mas);
2941	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 4) != 0);
2942	MT_BUG_ON(mt, mas.index != 0);
2943	MT_BUG_ON(mt, mas.last != 3);
2944
2945	mas_reset(&mas);
2946	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 100, 11) != -EBUSY);
2947
2948	mas_reset(&mas);
2949	MT_BUG_ON(mt, mas_empty_area(&mas, 5, 100, 6) != -EBUSY);
2950
2951	mas_reset(&mas);
2952	MT_BUG_ON(mt, mas_empty_area(&mas, 0, 8, 10) != -EINVAL);
2953
2954	mas_reset(&mas);
2955	mas_empty_area(&mas, 100, 165, 3);
2956
2957	mas_reset(&mas);
2958	MT_BUG_ON(mt, mas_empty_area(&mas, 100, 163, 6) != -EBUSY);
2959	rcu_read_unlock();
2960}
2961
2962static noinline void __init check_empty_area_fill(struct maple_tree *mt)
2963{
2964	const unsigned long max = 0x25D78000;
2965	unsigned long size;
2966	int loop, shift;
2967	MA_STATE(mas, mt, 0, 0);
2968
2969	mt_set_non_kernel(99999);
2970	for (shift = 12; shift <= 16; shift++) {
2971		loop = 5000;
2972		size = 1 << shift;
2973		while (loop--) {
2974			mas_set(&mas, 0);
2975			mas_lock(&mas);
2976			MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != 0);
2977			MT_BUG_ON(mt, mas.last != mas.index + size - 1);
2978			mas_store_gfp(&mas, (void *)size, GFP_KERNEL);
2979			mas_unlock(&mas);
2980			mas_reset(&mas);
2981		}
2982	}
2983
2984	/* No space left. */
2985	size = 0x1000;
2986	rcu_read_lock();
2987	MT_BUG_ON(mt, mas_empty_area(&mas, 0, max, size) != -EBUSY);
2988	rcu_read_unlock();
2989
2990	/* Fill a depth 3 node to the maximum */
2991	for (unsigned long i = 629440511; i <= 629440800; i += 6)
2992		mtree_store_range(mt, i, i + 5, (void *)i, GFP_KERNEL);
2993	/* Make space in the second-last depth 4 node */
2994	mtree_erase(mt, 631668735);
2995	/* Make space in the last depth 4 node */
2996	mtree_erase(mt, 629506047);
2997	mas_reset(&mas);
2998	/* Search from just after the gap in the second-last depth 4 */
2999	rcu_read_lock();
3000	MT_BUG_ON(mt, mas_empty_area(&mas, 629506048, 690000000, 0x5000) != 0);
3001	rcu_read_unlock();
3002	mt_set_non_kernel(0);
3003}
3004
3005/*
3006 * Check MAS_START, MAS_PAUSE, active (implied), and MAS_NONE transitions.
3007 *
3008 * The table below shows the single entry tree (0-0 pointer) and normal tree
3009 * with nodes.
3010 *
3011 * Function	ENTRY	Start		Result		index & last
3012 *     ┬          ┬       ┬               ┬                ┬
3013 *     │          │       │               │                └─ the final range
3014 *     │          │       │               └─ The node value after execution
3015 *     │          │       └─ The node value before execution
3016 *     │          └─ If the entry exists or does not exists (DNE)
3017 *     └─ The function name
3018 *
3019 * Function	ENTRY	Start		Result		index & last
3020 * mas_next()
3021 *  - after last
3022 *			Single entry tree at 0-0
3023 *			------------------------
3024 *		DNE	MAS_START	MAS_NONE	1 - oo
3025 *		DNE	MAS_PAUSE	MAS_NONE	1 - oo
3026 *		DNE	MAS_ROOT	MAS_NONE	1 - oo
3027 *			when index = 0
3028 *		DNE	MAS_NONE	MAS_ROOT	0
3029 *			when index > 0
3030 *		DNE	MAS_NONE	MAS_NONE	1 - oo
3031 *
3032 *			Normal tree
3033 *			-----------
3034 *		exists	MAS_START	active		range
3035 *		DNE	MAS_START	active		set to last range
3036 *		exists	MAS_PAUSE	active		range
3037 *		DNE	MAS_PAUSE	active		set to last range
3038 *		exists	MAS_NONE	active		range
3039 *		exists	active		active		range
3040 *		DNE	active		active		set to last range
3041 *		ERANGE	active		MAS_OVERFLOW	last range
3042 *
3043 * Function	ENTRY	Start		Result		index & last
3044 * mas_prev()
3045 * - before index
3046 *			Single entry tree at 0-0
3047 *			------------------------
3048 *				if index > 0
3049 *		exists	MAS_START	MAS_ROOT	0
3050 *		exists	MAS_PAUSE	MAS_ROOT	0
3051 *		exists	MAS_NONE	MAS_ROOT	0
3052 *
3053 *				if index == 0
3054 *		DNE	MAS_START	MAS_NONE	0
3055 *		DNE	MAS_PAUSE	MAS_NONE	0
3056 *		DNE	MAS_NONE	MAS_NONE	0
3057 *		DNE	MAS_ROOT	MAS_NONE	0
3058 *
3059 *			Normal tree
3060 *			-----------
3061 *		exists	MAS_START	active		range
3062 *		DNE	MAS_START	active		set to min
3063 *		exists	MAS_PAUSE	active		range
3064 *		DNE	MAS_PAUSE	active		set to min
3065 *		exists	MAS_NONE	active		range
3066 *		DNE	MAS_NONE	MAS_NONE	set to min
3067 *		any	MAS_ROOT	MAS_NONE	0
3068 *		exists	active		active		range
3069 *		DNE	active		active		last range
3070 *		ERANGE	active		MAS_UNDERFLOW	last range
3071 *
3072 * Function	ENTRY	Start		Result		index & last
3073 * mas_find()
3074 *  - at index or next
3075 *			Single entry tree at 0-0
3076 *			------------------------
3077 *				if index >  0
3078 *		DNE	MAS_START	MAS_NONE	0
3079 *		DNE	MAS_PAUSE	MAS_NONE	0
3080 *		DNE	MAS_ROOT	MAS_NONE	0
3081 *		DNE	MAS_NONE	MAS_NONE	1
3082 *				if index ==  0
3083 *		exists	MAS_START	MAS_ROOT	0
3084 *		exists	MAS_PAUSE	MAS_ROOT	0
3085 *		exists	MAS_NONE	MAS_ROOT	0
3086 *
3087 *			Normal tree
3088 *			-----------
3089 *		exists	MAS_START	active		range
3090 *		DNE	MAS_START	active		set to max
3091 *		exists	MAS_PAUSE	active		range
3092 *		DNE	MAS_PAUSE	active		set to max
3093 *		exists	MAS_NONE	active		range (start at last)
3094 *		exists	active		active		range
3095 *		DNE	active		active		last range (max < last)
3096 *
3097 * Function	ENTRY	Start		Result		index & last
3098 * mas_find_rev()
3099 *  - at index or before
3100 *			Single entry tree at 0-0
3101 *			------------------------
3102 *				if index >  0
3103 *		exists	MAS_START	MAS_ROOT	0
3104 *		exists	MAS_PAUSE	MAS_ROOT	0
3105 *		exists	MAS_NONE	MAS_ROOT	0
3106 *				if index ==  0
3107 *		DNE	MAS_START	MAS_NONE	0
3108 *		DNE	MAS_PAUSE	MAS_NONE	0
3109 *		DNE	MAS_NONE	MAS_NONE	0
3110 *		DNE	MAS_ROOT	MAS_NONE	0
3111 *
3112 *			Normal tree
3113 *			-----------
3114 *		exists	MAS_START	active		range
3115 *		DNE	MAS_START	active		set to min
3116 *		exists	MAS_PAUSE	active		range
3117 *		DNE	MAS_PAUSE	active		set to min
3118 *		exists	MAS_NONE	active		range (start at index)
3119 *		exists	active		active		range
3120 *		DNE	active		active		last range (min > index)
3121 *
3122 * Function	ENTRY	Start		Result		index & last
3123 * mas_walk()
3124 * - Look up index
3125 *			Single entry tree at 0-0
3126 *			------------------------
3127 *				if index >  0
3128 *		DNE	MAS_START	MAS_ROOT	1 - oo
3129 *		DNE	MAS_PAUSE	MAS_ROOT	1 - oo
3130 *		DNE	MAS_NONE	MAS_ROOT	1 - oo
3131 *		DNE	MAS_ROOT	MAS_ROOT	1 - oo
3132 *				if index ==  0
3133 *		exists	MAS_START	MAS_ROOT	0
3134 *		exists	MAS_PAUSE	MAS_ROOT	0
3135 *		exists	MAS_NONE	MAS_ROOT	0
3136 *		exists	MAS_ROOT	MAS_ROOT	0
3137 *
3138 *			Normal tree
3139 *			-----------
3140 *		exists	MAS_START	active		range
3141 *		DNE	MAS_START	active		range of NULL
3142 *		exists	MAS_PAUSE	active		range
3143 *		DNE	MAS_PAUSE	active		range of NULL
3144 *		exists	MAS_NONE	active		range
3145 *		DNE	MAS_NONE	active		range of NULL
3146 *		exists	active		active		range
3147 *		DNE	active		active		range of NULL
3148 */
3149
3150static noinline void __init check_state_handling(struct maple_tree *mt)
3151{
3152	MA_STATE(mas, mt, 0, 0);
3153	void *entry, *ptr = (void *) 0x1234500;
3154	void *ptr2 = &ptr;
3155	void *ptr3 = &ptr2;
3156
3157	/* Check MAS_ROOT First */
3158	mtree_store_range(mt, 0, 0, ptr, GFP_KERNEL);
3159
3160	mas_lock(&mas);
3161	/* prev: Start -> underflow*/
3162	entry = mas_prev(&mas, 0);
3163	MT_BUG_ON(mt, entry != NULL);
3164	MT_BUG_ON(mt, mas.status != ma_underflow);
3165
3166	/* prev: Start -> root */
3167	mas_set(&mas, 10);
3168	entry = mas_prev(&mas, 0);
3169	MT_BUG_ON(mt, entry != ptr);
3170	MT_BUG_ON(mt, mas.index != 0);
3171	MT_BUG_ON(mt, mas.last != 0);
3172	MT_BUG_ON(mt, mas.status != ma_root);
3173
3174	/* prev: pause -> root */
3175	mas_set(&mas, 10);
3176	mas_pause(&mas);
3177	entry = mas_prev(&mas, 0);
3178	MT_BUG_ON(mt, entry != ptr);
3179	MT_BUG_ON(mt, mas.index != 0);
3180	MT_BUG_ON(mt, mas.last != 0);
3181	MT_BUG_ON(mt, mas.status != ma_root);
3182
3183	/* next: start -> none */
3184	mas_set(&mas, 0);
3185	entry = mas_next(&mas, ULONG_MAX);
3186	MT_BUG_ON(mt, mas.index != 1);
3187	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3188	MT_BUG_ON(mt, entry != NULL);
3189	MT_BUG_ON(mt, mas.status != ma_none);
3190
3191	/* next: start -> none*/
3192	mas_set(&mas, 10);
3193	entry = mas_next(&mas, ULONG_MAX);
3194	MT_BUG_ON(mt, mas.index != 1);
3195	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3196	MT_BUG_ON(mt, entry != NULL);
3197	MT_BUG_ON(mt, mas.status != ma_none);
3198
3199	/* find: start -> root */
3200	mas_set(&mas, 0);
3201	entry = mas_find(&mas, ULONG_MAX);
3202	MT_BUG_ON(mt, entry != ptr);
3203	MT_BUG_ON(mt, mas.index != 0);
3204	MT_BUG_ON(mt, mas.last != 0);
3205	MT_BUG_ON(mt, mas.status != ma_root);
3206
3207	/* find: root -> none */
3208	entry = mas_find(&mas, ULONG_MAX);
3209	MT_BUG_ON(mt, entry != NULL);
3210	MT_BUG_ON(mt, mas.index != 1);
3211	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3212	MT_BUG_ON(mt, mas.status != ma_none);
3213
3214	/* find: none -> none */
3215	entry = mas_find(&mas, ULONG_MAX);
3216	MT_BUG_ON(mt, entry != NULL);
3217	MT_BUG_ON(mt, mas.index != 1);
3218	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3219	MT_BUG_ON(mt, mas.status != ma_none);
3220
3221	/* find: start -> none */
3222	mas_set(&mas, 10);
3223	entry = mas_find(&mas, ULONG_MAX);
3224	MT_BUG_ON(mt, entry != NULL);
3225	MT_BUG_ON(mt, mas.index != 1);
3226	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3227	MT_BUG_ON(mt, mas.status != ma_none);
3228
3229	/* find_rev: none -> root */
3230	entry = mas_find_rev(&mas, 0);
3231	MT_BUG_ON(mt, entry != ptr);
3232	MT_BUG_ON(mt, mas.index != 0);
3233	MT_BUG_ON(mt, mas.last != 0);
3234	MT_BUG_ON(mt, mas.status != ma_root);
3235
3236	/* find_rev: start -> root */
3237	mas_set(&mas, 0);
3238	entry = mas_find_rev(&mas, 0);
3239	MT_BUG_ON(mt, entry != ptr);
3240	MT_BUG_ON(mt, mas.index != 0);
3241	MT_BUG_ON(mt, mas.last != 0);
3242	MT_BUG_ON(mt, mas.status != ma_root);
3243
3244	/* find_rev: root -> none */
3245	entry = mas_find_rev(&mas, 0);
3246	MT_BUG_ON(mt, entry != NULL);
3247	MT_BUG_ON(mt, mas.index != 0);
3248	MT_BUG_ON(mt, mas.last != 0);
3249	MT_BUG_ON(mt, mas.status != ma_none);
3250
3251	/* find_rev: none -> none */
3252	entry = mas_find_rev(&mas, 0);
3253	MT_BUG_ON(mt, entry != NULL);
3254	MT_BUG_ON(mt, mas.index != 0);
3255	MT_BUG_ON(mt, mas.last != 0);
3256	MT_BUG_ON(mt, mas.status != ma_none);
3257
3258	/* find_rev: start -> root */
3259	mas_set(&mas, 10);
3260	entry = mas_find_rev(&mas, 0);
3261	MT_BUG_ON(mt, entry != ptr);
3262	MT_BUG_ON(mt, mas.index != 0);
3263	MT_BUG_ON(mt, mas.last != 0);
3264	MT_BUG_ON(mt, mas.status != ma_root);
3265
3266	/* walk: start -> none */
3267	mas_set(&mas, 10);
3268	entry = mas_walk(&mas);
3269	MT_BUG_ON(mt, entry != NULL);
3270	MT_BUG_ON(mt, mas.index != 1);
3271	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3272	MT_BUG_ON(mt, mas.status != ma_none);
3273
3274	/* walk: pause -> none*/
3275	mas_set(&mas, 10);
3276	mas_pause(&mas);
3277	entry = mas_walk(&mas);
3278	MT_BUG_ON(mt, entry != NULL);
3279	MT_BUG_ON(mt, mas.index != 1);
3280	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3281	MT_BUG_ON(mt, mas.status != ma_none);
3282
3283	/* walk: none -> none */
3284	mas.index = mas.last = 10;
3285	entry = mas_walk(&mas);
3286	MT_BUG_ON(mt, entry != NULL);
3287	MT_BUG_ON(mt, mas.index != 1);
3288	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3289	MT_BUG_ON(mt, mas.status != ma_none);
3290
3291	/* walk: none -> none */
3292	entry = mas_walk(&mas);
3293	MT_BUG_ON(mt, entry != NULL);
3294	MT_BUG_ON(mt, mas.index != 1);
3295	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3296	MT_BUG_ON(mt, mas.status != ma_none);
3297
3298	/* walk: start -> root */
3299	mas_set(&mas, 0);
3300	entry = mas_walk(&mas);
3301	MT_BUG_ON(mt, entry != ptr);
3302	MT_BUG_ON(mt, mas.index != 0);
3303	MT_BUG_ON(mt, mas.last != 0);
3304	MT_BUG_ON(mt, mas.status != ma_root);
3305
3306	/* walk: pause -> root */
3307	mas_set(&mas, 0);
3308	mas_pause(&mas);
3309	entry = mas_walk(&mas);
3310	MT_BUG_ON(mt, entry != ptr);
3311	MT_BUG_ON(mt, mas.index != 0);
3312	MT_BUG_ON(mt, mas.last != 0);
3313	MT_BUG_ON(mt, mas.status != ma_root);
3314
3315	/* walk: none -> root */
3316	mas.status = ma_none;
3317	entry = mas_walk(&mas);
3318	MT_BUG_ON(mt, entry != ptr);
3319	MT_BUG_ON(mt, mas.index != 0);
3320	MT_BUG_ON(mt, mas.last != 0);
3321	MT_BUG_ON(mt, mas.status != ma_root);
3322
3323	/* walk: root -> root */
3324	entry = mas_walk(&mas);
3325	MT_BUG_ON(mt, entry != ptr);
3326	MT_BUG_ON(mt, mas.index != 0);
3327	MT_BUG_ON(mt, mas.last != 0);
3328	MT_BUG_ON(mt, mas.status != ma_root);
3329
3330	/* walk: root -> none */
3331	mas_set(&mas, 10);
3332	entry = mas_walk(&mas);
3333	MT_BUG_ON(mt, entry != NULL);
3334	MT_BUG_ON(mt, mas.index != 1);
3335	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3336	MT_BUG_ON(mt, mas.status != ma_none);
3337
3338	/* walk: none -> root */
3339	mas.index = mas.last = 0;
3340	entry = mas_walk(&mas);
3341	MT_BUG_ON(mt, entry != ptr);
3342	MT_BUG_ON(mt, mas.index != 0);
3343	MT_BUG_ON(mt, mas.last != 0);
3344	MT_BUG_ON(mt, mas.status != ma_root);
3345
3346	mas_unlock(&mas);
3347
3348	/* Check when there is an actual node */
3349	mtree_store_range(mt, 0, 0, NULL, GFP_KERNEL);
3350	mtree_store_range(mt, 0x1000, 0x1500, ptr, GFP_KERNEL);
3351	mtree_store_range(mt, 0x2000, 0x2500, ptr2, GFP_KERNEL);
3352	mtree_store_range(mt, 0x3000, 0x3500, ptr3, GFP_KERNEL);
3353
3354	mas_lock(&mas);
3355
3356	/* next: start ->active */
3357	mas_set(&mas, 0);
3358	entry = mas_next(&mas, ULONG_MAX);
3359	MT_BUG_ON(mt, entry != ptr);
3360	MT_BUG_ON(mt, mas.index != 0x1000);
3361	MT_BUG_ON(mt, mas.last != 0x1500);
3362	MT_BUG_ON(mt, !mas_is_active(&mas));
3363
3364	/* next: pause ->active */
3365	mas_set(&mas, 0);
3366	mas_pause(&mas);
3367	entry = mas_next(&mas, ULONG_MAX);
3368	MT_BUG_ON(mt, entry != ptr);
3369	MT_BUG_ON(mt, mas.index != 0x1000);
3370	MT_BUG_ON(mt, mas.last != 0x1500);
3371	MT_BUG_ON(mt, !mas_is_active(&mas));
3372
3373	/* next: none ->active */
3374	mas.index = mas.last = 0;
3375	mas.offset = 0;
3376	mas.status = ma_none;
3377	entry = mas_next(&mas, ULONG_MAX);
3378	MT_BUG_ON(mt, entry != ptr);
3379	MT_BUG_ON(mt, mas.index != 0x1000);
3380	MT_BUG_ON(mt, mas.last != 0x1500);
3381	MT_BUG_ON(mt, !mas_is_active(&mas));
3382
3383	/* next:active ->active (spanning limit) */
3384	entry = mas_next(&mas, 0x2100);
3385	MT_BUG_ON(mt, entry != ptr2);
3386	MT_BUG_ON(mt, mas.index != 0x2000);
3387	MT_BUG_ON(mt, mas.last != 0x2500);
3388	MT_BUG_ON(mt, !mas_is_active(&mas));
3389
3390	/* next:active -> overflow (limit reached) beyond data */
3391	entry = mas_next(&mas, 0x2999);
3392	MT_BUG_ON(mt, entry != NULL);
3393	MT_BUG_ON(mt, mas.index != 0x2501);
3394	MT_BUG_ON(mt, mas.last != 0x2fff);
3395	MT_BUG_ON(mt, !mas_is_overflow(&mas));
3396
3397	/* next:overflow -> active (limit changed) */
3398	entry = mas_next(&mas, ULONG_MAX);
3399	MT_BUG_ON(mt, entry != ptr3);
3400	MT_BUG_ON(mt, mas.index != 0x3000);
3401	MT_BUG_ON(mt, mas.last != 0x3500);
3402	MT_BUG_ON(mt, !mas_is_active(&mas));
3403
3404	/* next:active ->  overflow (limit reached) */
3405	entry = mas_next(&mas, ULONG_MAX);
3406	MT_BUG_ON(mt, entry != NULL);
3407	MT_BUG_ON(mt, mas.index != 0x3501);
3408	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3409	MT_BUG_ON(mt, !mas_is_overflow(&mas));
3410
3411	/* next:overflow -> overflow  */
3412	entry = mas_next(&mas, ULONG_MAX);
3413	MT_BUG_ON(mt, entry != NULL);
3414	MT_BUG_ON(mt, mas.index != 0x3501);
3415	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3416	MT_BUG_ON(mt, !mas_is_overflow(&mas));
3417
3418	/* prev:overflow -> active  */
3419	entry = mas_prev(&mas, 0);
3420	MT_BUG_ON(mt, entry != ptr3);
3421	MT_BUG_ON(mt, mas.index != 0x3000);
3422	MT_BUG_ON(mt, mas.last != 0x3500);
3423	MT_BUG_ON(mt, !mas_is_active(&mas));
3424
3425	/* next: none -> active, skip value at location */
3426	mas_set(&mas, 0);
3427	entry = mas_next(&mas, ULONG_MAX);
3428	mas.status = ma_none;
3429	mas.offset = 0;
3430	entry = mas_next(&mas, ULONG_MAX);
3431	MT_BUG_ON(mt, entry != ptr2);
3432	MT_BUG_ON(mt, mas.index != 0x2000);
3433	MT_BUG_ON(mt, mas.last != 0x2500);
3434	MT_BUG_ON(mt, !mas_is_active(&mas));
3435
3436	/* prev:active ->active */
3437	entry = mas_prev(&mas, 0);
3438	MT_BUG_ON(mt, entry != ptr);
3439	MT_BUG_ON(mt, mas.index != 0x1000);
3440	MT_BUG_ON(mt, mas.last != 0x1500);
3441	MT_BUG_ON(mt, !mas_is_active(&mas));
3442
3443	/* prev:active -> underflow (span limit) */
3444	mas_next(&mas, ULONG_MAX);
3445	entry = mas_prev(&mas, 0x1200);
3446	MT_BUG_ON(mt, entry != ptr);
3447	MT_BUG_ON(mt, mas.index != 0x1000);
3448	MT_BUG_ON(mt, mas.last != 0x1500);
3449	MT_BUG_ON(mt, !mas_is_active(&mas)); /* spanning limit */
3450	entry = mas_prev(&mas, 0x1200); /* underflow */
3451	MT_BUG_ON(mt, entry != NULL);
3452	MT_BUG_ON(mt, mas.index != 0x1000);
3453	MT_BUG_ON(mt, mas.last != 0x1500);
3454	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3455
3456	/* prev:underflow -> underflow (lower limit) spanning end range */
3457	entry = mas_prev(&mas, 0x0100);
3458	MT_BUG_ON(mt, entry != NULL);
3459	MT_BUG_ON(mt, mas.index != 0);
3460	MT_BUG_ON(mt, mas.last != 0x0FFF);
3461	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3462
3463	/* prev:underflow -> underflow */
3464	entry = mas_prev(&mas, 0);
3465	MT_BUG_ON(mt, entry != NULL);
3466	MT_BUG_ON(mt, mas.index != 0);
3467	MT_BUG_ON(mt, mas.last != 0x0FFF);
3468	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3469
3470	/* prev:underflow -> underflow */
3471	entry = mas_prev(&mas, 0);
3472	MT_BUG_ON(mt, entry != NULL);
3473	MT_BUG_ON(mt, mas.index != 0);
3474	MT_BUG_ON(mt, mas.last != 0x0FFF);
3475	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3476
3477	/* next:underflow -> active */
3478	entry = mas_next(&mas, ULONG_MAX);
3479	MT_BUG_ON(mt, entry != ptr);
3480	MT_BUG_ON(mt, mas.index != 0x1000);
3481	MT_BUG_ON(mt, mas.last != 0x1500);
3482	MT_BUG_ON(mt, !mas_is_active(&mas));
3483
3484	/* prev:first value -> underflow */
3485	entry = mas_prev(&mas, 0x1000);
3486	MT_BUG_ON(mt, entry != NULL);
3487	MT_BUG_ON(mt, mas.index != 0x1000);
3488	MT_BUG_ON(mt, mas.last != 0x1500);
3489	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3490
3491	/* find:underflow -> first value */
3492	entry = mas_find(&mas, ULONG_MAX);
3493	MT_BUG_ON(mt, entry != ptr);
3494	MT_BUG_ON(mt, mas.index != 0x1000);
3495	MT_BUG_ON(mt, mas.last != 0x1500);
3496	MT_BUG_ON(mt, !mas_is_active(&mas));
3497
3498	/* prev: pause ->active */
3499	mas_set(&mas, 0x3600);
3500	entry = mas_prev(&mas, 0);
3501	MT_BUG_ON(mt, entry != ptr3);
3502	mas_pause(&mas);
3503	entry = mas_prev(&mas, 0);
3504	MT_BUG_ON(mt, entry != ptr2);
3505	MT_BUG_ON(mt, mas.index != 0x2000);
3506	MT_BUG_ON(mt, mas.last != 0x2500);
3507	MT_BUG_ON(mt, !mas_is_active(&mas));
3508
3509	/* prev:active -> underflow spanning min */
3510	entry = mas_prev(&mas, 0x1600);
3511	MT_BUG_ON(mt, entry != NULL);
3512	MT_BUG_ON(mt, mas.index != 0x1501);
3513	MT_BUG_ON(mt, mas.last != 0x1FFF);
3514	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3515
3516	/* prev: active ->active, continue */
3517	entry = mas_prev(&mas, 0);
3518	MT_BUG_ON(mt, entry != ptr);
3519	MT_BUG_ON(mt, mas.index != 0x1000);
3520	MT_BUG_ON(mt, mas.last != 0x1500);
3521	MT_BUG_ON(mt, !mas_is_active(&mas));
3522
3523	/* find: start ->active */
3524	mas_set(&mas, 0);
3525	entry = mas_find(&mas, ULONG_MAX);
3526	MT_BUG_ON(mt, entry != ptr);
3527	MT_BUG_ON(mt, mas.index != 0x1000);
3528	MT_BUG_ON(mt, mas.last != 0x1500);
3529	MT_BUG_ON(mt, !mas_is_active(&mas));
3530
3531	/* find: pause ->active */
3532	mas_set(&mas, 0);
3533	mas_pause(&mas);
3534	entry = mas_find(&mas, ULONG_MAX);
3535	MT_BUG_ON(mt, entry != ptr);
3536	MT_BUG_ON(mt, mas.index != 0x1000);
3537	MT_BUG_ON(mt, mas.last != 0x1500);
3538	MT_BUG_ON(mt, !mas_is_active(&mas));
3539
3540	/* find: start ->active on value */;
3541	mas_set(&mas, 1200);
3542	entry = mas_find(&mas, ULONG_MAX);
3543	MT_BUG_ON(mt, entry != ptr);
3544	MT_BUG_ON(mt, mas.index != 0x1000);
3545	MT_BUG_ON(mt, mas.last != 0x1500);
3546	MT_BUG_ON(mt, !mas_is_active(&mas));
3547
3548	/* find:active ->active */
3549	entry = mas_find(&mas, ULONG_MAX);
3550	MT_BUG_ON(mt, entry != ptr2);
3551	MT_BUG_ON(mt, mas.index != 0x2000);
3552	MT_BUG_ON(mt, mas.last != 0x2500);
3553	MT_BUG_ON(mt, !mas_is_active(&mas));
3554
3555
3556	/* find:active -> active (NULL)*/
3557	entry = mas_find(&mas, 0x2700);
3558	MT_BUG_ON(mt, entry != NULL);
3559	MT_BUG_ON(mt, mas.index != 0x2501);
3560	MT_BUG_ON(mt, mas.last != 0x2FFF);
3561	MAS_BUG_ON(&mas, !mas_is_active(&mas));
3562
3563	/* find: overflow ->active */
3564	entry = mas_find(&mas, 0x5000);
3565	MT_BUG_ON(mt, entry != ptr3);
3566	MT_BUG_ON(mt, mas.index != 0x3000);
3567	MT_BUG_ON(mt, mas.last != 0x3500);
3568	MT_BUG_ON(mt, !mas_is_active(&mas));
3569
3570	/* find:active -> active (NULL) end*/
3571	entry = mas_find(&mas, ULONG_MAX);
3572	MT_BUG_ON(mt, entry != NULL);
3573	MT_BUG_ON(mt, mas.index != 0x3501);
3574	MT_BUG_ON(mt, mas.last != ULONG_MAX);
3575	MAS_BUG_ON(&mas, !mas_is_active(&mas));
3576
3577	/* find_rev: active (END) ->active */
3578	entry = mas_find_rev(&mas, 0);
3579	MT_BUG_ON(mt, entry != ptr3);
3580	MT_BUG_ON(mt, mas.index != 0x3000);
3581	MT_BUG_ON(mt, mas.last != 0x3500);
3582	MT_BUG_ON(mt, !mas_is_active(&mas));
3583
3584	/* find_rev:active ->active */
3585	entry = mas_find_rev(&mas, 0);
3586	MT_BUG_ON(mt, entry != ptr2);
3587	MT_BUG_ON(mt, mas.index != 0x2000);
3588	MT_BUG_ON(mt, mas.last != 0x2500);
3589	MT_BUG_ON(mt, !mas_is_active(&mas));
3590
3591	/* find_rev: pause ->active */
3592	mas_pause(&mas);
3593	entry = mas_find_rev(&mas, 0);
3594	MT_BUG_ON(mt, entry != ptr);
3595	MT_BUG_ON(mt, mas.index != 0x1000);
3596	MT_BUG_ON(mt, mas.last != 0x1500);
3597	MT_BUG_ON(mt, !mas_is_active(&mas));
3598
3599	/* find_rev:active -> underflow */
3600	entry = mas_find_rev(&mas, 0);
3601	MT_BUG_ON(mt, entry != NULL);
3602	MT_BUG_ON(mt, mas.index != 0);
3603	MT_BUG_ON(mt, mas.last != 0x0FFF);
3604	MT_BUG_ON(mt, !mas_is_underflow(&mas));
3605
3606	/* find_rev: start ->active */
3607	mas_set(&mas, 0x1200);
3608	entry = mas_find_rev(&mas, 0);
3609	MT_BUG_ON(mt, entry != ptr);
3610	MT_BUG_ON(mt, mas.index != 0x1000);
3611	MT_BUG_ON(mt, mas.last != 0x1500);
3612	MT_BUG_ON(mt, !mas_is_active(&mas));
3613
3614	/* mas_walk start ->active */
3615	mas_set(&mas, 0x1200);
3616	entry = mas_walk(&mas);
3617	MT_BUG_ON(mt, entry != ptr);
3618	MT_BUG_ON(mt, mas.index != 0x1000);
3619	MT_BUG_ON(mt, mas.last != 0x1500);
3620	MT_BUG_ON(mt, !mas_is_active(&mas));
3621
3622	/* mas_walk start ->active */
3623	mas_set(&mas, 0x1600);
3624	entry = mas_walk(&mas);
3625	MT_BUG_ON(mt, entry != NULL);
3626	MT_BUG_ON(mt, mas.index != 0x1501);
3627	MT_BUG_ON(mt, mas.last != 0x1fff);
3628	MT_BUG_ON(mt, !mas_is_active(&mas));
3629
3630	/* mas_walk pause ->active */
3631	mas_set(&mas, 0x1200);
3632	mas_pause(&mas);
3633	entry = mas_walk(&mas);
3634	MT_BUG_ON(mt, entry != ptr);
3635	MT_BUG_ON(mt, mas.index != 0x1000);
3636	MT_BUG_ON(mt, mas.last != 0x1500);
3637	MT_BUG_ON(mt, !mas_is_active(&mas));
3638
3639	/* mas_walk pause -> active */
3640	mas_set(&mas, 0x1600);
3641	mas_pause(&mas);
3642	entry = mas_walk(&mas);
3643	MT_BUG_ON(mt, entry != NULL);
3644	MT_BUG_ON(mt, mas.index != 0x1501);
3645	MT_BUG_ON(mt, mas.last != 0x1fff);
3646	MT_BUG_ON(mt, !mas_is_active(&mas));
3647
3648	/* mas_walk none -> active */
3649	mas_set(&mas, 0x1200);
3650	mas.status = ma_none;
3651	entry = mas_walk(&mas);
3652	MT_BUG_ON(mt, entry != ptr);
3653	MT_BUG_ON(mt, mas.index != 0x1000);
3654	MT_BUG_ON(mt, mas.last != 0x1500);
3655	MT_BUG_ON(mt, !mas_is_active(&mas));
3656
3657	/* mas_walk none -> active */
3658	mas_set(&mas, 0x1600);
3659	mas.status = ma_none;
3660	entry = mas_walk(&mas);
3661	MT_BUG_ON(mt, entry != NULL);
3662	MT_BUG_ON(mt, mas.index != 0x1501);
3663	MT_BUG_ON(mt, mas.last != 0x1fff);
3664	MT_BUG_ON(mt, !mas_is_active(&mas));
3665
3666	/* mas_walk active -> active */
3667	mas.index = 0x1200;
3668	mas.last = 0x1200;
3669	mas.offset = 0;
3670	entry = mas_walk(&mas);
3671	MT_BUG_ON(mt, entry != ptr);
3672	MT_BUG_ON(mt, mas.index != 0x1000);
3673	MT_BUG_ON(mt, mas.last != 0x1500);
3674	MT_BUG_ON(mt, !mas_is_active(&mas));
3675
3676	/* mas_walk active -> active */
3677	mas.index = 0x1600;
3678	mas.last = 0x1600;
3679	entry = mas_walk(&mas);
3680	MT_BUG_ON(mt, entry != NULL);
3681	MT_BUG_ON(mt, mas.index != 0x1501);
3682	MT_BUG_ON(mt, mas.last != 0x1fff);
3683	MT_BUG_ON(mt, !mas_is_active(&mas));
3684
3685	mas_unlock(&mas);
3686}
3687
3688static noinline void __init alloc_cyclic_testing(struct maple_tree *mt)
3689{
3690	unsigned long location;
3691	unsigned long next;
3692	int ret = 0;
3693	MA_STATE(mas, mt, 0, 0);
3694
3695	next = 0;
3696	mtree_lock(mt);
3697	for (int i = 0; i < 100; i++) {
3698		mas_alloc_cyclic(&mas, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3699		MAS_BUG_ON(&mas, i != location - 2);
3700		MAS_BUG_ON(&mas, mas.index != location);
3701		MAS_BUG_ON(&mas, mas.last != location);
3702		MAS_BUG_ON(&mas, i != next - 3);
3703	}
3704
3705	mtree_unlock(mt);
3706	mtree_destroy(mt);
3707	next = 0;
3708	mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
3709	for (int i = 0; i < 100; i++) {
3710		mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3711		MT_BUG_ON(mt, i != location - 2);
3712		MT_BUG_ON(mt, i != next - 3);
3713		MT_BUG_ON(mt, mtree_load(mt, location) != mt);
3714	}
3715
3716	mtree_destroy(mt);
3717	/* Overflow test */
3718	next = ULONG_MAX - 1;
3719	ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3720	MT_BUG_ON(mt, ret != 0);
3721	ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3722	MT_BUG_ON(mt, ret != 0);
3723	ret = mtree_alloc_cyclic(mt, &location, mt, 2, ULONG_MAX, &next, GFP_KERNEL);
3724	MT_BUG_ON(mt, ret != 1);
3725}
3726
3727static DEFINE_MTREE(tree);
3728static int __init maple_tree_seed(void)
3729{
3730	unsigned long set[] = { 5015, 5014, 5017, 25, 1000,
3731				1001, 1002, 1003, 1005, 0,
3732				5003, 5002};
3733	void *ptr = &set;
3734
3735	pr_info("\nTEST STARTING\n\n");
3736
3737#if defined(BENCH_SLOT_STORE)
3738#define BENCH
3739	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3740	bench_slot_store(&tree);
3741	mtree_destroy(&tree);
3742	goto skip;
3743#endif
3744#if defined(BENCH_NODE_STORE)
3745#define BENCH
3746	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3747	bench_node_store(&tree);
3748	mtree_destroy(&tree);
3749	goto skip;
3750#endif
3751#if defined(BENCH_AWALK)
3752#define BENCH
3753	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3754	bench_awalk(&tree);
3755	mtree_destroy(&tree);
3756	goto skip;
3757#endif
3758#if defined(BENCH_WALK)
3759#define BENCH
3760	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3761	bench_walk(&tree);
3762	mtree_destroy(&tree);
3763	goto skip;
3764#endif
3765#if defined(BENCH_LOAD)
3766#define BENCH
3767	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3768	bench_load(&tree);
3769	mtree_destroy(&tree);
3770	goto skip;
3771#endif
3772#if defined(BENCH_FORK)
3773#define BENCH
3774	bench_forking();
3775	goto skip;
3776#endif
3777#if defined(BENCH_MT_FOR_EACH)
3778#define BENCH
3779	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3780	bench_mt_for_each(&tree);
3781	mtree_destroy(&tree);
3782	goto skip;
3783#endif
3784#if defined(BENCH_MAS_FOR_EACH)
3785#define BENCH
3786	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3787	bench_mas_for_each(&tree);
3788	mtree_destroy(&tree);
3789	goto skip;
3790#endif
3791#if defined(BENCH_MAS_PREV)
3792#define BENCH
3793	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3794	bench_mas_prev(&tree);
3795	mtree_destroy(&tree);
3796	goto skip;
3797#endif
3798
3799	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3800	check_store_null(&tree);
3801	mtree_destroy(&tree);
3802
3803	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3804	check_root_expand(&tree);
3805	mtree_destroy(&tree);
3806
3807	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3808	check_iteration(&tree);
3809	mtree_destroy(&tree);
3810
3811	check_forking();
3812
3813	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3814	check_mas_store_gfp(&tree);
3815	mtree_destroy(&tree);
3816
3817	/* Test ranges (store and insert) */
3818	mt_init_flags(&tree, 0);
3819	check_ranges(&tree);
3820	mtree_destroy(&tree);
3821
3822#if defined(CONFIG_64BIT)
3823	/* These tests have ranges outside of 4GB */
3824	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3825	check_alloc_range(&tree);
3826	mtree_destroy(&tree);
3827
3828	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3829	check_alloc_rev_range(&tree);
3830	mtree_destroy(&tree);
3831#endif
3832
3833	mt_init_flags(&tree, 0);
3834
3835	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3836
3837	check_insert(&tree, set[9], &tree);     /* Insert 0 */
3838	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3839	check_load(&tree, set[0], NULL);       /* See if 5015 -> NULL */
3840
3841	check_insert(&tree, set[10], ptr);      /* Insert 5003 */
3842	check_load(&tree, set[9], &tree);       /* See if 0 -> &tree */
3843	check_load(&tree, set[11], NULL);       /* See if 5002 -> NULL */
3844	check_load(&tree, set[10], ptr);       /* See if 5003 -> ptr */
3845
3846	/* Clear out the tree */
3847	mtree_destroy(&tree);
3848
3849	/* Try to insert, insert a dup, and load back what was inserted. */
3850	mt_init_flags(&tree, 0);
3851	check_insert(&tree, set[0], &tree);     /* Insert 5015 */
3852	check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
3853	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3854
3855	/*
3856	 * Second set of tests try to load a value that doesn't exist, inserts
3857	 * a second value, then loads the value again
3858	 */
3859	check_load(&tree, set[1], NULL);        /* See if 5014 -> NULL */
3860	check_insert(&tree, set[1], ptr);       /* insert 5014 -> ptr */
3861	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3862	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3863	/*
3864	 * Tree currently contains:
3865	 * p[0]: 14 -> (nil) p[1]: 15 -> ptr p[2]: 16 -> &tree p[3]: 0 -> (nil)
3866	 */
3867	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3868	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3869
3870	check_load(&tree, set[0], &tree);       /* See if 5015 -> &tree */
3871	check_load(&tree, set[1], ptr);         /* See if 5014 -> ptr */
3872	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3873	check_load(&tree, set[7], &tree);       /* 1003 = &tree ? */
3874
3875	/* Clear out tree */
3876	mtree_destroy(&tree);
3877
3878	mt_init_flags(&tree, 0);
3879	/* Test inserting into a NULL hole. */
3880	check_insert(&tree, set[5], ptr);       /* insert 1001 -> ptr */
3881	check_insert(&tree, set[7], &tree);       /* insert 1003 -> &tree */
3882	check_insert(&tree, set[6], ptr);       /* insert 1002 -> ptr */
3883	check_load(&tree, set[5], ptr);         /* See if 1001 -> ptr */
3884	check_load(&tree, set[6], ptr);         /* See if 1002 -> ptr */
3885	check_load(&tree, set[7], &tree);       /* See if 1003 -> &tree */
3886
3887	/* Clear out the tree */
3888	mtree_destroy(&tree);
3889
3890	mt_init_flags(&tree, 0);
3891	/*
3892	 *       set[] = {5015, 5014, 5017, 25, 1000,
3893	 *                1001, 1002, 1003, 1005, 0,
3894	 *                5003, 5002};
3895	 */
3896
3897	check_insert(&tree, set[0], ptr); /* 5015 */
3898	check_insert(&tree, set[1], &tree); /* 5014 */
3899	check_insert(&tree, set[2], ptr); /* 5017 */
3900	check_insert(&tree, set[3], &tree); /* 25 */
3901	check_load(&tree, set[0], ptr);
3902	check_load(&tree, set[1], &tree);
3903	check_load(&tree, set[2], ptr);
3904	check_load(&tree, set[3], &tree);
3905	check_insert(&tree, set[4], ptr); /* 1000 < Should split. */
3906	check_load(&tree, set[0], ptr);
3907	check_load(&tree, set[1], &tree);
3908	check_load(&tree, set[2], ptr);
3909	check_load(&tree, set[3], &tree); /*25 */
3910	check_load(&tree, set[4], ptr);
3911	check_insert(&tree, set[5], &tree); /* 1001 */
3912	check_load(&tree, set[0], ptr);
3913	check_load(&tree, set[1], &tree);
3914	check_load(&tree, set[2], ptr);
3915	check_load(&tree, set[3], &tree);
3916	check_load(&tree, set[4], ptr);
3917	check_load(&tree, set[5], &tree);
3918	check_insert(&tree, set[6], ptr);
3919	check_load(&tree, set[0], ptr);
3920	check_load(&tree, set[1], &tree);
3921	check_load(&tree, set[2], ptr);
3922	check_load(&tree, set[3], &tree);
3923	check_load(&tree, set[4], ptr);
3924	check_load(&tree, set[5], &tree);
3925	check_load(&tree, set[6], ptr);
3926	check_insert(&tree, set[7], &tree);
3927	check_load(&tree, set[0], ptr);
3928	check_insert(&tree, set[8], ptr);
3929
3930	check_insert(&tree, set[9], &tree);
3931
3932	check_load(&tree, set[0], ptr);
3933	check_load(&tree, set[1], &tree);
3934	check_load(&tree, set[2], ptr);
3935	check_load(&tree, set[3], &tree);
3936	check_load(&tree, set[4], ptr);
3937	check_load(&tree, set[5], &tree);
3938	check_load(&tree, set[6], ptr);
3939	check_load(&tree, set[9], &tree);
3940	mtree_destroy(&tree);
3941
3942	mt_init_flags(&tree, 0);
3943	check_seq(&tree, 16, false);
3944	mtree_destroy(&tree);
3945
3946	mt_init_flags(&tree, 0);
3947	check_seq(&tree, 1000, true);
3948	mtree_destroy(&tree);
3949
3950	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3951	check_rev_seq(&tree, 1000, true);
3952	mtree_destroy(&tree);
3953
3954	check_lower_bound_split(&tree);
3955	check_upper_bound_split(&tree);
3956	check_mid_split(&tree);
3957
3958	mt_init_flags(&tree, 0);
3959	check_next_entry(&tree);
3960	check_find(&tree);
3961	check_find_2(&tree);
3962	mtree_destroy(&tree);
3963
3964	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3965	check_prev_entry(&tree);
3966	mtree_destroy(&tree);
3967
3968	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3969	check_gap_combining(&tree);
3970	mtree_destroy(&tree);
3971
3972	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3973	check_node_overwrite(&tree);
3974	mtree_destroy(&tree);
3975
3976	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3977	next_prev_test(&tree);
3978	mtree_destroy(&tree);
3979
3980	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3981	check_spanning_relatives(&tree);
3982	mtree_destroy(&tree);
3983
3984	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3985	check_rev_find(&tree);
3986	mtree_destroy(&tree);
3987
3988	mt_init_flags(&tree, 0);
3989	check_fuzzer(&tree);
3990	mtree_destroy(&tree);
3991
3992	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3993	check_dup(&tree);
3994	mtree_destroy(&tree);
3995
3996	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
3997	check_bnode_min_spanning(&tree);
3998	mtree_destroy(&tree);
3999
4000	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4001	check_empty_area_window(&tree);
4002	mtree_destroy(&tree);
4003
4004	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4005	check_empty_area_fill(&tree);
4006	mtree_destroy(&tree);
4007
4008	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4009	check_state_handling(&tree);
4010	mtree_destroy(&tree);
4011
4012	mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
4013	alloc_cyclic_testing(&tree);
4014	mtree_destroy(&tree);
4015
4016
4017#if defined(BENCH)
4018skip:
4019#endif
4020	rcu_barrier();
4021	pr_info("maple_tree: %u of %u tests passed\n",
4022			atomic_read(&maple_tree_tests_passed),
4023			atomic_read(&maple_tree_tests_run));
4024	if (atomic_read(&maple_tree_tests_run) ==
4025	    atomic_read(&maple_tree_tests_passed))
4026		return 0;
4027
4028	return -EINVAL;
4029}
4030
4031static void __exit maple_tree_harvest(void)
4032{
4033
4034}
4035
4036module_init(maple_tree_seed);
4037module_exit(maple_tree_harvest);
4038MODULE_AUTHOR("Liam R. Howlett <Liam.Howlett@Oracle.com>");
4039MODULE_DESCRIPTION("maple tree API test module");
4040MODULE_LICENSE("GPL");