@@ -200,15 +200,16 @@ ZFS_BTREE_FIND_IN_BUF_FUNC(zfs_range_tree_seg64_find_in_buf, zfs_range_seg64_t,
200
200
ZFS_BTREE_FIND_IN_BUF_FUNC (zfs_range_tree_seg_gap_find_in_buf ,
201
201
zfs_range_seg_gap_t , zfs_range_tree_seg_gap_compare )
202
202
203
- zfs_range_tree_t *
204
- zfs_range_tree_create_gap (const zfs_range_tree_ops_t * ops ,
203
+ static zfs_range_tree_t *
204
+ zfs_range_tree_create_impl (const zfs_range_tree_ops_t * ops ,
205
205
zfs_range_seg_type_t type , void * arg , uint64_t start , uint64_t shift ,
206
- uint64_t gap )
206
+ uint64_t gap , zfs_range_tree_usecase_t usecase )
207
207
{
208
208
zfs_range_tree_t * rt = kmem_zalloc (sizeof (zfs_range_tree_t ), KM_SLEEP );
209
209
210
210
ASSERT3U (shift , < , 64 );
211
211
ASSERT3U (type , <=, ZFS_RANGE_SEG_NUM_TYPES );
212
+ ASSERT3U (usecase , < , ZFS_RANGE_TREE_UC_NUM_CASES );
212
213
size_t size ;
213
214
int (* compare ) (const void * , const void * );
214
215
bt_find_in_buf_f bt_find ;
@@ -235,6 +236,7 @@ zfs_range_tree_create_gap(const zfs_range_tree_ops_t *ops,
235
236
236
237
rt -> rt_ops = ops ;
237
238
rt -> rt_gap = gap ;
239
+ rt -> rt_usecase = usecase ;
238
240
rt -> rt_arg = arg ;
239
241
rt -> rt_type = type ;
240
242
rt -> rt_start = start ;
@@ -246,11 +248,30 @@ zfs_range_tree_create_gap(const zfs_range_tree_ops_t *ops,
246
248
return (rt );
247
249
}
248
250
251
+ zfs_range_tree_t *
252
+ zfs_range_tree_create_gap (const zfs_range_tree_ops_t * ops ,
253
+ zfs_range_seg_type_t type , void * arg , uint64_t start , uint64_t shift ,
254
+ uint64_t gap )
255
+ {
256
+ return (zfs_range_tree_create_impl (ops , type , arg , start , shift , gap ,
257
+ ZFS_RANGE_TREE_UC_UNKNOWN ));
258
+ }
259
+
249
260
zfs_range_tree_t *
250
261
zfs_range_tree_create (const zfs_range_tree_ops_t * ops ,
251
262
zfs_range_seg_type_t type , void * arg , uint64_t start , uint64_t shift )
252
263
{
253
- return (zfs_range_tree_create_gap (ops , type , arg , start , shift , 0 ));
264
+ return (zfs_range_tree_create_impl (ops , type , arg , start , shift , 0 ,
265
+ ZFS_RANGE_TREE_UC_UNKNOWN ));
266
+ }
267
+
268
+ zfs_range_tree_t *
269
+ zfs_range_tree_create_usecase (const zfs_range_tree_ops_t * ops ,
270
+ zfs_range_seg_type_t type , void * arg , uint64_t start , uint64_t shift ,
271
+ zfs_range_tree_usecase_t usecase )
272
+ {
273
+ return (zfs_range_tree_create_impl (ops , type , arg , start , shift , 0 ,
274
+ usecase ));
254
275
}
255
276
256
277
void
@@ -318,14 +339,25 @@ zfs_range_tree_add_impl(void *arg, uint64_t start, uint64_t size, uint64_t fill)
318
339
* the normal code paths.
319
340
*/
320
341
if (rs != NULL ) {
342
+ uint64_t rstart = zfs_rs_get_start (rs , rt );
343
+ uint64_t rend = zfs_rs_get_end (rs , rt );
321
344
if (gap == 0 ) {
322
- zfs_panic_recover ("zfs: adding existent segment to "
323
- "range tree (offset=%llx size=%llx)" ,
324
- (longlong_t )start , (longlong_t )size );
345
+ zfs_panic_recover_ms ("zfs: adding segment "
346
+ "(offset=%llx size=%llx) overlapping with "
347
+ "existing one (offset=%llx size=%llx)" ,
348
+ (longlong_t )start , (longlong_t )size ,
349
+ (longlong_t )rstart , (longlong_t )(rend - rstart ));
350
+ if (rt -> rt_usecase != ZFS_RANGE_TREE_UC_ALLOCATED_SPACE )
351
+ return ;
352
+ /* add non-overlapping chunks */
353
+ if (rstart > start )
354
+ range_tree_add_impl (rt , start , rstart - start ,
355
+ rstart - start );
356
+ if (rend < end )
357
+ range_tree_add_impl (rt , rend , end - rend ,
358
+ end - rend );
325
359
return ;
326
360
}
327
- uint64_t rstart = zfs_rs_get_start (rs , rt );
328
- uint64_t rend = zfs_rs_get_end (rs , rt );
329
361
if (rstart <= start && rend >= end ) {
330
362
zfs_range_tree_adjust_fill (rt , rs , fill );
331
363
return ;
@@ -450,6 +482,7 @@ zfs_range_tree_remove_impl(zfs_range_tree_t *rt, uint64_t start, uint64_t size,
450
482
zfs_range_seg_t * rs ;
451
483
zfs_range_seg_max_t rsearch , rs_tmp ;
452
484
uint64_t end = start + size ;
485
+ uint64_t rstart , rend ;
453
486
boolean_t left_over , right_over ;
454
487
455
488
VERIFY3U (size , != , 0 );
@@ -463,12 +496,15 @@ zfs_range_tree_remove_impl(zfs_range_tree_t *rt, uint64_t start, uint64_t size,
463
496
464
497
/* Make sure we completely overlap with someone */
465
498
if (rs == NULL ) {
466
- zfs_panic_recover ("zfs: removing nonexistent segment from "
499
+ zfs_panic_recover_ms ("zfs: removing nonexistent segment from "
467
500
"range tree (offset=%llx size=%llx)" ,
468
501
(longlong_t )start , (longlong_t )size );
469
502
return ;
470
503
}
471
504
505
+ rstart = zfs_rs_get_start (rs , rt );
506
+ rend = zfs_rs_get_end (rs , rt );
507
+
472
508
/*
473
509
* Range trees with gap support must only remove complete segments
474
510
* from the tree. This allows us to maintain accurate fill accounting
@@ -478,31 +514,47 @@ zfs_range_tree_remove_impl(zfs_range_tree_t *rt, uint64_t start, uint64_t size,
478
514
if (rt -> rt_gap != 0 ) {
479
515
if (do_fill ) {
480
516
if (zfs_rs_get_fill (rs , rt ) == size ) {
481
- start = zfs_rs_get_start ( rs , rt ) ;
482
- end = zfs_rs_get_end ( rs , rt ) ;
517
+ start = rstart ;
518
+ end = rend ;
483
519
size = end - start ;
484
520
} else {
485
521
zfs_range_tree_adjust_fill (rt , rs , - size );
486
522
return ;
487
523
}
488
- } else if (zfs_rs_get_start (rs , rt ) != start ||
489
- zfs_rs_get_end (rs , rt ) != end ) {
524
+ } else if (rstart != start || rend != end ) {
490
525
zfs_panic_recover ("zfs: freeing partial segment of "
491
526
"gap tree (offset=%llx size=%llx) of "
492
527
"(offset=%llx size=%llx)" ,
493
528
(longlong_t )start , (longlong_t )size ,
494
- (longlong_t )zfs_rs_get_start (rs , rt ),
495
- (longlong_t )zfs_rs_get_end (rs , rt ) -
496
- zfs_rs_get_start (rs , rt ));
529
+ (longlong_t )rstart ,
530
+ (longlong_t )(rend - rstart ));
497
531
return ;
498
532
}
499
533
}
500
534
501
- VERIFY3U (zfs_rs_get_start (rs , rt ), <=, start );
502
- VERIFY3U (zfs_rs_get_end (rs , rt ), >=, end );
535
+ if (!(rstart <= start && rend >= end )) {
536
+ zfs_panic_recover_ms ("zfs: removing segment "
537
+ "(offset=%llx size=%llx) not completely overlapped by "
538
+ "existing one (offset=%llx size=%llx)" ,
539
+ (longlong_t )start , (longlong_t )size ,
540
+ (longlong_t )rstart , (longlong_t )(rend - rstart ));
541
+ if (rt -> rt_usecase != ZFS_RANGE_TREE_UC_FREE_SPACE )
542
+ return ;
543
+ /* perform removal of the chunks */
544
+ if (rstart > start )
545
+ range_tree_remove_impl (rt , start , rstart - start ,
546
+ do_fill );
547
+ uint64_t mstart = MAX (rstart , start );
548
+ uint64_t mend = MIN (rend , end );
549
+ range_tree_remove_impl (rt , mstart , mend - mstart , do_fill );
550
+ if (rend < end )
551
+ range_tree_remove_impl (rt , rend , end - rend ,
552
+ do_fill );
553
+ return ;
554
+ }
503
555
504
- left_over = (zfs_rs_get_start ( rs , rt ) != start );
505
- right_over = (zfs_rs_get_end ( rs , rt ) != end );
556
+ left_over = (rstart != start );
557
+ right_over = (rend != end );
506
558
507
559
zfs_range_tree_stat_decr (rt , rs );
508
560
0 commit comments