.sp
.ne 2
.na
-\fBmetaslab_debug\fR (int)
+\fBmetaslab_debug_load\fR (int)
.ad
.RS 12n
-Keep space maps in core to verify frees
+Load all metaslabs during pool import.
+.sp
+Use \fB1\fR for yes and \fB0\fR for no (default).
+.RE
+
+.sp
+.ne 2
+.na
+\fBmetaslab_debug_unload\fR (int)
+.ad
+.RS 12n
+Prevent metaslabs from being unloaded.
.sp
Use \fB1\fR for yes and \fB0\fR for no (default).
.RE
int zfs_mg_noalloc_threshold = 0;
/*
- * Metaslab debugging: when set, keeps all space maps in core to verify frees.
+ * When set will load all metaslabs when pool is first opened.
*/
-int metaslab_debug = 0;
+int metaslab_debug_load = 0;
+
+/*
+ * When set will prevent metaslabs from being unloaded.
+ */
+int metaslab_debug_unload = 0;
/*
* Minimum size which forces the dynamic allocator to change
metaslab_group_add(mg, msp);
- if (metaslab_debug && smo->smo_object != 0) {
+ if (metaslab_debug_load && smo->smo_object != 0) {
mutex_enter(&msp->ms_lock);
VERIFY(space_map_load(msp->ms_map, mg->mg_class->mc_ops,
SM_FREE, smo, spa_meta_objset(vd->vdev_spa)) == 0);
if (msp->ms_allocmap[(txg + t) & TXG_MASK]->sm_space)
evictable = 0;
- if (evictable && !metaslab_debug)
+ if (evictable && !metaslab_debug_unload)
space_map_unload(sm);
}
}
#if defined(_KERNEL) && defined(HAVE_SPL)
-module_param(metaslab_debug, int, 0644);
-MODULE_PARM_DESC(metaslab_debug, "keep space maps in core to verify frees");
+module_param(metaslab_debug_load, int, 0644);
+MODULE_PARM_DESC(metaslab_debug_load, "load all metaslabs during pool import");
+
+module_param(metaslab_debug_unload, int, 0644);
+MODULE_PARM_DESC(metaslab_debug_unload, "prevent metaslabs from being unloaded");
#endif /* _KERNEL && HAVE_SPL */