]> git.proxmox.com Git - ceph.git/commit
update sources to v12.2.1
authorFabian Grünbichler <f.gruenbichler@proxmox.com>
Fri, 29 Sep 2017 07:57:31 +0000 (09:57 +0200)
committerFabian Grünbichler <f.gruenbichler@proxmox.com>
Fri, 29 Sep 2017 07:57:31 +0000 (09:57 +0200)
commit181888fb293938ba79f4c96c14bf1459f38d18af
tree92e2f4c3835df8dea1639eaf7bc759d621baba67
parent36f6c5ea099d43087ff0276121fd34e71668ae0e
update sources to v12.2.1
317 files changed:
ceph/CMakeLists.txt
ceph/PendingReleaseNotes
ceph/alpine/APKBUILD
ceph/ceph.spec
ceph/ceph.spec.in
ceph/debian/changelog
ceph/doc/cephfs/health-messages.rst
ceph/doc/cephfs/mds-config-ref.rst
ceph/doc/images/esx_iscsi_chap.png [new file with mode: 0755]
ceph/doc/images/esx_iscsi_conf.png [new file with mode: 0755]
ceph/doc/images/esx_iscsi_disc.png [new file with mode: 0755]
ceph/doc/images/esx_iscsi_general.png [new file with mode: 0755]
ceph/doc/images/esx_iscsi_rescan.png [new file with mode: 0755]
ceph/doc/images/esx_iscsi_select_device.png [new file with mode: 0755]
ceph/doc/images/esx_iscsi_select_mru.png [new file with mode: 0755]
ceph/doc/images/win2016_iscsi_advanced_window.png [new file with mode: 0755]
ceph/doc/images/win2016_iscsi_connect_to_target.png [new file with mode: 0755]
ceph/doc/images/win2016_iscsi_devices_mpio.png [new file with mode: 0755]
ceph/doc/images/win2016_iscsi_discovery_tab.png [new file with mode: 0755]
ceph/doc/images/win2016_iscsi_target_tab.png [new file with mode: 0755]
ceph/doc/images/win2016_iscsi_target_tab2.png [new file with mode: 0755]
ceph/doc/images/win2016_mpclaim_output.png [new file with mode: 0644]
ceph/doc/images/win2016_mpio_set_failover_only.png [new file with mode: 0755]
ceph/doc/man/8/ceph.rst
ceph/doc/rbd/index.rst
ceph/doc/rbd/iscsi-initiator-esx.rst [new file with mode: 0644]
ceph/doc/rbd/iscsi-initiator-rhel.rst [new file with mode: 0644]
ceph/doc/rbd/iscsi-initiator-win.rst [new file with mode: 0644]
ceph/doc/rbd/iscsi-initiators.rst [new file with mode: 0644]
ceph/doc/rbd/iscsi-monitoring.rst [new file with mode: 0644]
ceph/doc/rbd/iscsi-overview.rst [new file with mode: 0644]
ceph/doc/rbd/iscsi-requirements.rst [new file with mode: 0644]
ceph/doc/rbd/iscsi-target-ansible.rst [new file with mode: 0644]
ceph/doc/rbd/iscsi-target-cli.rst [new file with mode: 0644]
ceph/doc/rbd/iscsi-targets.rst [new file with mode: 0644]
ceph/qa/objectstore/bluestore-bitmap.yaml [new file with mode: 0644]
ceph/qa/objectstore/bluestore-comp.yaml
ceph/qa/objectstore/bluestore.yaml
ceph/qa/run_xfstests.sh [changed mode: 0644->0755]
ceph/qa/run_xfstests_krbd.sh [deleted file]
ceph/qa/standalone/crush/crush-classes.sh
ceph/qa/standalone/mon/osd-pool-df.sh [new file with mode: 0755]
ceph/qa/standalone/mon/test_pool_quota.sh
ceph/qa/suites/fs/basic_functional/tasks/alternate-pool.yaml [new file with mode: 0644]
ceph/qa/suites/fs/basic_functional/tasks/client-limits.yaml
ceph/qa/suites/fs/basic_functional/tasks/data-scan.yaml
ceph/qa/suites/fs/multiclient/tasks/cephfs_misc_tests.yaml
ceph/qa/suites/kcephfs/recovery/tasks/auto-repair.yaml
ceph/qa/suites/kcephfs/recovery/tasks/client-limits.yaml
ceph/qa/suites/kcephfs/recovery/tasks/client-recovery.yaml
ceph/qa/suites/kcephfs/recovery/tasks/data-scan.yaml
ceph/qa/suites/kcephfs/recovery/tasks/failover.yaml
ceph/qa/suites/kcephfs/recovery/whitelist_health.yaml [new symlink]
ceph/qa/suites/kcephfs/thrash/thrashosds-health.yaml [new symlink]
ceph/qa/suites/kcephfs/thrash/whitelist_health.yaml [new symlink]
ceph/qa/suites/krbd/singleton/tasks/rbd_xfstests.yaml
ceph/qa/suites/rados/thrash/d-require-luminous/at-end.yaml
ceph/qa/suites/rbd/cli/pool/ec-data-pool.yaml
ceph/qa/suites/rbd/librbd/pool/ec-data-pool.yaml
ceph/qa/suites/rbd/maintenance/workloads/dynamic_features_no_cache.yaml
ceph/qa/suites/rbd/mirror/cluster/2-node.yaml
ceph/qa/suites/rbd/qemu/pool/ec-data-pool.yaml
ceph/qa/suites/rgw/multisite/overrides.yaml
ceph/qa/suites/upgrade/luminous-x/parallel/% [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/0-cluster/+ [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/0-cluster/openstack.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/0-cluster/start.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/1-ceph-install/luminous.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/2-workload/+ [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/2-workload/blogbench.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/2-workload/ec-rados-default.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/2-workload/rados_api.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/2-workload/rados_loadgenbig.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/2-workload/test_rbd_api.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/2-workload/test_rbd_python.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/3-upgrade-sequence/upgrade-all.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/3-upgrade-sequence/upgrade-mon-osd-mds.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/5-final-workload/+ [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/5-final-workload/blogbench.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/5-final-workload/rados-snaps-few-objects.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/5-final-workload/rados_loadgenmix.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/5-final-workload/rados_mon_thrash.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/5-final-workload/rbd_cls.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/5-final-workload/rbd_import_export.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/5-final-workload/rgw_swift.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/parallel/distros [new symlink]
ceph/qa/suites/upgrade/luminous-x/parallel/objectstore [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/% [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/0-cluster [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/2-partial-upgrade [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/3-thrash/default.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/4-ec-workload.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/5-finish-upgrade.yaml [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/7-final-workload.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/distros [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/objectstore [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split-erasure-code/thrashosds-health.yaml [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split/% [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/0-cluster/+ [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/0-cluster/openstack.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/0-cluster/start.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/1-ceph-install/luminous.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/2-partial-upgrade/firsthalf.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/3-thrash/default.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/4-workload/+ [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/4-workload/radosbench.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/4-workload/rbd-cls.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/4-workload/rbd-import-export.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/4-workload/rbd_api.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/4-workload/readwrite.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/4-workload/snaps-few-objects.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/5-finish-upgrade.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/7-final-workload/+ [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/7-final-workload/rbd-python.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/7-final-workload/rgw-swift.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/7-final-workload/snaps-many-objects.yaml [new file with mode: 0644]
ceph/qa/suites/upgrade/luminous-x/stress-split/distros [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split/objectstore/bluestore.yaml [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split/objectstore/filestore-xfs.yaml [new symlink]
ceph/qa/suites/upgrade/luminous-x/stress-split/thrashosds-health.yaml [new symlink]
ceph/qa/tasks/ceph.py
ceph/qa/tasks/ceph_manager.py
ceph/qa/tasks/ceph_test_case.py
ceph/qa/tasks/cephfs/cephfs_test_case.py
ceph/qa/tasks/cephfs/filesystem.py
ceph/qa/tasks/cephfs/test_client_limits.py
ceph/qa/tasks/cephfs/test_client_recovery.py
ceph/qa/tasks/cephfs/test_data_scan.py
ceph/qa/tasks/cephfs/test_misc.py
ceph/qa/tasks/cephfs/test_recovery_pool.py [new file with mode: 0644]
ceph/qa/tasks/mgr/test_failover.py
ceph/qa/tasks/radosgw_admin.py
ceph/qa/tasks/rbd.py
ceph/qa/tasks/rgw.py
ceph/qa/tasks/util/rados.py
ceph/qa/tasks/vstart_runner.py
ceph/qa/workunits/cephtool/test.sh
ceph/qa/workunits/mon/crush_ops.sh
ceph/qa/workunits/rbd/import_export.sh
ceph/selinux/ceph.te
ceph/src/.git_version
ceph/src/CMakeLists.txt
ceph/src/ceph-volume/ceph_volume/devices/lvm/activate.py
ceph/src/ceph-volume/ceph_volume/devices/lvm/api.py
ceph/src/ceph-volume/ceph_volume/devices/lvm/prepare.py
ceph/src/ceph-volume/ceph_volume/devices/lvm/trigger.py
ceph/src/ceph-volume/ceph_volume/exceptions.py
ceph/src/ceph-volume/ceph_volume/tests/conftest.py
ceph/src/ceph-volume/ceph_volume/tests/devices/lvm/test_activate.py [new file with mode: 0644]
ceph/src/ceph-volume/ceph_volume/tests/devices/lvm/test_api.py
ceph/src/ceph-volume/ceph_volume/tests/devices/lvm/test_trigger.py
ceph/src/ceph-volume/ceph_volume/tests/functional/Vagrantfile
ceph/src/ceph-volume/ceph_volume/tests/functional/centos7/create/group_vars/all
ceph/src/ceph-volume/ceph_volume/tests/functional/tox.ini
ceph/src/ceph-volume/ceph_volume/tests/functional/xenial/create/group_vars/all
ceph/src/ceph-volume/ceph_volume/util/disk.py [new file with mode: 0644]
ceph/src/ceph.in
ceph/src/ceph_release
ceph/src/client/Client.cc
ceph/src/client/Client.h
ceph/src/client/fuse_ll.cc
ceph/src/cls/rbd/cls_rbd.cc
ceph/src/common/Formatter.cc
ceph/src/common/Formatter.h
ceph/src/common/LogClient.h
ceph/src/common/admin_socket.cc
ceph/src/common/cohort_lru.h
ceph/src/common/legacy_config_opts.h
ceph/src/common/options.cc
ceph/src/common/util.cc
ceph/src/crush/CrushWrapper.h
ceph/src/include/alloc_ptr.h [new file with mode: 0644]
ceph/src/include/btree_interval_set.h
ceph/src/include/compact_map.h
ceph/src/include/counter.h
ceph/src/include/encoding.h
ceph/src/include/interval_set.h
ceph/src/include/lru.h
ceph/src/include/mempool.h
ceph/src/include/rados.h
ceph/src/include/util.h
ceph/src/include/xlist.h
ceph/src/librbd/ImageCtx.cc
ceph/src/librbd/ImageCtx.h
ceph/src/librbd/ImageState.cc
ceph/src/librbd/ImageWatcher.cc
ceph/src/librbd/Journal.cc
ceph/src/librbd/image/CreateRequest.cc
ceph/src/librbd/internal.cc
ceph/src/librbd/io/ImageRequestWQ.cc
ceph/src/librbd/operation/ObjectMapIterate.cc
ceph/src/mds/Beacon.cc
ceph/src/mds/CDentry.cc
ceph/src/mds/CDentry.h
ceph/src/mds/CDir.cc
ceph/src/mds/CDir.h
ceph/src/mds/CInode.cc
ceph/src/mds/CInode.h
ceph/src/mds/FSMap.cc
ceph/src/mds/Locker.cc
ceph/src/mds/MDCache.cc
ceph/src/mds/MDCache.h
ceph/src/mds/MDLog.cc
ceph/src/mds/MDSCacheObject.cc
ceph/src/mds/MDSCacheObject.h
ceph/src/mds/MDSDaemon.cc
ceph/src/mds/MDSRank.cc
ceph/src/mds/Migrator.cc
ceph/src/mds/Server.cc
ceph/src/mds/Server.h
ceph/src/mds/SessionMap.cc
ceph/src/mds/SessionMap.h
ceph/src/mds/SimpleLock.h
ceph/src/messages/MOSDPGRecoveryDelete.h
ceph/src/messages/MOSDPGRecoveryDeleteReply.h
ceph/src/mgr/PyFormatter.h
ceph/src/mgr/PyState.cc
ceph/src/mon/CreatingPGs.h
ceph/src/mon/MDSMonitor.cc
ceph/src/mon/MgrMonitor.cc
ceph/src/mon/MgrMonitor.h
ceph/src/mon/MgrStatMonitor.h
ceph/src/mon/MonCommands.h
ceph/src/mon/MonOpRequest.h
ceph/src/mon/Monitor.cc
ceph/src/mon/Monitor.h
ceph/src/mon/OSDMonitor.cc
ceph/src/mon/PGMap.cc
ceph/src/mon/PGMap.h
ceph/src/os/bluestore/BlueFS.cc
ceph/src/os/bluestore/BlueStore.cc
ceph/src/os/bluestore/BlueStore.h
ceph/src/os/bluestore/StupidAllocator.cc
ceph/src/os/bluestore/StupidAllocator.h
ceph/src/os/bluestore/aio.cc
ceph/src/os/bluestore/bluestore_types.cc
ceph/src/osd/OSD.cc
ceph/src/osd/OSD.h
ceph/src/osd/OSDMap.cc
ceph/src/osd/PG.cc
ceph/src/osd/PGLog.cc
ceph/src/osd/PGLog.h
ceph/src/osd/PrimaryLogPG.cc
ceph/src/osd/osd_types.h
ceph/src/pybind/mgr/dashboard/base.html
ceph/src/pybind/mgr/dashboard/health.html
ceph/src/pybind/mgr/dashboard/module.py
ceph/src/rgw/rgw_admin.cc
ceph/src/rgw/rgw_asio_frontend.cc
ceph/src/rgw/rgw_bucket.cc
ceph/src/rgw/rgw_civetweb_frontend.cc
ceph/src/rgw/rgw_client_io.h
ceph/src/rgw/rgw_client_io_filters.h
ceph/src/rgw/rgw_common.h
ceph/src/rgw/rgw_crypt.cc
ceph/src/rgw/rgw_data_sync.cc
ceph/src/rgw/rgw_fcgi_process.cc
ceph/src/rgw/rgw_formats.h
ceph/src/rgw/rgw_lc.cc
ceph/src/rgw/rgw_lc.h
ceph/src/rgw/rgw_lc_s3.cc
ceph/src/rgw/rgw_lc_s3.h
ceph/src/rgw/rgw_loadgen_process.cc
ceph/src/rgw/rgw_log.cc
ceph/src/rgw/rgw_metadata.cc
ceph/src/rgw/rgw_metadata.h
ceph/src/rgw/rgw_op.cc
ceph/src/rgw/rgw_op.h
ceph/src/rgw/rgw_quota.cc
ceph/src/rgw/rgw_rados.cc
ceph/src/rgw/rgw_rados.h
ceph/src/rgw/rgw_rest_client.cc
ceph/src/rgw/rgw_rest_conn.cc
ceph/src/rgw/rgw_rest_conn.h
ceph/src/rgw/rgw_rest_metadata.cc
ceph/src/rgw/rgw_rest_s3.cc
ceph/src/rgw/rgw_sync.cc
ceph/src/rgw/rgw_user.cc
ceph/src/rocksdb/CMakeLists.txt
ceph/src/rocksdb/util/crc32c.cc
ceph/src/test/cli-integration/rbd/formatted-output.t
ceph/src/test/cli/rbd/help.t
ceph/src/test/cls_rbd/test_cls_rbd.cc
ceph/src/test/common/test_lru.cc
ceph/src/test/libcephfs/test.cc
ceph/src/test/librbd/journal/test_Entries.cc
ceph/src/test/librbd/journal/test_Replay.cc
ceph/src/test/librbd/managed_lock/test_mock_AcquireRequest.cc
ceph/src/test/librbd/mock/MockImageCtx.h
ceph/src/test/librbd/test_MirroringWatcher.cc
ceph/src/test/librbd/test_internal.cc
ceph/src/test/mon/PGMap.cc
ceph/src/test/osd/TestPGLog.cc
ceph/src/test/rbd_mirror/test_mock_LeaderWatcher.cc
ceph/src/test/rgw/rgw_multi/tests.py
ceph/src/tools/ceph_objectstore_tool.cc
ceph/src/tools/rbd/ArgumentTypes.cc
ceph/src/tools/rbd/Utils.cc
ceph/src/tools/rbd/action/DiskUsage.cc
ceph/src/tools/rbd/action/Export.cc
ceph/src/tools/rbd/action/ImageMeta.cc
ceph/src/tools/rbd/action/Import.cc
ceph/src/tools/rbd/action/Kernel.cc
ceph/src/tools/rbd/action/List.cc
ceph/src/tools/rbd/action/MirrorPool.cc
ceph/src/tools/rbd_mirror/ImageDeleter.cc
ceph/src/tools/rbd_mirror/ImageReplayer.cc
ceph/src/tools/rbd_mirror/ImageSyncThrottler.cc
ceph/src/tools/rbd_mirror/InstanceReplayer.cc
ceph/src/tools/rbd_mirror/InstanceWatcher.cc
ceph/src/tools/rbd_mirror/Instances.cc
ceph/src/tools/rbd_mirror/LeaderWatcher.cc
ceph/src/tools/rbd_mirror/Mirror.cc
ceph/src/tools/rbd_mirror/PoolReplayer.cc
ceph/src/tools/rbd_mirror/Threads.cc
ceph/src/tools/rbd_mirror/image_sync/ImageCopyRequest.cc
ceph/src/tools/rbd_nbd/rbd-nbd.cc