]>
git.proxmox.com Git - ceph.git/blob - ceph/qa/workunits/rgw/test_rgw_datacache.py
4 from configobj
import ConfigObj
10 Runs a test against a rgw with the data cache enabled. A client must be
11 set in the config for this task. This client must be the same client
12 that is in the config for the `rgw` task.
14 In the `overrides` section `datacache` and `datacache` must be configured for
15 the `rgw` task and the ceph conf overrides must contain the below config
16 variables in the client section.
18 `s3cmd` must be added as an extra_package to the install task.
20 In the `workunit` task, `- rgw/run-datacache.sh` must be set for the client that
21 is in the config for the `rgw` task. The `RGW_DATACACHE_PATH` variable must be
22 set in the workunit's `env` and it must match the `datacache_path` given to the
23 `rgw` task in `overrides`.
32 datacache_path: /tmp/rgw_datacache
40 rgw d3n l1 datacache persistent path: /tmp/rgw_datacache/
41 rgw d3n l1 datacache size: 10737417240
42 rgw d3n l1 local datacache enabled: true
43 rgw enable ops log: true
49 - rgw/run-datacache.sh
51 RGW_DATACACHE_PATH: /tmp/rgw_datacache
55 log
.basicConfig(level
=log
.DEBUG
)
58 USER
= 'rgw_datacache_user'
59 DISPLAY_NAME
= 'DatacacheUser'
60 ACCESS_KEY
= 'NX5QOQKC6BH2IDN8HC7A'
61 SECRET_KEY
= 'LnEsqNNqZIpkzauboDcLXLcYaWwLQ3Kop0zAnKIn'
62 BUCKET_NAME
= 'datacachebucket'
64 GET_FILE_NAME
= '7M-get.dat'
67 log
.debug("exec_cmd(%s)", cmd
)
69 proc
= subprocess
.Popen(cmd
, stdout
=subprocess
.PIPE
, stderr
=subprocess
.PIPE
, shell
=True)
70 out
, err
= proc
.communicate()
71 if proc
.returncode
== 0:
72 log
.info('command succeeded')
73 if out
is not None: log
.info(out
)
76 raise Exception("error: %s \nreturncode: %s" % (err
, proc
.returncode
))
77 except Exception as e
:
78 log
.error('command failed')
82 def get_radosgw_endpoint():
83 out
= exec_cmd('sudo netstat -nltp | egrep "rados|valgr"') # short for radosgw/valgrind
84 x
= out
.decode('utf8').split(" ")
85 port
= [i
for i
in x
if ':' in i
][0].split(':')[1]
86 log
.info('radosgw port: %s' % port
)
88 hostname
= '127.0.0.1'
95 log
.info("radosgw endpoint is: %s", endpoint
)
96 return endpoint
, proto
98 def create_s3cmd_config(path
, proto
):
100 Creates a minimal config file for s3cmd
102 log
.info("Creating s3cmd config...")
104 use_https_config
= "False"
105 log
.info("proto for s3cmd config is %s", proto
)
107 use_https_config
= "True"
109 s3cmd_config
= ConfigObj(
114 'host_bucket': 'no.way.in.hell',
115 'use_https': use_https_config
,
121 s3cmd_config
.write(f
)
123 log
.info("s3cmd config written")
125 def get_cmd_output(cmd_out
):
126 out
= cmd_out
.decode('utf8')
127 out
= out
.strip('\n')
132 execute the datacache test
135 cache_dir
= os
.environ
['RGW_DATACACHE_PATH']
136 log
.debug("datacache dir from config is: %s", cache_dir
)
138 out
= exec_cmd('pwd')
139 pwd
= get_cmd_output(out
)
140 log
.debug("pwd is: %s", pwd
)
142 endpoint
, proto
= get_radosgw_endpoint()
144 # create 7M file to put
145 outfile
= pwd
+ '/' + FILE_NAME
146 exec_cmd('dd if=/dev/urandom of=%s bs=1M count=7' % (outfile
))
149 exec_cmd('radosgw-admin user create --uid %s --display-name %s --access-key %s --secret %s'
150 % (USER
, DISPLAY_NAME
, ACCESS_KEY
, SECRET_KEY
))
152 # create s3cmd config
153 s3cmd_config_path
= pwd
+ '/s3cfg'
154 create_s3cmd_config(s3cmd_config_path
, proto
)
157 exec_cmd('s3cmd --access_key=%s --secret_key=%s --config=%s --no-check-hostname --host=%s mb s3://%s'
158 % (ACCESS_KEY
, SECRET_KEY
, s3cmd_config_path
, endpoint
, BUCKET_NAME
))
160 # put an object in the bucket
161 exec_cmd('s3cmd --access_key=%s --secret_key=%s --config=%s --no-check-hostname --host=%s put %s s3://%s'
162 % (ACCESS_KEY
, SECRET_KEY
, s3cmd_config_path
, endpoint
, outfile
, BUCKET_NAME
))
164 # get object from bucket
165 get_file_path
= pwd
+ '/' + GET_FILE_NAME
166 exec_cmd('s3cmd --access_key=%s --secret_key=%s --config=%s --no-check-hostname --host=%s get s3://%s/%s %s --force'
167 % (ACCESS_KEY
, SECRET_KEY
, s3cmd_config_path
, endpoint
, BUCKET_NAME
, FILE_NAME
, get_file_path
))
170 out
= exec_cmd('radosgw-admin object stat --bucket=%s --object=%s' % (BUCKET_NAME
, FILE_NAME
))
172 json_op
= json
.loads(out
)
173 cached_object_name
= json_op
['manifest']['prefix']
174 log
.debug("Cached object name is: %s", cached_object_name
)
176 # check that the cache is enabled (does the cache directory empty)
177 out
= exec_cmd('find %s -type f | wc -l' % (cache_dir
))
178 chk_cache_dir
= int(get_cmd_output(out
))
179 log
.debug("Check cache dir content: %s", chk_cache_dir
)
180 if chk_cache_dir
== 0:
181 log
.info("NOTICE: datacache test object not found, inspect if datacache was bypassed or disabled during this check.")
184 # list the files in the cache dir for troubleshooting
185 out
= exec_cmd('ls -l %s' % (cache_dir
))
186 # get name of cached object and check if it exists in the cache
187 out
= exec_cmd('find %s -name "*%s1"' % (cache_dir
, cached_object_name
))
188 cached_object_path
= get_cmd_output(out
)
189 log
.debug("Path of file in datacache is: %s", cached_object_path
)
190 out
= exec_cmd('basename %s' % (cached_object_path
))
191 basename_cmd_out
= get_cmd_output(out
)
192 log
.debug("Name of file in datacache is: %s", basename_cmd_out
)
194 # check to see if the cached object is in Ceph
195 out
= exec_cmd('rados ls -p default.rgw.buckets.data')
196 rados_ls_out
= get_cmd_output(out
)
197 log
.debug("rados ls output is: %s", rados_ls_out
)
199 assert(basename_cmd_out
in rados_ls_out
)
200 log
.debug("RGW Datacache test SUCCESS")
202 # remove datacache dir
203 #cmd = exec_cmd('rm -rf %s' % (cache_dir))
204 #log.debug("RGW Datacache dir deleted")
205 #^ commenting for future refrence - the work unit will continue running tests and if the cache_dir is removed
206 # all the writes to cache will fail with errno 2 ENOENT No such file or directory.
209 log
.info("Completed Datacache tests")