]> git.proxmox.com Git - ceph.git/blob - ceph/qa/workunits/rgw/test_rgw_datacache.py
update ceph source to reef 18.1.2
[ceph.git] / ceph / qa / workunits / rgw / test_rgw_datacache.py
1 #!/usr/bin/python3
2
3 import logging as log
4 from configobj import ConfigObj
5 import subprocess
6 import json
7 import os
8
9 """
10 Runs a test against a rgw with the data cache enabled. A client must be
11 set in the config for this task. This client must be the same client
12 that is in the config for the `rgw` task.
13
14 In the `overrides` section `datacache` and `datacache` must be configured for
15 the `rgw` task and the ceph conf overrides must contain the below config
16 variables in the client section.
17
18 `s3cmd` must be added as an extra_package to the install task.
19
20 In the `workunit` task, `- rgw/run-datacache.sh` must be set for the client that
21 is in the config for the `rgw` task. The `RGW_DATACACHE_PATH` variable must be
22 set in the workunit's `env` and it must match the `datacache_path` given to the
23 `rgw` task in `overrides`.
24 Ex:
25 - install:
26 extra_packages:
27 deb: ['s3cmd']
28 rpm: ['s3cmd']
29 - overrides:
30 rgw:
31 datacache: true
32 datacache_path: /tmp/rgw_datacache
33 install:
34 extra_packages:
35 deb: ['s3cmd']
36 rpm: ['s3cmd']
37 ceph:
38 conf:
39 client:
40 rgw d3n l1 datacache persistent path: /tmp/rgw_datacache/
41 rgw d3n l1 datacache size: 10737417240
42 rgw d3n l1 local datacache enabled: true
43 rgw enable ops log: true
44 - rgw:
45 client.0:
46 - workunit:
47 clients:
48 client.0:
49 - rgw/run-datacache.sh
50 env:
51 RGW_DATACACHE_PATH: /tmp/rgw_datacache
52 cleanup: true
53 """
54
55 log.basicConfig(level=log.DEBUG)
56
57 """ Constants """
58 USER = 'rgw_datacache_user'
59 DISPLAY_NAME = 'DatacacheUser'
60 ACCESS_KEY = 'NX5QOQKC6BH2IDN8HC7A'
61 SECRET_KEY = 'LnEsqNNqZIpkzauboDcLXLcYaWwLQ3Kop0zAnKIn'
62 BUCKET_NAME = 'datacachebucket'
63 FILE_NAME = '7M.dat'
64 GET_FILE_NAME = '7M-get.dat'
65
66 def exec_cmd(cmd):
67 log.debug("exec_cmd(%s)", cmd)
68 try:
69 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
70 out, err = proc.communicate()
71 if proc.returncode == 0:
72 log.info('command succeeded')
73 if out is not None: log.info(out)
74 return out
75 else:
76 raise Exception("error: %s \nreturncode: %s" % (err, proc.returncode))
77 except Exception as e:
78 log.error('command failed')
79 log.error(e)
80 return False
81
82 def get_radosgw_endpoint():
83 out = exec_cmd('sudo netstat -nltp | egrep "rados|valgr"') # short for radosgw/valgrind
84 x = out.decode('utf8').split(" ")
85 port = [i for i in x if ':' in i][0].split(':')[1]
86 log.info('radosgw port: %s' % port)
87 proto = "http"
88 hostname = '127.0.0.1'
89
90 if port == '443':
91 proto = "https"
92
93 endpoint = hostname
94
95 log.info("radosgw endpoint is: %s", endpoint)
96 return endpoint, proto
97
98 def create_s3cmd_config(path, proto):
99 """
100 Creates a minimal config file for s3cmd
101 """
102 log.info("Creating s3cmd config...")
103
104 use_https_config = "False"
105 log.info("proto for s3cmd config is %s", proto)
106 if proto == "https":
107 use_https_config = "True"
108
109 s3cmd_config = ConfigObj(
110 indent_type='',
111 infile={
112 'default':
113 {
114 'host_bucket': 'no.way.in.hell',
115 'use_https': use_https_config,
116 },
117 }
118 )
119
120 f = open(path, 'wb')
121 s3cmd_config.write(f)
122 f.close()
123 log.info("s3cmd config written")
124
125 def get_cmd_output(cmd_out):
126 out = cmd_out.decode('utf8')
127 out = out.strip('\n')
128 return out
129
130 def main():
131 """
132 execute the datacache test
133 """
134 # setup for test
135 cache_dir = os.environ['RGW_DATACACHE_PATH']
136 log.debug("datacache dir from config is: %s", cache_dir)
137
138 out = exec_cmd('pwd')
139 pwd = get_cmd_output(out)
140 log.debug("pwd is: %s", pwd)
141
142 endpoint, proto = get_radosgw_endpoint()
143
144 # create 7M file to put
145 outfile = pwd + '/' + FILE_NAME
146 exec_cmd('dd if=/dev/urandom of=%s bs=1M count=7' % (outfile))
147
148 # create user
149 exec_cmd('radosgw-admin user create --uid %s --display-name %s --access-key %s --secret %s'
150 % (USER, DISPLAY_NAME, ACCESS_KEY, SECRET_KEY))
151
152 # create s3cmd config
153 s3cmd_config_path = pwd + '/s3cfg'
154 create_s3cmd_config(s3cmd_config_path, proto)
155
156 # create a bucket
157 exec_cmd('s3cmd --access_key=%s --secret_key=%s --config=%s --no-check-hostname --host=%s mb s3://%s'
158 % (ACCESS_KEY, SECRET_KEY, s3cmd_config_path, endpoint, BUCKET_NAME))
159
160 # put an object in the bucket
161 exec_cmd('s3cmd --access_key=%s --secret_key=%s --config=%s --no-check-hostname --host=%s put %s s3://%s'
162 % (ACCESS_KEY, SECRET_KEY, s3cmd_config_path, endpoint, outfile, BUCKET_NAME))
163
164 # get object from bucket
165 get_file_path = pwd + '/' + GET_FILE_NAME
166 exec_cmd('s3cmd --access_key=%s --secret_key=%s --config=%s --no-check-hostname --host=%s get s3://%s/%s %s --force'
167 % (ACCESS_KEY, SECRET_KEY, s3cmd_config_path, endpoint, BUCKET_NAME, FILE_NAME, get_file_path))
168
169 # get info of object
170 out = exec_cmd('radosgw-admin object stat --bucket=%s --object=%s' % (BUCKET_NAME, FILE_NAME))
171
172 json_op = json.loads(out)
173 cached_object_name = json_op['manifest']['prefix']
174 log.debug("Cached object name is: %s", cached_object_name)
175
176 # check that the cache is enabled (does the cache directory empty)
177 out = exec_cmd('find %s -type f | wc -l' % (cache_dir))
178 chk_cache_dir = int(get_cmd_output(out))
179 log.debug("Check cache dir content: %s", chk_cache_dir)
180 if chk_cache_dir == 0:
181 log.info("NOTICE: datacache test object not found, inspect if datacache was bypassed or disabled during this check.")
182 return
183
184 # list the files in the cache dir for troubleshooting
185 out = exec_cmd('ls -l %s' % (cache_dir))
186 # get name of cached object and check if it exists in the cache
187 out = exec_cmd('find %s -name "*%s1"' % (cache_dir, cached_object_name))
188 cached_object_path = get_cmd_output(out)
189 log.debug("Path of file in datacache is: %s", cached_object_path)
190 out = exec_cmd('basename %s' % (cached_object_path))
191 basename_cmd_out = get_cmd_output(out)
192 log.debug("Name of file in datacache is: %s", basename_cmd_out)
193
194 # check to see if the cached object is in Ceph
195 out = exec_cmd('rados ls -p default.rgw.buckets.data')
196 rados_ls_out = get_cmd_output(out)
197 log.debug("rados ls output is: %s", rados_ls_out)
198
199 assert(basename_cmd_out in rados_ls_out)
200 log.debug("RGW Datacache test SUCCESS")
201
202 # remove datacache dir
203 #cmd = exec_cmd('rm -rf %s' % (cache_dir))
204 #log.debug("RGW Datacache dir deleted")
205 #^ commenting for future refrence - the work unit will continue running tests and if the cache_dir is removed
206 # all the writes to cache will fail with errno 2 ENOENT No such file or directory.
207
208 main()
209 log.info("Completed Datacache tests")