bucket-name: 's3atest' (default)
access-key: 'anykey' (uses a default value)
secret-key: 'secretkey' ( uses a default value)
+ dnsmasq-name: 's3.ceph.com'
"""
if config is None:
config = {}
hadoop_ver = config.get('hadoop-version', '2.7.3')
bucket_name = config.get('bucket-name', 's3atest')
access_key = config.get('access-key', 'EGAQRD2ULOIFKFSKCT4F')
+ dnsmasq_name = config.get('dnsmasq-name', 's3.ceph.com')
secret_key = config.get(
'secret-key',
'zi816w1vZKfaSM85Cl0BxXTwSLyN7zB4RbTswrGb')
run.Raw(hadoop_rel)
]
)
- dnsmasq_name = 's3.ceph.com'
configure_s3a(rgw_node, dnsmasq_name, access_key, secret_key, bucket_name, testdir)
- setup_dnsmasq(rgw_node, dnsmasq_name)
fix_rgw_config(rgw_node, dnsmasq_name)
setup_user_bucket(rgw_node, dnsmasq_name, access_key, secret_key, bucket_name, testdir)
if hadoop_ver.startswith('2.8'):
# test all ITtests but skip AWS test using public bucket landsat-pds
# which is not available from within this test
- test_options = '-Dit.test=ITestS3A* -Dit.test=\!ITestS3AAWSCredentialsProvider* -Dparallel-tests -Dscale -Dfs.s3a.scale.test.huge.filesize=128M verify'
+ test_options = '-Dit.test=ITestS3A* -Dparallel-tests -Dscale \
+ -Dfs.s3a.scale.test.timeout=1200 \
+ -Dfs.s3a.scale.test.huge.filesize=256M verify'
else:
test_options = 'test -Dtest=S3a*,TestS3A*'
try:
log.info("Done s3a testing, Cleaning up")
for fil in ['apache*', 'hadoop*', 'venv*', 'create*']:
rgw_node.run(args=['rm', run.Raw('-rf'), run.Raw('{tdir}/{file}'.format(tdir=testdir, file=fil))])
- # restart and let NM restore original config
- rgw_node.run(args=['sudo', 'systemctl', 'stop', 'dnsmasq'])
- rgw_node.run(args=['sudo', 'systemctl', 'restart', 'network.service'], check_status=False)
- rgw_node.run(args=['sudo', 'systemctl', 'status', 'network.service'], check_status=False)
def install_prereq(client):
)
-def setup_dnsmasq(client, name):
- """
- Setup simple dnsmasq name eg: s3.ceph.com
- Local RGW host can then be used with whatever name has been setup with.
- """
- resolv_conf = "nameserver 127.0.0.1\n"
- dnsmasq_template = """address=/{name}/{ip_address}
-server=8.8.8.8
-server=8.8.4.4
-""".format(name=name, ip_address=client.ip_address)
- dnsmasq_config_path = '/etc/dnsmasq.d/ceph'
- # point resolv.conf to local dnsmasq
- misc.sudo_write_file(
- remote=client,
- path='/etc/resolv.conf',
- data=resolv_conf,
- )
- misc.sudo_write_file(
- remote=client,
- path=dnsmasq_config_path,
- data=dnsmasq_template,
- )
- client.run(args=['cat', dnsmasq_config_path])
- # restart dnsmasq
- client.run(args=['sudo', 'systemctl', 'restart', 'dnsmasq'])
- client.run(args=['sudo', 'systemctl', 'status', 'dnsmasq'])
- time.sleep(5)
- # verify dns name is set
- client.run(args=['ping', '-c', '4', name])
-
-
def fix_rgw_config(client, name):
"""
Fix RGW config in ceph.conf, we need rgw dns name entry
client.run(args=['cat', ceph_conf_path])
client.run(args=['sudo', 'systemctl', 'restart', 'ceph-radosgw.target'])
client.run(args=['sudo', 'systemctl', 'status', 'ceph-radosgw.target'])
+ # sleep for daemon to be completely up before creating admin user
+ time.sleep(10)
def setup_user_bucket(client, dns_name, access_key, secret_key, bucket_name, testdir):
"""
aws_testdir = '{testdir}/hadoop/hadoop-tools/hadoop-aws/'.format(testdir=testdir)
run_test = '{testdir}/apache-maven-{maven_version}/bin/mvn'.format(testdir=testdir, maven_version=maven_version)
+ # Remove AWS CredentialsProvider tests as it hits public bucket from AWS
+ # better solution is to create the public bucket on local server and test
+ rm_test = 'rm src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java'
client.run(
args=[
'cd',
run.Raw(aws_testdir),
run.Raw('&&'),
+ run.Raw(rm_test),
+ run.Raw('&&'),
run.Raw(run_test),
run.Raw(test_options)
]
<value>{name}</value>
</property>
+<property>
+<name>fs.contract.test.fs.s3a</name>
+<value>s3a://{bucket_name}/</value>
+</property>
+
<property>
<name>fs.s3a.connection.ssl.enabled</name>
<value>false</value>