summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--gluster/swift/__init__.py2
-rw-r--r--gluster/swift/account/utils.py17
-rw-r--r--gluster/swift/common/DiskDir.py20
-rw-r--r--gluster/swift/obj/diskfile.py2
-rw-r--r--glusterfs-openstack-swift.spec13
-rw-r--r--requirements.txt2
-rw-r--r--test-requirements.txt14
-rw-r--r--test/__init__.py35
-rw-r--r--test/functional/__init__.py122
-rw-r--r--test/functional/swift_test_client.py77
-rwxr-xr-xtest/functional/test_account.py78
-rwxr-xr-xtest/functional/test_container.py5
-rwxr-xr-xtest/functional/test_object.py13
-rw-r--r--test/functional/tests.py2840
-rw-r--r--test/object_expirer_functional/test_object_expirer_gluster_swift.py4
-rw-r--r--test/unit/__init__.py244
-rw-r--r--test/unit/common/test_diskdir.py2
-rw-r--r--test/unit/obj/test_expirer.py4
-rw-r--r--test/unit/proxy/controllers/test_base.py35
-rw-r--r--tox.ini2
20 files changed, 749 insertions, 2782 deletions
diff --git a/gluster/swift/__init__.py b/gluster/swift/__init__.py
index ac0c566..4de6cfa 100644
--- a/gluster/swift/__init__.py
+++ b/gluster/swift/__init__.py
@@ -45,6 +45,6 @@ class PkgInfo(object):
#
# Change the Package version here
#
-_pkginfo = PkgInfo('2.10.1', '0', 'gluster_swift', False)
+_pkginfo = PkgInfo('2.15.1', '0', 'gluster_swift', False)
__version__ = _pkginfo.pretty_version
__canonical_version__ = _pkginfo.canonical_version
diff --git a/gluster/swift/account/utils.py b/gluster/swift/account/utils.py
index 4424835..24fb7df 100644
--- a/gluster/swift/account/utils.py
+++ b/gluster/swift/account/utils.py
@@ -15,7 +15,7 @@
from swift.account.utils import FakeAccountBroker, get_response_headers
from swift.common.swob import HTTPOk, HTTPNoContent
-from swift.common.utils import json
+from swift.common.utils import json, Timestamp
from xml.sax import saxutils
@@ -37,24 +37,29 @@ def account_listing_response(account, req, response_content_type, broker=None,
response_content_type, reverse)
if response_content_type == 'application/json':
data = []
- for (name, object_count, bytes_used, is_subdir) in account_list:
+ for (name, object_count, bytes_used, put_tstamp,
+ is_subdir) in account_list:
if is_subdir:
data.append({'subdir': name})
else:
data.append({'name': name, 'count': object_count,
- 'bytes': bytes_used})
+ 'bytes': bytes_used,
+ 'last_modified': Timestamp(put_tstamp).isoformat})
account_list = json.dumps(data)
elif response_content_type.endswith('/xml'):
output_list = ['<?xml version="1.0" encoding="UTF-8"?>',
'<account name=%s>' % saxutils.quoteattr(account)]
- for (name, object_count, bytes_used, is_subdir) in account_list:
+ for (name, object_count, bytes_used, put_tstamp,
+ is_subdir) in account_list:
if is_subdir:
output_list.append(
'<subdir name=%s />' % saxutils.quoteattr(name))
else:
item = '<container><name>%s</name><count>%s</count>' \
- '<bytes>%s</bytes></container>' % \
- (saxutils.escape(name), object_count, bytes_used)
+ '<bytes>%s</bytes><last_modified>%s</last_modified> \
+ </container>' % \
+ (saxutils.escape(name), object_count, bytes_used,
+ Timestamp(put_tstamp).isoformat)
output_list.append(item)
output_list.append('</account>')
account_list = '\n'.join(output_list)
diff --git a/gluster/swift/common/DiskDir.py b/gluster/swift/common/DiskDir.py
index 0bc95df..bda88aa 100644
--- a/gluster/swift/common/DiskDir.py
+++ b/gluster/swift/common/DiskDir.py
@@ -37,7 +37,7 @@ from gluster.swift.common.utils import ThreadPool
DATADIR = 'containers'
-
+# dict to have mapping of delimiter selected obj to real obj name
# Create a dummy db_file in Glusterfs.RUN_DIR
_db_file = ""
@@ -405,7 +405,6 @@ class DiskDir(DiskCommon):
"""
assert limit >= 0
assert not delimiter or (len(delimiter) == 1 and ord(delimiter) <= 254)
-
if path is not None:
if path:
prefix = path = path.rstrip('/') + '/'
@@ -507,7 +506,9 @@ class DiskDir(DiskCommon):
and not dir_is_object(metadata):
continue
list_item = []
+
list_item.append(obj)
+
if metadata:
list_item.append(metadata[X_TIMESTAMP])
list_item.append(int(metadata[X_CONTENT_LENGTH]))
@@ -551,7 +552,6 @@ class DiskDir(DiskCommon):
if self._dir_exists and Glusterfs._container_update_object_count and \
self.account != 'gsexpiring':
self._update_object_count()
-
data = {'account': self.account, 'container': self.container,
'object_count': self.metadata.get(
X_OBJECTS_COUNT, ('0', 0))[0],
@@ -589,6 +589,7 @@ class DiskDir(DiskCommon):
do_chown(self.datadir, self.uid, self.gid)
metadata = get_container_metadata(self.datadir)
metadata[X_TIMESTAMP] = (timestamp, 0)
+ metadata[X_PUT_TIMESTAMP] = (timestamp, 0)
write_metadata(self.datadir, metadata)
self.metadata = metadata
self._dir_exists = True
@@ -606,7 +607,8 @@ class DiskDir(DiskCommon):
if not do_exists(self.datadir):
self.initialize(timestamp)
else:
- if timestamp > self.metadata[X_PUT_TIMESTAMP]:
+ existing_timestamp = self.metadata[X_PUT_TIMESTAMP][0]
+ if timestamp > existing_timestamp:
self.metadata[X_PUT_TIMESTAMP] = (timestamp, 0)
write_metadata(self.datadir, self.metadata)
@@ -845,10 +847,11 @@ class DiskAccount(DiskCommon):
# the following ordered fields:
# (name, object_count, bytes_used, is_subdir)
for container in containers:
- # When response_content_type == 'text/plain', Swift will only
- # consume the name of the container (first element of tuple).
- # Refer: swift.account.utils.account_listing_response()
- account_list.append((container, 0, 0, 0))
+ # When response_content_type == 'text/plain', Swift will
+ # only consume the name of the container (first element of
+ # tuple).Refer:
+ # swift.account.utils.account_listing_response()
+ account_list.append((container, 0, 0, 0, 0))
if len(account_list) >= limit:
break
if reverse:
@@ -873,6 +876,7 @@ class DiskAccount(DiskCommon):
if metadata:
list_item.append(metadata[X_OBJECTS_COUNT][0])
list_item.append(metadata[X_BYTES_USED][0])
+ list_item.append(metadata[X_PUT_TIMESTAMP][0])
list_item.append(0)
account_list.append(list_item)
count += 1
diff --git a/gluster/swift/obj/diskfile.py b/gluster/swift/obj/diskfile.py
index be0669f..7ad5f26 100644
--- a/gluster/swift/obj/diskfile.py
+++ b/gluster/swift/obj/diskfile.py
@@ -1081,7 +1081,7 @@ class DiskFile(object):
X_OBJECT_TYPE, X_TYPE]
for key in sys_keys:
- if key in orig_metadata:
+ if key in orig_metadata and key not in metadata:
metadata[key] = orig_metadata[key]
if X_OBJECT_TYPE not in orig_metadata:
diff --git a/glusterfs-openstack-swift.spec b/glusterfs-openstack-swift.spec
index 7a87141..0affb81 100644
--- a/glusterfs-openstack-swift.spec
+++ b/glusterfs-openstack-swift.spec
@@ -24,11 +24,11 @@ Requires : memcached
Requires : openssl
Requires : python
Requires : python-prettytable
-Requires : openstack-swift = 2.10.1
-Requires : openstack-swift-account = 2.10.1
-Requires : openstack-swift-container = 2.10.1
-Requires : openstack-swift-object = 2.10.1
-Requires : openstack-swift-proxy = 2.10.1
+Requires : openstack-swift = 2.15.1
+Requires : openstack-swift-account = 2.15.1
+Requires : openstack-swift-container = 2.15.1
+Requires : openstack-swift-object = 2.15.1
+Requires : openstack-swift-proxy = 2.15.1
# gluster-swift has no hard-dependency on particular version of glusterfs
# so don't bump this up unless you want to force users to upgrade their
# glusterfs deployment
@@ -102,6 +102,9 @@ done
%config(noreplace) %{_confdir}/object-expirer.conf-gluster
%changelog
+* Wed Nov 22 2017 Venkata R Edara <redara@redhat.com> - 2.15.1
+- Rebase to Swift 2.15.1 (pike)
+
* Wed May 10 2017 Venkata R Edara <redara@redhat.com> - 2.10.1
- Rebase to Swift 2.10.1 (newton)
diff --git a/requirements.txt b/requirements.txt
index 4537934..e37754c 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -9,7 +9,7 @@ netifaces>=0.5,!=0.10.0,!=0.10.1
pastedeploy>=1.3.3
six>=1.9.0
xattr>=0.4
-PyECLib>=1.2.0 # BSD
+PyECLib>=1.3.1 # BSD
# gluster-swift specific requirements
prettytable # needed by gswauth
diff --git a/test-requirements.txt b/test-requirements.txt
index 92aa503..d5a3bc3 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -3,13 +3,13 @@
# process, which may cause wedges in the gate later.
# Hacking already pins down pep8, pyflakes and flake8
-hacking>=0.10.0,<0.11
-coverage
+hacking>=0.11.0,<0.12
+coverage>=3.6
nose
nosexcover
-nosehtmloutput
-os-api-ref>=0.1.0 # Apache-2.0
-os-testr>=0.4.1
-mock>=1.0
+nosehtmloutput>=0.0.3
+os-api-ref>=1.0.0 # Apache-2.0
+os-testr>=0.8.0
+mock>=2.0 # BSD
python-swiftclient
-python-keystoneclient>=1.3.0
+python-keystoneclient!=2.1.0,>=2.0.0
diff --git a/test/__init__.py b/test/__init__.py
index 3bd25b1..1a56597 100644
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -15,7 +15,7 @@
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
-
+from __future__ import print_function
import sys
import os
try:
@@ -33,6 +33,8 @@ except ImportError:
return result
return result[:_MAX_LENGTH] + ' [truncated]...'
+from eventlet.green import socket
+
# make unittests pass on all locale
import swift
setattr(swift, 'gettext_', lambda x: x)
@@ -61,17 +63,26 @@ def get_config(section_name=None, defaults=None):
'/etc/swift/test.conf')
try:
config = readconf(config_file, section_name)
- except SystemExit:
+ except IOError:
if not os.path.exists(config_file):
- print >>sys.stderr, \
- 'Unable to read test config %s - file not found' \
- % config_file
+ print('Unable to read test config %s - file not found'
+ % config_file, file=sys.stderr)
elif not os.access(config_file, os.R_OK):
- print >>sys.stderr, \
- 'Unable to read test config %s - permission denied' \
- % config_file
- else:
- print >>sys.stderr, \
- 'Unable to read test config %s - section %s not found' \
- % (config_file, section_name)
+ print('Unable to read test config %s - permission denied'
+ % config_file, file=sys.stderr)
+ except ValueError as e:
+ print(e)
return config
+
+
+def listen_zero():
+ """
+ The eventlet.listen() always sets SO_REUSEPORT, so when called with
+ ("localhost",0), instead of returning unique ports it can return the
+ same port twice. That causes our tests to fail, so open-code it here
+ without SO_REUSEPORT.
+ """
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.bind(("127.0.0.1", 0))
+ sock.listen(50)
+ return sock
diff --git a/test/functional/__init__.py b/test/functional/__init__.py
index 4d0b71f..3e3c448 100644
--- a/test/functional/__init__.py
+++ b/test/functional/__init__.py
@@ -39,8 +39,9 @@ from six.moves.http_client import HTTPException
from swift.common.middleware.memcache import MemcacheMiddleware
from swift.common.storage_policy import parse_storage_policies, PolicyError
+from swift.common.utils import set_swift_dir
-from test import get_config
+from test import get_config, listen_zero
from test.functional.swift_test_client import Account, Connection, Container, \
ResponseError
# This has the side effect of mocking out the xattr module so that unit tests
@@ -106,9 +107,6 @@ skip, skip2, skip3, skip_service_tokens, skip_if_no_reseller_admin = \
orig_collate = ''
insecure = False
-orig_hash_path_suff_pref = ('', '')
-orig_swift_conf_name = None
-
in_process = False
_testdir = _test_servers = _test_coros = _test_socks = None
policy_specified = None
@@ -259,7 +257,7 @@ def _in_process_setup_ring(swift_conf, conf_src_dir, testdir):
device = 'sd%c1' % chr(len(obj_sockets) + ord('a'))
utils.mkdirs(os.path.join(_testdir, 'sda1'))
utils.mkdirs(os.path.join(_testdir, 'sda1', 'tmp'))
- obj_socket = eventlet.listen(('localhost', 0))
+ obj_socket = listen_zero()
obj_sockets.append(obj_socket)
dev['port'] = obj_socket.getsockname()[1]
dev['ip'] = '127.0.0.1'
@@ -268,16 +266,20 @@ def _in_process_setup_ring(swift_conf, conf_src_dir, testdir):
dev['replication_ip'] = dev['ip']
ring_data.save(ring_file_test)
else:
- # make default test ring, 2 replicas, 4 partitions, 2 devices
- _info('No source object ring file, creating 2rep/4part/2dev ring')
- obj_sockets = [eventlet.listen(('localhost', 0)) for _ in (0, 1)]
- ring_data = ring.RingData(
- [[0, 1, 0, 1], [1, 0, 1, 0]],
- [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
- 'port': obj_sockets[0].getsockname()[1]},
- {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
- 'port': obj_sockets[1].getsockname()[1]}],
- 30)
+ # make default test ring, 3 replicas, 4 partitions, 3 devices
+ # which will work for a replication policy or a 2+1 EC policy
+ _info('No source object ring file, creating 3rep/4part/3dev ring')
+ obj_sockets = [listen_zero() for _ in (0, 1, 2)]
+ replica2part2dev_id = [[0, 1, 2, 0],
+ [1, 2, 0, 1],
+ [2, 0, 1, 2]]
+ devs = [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
+ 'port': obj_sockets[0].getsockname()[1]},
+ {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
+ 'port': obj_sockets[1].getsockname()[1]},
+ {'id': 2, 'zone': 2, 'device': 'sdc1', 'ip': '127.0.0.1',
+ 'port': obj_sockets[2].getsockname()[1]}]
+ ring_data = ring.RingData(replica2part2dev_id, devs, 30)
with closing(GzipFile(ring_file_test, 'wb')) as f:
pickle.dump(ring_data, f)
@@ -287,12 +289,13 @@ def _in_process_setup_ring(swift_conf, conf_src_dir, testdir):
return obj_sockets
-def _load_encryption(proxy_conf_file, **kwargs):
+def _load_encryption(proxy_conf_file, swift_conf_file, **kwargs):
"""
Load encryption configuration and override proxy-server.conf contents.
:param proxy_conf_file: Source proxy conf filename
- :returns: Path to the test proxy conf file to use
+ :param swift_conf_file: Source swift conf filename
+ :returns: Tuple of paths to the proxy conf file and swift conf file to use
:raises InProcessException: raised if proxy conf contents are invalid
"""
_debug('Setting configuration for encryption')
@@ -324,7 +327,43 @@ def _load_encryption(proxy_conf_file, **kwargs):
with open(test_conf_file, 'w') as fp:
conf.write(fp)
- return test_conf_file
+ return test_conf_file, swift_conf_file
+
+
+def _load_ec_as_default_policy(proxy_conf_file, swift_conf_file, **kwargs):
+ """
+ Override swift.conf [storage-policy:0] section to use a 2+1 EC policy.
+
+ :param proxy_conf_file: Source proxy conf filename
+ :param swift_conf_file: Source swift conf filename
+ :returns: Tuple of paths to the proxy conf file and swift conf file to use
+ """
+ _debug('Setting configuration for default EC policy')
+
+ conf = ConfigParser()
+ conf.read(swift_conf_file)
+ # remove existing policy sections that came with swift.conf-sample
+ for section in list(conf.sections()):
+ if section.startswith('storage-policy'):
+ conf.remove_section(section)
+ # add new policy 0 section for an EC policy
+ conf.add_section('storage-policy:0')
+ ec_policy_spec = {
+ 'name': 'ec-test',
+ 'policy_type': 'erasure_coding',
+ 'ec_type': 'liberasurecode_rs_vand',
+ 'ec_num_data_fragments': 2,
+ 'ec_num_parity_fragments': 1,
+ 'ec_object_segment_size': 1048576,
+ 'default': True
+ }
+
+ for k, v in ec_policy_spec.items():
+ conf.set('storage-policy:0', k, str(v))
+
+ with open(swift_conf_file, 'w') as fp:
+ conf.write(fp)
+ return proxy_conf_file, swift_conf_file
# Mapping from possible values of the variable
@@ -333,7 +372,8 @@ def _load_encryption(proxy_conf_file, **kwargs):
# The expected signature for these methods is:
# conf_filename_to_use loader(input_conf_filename, **kwargs)
conf_loaders = {
- 'encryption': _load_encryption
+ 'encryption': _load_encryption,
+ 'ec': _load_ec_as_default_policy
}
@@ -367,6 +407,11 @@ def in_process_setup(the_object_server=object_server):
utils.mkdirs(os.path.join(_testdir, 'sda1', 'tmp'))
utils.mkdirs(os.path.join(_testdir, 'sdb1'))
utils.mkdirs(os.path.join(_testdir, 'sdb1', 'tmp'))
+ utils.mkdirs(os.path.join(_testdir, 'sdc1'))
+ utils.mkdirs(os.path.join(_testdir, 'sdc1', 'tmp'))
+
+ swift_conf = _in_process_setup_swift_conf(swift_conf_src, _testdir)
+ _info('prepared swift.conf: %s' % swift_conf)
# Call the associated method for the value of
# 'SWIFT_TEST_IN_PROCESS_CONF_LOADER', if one exists
@@ -382,21 +427,20 @@ def in_process_setup(the_object_server=object_server):
missing_key)
try:
- # Pass-in proxy_conf
- proxy_conf = conf_loader(proxy_conf)
+ # Pass-in proxy_conf, swift_conf files
+ proxy_conf, swift_conf = conf_loader(proxy_conf, swift_conf)
_debug('Now using proxy conf %s' % proxy_conf)
+ _debug('Now using swift conf %s' % swift_conf)
except Exception as err: # noqa
raise InProcessException(err)
- swift_conf = _in_process_setup_swift_conf(swift_conf_src, _testdir)
obj_sockets = _in_process_setup_ring(swift_conf, conf_src_dir, _testdir)
- global orig_swift_conf_name
- orig_swift_conf_name = utils.SWIFT_CONF_FILE
- utils.SWIFT_CONF_FILE = swift_conf
- constraints.reload_constraints()
- storage_policy.SWIFT_CONF_FILE = swift_conf
- storage_policy.reload_storage_policies()
+ # load new swift.conf file
+ if set_swift_dir(os.path.dirname(swift_conf)):
+ constraints.reload_constraints()
+ storage_policy.reload_storage_policies()
+
global config
if constraints.SWIFT_CONSTRAINTS_LOADED:
# Use the swift constraints that are loaded for the test framework
@@ -407,16 +451,13 @@ def in_process_setup(the_object_server=object_server):
else:
# In-process swift constraints were not loaded, somethings wrong
raise SkipTest
- global orig_hash_path_suff_pref
- orig_hash_path_suff_pref = utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX
- utils.validate_hash_conf()
global _test_socks
_test_socks = []
# We create the proxy server listening socket to get its port number so
# that we can add it as the "auth_port" value for the functional test
# clients.
- prolis = eventlet.listen(('localhost', 0))
+ prolis = listen_zero()
_test_socks.append(prolis)
# The following set of configuration values is used both for the
@@ -431,6 +472,7 @@ def in_process_setup(the_object_server=object_server):
'allow_account_management': 'true',
'account_autocreate': 'true',
'allow_versions': 'True',
+ 'allow_versioned_writes': 'True',
# Below are values used by the functional test framework, as well as
# by the various in-process swift servers
'auth_host': '127.0.0.1',
@@ -472,10 +514,10 @@ def in_process_setup(the_object_server=object_server):
config['object_post_as_copy'] = str(object_post_as_copy)
_debug('Setting object_post_as_copy to %r' % object_post_as_copy)
- acc1lis = eventlet.listen(('localhost', 0))
- acc2lis = eventlet.listen(('localhost', 0))
- con1lis = eventlet.listen(('localhost', 0))
- con2lis = eventlet.listen(('localhost', 0))
+ acc1lis = listen_zero()
+ acc2lis = listen_zero()
+ con1lis = listen_zero()
+ con2lis = listen_zero()
_test_socks += [acc1lis, acc2lis, con1lis, con2lis] + obj_sockets
account_ring_path = os.path.join(_testdir, 'account.ring.gz')
@@ -569,7 +611,8 @@ def in_process_setup(the_object_server=object_server):
node['ip'], node['port'], node['device'], partition, 'PUT',
'/' + act, {'X-Timestamp': ts, 'x-trans-id': act})
resp = conn.getresponse()
- assert(resp.status == 201)
+ assert resp.status == 201, 'Unable to create account: %s\n%s' % (
+ resp.status, resp.body)
create_account('AUTH_test')
create_account('AUTH_test2')
@@ -870,10 +913,7 @@ def teardown_package():
rmtree(os.path.dirname(_testdir))
except Exception:
pass
- utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX = \
- orig_hash_path_suff_pref
- utils.SWIFT_CONF_FILE = orig_swift_conf_name
- constraints.reload_constraints()
+
reset_globals()
diff --git a/test/functional/swift_test_client.py b/test/functional/swift_test_client.py
index d98af92..da093dd 100644
--- a/test/functional/swift_test_client.py
+++ b/test/functional/swift_test_client.py
@@ -366,26 +366,39 @@ class Base(object):
if optional_fields is None:
optional_fields = ()
+ def is_int_header(header):
+ if header.startswith('x-account-storage-policy-') and \
+ header.endswith(('-bytes-used', '-object-count')):
+ return True
+ return header in (
+ 'content-length',
+ 'x-account-container-count',
+ 'x-account-object-count',
+ 'x-account-bytes-used',
+ 'x-container-object-count',
+ 'x-container-bytes-used',
+ )
+
headers = dict(self.conn.response.getheaders())
ret = {}
- for field in required_fields:
- if field[1] not in headers:
+ for return_key, header in required_fields:
+ if header not in headers:
raise ValueError("%s was not found in response header" %
- (field[1]))
+ (header,))
- try:
- ret[field[0]] = int(headers[field[1]])
- except ValueError:
- ret[field[0]] = headers[field[1]]
+ if is_int_header(header):
+ ret[return_key] = int(headers[header])
+ else:
+ ret[return_key] = headers[header]
- for field in optional_fields:
- if field[1] not in headers:
+ for return_key, header in optional_fields:
+ if header not in headers:
continue
- try:
- ret[field[0]] = int(headers[field[1]])
- except ValueError:
- ret[field[0]] = headers[field[1]]
+ if is_int_header(header):
+ ret[return_key] = int(headers[header])
+ else:
+ ret[return_key] = headers[header]
return ret
@@ -425,6 +438,7 @@ class Account(Base):
raise RequestError('Invalid format: %s' % format_type)
if format_type is None and 'format' in parms:
del parms['format']
+
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if status == 200:
@@ -435,10 +449,11 @@ class Account(Base):
return conts
elif format_type == 'xml':
conts = []
- tree = minidom.parseString(self.conn.response.read())
+ respString = self.conn.response.read()
+ tree = minidom.parseString(respString)
for x in tree.getElementsByTagName('container'):
cont = {}
- for key in ['name', 'count', 'bytes']:
+ for key in ['name', 'count', 'bytes', 'last_modified']:
cont[key] = x.getElementsByTagName(key)[0].\
childNodes[0].nodeValue
conts.append(cont)
@@ -481,8 +496,11 @@ class Account(Base):
fields = [['object_count', 'x-account-object-count'],
['container_count', 'x-account-container-count'],
['bytes_used', 'x-account-bytes-used']]
+ optional_fields = [
+ ['temp-url-key', 'x-account-meta-temp-url-key'],
+ ['temp-url-key-2', 'x-account-meta-temp-url-key-2']]
- return self.header_fields(fields)
+ return self.header_fields(fields, optional_fields=optional_fields)
@property
def path(self):
@@ -556,6 +574,7 @@ class Container(Base):
raise RequestError('Invalid format: %s' % format_type)
if format_type is None and 'format' in parms:
del parms['format']
+
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if status == 200:
@@ -626,7 +645,12 @@ class Container(Base):
['object_count', 'x-container-object-count'],
['last_modified', 'last-modified']]
optional_fields = [
+ # N.B. swift doesn't return both x-versions-location
+ # and x-history-location at a response so that this is safe
+ # using same variable "versions" for both and it means
+ # versioning is enabled.
['versions', 'x-versions-location'],
+ ['versions', 'x-history-location'],
['tempurl_key', 'x-container-meta-temp-url-key'],
['tempurl_key2', 'x-container-meta-temp-url-key-2']]
@@ -677,6 +701,7 @@ class File(Base):
if cfg.get('x_delete_at'):
headers['X-Delete-At'] = cfg.get('x_delete_at')
+
if cfg.get('x_delete_after'):
headers['X-Delete-After'] = cfg.get('x_delete_after')
@@ -718,8 +743,11 @@ class File(Base):
if 'Destination' in headers:
headers['Destination'] = urllib.parse.quote(headers['Destination'])
- return self.conn.make_request('COPY', self.path, hdrs=headers,
- parms=parms) == 201
+ if self.conn.make_request('COPY', self.path, hdrs=headers,
+ cfg=cfg, parms=parms) != 201:
+ raise ResponseError(self.conn.response, 'COPY',
+ self.conn.make_path(self.path))
+ return True
def copy_account(self, dest_account, dest_cont, dest_file,
hdrs=None, parms=None, cfg=None):
@@ -744,8 +772,11 @@ class File(Base):
if 'Destination' in headers:
headers['Destination'] = urllib.parse.quote(headers['Destination'])
- return self.conn.make_request('COPY', self.path, hdrs=headers,
- parms=parms) == 201
+ if self.conn.make_request('COPY', self.path, hdrs=headers,
+ cfg=cfg, parms=parms) != 201:
+ raise ResponseError(self.conn.response, 'COPY',
+ self.conn.make_path(self.path))
+ return True
def delete(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
@@ -781,6 +812,7 @@ class File(Base):
['x_delete_at', 'x-delete-at'],
['x_delete_after', 'x-delete-after']]
+
header_fields = self.header_fields(fields,
optional_fields=optional_fields)
header_fields['etag'] = header_fields['etag'].strip('"')
@@ -899,12 +931,10 @@ class File(Base):
fobj.close()
def sync_metadata(self, metadata=None, cfg=None, parms=None):
- if metadata is None:
- metadata = {}
if cfg is None:
cfg = {}
- self.metadata.update(metadata)
+ self.metadata = self.metadata if metadata is None else metadata
if self.metadata:
headers = self.make_headers(cfg=cfg)
@@ -914,6 +944,7 @@ class File(Base):
cfg.get('set_content_length')
else:
headers['Content-Length'] = 0
+
self.conn.make_request('POST', self.path, hdrs=headers,
parms=parms, cfg=cfg)
diff --git a/test/functional/test_account.py b/test/functional/test_account.py
index 57bbe6b..cc781cc 100755
--- a/test/functional/test_account.py
+++ b/test/functional/test_account.py
@@ -38,42 +38,65 @@ def tearDownModule():
class TestAccount(unittest2.TestCase):
+ existing_metadata = None
- def setUp(self):
- self.max_meta_count = load_constraint('max_meta_count')
- self.max_meta_name_length = load_constraint('max_meta_name_length')
- self.max_meta_overall_size = load_constraint('max_meta_overall_size')
- self.max_meta_value_length = load_constraint('max_meta_value_length')
-
- def head(url, token, parsed, conn):
- conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
- return check_response(conn)
- resp = retry(head)
- self.existing_metadata = set([
- k for k, v in resp.getheaders() if
- k.lower().startswith('x-account-meta')])
-
- def tearDown(self):
+ @classmethod
+ def get_meta(cls):
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
resp.read()
- new_metadata = set(
- [k for k, v in resp.getheaders() if
- k.lower().startswith('x-account-meta')])
+ return dict((k, v) for k, v in resp.getheaders() if
+ k.lower().startswith('x-account-meta'))
- def clear_meta(url, token, parsed, conn, remove_metadata_keys):
+ @classmethod
+ def clear_meta(cls, remove_metadata_keys):
+ def post(url, token, parsed, conn, hdr_keys):
headers = {'X-Auth-Token': token}
- headers.update((k, '') for k in remove_metadata_keys)
+ headers.update((k, '') for k in hdr_keys)
conn.request('POST', parsed.path, '', headers)
return check_response(conn)
- extra_metadata = list(self.existing_metadata ^ new_metadata)
- for i in range(0, len(extra_metadata), 90):
- batch = extra_metadata[i:i + 90]
- resp = retry(clear_meta, batch)
+
+ for i in range(0, len(remove_metadata_keys), 90):
+ batch = remove_metadata_keys[i:i + 90]
+ resp = retry(post, batch)
resp.read()
- self.assertEqual(resp.status // 100, 2)
+
+ @classmethod
+ def set_meta(cls, metadata):
+ def post(url, token, parsed, conn, meta_hdrs):
+ headers = {'X-Auth-Token': token}
+ headers.update(meta_hdrs)
+ conn.request('POST', parsed.path, '', headers)
+ return check_response(conn)
+
+ if not metadata:
+ return
+ resp = retry(post, metadata)
+ resp.read()
+
+ @classmethod
+ def setUpClass(cls):
+ # remove and stash any existing account user metadata before tests
+ cls.existing_metadata = cls.get_meta()
+ cls.clear_meta(cls.existing_metadata.keys())
+
+ @classmethod
+ def tearDownClass(cls):
+ # replace any stashed account user metadata
+ cls.set_meta(cls.existing_metadata)
+
+ def setUp(self):
+ self.max_meta_count = load_constraint('max_meta_count')
+ self.max_meta_name_length = load_constraint('max_meta_name_length')
+ self.max_meta_overall_size = load_constraint('max_meta_overall_size')
+ self.max_meta_value_length = load_constraint('max_meta_value_length')
+
+ def tearDown(self):
+ # clean up any account user metadata created by test
+ new_metadata = self.get_meta().keys()
+ self.clear_meta(new_metadata)
def test_metadata(self):
if tf.skip:
@@ -794,11 +817,6 @@ class TestAccount(unittest2.TestCase):
conn.request('POST', parsed.path, '', headers)
return check_response(conn)
- # TODO: Find the test that adds these and remove them.
- headers = {'x-remove-account-meta-temp-url-key': 'remove',
- 'x-remove-account-meta-temp-url-key-2': 'remove'}
- resp = retry(post, headers)
-
headers = {}
for x in range(self.max_meta_count):
headers['X-Account-Meta-%d' % x] = 'v'
diff --git a/test/functional/test_container.py b/test/functional/test_container.py
index 5abaaa5..fefa35a 100755
--- a/test/functional/test_container.py
+++ b/test/functional/test_container.py
@@ -627,7 +627,7 @@ class TestContainer(unittest2.TestCase):
resp.read()
self.assertEqual(resp.status, 403)
- # Now make the container also writeable by the second account
+ # Now make the container also writable by the second account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
@@ -696,7 +696,7 @@ class TestContainer(unittest2.TestCase):
resp.read()
self.assertEqual(resp.status, 403)
- # Now make the container also writeable by the third account
+ # Now make the container also writable by the third account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
@@ -732,7 +732,6 @@ class TestContainer(unittest2.TestCase):
def put(url, token, parsed, conn, name):
conn.request('PUT', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
- print("PUT X-Auth-Token:%s"%(token))
return check_response(conn)
# cannot list containers
diff --git a/test/functional/test_object.py b/test/functional/test_object.py
index f23ccbc..f6c8266 100755
--- a/test/functional/test_object.py
+++ b/test/functional/test_object.py
@@ -273,6 +273,12 @@ class TestObject(unittest2.TestCase):
})
def test_if_none_match(self):
+ def delete(url, token, parsed, conn):
+ conn.request('DELETE', '%s/%s/%s' % (
+ parsed.path, self.container, 'if_none_match_test'), '',
+ {'X-Auth-Token': token})
+ return check_response(conn)
+
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, 'if_none_match_test'), '',
@@ -287,6 +293,13 @@ class TestObject(unittest2.TestCase):
resp.read()
self.assertEqual(resp.status, 412)
+ resp = retry(delete)
+ resp.read()
+ self.assertEqual(resp.status, 204)
+ resp = retry(put)
+ resp.read()
+ self.assertEqual(resp.status, 201)
+
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, 'if_none_match_test'), '',
diff --git a/test/functional/tests.py b/test/functional/tests.py
index 2412147..be35c67 100644
--- a/test/functional/tests.py
+++ b/test/functional/tests.py
@@ -17,14 +17,10 @@
from datetime import datetime
import email.parser
import hashlib
-import hmac
-import itertools
-import json
import locale
import random
import six
from six.moves import urllib
-import os
import time
import unittest2
import uuid
@@ -32,12 +28,16 @@ from copy import deepcopy
import eventlet
from unittest2 import SkipTest
from swift.common.http import is_success, is_client_error
+from email.utils import parsedate
+import os
+import mock
from test.functional import normalized_urls, load_constraint, cluster_info
-from test.functional import check_response, retry, requires_acls
+from test.functional import check_response, retry
import test.functional as tf
from test.functional.swift_test_client import Account, Connection, File, \
ResponseError
+
from gluster.swift.common.constraints import \
set_object_name_component_length, get_object_name_component_length
@@ -100,12 +100,36 @@ class Utils(object):
create_name = create_ascii_name
+class BaseEnv(object):
+ account = conn = None
+
+ @classmethod
+ def setUp(cls):
+ cls.conn = Connection(tf.config)
+ cls.conn.authenticate()
+ cls.account = Account(cls.conn, tf.config.get('account',
+ tf.config['username']))
+ cls.account.delete_containers()
+
+ @classmethod
+ def tearDown(cls):
+ pass
+
+
class Base(unittest2.TestCase):
- def setUp(self):
- cls = type(self)
- if not cls.set_up:
- cls.env.setUp()
- cls.set_up = True
+ # subclasses may override env class
+ env = BaseEnv
+
+ @classmethod
+ def setUpClass(cls):
+ cls.env.setUp()
+
+ @classmethod
+ def tearDownClass(cls):
+ try:
+ cls.env.tearDown()
+ except AttributeError:
+ pass
def assert_body(self, body):
response_body = self.env.conn.response.read()
@@ -138,15 +162,10 @@ class Base2(object):
Utils.create_name = Utils.create_ascii_name
-class TestAccountEnv(object):
+class TestAccountEnv(BaseEnv):
@classmethod
def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
-
+ super(TestAccountEnv, cls).setUp()
cls.containers = []
for i in range(10):
cont = cls.account.container(Utils.create_name())
@@ -158,16 +177,14 @@ class TestAccountEnv(object):
class TestAccountDev(Base):
env = TestAccountEnv
- set_up = False
class TestAccountDevUTF8(Base2, TestAccountDev):
- set_up = False
+ pass
class TestAccount(Base):
env = TestAccountEnv
- set_up = False
def testNoAuthToken(self):
self.assertRaises(ResponseError, self.env.account.info,
@@ -279,6 +296,72 @@ class TestAccount(Base):
self.assertEqual(a, b)
+ def testListDelimiter(self):
+ delimiter = '-'
+ containers = ['test', delimiter.join(['test', 'bar']),
+ delimiter.join(['test', 'foo'])]
+ for c in containers:
+ cont = self.env.account.container(c)
+ self.assertTrue(cont.create())
+
+ results = self.env.account.containers(parms={'delimiter': delimiter})
+ expected = ['test', 'test-']
+ results = [r for r in results if r in expected]
+ self.assertEqual(expected, results)
+
+ results = self.env.account.containers(parms={'delimiter': delimiter,
+ 'reverse': 'yes'})
+ expected.reverse()
+ results = [r for r in results if r in expected]
+ self.assertEqual(expected, results)
+
+ def testListDelimiterAndPrefix(self):
+ delimiter = 'a'
+ containers = ['bar', 'bazar']
+ for c in containers:
+ cont = self.env.account.container(c)
+ self.assertTrue(cont.create())
+
+ results = self.env.account.containers(parms={'delimiter': delimiter,
+ 'prefix': 'ba'})
+ expected = ['bar', 'baza']
+ results = [r for r in results if r in expected]
+ self.assertEqual(expected, results)
+
+ results = self.env.account.containers(parms={'delimiter': delimiter,
+ 'prefix': 'ba',
+ 'reverse': 'yes'})
+ expected.reverse()
+ results = [r for r in results if r in expected]
+ self.assertEqual(expected, results)
+
+ def testContainerListingLastModified(self):
+ expected = {}
+ for container in self.env.containers:
+ res = container.info()
+ expected[container.name] = time.mktime(
+ parsedate(res['last_modified']))
+ for format_type in ['json', 'xml']:
+ actual = {}
+ containers = self.env.account.containers(
+ parms={'format': format_type})
+ if isinstance(containers[0], dict):
+ for container in containers:
+ self.assertIn('name', container) # sanity
+ self.assertIn('last_modified', container) # sanity
+ # ceil by hand (wants easier way!)
+ datetime_str, micro_sec_str = \
+ container['last_modified'].split('.')
+
+ timestamp = time.mktime(
+ time.strptime(datetime_str,
+ "%Y-%m-%dT%H:%M:%S"))
+ if int(micro_sec_str):
+ timestamp += 1
+ actual[container['name']] = timestamp
+
+ self.assertEqual(expected, actual)
+
def testInvalidAuthToken(self):
hdrs = {'X-Auth-Token': 'bogus_auth_token'}
self.assertRaises(ResponseError, self.env.account.info, hdrs=hdrs)
@@ -346,23 +429,10 @@ class TestAccount(Base):
class TestAccountUTF8(Base2, TestAccount):
- set_up = False
-
-
-class TestAccountNoContainersEnv(object):
- @classmethod
- def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
+ pass
class TestAccountNoContainers(Base):
- env = TestAccountNoContainersEnv
- set_up = False
-
def testGetRequest(self):
for format_type in [None, 'json', 'xml']:
self.assertFalse(self.env.account.containers(
@@ -375,18 +445,13 @@ class TestAccountNoContainers(Base):
class TestAccountNoContainersUTF8(Base2, TestAccountNoContainers):
- set_up = False
+ pass
-class TestAccountSortingEnv(object):
+class TestAccountSortingEnv(BaseEnv):
@classmethod
def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
-
+ super(TestAccountSortingEnv, cls).setUp()
postfix = Utils.create_name()
cls.cont_items = ('a1', 'a2', 'A3', 'b1', 'B2', 'a10', 'b10', 'zz')
cls.cont_items = ['%s%s' % (x, postfix) for x in cls.cont_items]
@@ -399,7 +464,6 @@ class TestAccountSortingEnv(object):
class TestAccountSorting(Base):
env = TestAccountSortingEnv
- set_up = False
def testAccountContainerListSorting(self):
# name (byte order) sorting.
@@ -464,15 +528,10 @@ class TestAccountSorting(Base):
self.assertEqual([], cont_listing)
-class TestContainerEnv(object):
+class TestContainerEnv(BaseEnv):
@classmethod
def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
-
+ super(TestContainerEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
@@ -488,16 +547,14 @@ class TestContainerEnv(object):
class TestContainerDev(Base):
env = TestContainerEnv
- set_up = False
class TestContainerDevUTF8(Base2, TestContainerDev):
- set_up = False
+ pass
class TestContainer(Base):
env = TestContainerEnv
- set_up = False
def testContainerNameLimit(self):
limit = load_constraint('max_container_name_length')
@@ -845,7 +902,6 @@ class TestContainer(Base):
file_item.write_random()
def testContainerLastModified(self):
- raise SkipTest("NA")
container = self.env.account.container(Utils.create_name())
self.assertTrue(container.create())
info = container.info()
@@ -886,18 +942,13 @@ class TestContainer(Base):
class TestContainerUTF8(Base2, TestContainer):
- set_up = False
+ pass
-class TestContainerSortingEnv(object):
+class TestContainerSortingEnv(BaseEnv):
@classmethod
def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
-
+ super(TestContainerSortingEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
@@ -913,7 +964,6 @@ class TestContainerSortingEnv(object):
class TestContainerSorting(Base):
env = TestContainerSortingEnv
- set_up = False
def testContainerFileListSortingReversed(self):
file_list = list(sorted(self.env.file_items))
@@ -1001,16 +1051,10 @@ class TestContainerSorting(Base):
self.assertEqual(file_list, cont_files)
-class TestContainerPathsEnv(object):
+class TestContainerPathsEnv(BaseEnv):
@classmethod
def setUp(cls):
- raise SkipTest('Objects ending in / are not supported')
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
-
+ super(TestContainerPathsEnv, cls).setUp()
cls.file_size = 8
cls.container = cls.account.container(Utils.create_name())
@@ -1018,44 +1062,18 @@ class TestContainerPathsEnv(object):
raise ResponseError(cls.conn.response)
cls.files = [
- '/file1',
- '/file A',
- '/dir1/',
- '/dir2/',
- '/dir1/file2',
- '/dir1/subdir1/',
- '/dir1/subdir2/',
- '/dir1/subdir1/file2',
- '/dir1/subdir1/file3',
- '/dir1/subdir1/file4',
- '/dir1/subdir1/subsubdir1/',
- '/dir1/subdir1/subsubdir1/file5',
- '/dir1/subdir1/subsubdir1/file6',
- '/dir1/subdir1/subsubdir1/file7',
- '/dir1/subdir1/subsubdir1/file8',
- '/dir1/subdir1/subsubdir2/',
- '/dir1/subdir1/subsubdir2/file9',
- '/dir1/subdir1/subsubdir2/file0',
'file1',
- 'dir1/',
- 'dir2/',
'dir1/file2',
- 'dir1/subdir1/',
- 'dir1/subdir2/',
'dir1/subdir1/file2',
'dir1/subdir1/file3',
'dir1/subdir1/file4',
- 'dir1/subdir1/subsubdir1/',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file6',
'dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file8',
- 'dir1/subdir1/subsubdir2/',
'dir1/subdir1/subsubdir2/file9',
'dir1/subdir1/subsubdir2/file0',
- 'dir1/subdir with spaces/',
'dir1/subdir with spaces/file B',
- 'dir1/subdir+with{whatever/',
'dir1/subdir+with{whatever/file D',
]
@@ -1080,9 +1098,9 @@ class TestContainerPathsEnv(object):
class TestContainerPaths(Base):
env = TestContainerPathsEnv
- set_up = False
def testTraverseContainer(self):
+ raise SkipTest("No support for Objects having //")
found_files = []
found_dirs = []
@@ -1125,6 +1143,7 @@ class TestContainerPaths(Base):
self.assertNotIn(file_item, found_dirs)
def testContainerListing(self):
+ raise SkipTest("No support for Objects having //")
for format_type in (None, 'json', 'xml'):
files = self.env.container.files(parms={'format': format_type})
@@ -1143,6 +1162,7 @@ class TestContainerPaths(Base):
'application/directory')
def testStructure(self):
+ raise SkipTest("No support for Objects having //")
def assert_listing(path, file_list):
files = self.env.container.files(parms={'path': path})
self.assertEqual(sorted(file_list, cmp=locale.strcoll), files)
@@ -1181,13 +1201,10 @@ class TestContainerPaths(Base):
['dir1/subdir with spaces/file B'])
-class TestFileEnv(object):
+class TestFileEnv(BaseEnv):
@classmethod
def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
+ super(TestFileEnv, cls).setUp()
# creating another account and connection
# for account to account copy tests
config2 = deepcopy(tf.config)
@@ -1197,9 +1214,6 @@ class TestFileEnv(object):
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
@@ -1221,16 +1235,68 @@ class TestFileEnv(object):
class TestFileDev(Base):
env = TestFileEnv
- set_up = False
class TestFileDevUTF8(Base2, TestFileDev):
- set_up = False
+ pass
class TestFile(Base):
env = TestFileEnv
- set_up = False
+
+ def testGetResponseHeaders(self):
+ obj_data = 'test_body'
+
+ def do_test(put_hdrs, get_hdrs, expected_hdrs, unexpected_hdrs):
+ filename = Utils.create_name()
+ file_item = self.env.container.file(filename)
+ resp = file_item.write(
+ data=obj_data, hdrs=put_hdrs, return_resp=True)
+
+ # put then get an object
+ resp.read()
+ read_data = file_item.read(hdrs=get_hdrs)
+ self.assertEqual(obj_data, read_data) # sanity check
+ resp_headers = file_item.conn.response.getheaders()
+
+ # check the *list* of all header (name, value) pairs rather than
+ # constructing a dict in case of repeated names in the list
+ errors = []
+ for k, v in resp_headers:
+ if k.lower() in unexpected_hdrs:
+ errors.append('Found unexpected header %s: %s' % (k, v))
+ for k, v in expected_hdrs.items():
+ matches = [hdr for hdr in resp_headers if hdr[0] == k]
+ if not matches:
+ errors.append('Missing expected header %s' % k)
+ for (got_k, got_v) in matches:
+ if got_v != v:
+ errors.append('Expected %s but got %s for %s' %
+ (v, got_v, k))
+ if errors:
+ self.fail(
+ 'Errors in response headers:\n %s' % '\n '.join(errors))
+
+ put_headers = {'X-Object-Meta-Fruit': 'Banana',
+ 'X-Delete-After': '10000',
+ 'Content-Type': 'application/test'}
+ expected_headers = {'content-length': str(len(obj_data)),
+ 'x-object-meta-fruit': 'Banana',
+ 'accept-ranges': 'bytes',
+ 'content-type': 'application/test',
+ 'etag': hashlib.md5(obj_data).hexdigest(),
+ 'last-modified': mock.ANY,
+ 'date': mock.ANY,
+ 'x-delete-at': mock.ANY,
+ 'x-trans-id': mock.ANY,
+ 'x-openstack-request-id': mock.ANY}
+ unexpected_headers = ['connection', 'x-delete-after']
+ do_test(put_headers, {}, expected_headers, unexpected_headers)
+
+ get_headers = {'Connection': 'keep-alive'}
+ expected_headers['connection'] = 'keep-alive'
+ unexpected_headers = ['x-delete-after']
+ do_test(put_headers, get_headers, expected_headers, unexpected_headers)
def testCopy(self):
# makes sure to test encoded characters
@@ -1480,31 +1546,33 @@ class TestFile(Base):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file_item = source_cont.file(source_filename)
- self.assertFalse(file_item.copy(
- '%s%s' % (prefix, self.env.container),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy,
+ '%s%s' % (prefix, self.env.container),
+ Utils.create_name())
self.assert_status(404)
- self.assertFalse(file_item.copy('%s%s' % (prefix, dest_cont),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy,
+ '%s%s' % (prefix, dest_cont),
+ Utils.create_name())
self.assert_status(404)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
- self.assertFalse(file_item.copy(
- '%s%s' % (prefix, self.env.container),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy,
+ '%s%s' % (prefix, self.env.container),
+ Utils.create_name())
self.assert_status(404)
- self.assertFalse(file_item.copy('%s%s' % (prefix, dest_cont),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy,
+ '%s%s' % (prefix, dest_cont),
+ Utils.create_name())
self.assert_status(404)
# invalid destination container
file_item = self.env.container.file(source_filename)
- self.assertFalse(file_item.copy(
- '%s%s' % (prefix, Utils.create_name()),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy,
+ '%s%s' % (prefix, Utils.create_name()),
+ Utils.create_name())
def testCopyAccount404s(self):
acct = self.env.conn.account_name
@@ -1528,44 +1596,44 @@ class TestFile(Base):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file_item = source_cont.file(source_filename)
- self.assertFalse(file_item.copy_account(
- acct,
- '%s%s' % (prefix, self.env.container),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy_account,
+ acct,
+ '%s%s' % (prefix, self.env.container),
+ Utils.create_name())
# there is no such source container but user has
# permissions to do a GET (done internally via COPY) for
# objects in his own account.
self.assert_status(404)
- self.assertFalse(file_item.copy_account(
- acct,
- '%s%s' % (prefix, cont),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy_account,
+ acct,
+ '%s%s' % (prefix, cont),
+ Utils.create_name())
self.assert_status(404)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
- self.assertFalse(file_item.copy_account(
- acct,
- '%s%s' % (prefix, self.env.container),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy_account,
+ acct,
+ '%s%s' % (prefix, self.env.container),
+ Utils.create_name())
# there is no such source container but user has
# permissions to do a GET (done internally via COPY) for
# objects in his own account.
self.assert_status(404)
- self.assertFalse(file_item.copy_account(
- acct,
- '%s%s' % (prefix, cont),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy_account,
+ acct,
+ '%s%s' % (prefix, cont),
+ Utils.create_name())
self.assert_status(404)
# invalid destination container
file_item = self.env.container.file(source_filename)
- self.assertFalse(file_item.copy_account(
- acct,
- '%s%s' % (prefix, Utils.create_name()),
- Utils.create_name()))
+ self.assertRaises(ResponseError, file_item.copy_account,
+ acct,
+ '%s%s' % (prefix, Utils.create_name()),
+ Utils.create_name())
if acct == acct2:
# there is no such destination container
# and foreign user can have no permission to write there
@@ -1579,9 +1647,9 @@ class TestFile(Base):
file_item.write_random()
file_item = self.env.container.file(source_filename)
- self.assertFalse(file_item.copy(Utils.create_name(),
- Utils.create_name(),
- cfg={'no_destination': True}))
+ self.assertRaises(ResponseError, file_item.copy, Utils.create_name(),
+ Utils.create_name(),
+ cfg={'no_destination': True})
self.assert_status(412)
def testCopyDestinationSlashProblems(self):
@@ -1590,9 +1658,15 @@ class TestFile(Base):
file_item.write_random()
# no slash
- self.assertFalse(file_item.copy(Utils.create_name(),
- Utils.create_name(),
- cfg={'destination': Utils.create_name()}))
+ self.assertRaises(ResponseError, file_item.copy, Utils.create_name(),
+ Utils.create_name(),
+ cfg={'destination': Utils.create_name()})
+ self.assert_status(412)
+
+ # too many slashes
+ self.assertRaises(ResponseError, file_item.copy, Utils.create_name(),
+ Utils.create_name(),
+ cfg={'destination': '//%s' % Utils.create_name()})
self.assert_status(412)
def testCopyFromHeader(self):
@@ -1799,8 +1873,6 @@ class TestFile(Base):
def testMetadataNumberLimit(self):
raise SkipTest("Bad test")
- # TODO(ppai): Fix it in upstream swift first
- # Refer to comments below
number_limit = load_constraint('max_meta_count')
size_limit = load_constraint('max_meta_overall_size')
@@ -1812,13 +1884,10 @@ class TestFile(Base):
metadata = {}
while len(metadata.keys()) < i:
key = Utils.create_ascii_name()
- # The following line returns a valid utf8 byte sequence
val = Utils.create_name()
if len(key) > j:
key = key[:j]
- # This slicing done below can make the 'utf8' byte
- # sequence invalid and hence it cannot be decoded
val = val[:j]
metadata[key] = val
@@ -1886,8 +1955,11 @@ class TestFile(Base):
# Otherwise, the byte-range-set is unsatisfiable.
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
+ self.assert_header('content-range', 'bytes */%d' % file_length)
else:
self.assertEqual(file_item.read(hdrs=hdrs), data[-i:])
+ self.assert_header('content-range', 'bytes %d-%d/%d' % (
+ file_length - i, file_length - 1, file_length))
self.assert_header('etag', file_item.md5)
self.assert_header('accept-ranges', 'bytes')
@@ -1901,6 +1973,7 @@ class TestFile(Base):
hdrs = {'Range': range_string}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
+ self.assert_header('content-range', 'bytes */%d' % file_length)
self.assert_header('etag', file_item.md5)
self.assert_header('accept-ranges', 'bytes')
@@ -2065,6 +2138,7 @@ class TestFile(Base):
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
+ self.assert_header('content-range', 'bytes */%d' % file_length)
def testRangedGetsWithLWSinHeader(self):
file_length = 10000
@@ -2447,7 +2521,6 @@ class TestFile(Base):
self.assertEqual(etag, info['etag'])
def test_POST(self):
- raise SkipTest("Gluster preserves orig sys metadata - invalid test")
# verify consistency between object and container listing metadata
file_name = Utils.create_name()
file_item = self.env.container.file(file_name)
@@ -2499,386 +2572,13 @@ class TestFile(Base):
class TestFileUTF8(Base2, TestFile):
- set_up = False
-
-
-class TestDloEnv(object):
- @classmethod
- def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
-
- config2 = tf.config.copy()
- config2['username'] = tf.config['username3']
- config2['password'] = tf.config['password3']
- cls.conn2 = Connection(config2)
- cls.conn2.authenticate()
-
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
-
- cls.container = cls.account.container(Utils.create_name())
- cls.container2 = cls.account.container(Utils.create_name())
-
- for cont in (cls.container, cls.container2):
- if not cont.create():
- raise ResponseError(cls.conn.response)
-
- # avoid getting a prefix that stops halfway through an encoded
- # character
- prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
- cls.segment_prefix = prefix
-
- for letter in ('a', 'b', 'c', 'd', 'e'):
- file_item = cls.container.file("%s/seg_lower%s" % (prefix, letter))
- file_item.write(letter * 10)
-
- file_item = cls.container.file("%s/seg_upper%s" % (prefix, letter))
- file_item.write(letter.upper() * 10)
-
- for letter in ('f', 'g', 'h', 'i', 'j'):
- file_item = cls.container2.file("%s/seg_lower%s" %
- (prefix, letter))
- file_item.write(letter * 10)
-
- man1 = cls.container.file("man1")
- man1.write('man1-contents',
- hdrs={"X-Object-Manifest": "%s/%s/seg_lower" %
- (cls.container.name, prefix)})
-
- man2 = cls.container.file("man2")
- man2.write('man2-contents',
- hdrs={"X-Object-Manifest": "%s/%s/seg_upper" %
- (cls.container.name, prefix)})
-
- manall = cls.container.file("manall")
- manall.write('manall-contents',
- hdrs={"X-Object-Manifest": "%s/%s/seg" %
- (cls.container.name, prefix)})
-
- mancont2 = cls.container.file("mancont2")
- mancont2.write(
- 'mancont2-contents',
- hdrs={"X-Object-Manifest": "%s/%s/seg_lower" %
- (cls.container2.name, prefix)})
-
-
-class TestDlo(Base):
- env = TestDloEnv
- set_up = False
-
- def test_get_manifest(self):
- file_item = self.env.container.file('man1')
- file_contents = file_item.read()
- self.assertEqual(
- file_contents,
- "aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
-
- file_item = self.env.container.file('man2')
- file_contents = file_item.read()
- self.assertEqual(
- file_contents,
- "AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE")
-
- file_item = self.env.container.file('manall')
- file_contents = file_item.read()
- self.assertEqual(
- file_contents,
- ("aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee" +
- "AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE"))
-
- def test_get_manifest_document_itself(self):
- file_item = self.env.container.file('man1')
- file_contents = file_item.read(parms={'multipart-manifest': 'get'})
- self.assertEqual(file_contents, "man1-contents")
- self.assertEqual(file_item.info()['x_object_manifest'],
- "%s/%s/seg_lower" %
- (self.env.container.name, self.env.segment_prefix))
-
- def test_get_range(self):
- file_item = self.env.container.file('man1')
- file_contents = file_item.read(size=25, offset=8)
- self.assertEqual(file_contents, "aabbbbbbbbbbccccccccccddd")
-
- file_contents = file_item.read(size=1, offset=47)
- self.assertEqual(file_contents, "e")
-
- def test_get_range_out_of_range(self):
- file_item = self.env.container.file('man1')
-
- self.assertRaises(ResponseError, file_item.read, size=7, offset=50)
- self.assert_status(416)
-
- def test_copy(self):
- # Adding a new segment, copying the manifest, and then deleting the
- # segment proves that the new object is really the concatenated
- # segments and not just a manifest.
- f_segment = self.env.container.file("%s/seg_lowerf" %
- (self.env.segment_prefix))
- f_segment.write('ffffffffff')
- try:
- man1_item = self.env.container.file('man1')
- man1_item.copy(self.env.container.name, "copied-man1")
- finally:
- # try not to leave this around for other tests to stumble over
- f_segment.delete()
-
- file_item = self.env.container.file('copied-man1')
- file_contents = file_item.read()
- self.assertEqual(
- file_contents,
- "aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
- # The copied object must not have X-Object-Manifest
- self.assertNotIn("x_object_manifest", file_item.info())
-
- def test_copy_account(self):
- # dlo use same account and same container only
- acct = self.env.conn.account_name
- # Adding a new segment, copying the manifest, and then deleting the
- # segment proves that the new object is really the concatenated
- # segments and not just a manifest.
- f_segment = self.env.container.file("%s/seg_lowerf" %
- (self.env.segment_prefix))
- f_segment.write('ffffffffff')
- try:
- man1_item = self.env.container.file('man1')
- man1_item.copy_account(acct,
- self.env.container.name,
- "copied-man1")
- finally:
- # try not to leave this around for other tests to stumble over
- f_segment.delete()
-
- file_item = self.env.container.file('copied-man1')
- file_contents = file_item.read()
- self.assertEqual(
- file_contents,
- "aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
- # The copied object must not have X-Object-Manifest
- self.assertNotIn("x_object_manifest", file_item.info())
-
- def test_copy_manifest(self):
- # Copying the manifest with multipart-manifest=get query string
- # should result in another manifest
- try:
- man1_item = self.env.container.file('man1')
- man1_item.copy(self.env.container.name, "copied-man1",
- parms={'multipart-manifest': 'get'})
-
- copied = self.env.container.file("copied-man1")
- copied_contents = copied.read(parms={'multipart-manifest': 'get'})
- self.assertEqual(copied_contents, "man1-contents")
-
- copied_contents = copied.read()
- self.assertEqual(
- copied_contents,
- "aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
- self.assertEqual(man1_item.info()['x_object_manifest'],
- copied.info()['x_object_manifest'])
- finally:
- # try not to leave this around for other tests to stumble over
- self.env.container.file("copied-man1").delete()
-
- def test_dlo_if_match_get(self):
- manifest = self.env.container.file("man1")
- etag = manifest.info()['etag']
-
- self.assertRaises(ResponseError, manifest.read,
- hdrs={'If-Match': 'not-%s' % etag})
- self.assert_status(412)
+ pass
- manifest.read(hdrs={'If-Match': etag})
- self.assert_status(200)
-
- def test_dlo_if_none_match_get(self):
- manifest = self.env.container.file("man1")
- etag = manifest.info()['etag']
-
- self.assertRaises(ResponseError, manifest.read,
- hdrs={'If-None-Match': etag})
- self.assert_status(304)
-
- manifest.read(hdrs={'If-None-Match': "not-%s" % etag})
- self.assert_status(200)
-
- def test_dlo_if_match_head(self):
- manifest = self.env.container.file("man1")
- etag = manifest.info()['etag']
-
- self.assertRaises(ResponseError, manifest.info,
- hdrs={'If-Match': 'not-%s' % etag})
- self.assert_status(412)
-
- manifest.info(hdrs={'If-Match': etag})
- self.assert_status(200)
-
- def test_dlo_if_none_match_head(self):
- manifest = self.env.container.file("man1")
- etag = manifest.info()['etag']
-
- self.assertRaises(ResponseError, manifest.info,
- hdrs={'If-None-Match': etag})
- self.assert_status(304)
-
- manifest.info(hdrs={'If-None-Match': "not-%s" % etag})
- self.assert_status(200)
-
- def test_dlo_referer_on_segment_container(self):
- # First the account2 (test3) should fail
- headers = {'X-Auth-Token': self.env.conn2.storage_token,
- 'Referer': 'http://blah.example.com'}
- dlo_file = self.env.container.file("mancont2")
- self.assertRaises(ResponseError, dlo_file.read,
- hdrs=headers)
- self.assert_status(403)
-
- # Now set the referer on the dlo container only
- referer_metadata = {'X-Container-Read': '.r:*.example.com,.rlistings'}
- self.env.container.update_metadata(referer_metadata)
-
- self.assertRaises(ResponseError, dlo_file.read,
- hdrs=headers)
- self.assert_status(403)
-
- # Finally set the referer on the segment container
- self.env.container2.update_metadata(referer_metadata)
-
- contents = dlo_file.read(hdrs=headers)
- self.assertEqual(
- contents,
- "ffffffffffgggggggggghhhhhhhhhhiiiiiiiiiijjjjjjjjjj")
-
- def test_dlo_post_with_manifest_header(self):
- # verify that performing a POST to a DLO manifest
- # preserves the fact that it is a manifest file.
- # verify that the x-object-manifest header may be updated.
-
- # create a new manifest for this test to avoid test coupling.
- x_o_m = self.env.container.file('man1').info()['x_object_manifest']
- file_item = self.env.container.file(Utils.create_name())
- file_item.write('manifest-contents', hdrs={"X-Object-Manifest": x_o_m})
-
- # sanity checks
- manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
- self.assertEqual('manifest-contents', manifest_contents)
- expected_contents = ''.join([(c * 10) for c in 'abcde'])
- contents = file_item.read(parms={})
- self.assertEqual(expected_contents, contents)
-
- # POST a modified x-object-manifest value
- new_x_o_m = x_o_m.rstrip('lower') + 'upper'
- file_item.post({'x-object-meta-foo': 'bar',
- 'x-object-manifest': new_x_o_m})
-
- # verify that x-object-manifest was updated
- file_item.info()
- resp_headers = file_item.conn.response.getheaders()
- self.assertIn(('x-object-manifest', new_x_o_m), resp_headers)
- self.assertIn(('x-object-meta-foo', 'bar'), resp_headers)
-
- # verify that manifest content was not changed
- manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
- self.assertEqual('manifest-contents', manifest_contents)
-
- # verify that updated manifest points to new content
- expected_contents = ''.join([(c * 10) for c in 'ABCDE'])
- contents = file_item.read(parms={})
- self.assertEqual(expected_contents, contents)
-
- # Now revert the manifest to point to original segments, including a
- # multipart-manifest=get param just to check that has no effect
- file_item.post({'x-object-manifest': x_o_m},
- parms={'multipart-manifest': 'get'})
-
- # verify that x-object-manifest was reverted
- info = file_item.info()
- self.assertIn('x_object_manifest', info)
- self.assertEqual(x_o_m, info['x_object_manifest'])
-
- # verify that manifest content was not changed
- manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
- self.assertEqual('manifest-contents', manifest_contents)
-
- # verify that updated manifest points new content
- expected_contents = ''.join([(c * 10) for c in 'abcde'])
- contents = file_item.read(parms={})
- self.assertEqual(expected_contents, contents)
-
- def test_dlo_post_without_manifest_header(self):
- # verify that a POST to a DLO manifest object with no
- # x-object-manifest header will cause the existing x-object-manifest
- # header to be lost
-
- # create a new manifest for this test to avoid test coupling.
- x_o_m = self.env.container.file('man1').info()['x_object_manifest']
- file_item = self.env.container.file(Utils.create_name())
- file_item.write('manifest-contents', hdrs={"X-Object-Manifest": x_o_m})
-
- # sanity checks
- manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
- self.assertEqual('manifest-contents', manifest_contents)
- expected_contents = ''.join([(c * 10) for c in 'abcde'])
- contents = file_item.read(parms={})
- self.assertEqual(expected_contents, contents)
-
- # POST with no x-object-manifest header
- file_item.post({})
-
- # verify that existing x-object-manifest was removed
- info = file_item.info()
- self.assertNotIn('x_object_manifest', info)
- # verify that object content was not changed
- manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
- self.assertEqual('manifest-contents', manifest_contents)
-
- # verify that object is no longer a manifest
- contents = file_item.read(parms={})
- self.assertEqual('manifest-contents', contents)
-
- def test_dlo_post_with_manifest_regular_object(self):
- # verify that performing a POST to a regular object
- # with a manifest header will create a DLO.
-
- # Put a regular object
- file_item = self.env.container.file(Utils.create_name())
- file_item.write('file contents', hdrs={})
-
- # sanity checks
- file_contents = file_item.read(parms={})
- self.assertEqual('file contents', file_contents)
-
- # get the path associated with man1
- x_o_m = self.env.container.file('man1').info()['x_object_manifest']
-
- # POST a x-object-manifest value to the regular object
- file_item.post({'x-object-manifest': x_o_m})
-
- # verify that the file is now a manifest
- manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
- self.assertEqual('file contents', manifest_contents)
- expected_contents = ''.join([(c * 10) for c in 'abcde'])
- contents = file_item.read(parms={})
- self.assertEqual(expected_contents, contents)
- file_item.info()
- resp_headers = file_item.conn.response.getheaders()
- self.assertIn(('x-object-manifest', x_o_m), resp_headers)
-
-
-class TestDloUTF8(Base2, TestDlo):
- set_up = False
-
-
-class TestFileComparisonEnv(object):
+class TestFileComparisonEnv(BaseEnv):
@classmethod
def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
-
+ super(TestFileComparisonEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
@@ -2904,7 +2604,6 @@ class TestFileComparisonEnv(object):
class TestFileComparison(Base):
env = TestFileComparisonEnv
- set_up = False
def testIfMatch(self):
for file_item in self.env.files:
@@ -3024,2084 +2723,7 @@ class TestFileComparison(Base):
class TestFileComparisonUTF8(Base2, TestFileComparison):
- set_up = False
-
-
-class TestSloEnv(object):
- slo_enabled = None # tri-state: None initially, then True/False
-
- @classmethod
- def create_segments(cls, container):
- seg_info = {}
- for letter, size in (('a', 1024 * 1024),
- ('b', 1024 * 1024),
- ('c', 1024 * 1024),
- ('d', 1024 * 1024),
- ('e', 1)):
- seg_name = "seg_%s" % letter
- file_item = container.file(seg_name)
- file_item.write(letter * size)
- seg_info[seg_name] = {
- 'size_bytes': size,
- 'etag': file_item.md5,
- 'path': '/%s/%s' % (container.name, seg_name)}
- return seg_info
-
- @classmethod
- def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
- config2 = deepcopy(tf.config)
- config2['account'] = tf.config['account2']
- config2['username'] = tf.config['username2']
- config2['password'] = tf.config['password2']
- cls.conn2 = Connection(config2)
- cls.conn2.authenticate()
- cls.account2 = cls.conn2.get_account()
- cls.account2.delete_containers()
- config3 = tf.config.copy()
- config3['username'] = tf.config['username3']
- config3['password'] = tf.config['password3']
- cls.conn3 = Connection(config3)
- cls.conn3.authenticate()
-
- if cls.slo_enabled is None:
- cls.slo_enabled = 'slo' in cluster_info
- if not cls.slo_enabled:
- return
-
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
- cls.account.delete_containers()
-
- cls.container = cls.account.container(Utils.create_name())
- cls.container2 = cls.account.container(Utils.create_name())
-
- for cont in (cls.container, cls.container2):
- if not cont.create():
- raise ResponseError(cls.conn.response)
-
- cls.seg_info = seg_info = cls.create_segments(cls.container)
-
- file_item = cls.container.file("manifest-abcde")
- file_item.write(
- json.dumps([seg_info['seg_a'], seg_info['seg_b'],
- seg_info['seg_c'], seg_info['seg_d'],
- seg_info['seg_e']]),
- parms={'multipart-manifest': 'put'})
-
- # Put the same manifest in the container2
- file_item = cls.container2.file("manifest-abcde")
- file_item.write(
- json.dumps([seg_info['seg_a'], seg_info['seg_b'],
- seg_info['seg_c'], seg_info['seg_d'],
- seg_info['seg_e']]),
- parms={'multipart-manifest': 'put'})
-
- file_item = cls.container.file('manifest-cd')
- cd_json = json.dumps([seg_info['seg_c'], seg_info['seg_d']])
- file_item.write(cd_json, parms={'multipart-manifest': 'put'})
- cd_etag = hashlib.md5(seg_info['seg_c']['etag'] +
- seg_info['seg_d']['etag']).hexdigest()
-
- file_item = cls.container.file("manifest-bcd-submanifest")
- file_item.write(
- json.dumps([seg_info['seg_b'],
- {'etag': cd_etag,
- 'size_bytes': (seg_info['seg_c']['size_bytes'] +
- seg_info['seg_d']['size_bytes']),
- 'path': '/%s/%s' % (cls.container.name,
- 'manifest-cd')}]),
- parms={'multipart-manifest': 'put'})
- bcd_submanifest_etag = hashlib.md5(
- seg_info['seg_b']['etag'] + cd_etag).hexdigest()
-
- file_item = cls.container.file("manifest-abcde-submanifest")
- file_item.write(
- json.dumps([
- seg_info['seg_a'],
- {'etag': bcd_submanifest_etag,
- 'size_bytes': (seg_info['seg_b']['size_bytes'] +
- seg_info['seg_c']['size_bytes'] +
- seg_info['seg_d']['size_bytes']),
- 'path': '/%s/%s' % (cls.container.name,
- 'manifest-bcd-submanifest')},
- seg_info['seg_e']]),
- parms={'multipart-manifest': 'put'})
- abcde_submanifest_etag = hashlib.md5(
- seg_info['seg_a']['etag'] + bcd_submanifest_etag +
- seg_info['seg_e']['etag']).hexdigest()
- abcde_submanifest_size = (seg_info['seg_a']['size_bytes'] +
- seg_info['seg_b']['size_bytes'] +
- seg_info['seg_c']['size_bytes'] +
- seg_info['seg_d']['size_bytes'] +
- seg_info['seg_e']['size_bytes'])
-
- file_item = cls.container.file("ranged-manifest")
- file_item.write(
- json.dumps([
- {'etag': abcde_submanifest_etag,
- 'size_bytes': abcde_submanifest_size,
- 'path': '/%s/%s' % (cls.container.name,
- 'manifest-abcde-submanifest'),
- 'range': '-1048578'}, # 'c' + ('d' * 2**20) + 'e'
- {'etag': abcde_submanifest_etag,
- 'size_bytes': abcde_submanifest_size,
- 'path': '/%s/%s' % (cls.container.name,
- 'manifest-abcde-submanifest'),
- 'range': '524288-1572863'}, # 'a' * 2**19 + 'b' * 2**19
- {'etag': abcde_submanifest_etag,
- 'size_bytes': abcde_submanifest_size,
- 'path': '/%s/%s' % (cls.container.name,
- 'manifest-abcde-submanifest'),
- 'range': '3145727-3145728'}]), # 'cd'
- parms={'multipart-manifest': 'put'})
- ranged_manifest_etag = hashlib.md5(
- abcde_submanifest_etag + ':3145727-4194304;' +
- abcde_submanifest_etag + ':524288-1572863;' +
- abcde_submanifest_etag + ':3145727-3145728;').hexdigest()
- ranged_manifest_size = 2 * 1024 * 1024 + 4
-
- file_item = cls.container.file("ranged-submanifest")
- file_item.write(
- json.dumps([
- seg_info['seg_c'],
- {'etag': ranged_manifest_etag,
- 'size_bytes': ranged_manifest_size,
- 'path': '/%s/%s' % (cls.container.name,
- 'ranged-manifest')},
- {'etag': ranged_manifest_etag,
- 'size_bytes': ranged_manifest_size,
- 'path': '/%s/%s' % (cls.container.name,
- 'ranged-manifest'),
- 'range': '524289-1572865'},
- {'etag': ranged_manifest_etag,
- 'size_bytes': ranged_manifest_size,
- 'path': '/%s/%s' % (cls.container.name,
- 'ranged-manifest'),
- 'range': '-3'}]),
- parms={'multipart-manifest': 'put'})
-
- file_item = cls.container.file("manifest-db")
- file_item.write(
- json.dumps([
- {'path': seg_info['seg_d']['path'], 'etag': None,
- 'size_bytes': None},
- {'path': seg_info['seg_b']['path'], 'etag': None,
- 'size_bytes': None},
- ]), parms={'multipart-manifest': 'put'})
-
- file_item = cls.container.file("ranged-manifest-repeated-segment")
- file_item.write(
- json.dumps([
- {'path': seg_info['seg_a']['path'], 'etag': None,
- 'size_bytes': None, 'range': '-1048578'},
- {'path': seg_info['seg_a']['path'], 'etag': None,
- 'size_bytes': None},
- {'path': seg_info['seg_b']['path'], 'etag': None,
- 'size_bytes': None, 'range': '-1048578'},
- ]), parms={'multipart-manifest': 'put'})
-
-
-class TestSlo(Base):
- env = TestSloEnv
- set_up = False
-
- def setUp(self):
- super(TestSlo, self).setUp()
- if self.env.slo_enabled is False:
- raise SkipTest("SLO not enabled")
- elif self.env.slo_enabled is not True:
- # just some sanity checking
- raise Exception(
- "Expected slo_enabled to be True/False, got %r" %
- (self.env.slo_enabled,))
-
- def test_slo_get_simple_manifest(self):
- file_item = self.env.container.file('manifest-abcde')
- file_contents = file_item.read()
- self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents))
- self.assertEqual('a', file_contents[0])
- self.assertEqual('a', file_contents[1024 * 1024 - 1])
- self.assertEqual('b', file_contents[1024 * 1024])
- self.assertEqual('d', file_contents[-2])
- self.assertEqual('e', file_contents[-1])
-
- def test_slo_container_listing(self):
- raise SkipTest("Gluster preserves orig sys metadata - invalid test")
- # the listing object size should equal the sum of the size of the
- # segments, not the size of the manifest body
- file_item = self.env.container.file(Utils.create_name())
- file_item.write(
- json.dumps([self.env.seg_info['seg_a']]),
- parms={'multipart-manifest': 'put'})
- # The container listing has the etag of the actual manifest object
- # contents which we get using multipart-manifest=get. Arguably this
- # should be the etag that we get when NOT using multipart-manifest=get,
- # to be consistent with size and content-type. But here we at least
- # verify that it remains consistent when the object is updated with a
- # POST.
- file_item.initialize(parms={'multipart-manifest': 'get'})
- expected_etag = file_item.etag
-
- listing = self.env.container.files(parms={'format': 'json'})
- for f_dict in listing:
- if f_dict['name'] == file_item.name:
- self.assertEqual(1024 * 1024, f_dict['bytes'])
- self.assertEqual('application/octet-stream',
- f_dict['content_type'])
- self.assertEqual(expected_etag, f_dict['hash'])
- break
- else:
- self.fail('Failed to find manifest file in container listing')
-
- # now POST updated content-type file
- file_item.content_type = 'image/jpeg'
- file_item.sync_metadata({'X-Object-Meta-Test': 'blah'})
- file_item.initialize()
- self.assertEqual('image/jpeg', file_item.content_type) # sanity
-
- # verify that the container listing is consistent with the file
- listing = self.env.container.files(parms={'format': 'json'})
- for f_dict in listing:
- if f_dict['name'] == file_item.name:
- self.assertEqual(1024 * 1024, f_dict['bytes'])
- self.assertEqual(file_item.content_type,
- f_dict['content_type'])
- self.assertEqual(expected_etag, f_dict['hash'])
- break
- else:
- self.fail('Failed to find manifest file in container listing')
-
- # now POST with no change to content-type
- file_item.sync_metadata({'X-Object-Meta-Test': 'blah'},
- cfg={'no_content_type': True})
- file_item.initialize()
- self.assertEqual('image/jpeg', file_item.content_type) # sanity
-
- # verify that the container listing is consistent with the file
- listing = self.env.container.files(parms={'format': 'json'})
- for f_dict in listing:
- if f_dict['name'] == file_item.name:
- self.assertEqual(1024 * 1024, f_dict['bytes'])
- self.assertEqual(file_item.content_type,
- f_dict['content_type'])
- self.assertEqual(expected_etag, f_dict['hash'])
- break
- else:
- self.fail('Failed to find manifest file in container listing')
-
- def test_slo_get_nested_manifest(self):
- file_item = self.env.container.file('manifest-abcde-submanifest')
- file_contents = file_item.read()
- self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents))
- self.assertEqual('a', file_contents[0])
- self.assertEqual('a', file_contents[1024 * 1024 - 1])
- self.assertEqual('b', file_contents[1024 * 1024])
- self.assertEqual('d', file_contents[-2])
- self.assertEqual('e', file_contents[-1])
-
- def test_slo_get_ranged_manifest(self):
- file_item = self.env.container.file('ranged-manifest')
- grouped_file_contents = [
- (char, sum(1 for _char in grp))
- for char, grp in itertools.groupby(file_item.read())]
- self.assertEqual([
- ('c', 1),
- ('d', 1024 * 1024),
- ('e', 1),
- ('a', 512 * 1024),
- ('b', 512 * 1024),
- ('c', 1),
- ('d', 1)], grouped_file_contents)
-
- def test_slo_get_ranged_manifest_repeated_segment(self):
- file_item = self.env.container.file('ranged-manifest-repeated-segment')
- grouped_file_contents = [
- (char, sum(1 for _char in grp))
- for char, grp in itertools.groupby(file_item.read())]
- self.assertEqual(
- [('a', 2097152), ('b', 1048576)],
- grouped_file_contents)
-
- def test_slo_get_ranged_submanifest(self):
- file_item = self.env.container.file('ranged-submanifest')
- grouped_file_contents = [
- (char, sum(1 for _char in grp))
- for char, grp in itertools.groupby(file_item.read())]
- self.assertEqual([
- ('c', 1024 * 1024 + 1),
- ('d', 1024 * 1024),
- ('e', 1),
- ('a', 512 * 1024),
- ('b', 512 * 1024),
- ('c', 1),
- ('d', 512 * 1024 + 1),
- ('e', 1),
- ('a', 512 * 1024),
- ('b', 1),
- ('c', 1),
- ('d', 1)], grouped_file_contents)
-
- def test_slo_ranged_get(self):
- file_item = self.env.container.file('manifest-abcde')
- file_contents = file_item.read(size=1024 * 1024 + 2,
- offset=1024 * 1024 - 1)
- self.assertEqual('a', file_contents[0])
- self.assertEqual('b', file_contents[1])
- self.assertEqual('b', file_contents[-2])
- self.assertEqual('c', file_contents[-1])
-
- def test_slo_ranged_submanifest(self):
- file_item = self.env.container.file('manifest-abcde-submanifest')
- file_contents = file_item.read(size=1024 * 1024 + 2,
- offset=1024 * 1024 * 2 - 1)
- self.assertEqual('b', file_contents[0])
- self.assertEqual('c', file_contents[1])
- self.assertEqual('c', file_contents[-2])
- self.assertEqual('d', file_contents[-1])
-
- def test_slo_etag_is_hash_of_etags(self):
- expected_hash = hashlib.md5()
- expected_hash.update(hashlib.md5('a' * 1024 * 1024).hexdigest())
- expected_hash.update(hashlib.md5('b' * 1024 * 1024).hexdigest())
- expected_hash.update(hashlib.md5('c' * 1024 * 1024).hexdigest())
- expected_hash.update(hashlib.md5('d' * 1024 * 1024).hexdigest())
- expected_hash.update(hashlib.md5('e').hexdigest())
- expected_etag = expected_hash.hexdigest()
-
- file_item = self.env.container.file('manifest-abcde')
- self.assertEqual(expected_etag, file_item.info()['etag'])
-
- def test_slo_etag_is_hash_of_etags_submanifests(self):
-
- def hd(x):
- return hashlib.md5(x).hexdigest()
-
- expected_etag = hd(hd('a' * 1024 * 1024) +
- hd(hd('b' * 1024 * 1024) +
- hd(hd('c' * 1024 * 1024) +
- hd('d' * 1024 * 1024))) +
- hd('e'))
-
- file_item = self.env.container.file('manifest-abcde-submanifest')
- self.assertEqual(expected_etag, file_item.info()['etag'])
-
- def test_slo_etag_mismatch(self):
- file_item = self.env.container.file("manifest-a-bad-etag")
- try:
- file_item.write(
- json.dumps([{
- 'size_bytes': 1024 * 1024,
- 'etag': 'not it',
- 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
- parms={'multipart-manifest': 'put'})
- except ResponseError as err:
- self.assertEqual(400, err.status)
- else:
- self.fail("Expected ResponseError but didn't get it")
-
- def test_slo_size_mismatch(self):
- file_item = self.env.container.file("manifest-a-bad-size")
- try:
- file_item.write(
- json.dumps([{
- 'size_bytes': 1024 * 1024 - 1,
- 'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(),
- 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
- parms={'multipart-manifest': 'put'})
- except ResponseError as err:
- self.assertEqual(400, err.status)
- else:
- self.fail("Expected ResponseError but didn't get it")
-
- def test_slo_unspecified_etag(self):
- file_item = self.env.container.file("manifest-a-unspecified-etag")
- file_item.write(
- json.dumps([{
- 'size_bytes': 1024 * 1024,
- 'etag': None,
- 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
- parms={'multipart-manifest': 'put'})
- self.assert_status(201)
-
- def test_slo_unspecified_size(self):
- file_item = self.env.container.file("manifest-a-unspecified-size")
- file_item.write(
- json.dumps([{
- 'size_bytes': None,
- 'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(),
- 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
- parms={'multipart-manifest': 'put'})
- self.assert_status(201)
-
- def test_slo_missing_etag(self):
- file_item = self.env.container.file("manifest-a-missing-etag")
- try:
- file_item.write(
- json.dumps([{
- 'size_bytes': 1024 * 1024,
- 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
- parms={'multipart-manifest': 'put'})
- except ResponseError as err:
- self.assertEqual(400, err.status)
- else:
- self.fail("Expected ResponseError but didn't get it")
-
- def test_slo_missing_size(self):
- file_item = self.env.container.file("manifest-a-missing-size")
- try:
- file_item.write(
- json.dumps([{
- 'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(),
- 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]),
- parms={'multipart-manifest': 'put'})
- except ResponseError as err:
- self.assertEqual(400, err.status)
- else:
- self.fail("Expected ResponseError but didn't get it")
-
- def test_slo_overwrite_segment_with_manifest(self):
- file_item = self.env.container.file("seg_b")
- with self.assertRaises(ResponseError) as catcher:
- file_item.write(
- json.dumps([
- {'size_bytes': 1024 * 1024,
- 'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(),
- 'path': '/%s/%s' % (self.env.container.name, 'seg_a')},
- {'size_bytes': 1024 * 1024,
- 'etag': hashlib.md5('b' * 1024 * 1024).hexdigest(),
- 'path': '/%s/%s' % (self.env.container.name, 'seg_b')},
- {'size_bytes': 1024 * 1024,
- 'etag': hashlib.md5('c' * 1024 * 1024).hexdigest(),
- 'path': '/%s/%s' % (self.env.container.name, 'seg_c')}]),
- parms={'multipart-manifest': 'put'})
- self.assertEqual(400, catcher.exception.status)
-
- def test_slo_copy(self):
- file_item = self.env.container.file("manifest-abcde")
- file_item.copy(self.env.container.name, "copied-abcde")
-
- copied = self.env.container.file("copied-abcde")
- copied_contents = copied.read(parms={'multipart-manifest': 'get'})
- self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
-
- def test_slo_copy_account(self):
- acct = self.env.conn.account_name
- # same account copy
- file_item = self.env.container.file("manifest-abcde")
- file_item.copy_account(acct, self.env.container.name, "copied-abcde")
-
- copied = self.env.container.file("copied-abcde")
- copied_contents = copied.read(parms={'multipart-manifest': 'get'})
- self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
-
- # copy to different account
- acct = self.env.conn2.account_name
- dest_cont = self.env.account2.container(Utils.create_name())
- self.assertTrue(dest_cont.create(hdrs={
- 'X-Container-Write': self.env.conn.user_acl
- }))
- file_item = self.env.container.file("manifest-abcde")
- file_item.copy_account(acct, dest_cont, "copied-abcde")
-
- copied = dest_cont.file("copied-abcde")
- copied_contents = copied.read(parms={'multipart-manifest': 'get'})
- self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
-
- def test_slo_copy_the_manifest(self):
- source = self.env.container.file("manifest-abcde")
- source_contents = source.read(parms={'multipart-manifest': 'get'})
- source_json = json.loads(source_contents)
- source.initialize()
- self.assertEqual('application/octet-stream', source.content_type)
- source.initialize(parms={'multipart-manifest': 'get'})
- source_hash = hashlib.md5()
- source_hash.update(source_contents)
- self.assertEqual(source_hash.hexdigest(), source.etag)
-
- self.assertTrue(source.copy(self.env.container.name,
- "copied-abcde-manifest-only",
- parms={'multipart-manifest': 'get'}))
-
- copied = self.env.container.file("copied-abcde-manifest-only")
- copied_contents = copied.read(parms={'multipart-manifest': 'get'})
- try:
- copied_json = json.loads(copied_contents)
- except ValueError:
- self.fail("COPY didn't copy the manifest (invalid json on GET)")
- self.assertEqual(source_json, copied_json)
- copied.initialize()
- self.assertEqual('application/octet-stream', copied.content_type)
- copied.initialize(parms={'multipart-manifest': 'get'})
- copied_hash = hashlib.md5()
- copied_hash.update(copied_contents)
- self.assertEqual(copied_hash.hexdigest(), copied.etag)
-
- # verify the listing metadata
- listing = self.env.container.files(parms={'format': 'json'})
- names = {}
- for f_dict in listing:
- if f_dict['name'] in ('manifest-abcde',
- 'copied-abcde-manifest-only'):
- names[f_dict['name']] = f_dict
-
- self.assertIn('manifest-abcde', names)
- actual = names['manifest-abcde']
- self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
- self.assertEqual('application/octet-stream', actual['content_type'])
- self.assertEqual(source.etag, actual['hash'])
-
- self.assertIn('copied-abcde-manifest-only', names)
- actual = names['copied-abcde-manifest-only']
- self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
- self.assertEqual('application/octet-stream', actual['content_type'])
- self.assertEqual(copied.etag, actual['hash'])
-
- def test_slo_copy_the_manifest_updating_metadata(self):
- source = self.env.container.file("manifest-abcde")
- source.content_type = 'application/octet-stream'
- source.sync_metadata({'test': 'original'})
- source_contents = source.read(parms={'multipart-manifest': 'get'})
- source_json = json.loads(source_contents)
- source.initialize()
- self.assertEqual('application/octet-stream', source.content_type)
- source.initialize(parms={'multipart-manifest': 'get'})
- source_hash = hashlib.md5()
- source_hash.update(source_contents)
- self.assertEqual(source_hash.hexdigest(), source.etag)
- self.assertEqual(source.metadata['test'], 'original')
-
- self.assertTrue(
- source.copy(self.env.container.name, "copied-abcde-manifest-only",
- parms={'multipart-manifest': 'get'},
- hdrs={'Content-Type': 'image/jpeg',
- 'X-Object-Meta-Test': 'updated'}))
-
- copied = self.env.container.file("copied-abcde-manifest-only")
- copied_contents = copied.read(parms={'multipart-manifest': 'get'})
- try:
- copied_json = json.loads(copied_contents)
- except ValueError:
- self.fail("COPY didn't copy the manifest (invalid json on GET)")
- self.assertEqual(source_json, copied_json)
- copied.initialize()
- self.assertEqual('image/jpeg', copied.content_type)
- copied.initialize(parms={'multipart-manifest': 'get'})
- copied_hash = hashlib.md5()
- copied_hash.update(copied_contents)
- self.assertEqual(copied_hash.hexdigest(), copied.etag)
- self.assertEqual(copied.metadata['test'], 'updated')
-
- # verify the listing metadata
- listing = self.env.container.files(parms={'format': 'json'})
- names = {}
- for f_dict in listing:
- if f_dict['name'] in ('manifest-abcde',
- 'copied-abcde-manifest-only'):
- names[f_dict['name']] = f_dict
-
- self.assertIn('manifest-abcde', names)
- actual = names['manifest-abcde']
- self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
- self.assertEqual('application/octet-stream', actual['content_type'])
- # the container listing should have the etag of the manifest contents
- self.assertEqual(source.etag, actual['hash'])
-
- self.assertIn('copied-abcde-manifest-only', names)
- actual = names['copied-abcde-manifest-only']
- self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
- self.assertEqual('image/jpeg', actual['content_type'])
- self.assertEqual(copied.etag, actual['hash'])
-
- def test_slo_copy_the_manifest_account(self):
- acct = self.env.conn.account_name
- # same account
- file_item = self.env.container.file("manifest-abcde")
- file_item.copy_account(acct,
- self.env.container.name,
- "copied-abcde-manifest-only",
- parms={'multipart-manifest': 'get'})
-
- copied = self.env.container.file("copied-abcde-manifest-only")
- copied_contents = copied.read(parms={'multipart-manifest': 'get'})
- try:
- json.loads(copied_contents)
- except ValueError:
- self.fail("COPY didn't copy the manifest (invalid json on GET)")
-
- # different account
- acct = self.env.conn2.account_name
- dest_cont = self.env.account2.container(Utils.create_name())
- self.assertTrue(dest_cont.create(hdrs={
- 'X-Container-Write': self.env.conn.user_acl
- }))
-
- # manifest copy will fail because there is no read access to segments
- # in destination account
- file_item.copy_account(
- acct, dest_cont, "copied-abcde-manifest-only",
- parms={'multipart-manifest': 'get'})
- self.assertEqual(400, file_item.conn.response.status)
- resp_body = file_item.conn.response.read()
- self.assertEqual(5, resp_body.count('403 Forbidden'),
- 'Unexpected response body %r' % resp_body)
-
- # create segments container in account2 with read access for account1
- segs_container = self.env.account2.container(self.env.container.name)
- self.assertTrue(segs_container.create(hdrs={
- 'X-Container-Read': self.env.conn.user_acl
- }))
-
- # manifest copy will still fail because there are no segments in
- # destination account
- file_item.copy_account(
- acct, dest_cont, "copied-abcde-manifest-only",
- parms={'multipart-manifest': 'get'})
- self.assertEqual(400, file_item.conn.response.status)
- resp_body = file_item.conn.response.read()
- self.assertEqual(5, resp_body.count('404 Not Found'),
- 'Unexpected response body %r' % resp_body)
-
- # create segments in account2 container with same name as in account1,
- # manifest copy now succeeds
- self.env.create_segments(segs_container)
-
- self.assertTrue(file_item.copy_account(
- acct, dest_cont, "copied-abcde-manifest-only",
- parms={'multipart-manifest': 'get'}))
-
- copied = dest_cont.file("copied-abcde-manifest-only")
- copied_contents = copied.read(parms={'multipart-manifest': 'get'})
- try:
- json.loads(copied_contents)
- except ValueError:
- self.fail("COPY didn't copy the manifest (invalid json on GET)")
-
- def _make_manifest(self):
- file_item = self.env.container.file("manifest-post")
- seg_info = self.env.seg_info
- file_item.write(
- json.dumps([seg_info['seg_a'], seg_info['seg_b'],
- seg_info['seg_c'], seg_info['seg_d'],
- seg_info['seg_e']]),
- parms={'multipart-manifest': 'put'})
- return file_item
-
- def test_slo_post_the_manifest_metadata_update(self):
- file_item = self._make_manifest()
- # sanity check, check the object is an SLO manifest
- file_item.info()
- file_item.header_fields([('slo', 'x-static-large-object')])
-
- # POST a user metadata (i.e. x-object-meta-post)
- file_item.sync_metadata({'post': 'update'})
-
- updated = self.env.container.file("manifest-post")
- updated.info()
- updated.header_fields([('user-meta', 'x-object-meta-post')]) # sanity
- updated.header_fields([('slo', 'x-static-large-object')])
- updated_contents = updated.read(parms={'multipart-manifest': 'get'})
- try:
- json.loads(updated_contents)
- except ValueError:
- self.fail("Unexpected content on GET, expected a json body")
-
- def test_slo_post_the_manifest_metadata_update_with_qs(self):
- # multipart-manifest query should be ignored on post
- for verb in ('put', 'get', 'delete'):
- file_item = self._make_manifest()
- # sanity check, check the object is an SLO manifest
- file_item.info()
- file_item.header_fields([('slo', 'x-static-large-object')])
- # POST a user metadata (i.e. x-object-meta-post)
- file_item.sync_metadata(metadata={'post': 'update'},
- parms={'multipart-manifest': verb})
- updated = self.env.container.file("manifest-post")
- updated.info()
- updated.header_fields(
- [('user-meta', 'x-object-meta-post')]) # sanity
- updated.header_fields([('slo', 'x-static-large-object')])
- updated_contents = updated.read(
- parms={'multipart-manifest': 'get'})
- try:
- json.loads(updated_contents)
- except ValueError:
- self.fail(
- "Unexpected content on GET, expected a json body")
-
- def test_slo_get_the_manifest(self):
- manifest = self.env.container.file("manifest-abcde")
- got_body = manifest.read(parms={'multipart-manifest': 'get'})
-
- self.assertEqual('application/json; charset=utf-8',
- manifest.content_type)
- try:
- json.loads(got_body)
- except ValueError:
- self.fail("GET with multipart-manifest=get got invalid json")
-
- def test_slo_get_the_manifest_with_details_from_server(self):
- manifest = self.env.container.file("manifest-db")
- got_body = manifest.read(parms={'multipart-manifest': 'get'})
-
- self.assertEqual('application/json; charset=utf-8',
- manifest.content_type)
- try:
- value = json.loads(got_body)
- except ValueError:
- self.fail("GET with multipart-manifest=get got invalid json")
-
- self.assertEqual(len(value), 2)
- self.assertEqual(value[0]['bytes'], 1024 * 1024)
- self.assertEqual(value[0]['hash'],
- hashlib.md5('d' * 1024 * 1024).hexdigest())
- self.assertEqual(value[0]['name'],
- '/%s/seg_d' % self.env.container.name.decode("utf-8"))
-
- self.assertEqual(value[1]['bytes'], 1024 * 1024)
- self.assertEqual(value[1]['hash'],
- hashlib.md5('b' * 1024 * 1024).hexdigest())
- self.assertEqual(value[1]['name'],
- '/%s/seg_b' % self.env.container.name.decode("utf-8"))
-
- def test_slo_get_raw_the_manifest_with_details_from_server(self):
- manifest = self.env.container.file("manifest-db")
- got_body = manifest.read(parms={'multipart-manifest': 'get',
- 'format': 'raw'})
-
- # raw format should have the actual manifest object content-type
- self.assertEqual('application/octet-stream', manifest.content_type)
- try:
- value = json.loads(got_body)
- except ValueError:
- msg = "GET with multipart-manifest=get&format=raw got invalid json"
- self.fail(msg)
-
- self.assertEqual(
- set(value[0].keys()), set(('size_bytes', 'etag', 'path')))
- self.assertEqual(len(value), 2)
- self.assertEqual(value[0]['size_bytes'], 1024 * 1024)
- self.assertEqual(value[0]['etag'],
- hashlib.md5('d' * 1024 * 1024).hexdigest())
- self.assertEqual(value[0]['path'],
- '/%s/seg_d' % self.env.container.name.decode("utf-8"))
- self.assertEqual(value[1]['size_bytes'], 1024 * 1024)
- self.assertEqual(value[1]['etag'],
- hashlib.md5('b' * 1024 * 1024).hexdigest())
- self.assertEqual(value[1]['path'],
- '/%s/seg_b' % self.env.container.name.decode("utf-8"))
-
- file_item = self.env.container.file("manifest-from-get-raw")
- file_item.write(got_body, parms={'multipart-manifest': 'put'})
-
- file_contents = file_item.read()
- self.assertEqual(2 * 1024 * 1024, len(file_contents))
-
- def test_slo_head_the_manifest(self):
- manifest = self.env.container.file("manifest-abcde")
- got_info = manifest.info(parms={'multipart-manifest': 'get'})
-
- self.assertEqual('application/json; charset=utf-8',
- got_info['content_type'])
-
- def test_slo_if_match_get(self):
- manifest = self.env.container.file("manifest-abcde")
- etag = manifest.info()['etag']
-
- self.assertRaises(ResponseError, manifest.read,
- hdrs={'If-Match': 'not-%s' % etag})
- self.assert_status(412)
-
- manifest.read(hdrs={'If-Match': etag})
- self.assert_status(200)
-
- def test_slo_if_none_match_put(self):
- file_item = self.env.container.file("manifest-if-none-match")
- manifest = json.dumps([{
- 'size_bytes': 1024 * 1024,
- 'etag': None,
- 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}])
-
- self.assertRaises(ResponseError, file_item.write, manifest,
- parms={'multipart-manifest': 'put'},
- hdrs={'If-None-Match': '"not-star"'})
- self.assert_status(400)
-
- file_item.write(manifest, parms={'multipart-manifest': 'put'},
- hdrs={'If-None-Match': '*'})
- self.assert_status(201)
-
- self.assertRaises(ResponseError, file_item.write, manifest,
- parms={'multipart-manifest': 'put'},
- hdrs={'If-None-Match': '*'})
- self.assert_status(412)
-
- def test_slo_if_none_match_get(self):
- manifest = self.env.container.file("manifest-abcde")
- etag = manifest.info()['etag']
-
- self.assertRaises(ResponseError, manifest.read,
- hdrs={'If-None-Match': etag})
- self.assert_status(304)
-
- manifest.read(hdrs={'If-None-Match': "not-%s" % etag})
- self.assert_status(200)
-
- def test_slo_if_match_head(self):
- manifest = self.env.container.file("manifest-abcde")
- etag = manifest.info()['etag']
-
- self.assertRaises(ResponseError, manifest.info,
- hdrs={'If-Match': 'not-%s' % etag})
- self.assert_status(412)
-
- manifest.info(hdrs={'If-Match': etag})
- self.assert_status(200)
-
- def test_slo_if_none_match_head(self):
- manifest = self.env.container.file("manifest-abcde")
- etag = manifest.info()['etag']
-
- self.assertRaises(ResponseError, manifest.info,
- hdrs={'If-None-Match': etag})
- self.assert_status(304)
-
- manifest.info(hdrs={'If-None-Match': "not-%s" % etag})
- self.assert_status(200)
-
- def test_slo_referer_on_segment_container(self):
- # First the account2 (test3) should fail
- headers = {'X-Auth-Token': self.env.conn3.storage_token,
- 'Referer': 'http://blah.example.com'}
- slo_file = self.env.container2.file('manifest-abcde')
- self.assertRaises(ResponseError, slo_file.read,
- hdrs=headers)
- self.assert_status(403)
-
- # Now set the referer on the slo container only
- referer_metadata = {'X-Container-Read': '.r:*.example.com,.rlistings'}
- self.env.container2.update_metadata(referer_metadata)
-
- self.assertRaises(ResponseError, slo_file.read,
- hdrs=headers)
- self.assert_status(409)
-
- # Finally set the referer on the segment container
- self.env.container.update_metadata(referer_metadata)
- contents = slo_file.read(hdrs=headers)
- self.assertEqual(4 * 1024 * 1024 + 1, len(contents))
- self.assertEqual('a', contents[0])
- self.assertEqual('a', contents[1024 * 1024 - 1])
- self.assertEqual('b', contents[1024 * 1024])
- self.assertEqual('d', contents[-2])
- self.assertEqual('e', contents[-1])
-
-
-class TestSloUTF8(Base2, TestSlo):
- set_up = False
-
-
-class TestObjectVersioningEnv(object):
- versioning_enabled = None # tri-state: None initially, then True/False
-
- @classmethod
- def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.storage_url, cls.storage_token = cls.conn.authenticate()
-
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
-
- # Second connection for ACL tests
- config2 = deepcopy(tf.config)
- config2['account'] = tf.config['account2']
- config2['username'] = tf.config['username2']
- config2['password'] = tf.config['password2']
- cls.conn2 = Connection(config2)
- cls.conn2.authenticate()
-
- # avoid getting a prefix that stops halfway through an encoded
- # character
- prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
-
- cls.versions_container = cls.account.container(prefix + "-versions")
- if not cls.versions_container.create():
- raise ResponseError(cls.conn.response)
-
- cls.container = cls.account.container(prefix + "-objs")
- if not cls.container.create(
- hdrs={'X-Versions-Location': cls.versions_container.name}):
- if cls.conn.response.status == 412:
- cls.versioning_enabled = False
- return
- raise ResponseError(cls.conn.response)
-
- container_info = cls.container.info()
- # if versioning is off, then X-Versions-Location won't persist
- cls.versioning_enabled = 'versions' in container_info
-
- # setup another account to test ACLs
- config2 = deepcopy(tf.config)
- config2['account'] = tf.config['account2']
- config2['username'] = tf.config['username2']
- config2['password'] = tf.config['password2']
- cls.conn2 = Connection(config2)
- cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate()
- cls.account2 = cls.conn2.get_account()
- cls.account2.delete_containers()
-
- # setup another account with no access to anything to test ACLs
- config3 = deepcopy(tf.config)
- config3['account'] = tf.config['account']
- config3['username'] = tf.config['username3']
- config3['password'] = tf.config['password3']
- cls.conn3 = Connection(config3)
- cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate()
- cls.account3 = cls.conn3.get_account()
-
- @classmethod
- def tearDown(cls):
- cls.account.delete_containers()
- cls.account2.delete_containers()
-
-
-class TestCrossPolicyObjectVersioningEnv(object):
- # tri-state: None initially, then True/False
- versioning_enabled = None
- multiple_policies_enabled = None
- policies = None
-
- @classmethod
- def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
-
- if cls.multiple_policies_enabled is None:
- try:
- cls.policies = tf.FunctionalStoragePolicyCollection.from_info()
- except AssertionError:
- pass
-
- if cls.policies and len(cls.policies) > 1:
- cls.multiple_policies_enabled = True
- else:
- cls.multiple_policies_enabled = False
- cls.versioning_enabled = True
- # We don't actually know the state of versioning, but without
- # multiple policies the tests should be skipped anyway. Claiming
- # versioning support lets us report the right reason for skipping.
- return
-
- policy = cls.policies.select()
- version_policy = cls.policies.exclude(name=policy['name']).select()
-
- cls.account = Account(cls.conn, tf.config.get('account',
- tf.config['username']))
-
- # Second connection for ACL tests
- config2 = deepcopy(tf.config)
- config2['account'] = tf.config['account2']
- config2['username'] = tf.config['username2']
- config2['password'] = tf.config['password2']
- cls.conn2 = Connection(config2)
- cls.conn2.authenticate()
-
- # avoid getting a prefix that stops halfway through an encoded
- # character
- prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
-
- cls.versions_container = cls.account.container(prefix + "-versions")
- if not cls.versions_container.create(
- {'X-Storage-Policy': policy['name']}):
- raise ResponseError(cls.conn.response)
-
- cls.container = cls.account.container(prefix + "-objs")
- if not cls.container.create(
- hdrs={'X-Versions-Location': cls.versions_container.name,
- 'X-Storage-Policy': version_policy['name']}):
- if cls.conn.response.status == 412:
- cls.versioning_enabled = False
- return
- raise ResponseError(cls.conn.response)
-
- container_info = cls.container.info()
- # if versioning is off, then X-Versions-Location won't persist
- cls.versioning_enabled = 'versions' in container_info
-
- # setup another account to test ACLs
- config2 = deepcopy(tf.config)
- config2['account'] = tf.config['account2']
- config2['username'] = tf.config['username2']
- config2['password'] = tf.config['password2']
- cls.conn2 = Connection(config2)
- cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate()
- cls.account2 = cls.conn2.get_account()
- cls.account2.delete_containers()
-
- # setup another account with no access to anything to test ACLs
- config3 = deepcopy(tf.config)
- config3['account'] = tf.config['account']
- config3['username'] = tf.config['username3']
- config3['password'] = tf.config['password3']
- cls.conn3 = Connection(config3)
- cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate()
- cls.account3 = cls.conn3.get_account()
-
- @classmethod
- def tearDown(cls):
- cls.account.delete_containers()
- cls.account2.delete_containers()
-
-
-class TestObjectVersioning(Base):
- env = TestObjectVersioningEnv
- set_up = False
-
- def setUp(self):
- super(TestObjectVersioning, self).setUp()
- if self.env.versioning_enabled is False:
- raise SkipTest("Object versioning not enabled")
- elif self.env.versioning_enabled is not True:
- # just some sanity checking
- raise Exception(
- "Expected versioning_enabled to be True/False, got %r" %
- (self.env.versioning_enabled,))
-
- def _tear_down_files(self):
- try:
- # only delete files and not containers
- # as they were configured in self.env
- self.env.versions_container.delete_files()
- self.env.container.delete_files()
- except ResponseError:
- pass
-
- def tearDown(self):
- super(TestObjectVersioning, self).tearDown()
- self._tear_down_files()
-
- def test_clear_version_option(self):
- # sanity
- self.assertEqual(self.env.container.info()['versions'],
- self.env.versions_container.name)
- self.env.container.update_metadata(
- hdrs={'X-Versions-Location': ''})
- self.assertIsNone(self.env.container.info().get('versions'))
-
- # set location back to the way it was
- self.env.container.update_metadata(
- hdrs={'X-Versions-Location': self.env.versions_container.name})
- self.assertEqual(self.env.container.info()['versions'],
- self.env.versions_container.name)
-
- def test_overwriting(self):
- container = self.env.container
- versions_container = self.env.versions_container
- cont_info = container.info()
- self.assertEqual(cont_info['versions'], versions_container.name)
-
- obj_name = Utils.create_name()
-
- versioned_obj = container.file(obj_name)
- put_headers = {'Content-Type': 'text/jibberish01',
- 'Content-Encoding': 'gzip',
- 'Content-Disposition': 'attachment; filename=myfile'}
- versioned_obj.write("aaaaa", hdrs=put_headers)
- obj_info = versioned_obj.info()
- self.assertEqual('text/jibberish01', obj_info['content_type'])
-
- # the allowed headers are configurable in object server, so we cannot
- # assert that content-encoding or content-disposition get *copied* to
- # the object version unless they were set on the original PUT, so
- # populate expected_headers by making a HEAD on the original object
- resp_headers = dict(versioned_obj.conn.response.getheaders())
- expected_headers = {}
- for k, v in put_headers.items():
- if k.lower() in resp_headers:
- expected_headers[k] = v
-
- self.assertEqual(0, versions_container.info()['object_count'])
- versioned_obj.write("bbbbb", hdrs={'Content-Type': 'text/jibberish02',
- 'X-Object-Meta-Foo': 'Bar'})
- versioned_obj.initialize()
- self.assertEqual(versioned_obj.content_type, 'text/jibberish02')
- self.assertEqual(versioned_obj.metadata['foo'], 'Bar')
-
- # the old version got saved off
- self.assertEqual(1, versions_container.info()['object_count'])
- versioned_obj_name = versions_container.files()[0]
- prev_version = versions_container.file(versioned_obj_name)
- prev_version.initialize()
- self.assertEqual("aaaaa", prev_version.read())
- self.assertEqual(prev_version.content_type, 'text/jibberish01')
-
- resp_headers = dict(prev_version.conn.response.getheaders())
- for k, v in expected_headers.items():
- self.assertIn(k.lower(), resp_headers)
- self.assertEqual(v, resp_headers[k.lower()])
-
- # make sure the new obj metadata did not leak to the prev. version
- self.assertNotIn('foo', prev_version.metadata)
-
- # check that POST does not create a new version
- versioned_obj.sync_metadata(metadata={'fu': 'baz'})
- self.assertEqual(1, versions_container.info()['object_count'])
-
- # if we overwrite it again, there are two versions
- versioned_obj.write("ccccc")
- self.assertEqual(2, versions_container.info()['object_count'])
- versioned_obj_name = versions_container.files()[1]
- prev_version = versions_container.file(versioned_obj_name)
- prev_version.initialize()
- self.assertEqual("bbbbb", prev_version.read())
- self.assertEqual(prev_version.content_type, 'text/jibberish02')
- self.assertIn('foo', prev_version.metadata)
- self.assertIn('fu', prev_version.metadata)
-
- # as we delete things, the old contents return
- self.assertEqual("ccccc", versioned_obj.read())
-
- # test copy from a different container
- src_container = self.env.account.container(Utils.create_name())
- self.assertTrue(src_container.create())
- src_name = Utils.create_name()
- src_obj = src_container.file(src_name)
- src_obj.write("ddddd", hdrs={'Content-Type': 'text/jibberish04'})
- src_obj.copy(container.name, obj_name)
-
- self.assertEqual("ddddd", versioned_obj.read())
- versioned_obj.initialize()
- self.assertEqual(versioned_obj.content_type, 'text/jibberish04')
-
- # make sure versions container has the previous version
- self.assertEqual(3, versions_container.info()['object_count'])
- versioned_obj_name = versions_container.files()[2]
- prev_version = versions_container.file(versioned_obj_name)
- prev_version.initialize()
- self.assertEqual("ccccc", prev_version.read())
-
- # test delete
- versioned_obj.delete()
- self.assertEqual("ccccc", versioned_obj.read())
- versioned_obj.delete()
- self.assertEqual("bbbbb", versioned_obj.read())
- versioned_obj.delete()
- self.assertEqual("aaaaa", versioned_obj.read())
- self.assertEqual(0, versions_container.info()['object_count'])
-
- # verify that all the original object headers have been copied back
- obj_info = versioned_obj.info()
- self.assertEqual('text/jibberish01', obj_info['content_type'])
- resp_headers = dict(versioned_obj.conn.response.getheaders())
- for k, v in expected_headers.items():
- self.assertIn(k.lower(), resp_headers)
- self.assertEqual(v, resp_headers[k.lower()])
-
- versioned_obj.delete()
- self.assertRaises(ResponseError, versioned_obj.read)
-
- def test_versioning_dlo(self):
- raise SkipTest('SOF incompatible test')
- container = self.env.container
- versions_container = self.env.versions_container
- obj_name = Utils.create_name()
-
- for i in ('1', '2', '3'):
- time.sleep(.01) # guarantee that the timestamp changes
- obj_name_seg = obj_name + '/' + i
- versioned_obj = container.file(obj_name_seg)
- versioned_obj.write(i)
- versioned_obj.write(i + i)
-
- self.assertEqual(3, versions_container.info()['object_count'])
-
- man_file = container.file(obj_name)
- man_file.write('', hdrs={"X-Object-Manifest": "%s/%s/" %
- (self.env.container.name, obj_name)})
-
- # guarantee that the timestamp changes
- time.sleep(.01)
-
- # write manifest file again
- man_file.write('', hdrs={"X-Object-Manifest": "%s/%s/" %
- (self.env.container.name, obj_name)})
-
- self.assertEqual(3, versions_container.info()['object_count'])
- self.assertEqual("112233", man_file.read())
-
- def test_versioning_container_acl(self):
- # create versions container and DO NOT give write access to account2
- versions_container = self.env.account.container(Utils.create_name())
- self.assertTrue(versions_container.create(hdrs={
- 'X-Container-Write': ''
- }))
-
- # check account2 cannot write to versions container
- fail_obj_name = Utils.create_name()
- fail_obj = versions_container.file(fail_obj_name)
- self.assertRaises(ResponseError, fail_obj.write, "should fail",
- cfg={'use_token': self.env.storage_token2})
-
- # create container and give write access to account2
- # don't set X-Versions-Location just yet
- container = self.env.account.container(Utils.create_name())
- self.assertTrue(container.create(hdrs={
- 'X-Container-Write': self.env.conn2.user_acl}))
-
- # check account2 cannot set X-Versions-Location on container
- self.assertRaises(ResponseError, container.update_metadata, hdrs={
- 'X-Versions-Location': versions_container},
- cfg={'use_token': self.env.storage_token2})
-
- # good! now let admin set the X-Versions-Location
- # p.s.: sticking a 'x-remove' header here to test precedence
- # of both headers. Setting the location should succeed.
- self.assertTrue(container.update_metadata(hdrs={
- 'X-Remove-Versions-Location': versions_container,
- 'X-Versions-Location': versions_container}))
-
- # write object twice to container and check version
- obj_name = Utils.create_name()
- versioned_obj = container.file(obj_name)
- self.assertTrue(versioned_obj.write("never argue with the data",
- cfg={'use_token': self.env.storage_token2}))
- self.assertEqual(versioned_obj.read(), "never argue with the data")
-
- self.assertTrue(
- versioned_obj.write("we don't have no beer, just tequila",
- cfg={'use_token': self.env.storage_token2}))
- self.assertEqual(versioned_obj.read(),
- "we don't have no beer, just tequila")
- self.assertEqual(1, versions_container.info()['object_count'])
-
- # read the original uploaded object
- for filename in versions_container.files():
- backup_file = versions_container.file(filename)
- break
- self.assertEqual(backup_file.read(), "never argue with the data")
-
- # user3 (some random user with no access to anything)
- # tries to read from versioned container
- self.assertRaises(ResponseError, backup_file.read,
- cfg={'use_token': self.env.storage_token3})
-
- # user3 cannot write or delete from source container either
- number_of_versions = versions_container.info()['object_count']
- self.assertRaises(ResponseError, versioned_obj.write,
- "some random user trying to write data",
- cfg={'use_token': self.env.storage_token3})
- self.assertEqual(number_of_versions,
- versions_container.info()['object_count'])
- self.assertRaises(ResponseError, versioned_obj.delete,
- cfg={'use_token': self.env.storage_token3})
- self.assertEqual(number_of_versions,
- versions_container.info()['object_count'])
-
- # user2 can't read or delete from versions-location
- self.assertRaises(ResponseError, backup_file.read,
- cfg={'use_token': self.env.storage_token2})
- self.assertRaises(ResponseError, backup_file.delete,
- cfg={'use_token': self.env.storage_token2})
-
- # but is able to delete from the source container
- # this could be a helpful scenario for dev ops that want to setup
- # just one container to hold object versions of multiple containers
- # and each one of those containers are owned by different users
- self.assertTrue(versioned_obj.delete(
- cfg={'use_token': self.env.storage_token2}))
-
- # tear-down since we create these containers here
- # and not in self.env
- versions_container.delete_recursive()
- container.delete_recursive()
-
- def test_versioning_check_acl(self):
- container = self.env.container
- versions_container = self.env.versions_container
- versions_container.create(hdrs={'X-Container-Read': '.r:*,.rlistings'})
-
- obj_name = Utils.create_name()
- versioned_obj = container.file(obj_name)
- versioned_obj.write("aaaaa")
- self.assertEqual("aaaaa", versioned_obj.read())
-
- versioned_obj.write("bbbbb")
- self.assertEqual("bbbbb", versioned_obj.read())
-
- # Use token from second account and try to delete the object
- org_token = self.env.account.conn.storage_token
- self.env.account.conn.storage_token = self.env.conn2.storage_token
- try:
- self.assertRaises(ResponseError, versioned_obj.delete)
- finally:
- self.env.account.conn.storage_token = org_token
-
- # Verify with token from first account
- self.assertEqual("bbbbb", versioned_obj.read())
-
- versioned_obj.delete()
- self.assertEqual("aaaaa", versioned_obj.read())
-
-
-class TestObjectVersioningUTF8(Base2, TestObjectVersioning):
- set_up = False
-
- def tearDown(self):
- self._tear_down_files()
- super(TestObjectVersioningUTF8, self).tearDown()
-
-
-class TestCrossPolicyObjectVersioning(TestObjectVersioning):
- env = TestCrossPolicyObjectVersioningEnv
- set_up = False
-
- def setUp(self):
- super(TestCrossPolicyObjectVersioning, self).setUp()
- if self.env.multiple_policies_enabled is False:
- raise SkipTest('Cross policy test requires multiple policies')
- elif self.env.multiple_policies_enabled is not True:
- # just some sanity checking
- raise Exception("Expected multiple_policies_enabled "
- "to be True/False, got %r" % (
- self.env.versioning_enabled,))
-
-
-class TestSloWithVersioning(Base):
-
- def setUp(self):
- if 'slo' not in cluster_info:
- raise SkipTest("SLO not enabled")
-
- self.conn = Connection(tf.config)
- self.conn.authenticate()
- self.account = Account(
- self.conn, tf.config.get('account', tf.config['username']))
- self.account.delete_containers()
-
- # create a container with versioning
- self.versions_container = self.account.container(Utils.create_name())
- self.container = self.account.container(Utils.create_name())
- self.segments_container = self.account.container(Utils.create_name())
- if not self.container.create(
- hdrs={'X-Versions-Location': self.versions_container.name}):
- raise ResponseError(self.conn.response)
- if 'versions' not in self.container.info():
- raise SkipTest("Object versioning not enabled")
-
- for cont in (self.versions_container, self.segments_container):
- if not cont.create():
- raise ResponseError(self.conn.response)
-
- # create some segments
- self.seg_info = {}
- for letter, size in (('a', 1024 * 1024),
- ('b', 1024 * 1024)):
- seg_name = letter
- file_item = self.segments_container.file(seg_name)
- file_item.write(letter * size)
- self.seg_info[seg_name] = {
- 'size_bytes': size,
- 'etag': file_item.md5,
- 'path': '/%s/%s' % (self.segments_container.name, seg_name)}
-
- def _create_manifest(self, seg_name):
- # create a manifest in the versioning container
- file_item = self.container.file("my-slo-manifest")
- file_item.write(
- json.dumps([self.seg_info[seg_name]]),
- parms={'multipart-manifest': 'put'})
- return file_item
-
- def _assert_is_manifest(self, file_item, seg_name):
- manifest_body = file_item.read(parms={'multipart-manifest': 'get'})
- resp_headers = dict(file_item.conn.response.getheaders())
- self.assertIn('x-static-large-object', resp_headers)
- self.assertEqual('application/json; charset=utf-8',
- file_item.content_type)
- try:
- manifest = json.loads(manifest_body)
- except ValueError:
- self.fail("GET with multipart-manifest=get got invalid json")
-
- self.assertEqual(1, len(manifest))
- key_map = {'etag': 'hash', 'size_bytes': 'bytes', 'path': 'name'}
- for k_client, k_slo in key_map.items():
- self.assertEqual(self.seg_info[seg_name][k_client],
- manifest[0][k_slo])
-
- def _assert_is_object(self, file_item, seg_name):
- file_contents = file_item.read()
- self.assertEqual(1024 * 1024, len(file_contents))
- self.assertEqual(seg_name, file_contents[0])
- self.assertEqual(seg_name, file_contents[-1])
-
- def tearDown(self):
- # remove versioning to allow simple container delete
- self.container.update_metadata(hdrs={'X-Versions-Location': ''})
- self.account.delete_containers()
-
- def test_slo_manifest_version(self):
- file_item = self._create_manifest('a')
- # sanity check: read the manifest, then the large object
- self._assert_is_manifest(file_item, 'a')
- self._assert_is_object(file_item, 'a')
-
- # upload new manifest
- file_item = self._create_manifest('b')
- # sanity check: read the manifest, then the large object
- self._assert_is_manifest(file_item, 'b')
- self._assert_is_object(file_item, 'b')
-
- versions_list = self.versions_container.files()
- self.assertEqual(1, len(versions_list))
- version_file = self.versions_container.file(versions_list[0])
- # check the version is still a manifest
- self._assert_is_manifest(version_file, 'a')
- self._assert_is_object(version_file, 'a')
-
- # delete the newest manifest
- file_item.delete()
-
- # expect the original manifest file to be restored
- self._assert_is_manifest(file_item, 'a')
- self._assert_is_object(file_item, 'a')
-
-
-class TestTempurlEnv(object):
- tempurl_enabled = None # tri-state: None initially, then True/False
-
- @classmethod
- def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
-
- if cls.tempurl_enabled is None:
- cls.tempurl_enabled = 'tempurl' in cluster_info
- if not cls.tempurl_enabled:
- return
-
- cls.tempurl_key = Utils.create_name()
- cls.tempurl_key2 = Utils.create_name()
-
- cls.account = Account(
- cls.conn, tf.config.get('account', tf.config['username']))
- cls.account.delete_containers()
- cls.account.update_metadata({
- 'temp-url-key': cls.tempurl_key,
- 'temp-url-key-2': cls.tempurl_key2
- })
-
- cls.container = cls.account.container(Utils.create_name())
- if not cls.container.create():
- raise ResponseError(cls.conn.response)
-
- cls.obj = cls.container.file(Utils.create_name())
- cls.obj.write("obj contents")
- cls.other_obj = cls.container.file(Utils.create_name())
- cls.other_obj.write("other obj contents")
-
-
-class TestTempurl(Base):
- env = TestTempurlEnv
- set_up = False
-
- def setUp(self):
- super(TestTempurl, self).setUp()
- if self.env.tempurl_enabled is False:
- raise SkipTest("TempURL not enabled")
- elif self.env.tempurl_enabled is not True:
- # just some sanity checking
- raise Exception(
- "Expected tempurl_enabled to be True/False, got %r" %
- (self.env.tempurl_enabled,))
-
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'GET', expires, self.env.conn.make_path(self.env.obj.path),
- self.env.tempurl_key)
- self.obj_tempurl_parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- def tempurl_sig(self, method, expires, path, key):
- return hmac.new(
- key,
- '%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)),
- hashlib.sha1).hexdigest()
-
- def test_GET(self):
- contents = self.env.obj.read(
- parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- # GET tempurls also allow HEAD requests
- self.assertTrue(self.env.obj.info(parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True}))
-
- def test_GET_with_key_2(self):
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'GET', expires, self.env.conn.make_path(self.env.obj.path),
- self.env.tempurl_key2)
- parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- contents = self.env.obj.read(parms=parms, cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- def test_GET_DLO_inside_container(self):
- seg1 = self.env.container.file(
- "get-dlo-inside-seg1" + Utils.create_name())
- seg2 = self.env.container.file(
- "get-dlo-inside-seg2" + Utils.create_name())
- seg1.write("one fish two fish ")
- seg2.write("red fish blue fish")
-
- manifest = self.env.container.file("manifest" + Utils.create_name())
- manifest.write(
- '',
- hdrs={"X-Object-Manifest": "%s/get-dlo-inside-seg" %
- (self.env.container.name,)})
-
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'GET', expires, self.env.conn.make_path(manifest.path),
- self.env.tempurl_key)
- parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- contents = manifest.read(parms=parms, cfg={'no_auth_token': True})
- self.assertEqual(contents, "one fish two fish red fish blue fish")
-
- def test_GET_DLO_outside_container(self):
- seg1 = self.env.container.file(
- "get-dlo-outside-seg1" + Utils.create_name())
- seg2 = self.env.container.file(
- "get-dlo-outside-seg2" + Utils.create_name())
- seg1.write("one fish two fish ")
- seg2.write("red fish blue fish")
-
- container2 = self.env.account.container(Utils.create_name())
- container2.create()
-
- manifest = container2.file("manifest" + Utils.create_name())
- manifest.write(
- '',
- hdrs={"X-Object-Manifest": "%s/get-dlo-outside-seg" %
- (self.env.container.name,)})
-
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'GET', expires, self.env.conn.make_path(manifest.path),
- self.env.tempurl_key)
- parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- # cross container tempurl works fine for account tempurl key
- contents = manifest.read(parms=parms, cfg={'no_auth_token': True})
- self.assertEqual(contents, "one fish two fish red fish blue fish")
- self.assert_status([200])
-
- def test_PUT(self):
- new_obj = self.env.container.file(Utils.create_name())
-
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'PUT', expires, self.env.conn.make_path(new_obj.path),
- self.env.tempurl_key)
- put_parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- new_obj.write('new obj contents',
- parms=put_parms, cfg={'no_auth_token': True})
- self.assertEqual(new_obj.read(), "new obj contents")
-
- # PUT tempurls also allow HEAD requests
- self.assertTrue(new_obj.info(parms=put_parms,
- cfg={'no_auth_token': True}))
-
- def test_PUT_manifest_access(self):
- new_obj = self.env.container.file(Utils.create_name())
-
- # give out a signature which allows a PUT to new_obj
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'PUT', expires, self.env.conn.make_path(new_obj.path),
- self.env.tempurl_key)
- put_parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- # try to create manifest pointing to some random container
- try:
- new_obj.write('', {
- 'x-object-manifest': '%s/foo' % 'some_random_container'
- }, parms=put_parms, cfg={'no_auth_token': True})
- except ResponseError as e:
- self.assertEqual(e.status, 400)
- else:
- self.fail('request did not error')
-
- # create some other container
- other_container = self.env.account.container(Utils.create_name())
- if not other_container.create():
- raise ResponseError(self.conn.response)
-
- # try to create manifest pointing to new container
- try:
- new_obj.write('', {
- 'x-object-manifest': '%s/foo' % other_container
- }, parms=put_parms, cfg={'no_auth_token': True})
- except ResponseError as e:
- self.assertEqual(e.status, 400)
- else:
- self.fail('request did not error')
-
- # try again using a tempurl POST to an already created object
- new_obj.write('', {}, parms=put_parms, cfg={'no_auth_token': True})
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'POST', expires, self.env.conn.make_path(new_obj.path),
- self.env.tempurl_key)
- post_parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
- try:
- new_obj.post({'x-object-manifest': '%s/foo' % other_container},
- parms=post_parms, cfg={'no_auth_token': True})
- except ResponseError as e:
- self.assertEqual(e.status, 400)
- else:
- self.fail('request did not error')
-
- def test_HEAD(self):
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'HEAD', expires, self.env.conn.make_path(self.env.obj.path),
- self.env.tempurl_key)
- head_parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- self.assertTrue(self.env.obj.info(parms=head_parms,
- cfg={'no_auth_token': True}))
- # HEAD tempurls don't allow PUT or GET requests, despite the fact that
- # PUT and GET tempurls both allow HEAD requests
- self.assertRaises(ResponseError, self.env.other_obj.read,
- cfg={'no_auth_token': True},
- parms=self.obj_tempurl_parms)
- self.assert_status([401])
-
- self.assertRaises(ResponseError, self.env.other_obj.write,
- 'new contents',
- cfg={'no_auth_token': True},
- parms=self.obj_tempurl_parms)
- self.assert_status([401])
-
- def test_different_object(self):
- contents = self.env.obj.read(
- parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- self.assertRaises(ResponseError, self.env.other_obj.read,
- cfg={'no_auth_token': True},
- parms=self.obj_tempurl_parms)
- self.assert_status([401])
-
- def test_changing_sig(self):
- contents = self.env.obj.read(
- parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- parms = self.obj_tempurl_parms.copy()
- if parms['temp_url_sig'][0] == 'a':
- parms['temp_url_sig'] = 'b' + parms['temp_url_sig'][1:]
- else:
- parms['temp_url_sig'] = 'a' + parms['temp_url_sig'][1:]
-
- self.assertRaises(ResponseError, self.env.obj.read,
- cfg={'no_auth_token': True},
- parms=parms)
- self.assert_status([401])
-
- def test_changing_expires(self):
- contents = self.env.obj.read(
- parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- parms = self.obj_tempurl_parms.copy()
- if parms['temp_url_expires'][-1] == '0':
- parms['temp_url_expires'] = parms['temp_url_expires'][:-1] + '1'
- else:
- parms['temp_url_expires'] = parms['temp_url_expires'][:-1] + '0'
-
- self.assertRaises(ResponseError, self.env.obj.read,
- cfg={'no_auth_token': True},
- parms=parms)
- self.assert_status([401])
-
-
-class TestTempurlUTF8(Base2, TestTempurl):
- set_up = False
-
-
-class TestContainerTempurlEnv(object):
- tempurl_enabled = None # tri-state: None initially, then True/False
-
- @classmethod
- def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
-
- if cls.tempurl_enabled is None:
- cls.tempurl_enabled = 'tempurl' in cluster_info
- if not cls.tempurl_enabled:
- return
-
- cls.tempurl_key = Utils.create_name()
- cls.tempurl_key2 = Utils.create_name()
-
- cls.account = Account(
- cls.conn, tf.config.get('account', tf.config['username']))
- cls.account.delete_containers()
-
- # creating another account and connection
- # for ACL tests
- config2 = deepcopy(tf.config)
- config2['account'] = tf.config['account2']
- config2['username'] = tf.config['username2']
- config2['password'] = tf.config['password2']
- cls.conn2 = Connection(config2)
- cls.conn2.authenticate()
- cls.account2 = Account(
- cls.conn2, config2.get('account', config2['username']))
- cls.account2 = cls.conn2.get_account()
-
- cls.container = cls.account.container(Utils.create_name())
- if not cls.container.create({
- 'x-container-meta-temp-url-key': cls.tempurl_key,
- 'x-container-meta-temp-url-key-2': cls.tempurl_key2,
- 'x-container-read': cls.account2.name}):
- raise ResponseError(cls.conn.response)
-
- cls.obj = cls.container.file(Utils.create_name())
- cls.obj.write("obj contents")
- cls.other_obj = cls.container.file(Utils.create_name())
- cls.other_obj.write("other obj contents")
-
-
-class TestContainerTempurl(Base):
- env = TestContainerTempurlEnv
- set_up = False
-
- def setUp(self):
- super(TestContainerTempurl, self).setUp()
- if self.env.tempurl_enabled is False:
- raise SkipTest("TempURL not enabled")
- elif self.env.tempurl_enabled is not True:
- # just some sanity checking
- raise Exception(
- "Expected tempurl_enabled to be True/False, got %r" %
- (self.env.tempurl_enabled,))
-
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'GET', expires, self.env.conn.make_path(self.env.obj.path),
- self.env.tempurl_key)
- self.obj_tempurl_parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- def tempurl_sig(self, method, expires, path, key):
- return hmac.new(
- key,
- '%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)),
- hashlib.sha1).hexdigest()
-
- def test_GET(self):
- contents = self.env.obj.read(
- parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- # GET tempurls also allow HEAD requests
- self.assertTrue(self.env.obj.info(parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True}))
-
- def test_GET_with_key_2(self):
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'GET', expires, self.env.conn.make_path(self.env.obj.path),
- self.env.tempurl_key2)
- parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- contents = self.env.obj.read(parms=parms, cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- def test_PUT(self):
- new_obj = self.env.container.file(Utils.create_name())
-
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'PUT', expires, self.env.conn.make_path(new_obj.path),
- self.env.tempurl_key)
- put_parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- new_obj.write('new obj contents',
- parms=put_parms, cfg={'no_auth_token': True})
- self.assertEqual(new_obj.read(), "new obj contents")
-
- # PUT tempurls also allow HEAD requests
- self.assertTrue(new_obj.info(parms=put_parms,
- cfg={'no_auth_token': True}))
-
- def test_HEAD(self):
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'HEAD', expires, self.env.conn.make_path(self.env.obj.path),
- self.env.tempurl_key)
- head_parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- self.assertTrue(self.env.obj.info(parms=head_parms,
- cfg={'no_auth_token': True}))
- # HEAD tempurls don't allow PUT or GET requests, despite the fact that
- # PUT and GET tempurls both allow HEAD requests
- self.assertRaises(ResponseError, self.env.other_obj.read,
- cfg={'no_auth_token': True},
- parms=self.obj_tempurl_parms)
- self.assert_status([401])
-
- self.assertRaises(ResponseError, self.env.other_obj.write,
- 'new contents',
- cfg={'no_auth_token': True},
- parms=self.obj_tempurl_parms)
- self.assert_status([401])
-
- def test_different_object(self):
- contents = self.env.obj.read(
- parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- self.assertRaises(ResponseError, self.env.other_obj.read,
- cfg={'no_auth_token': True},
- parms=self.obj_tempurl_parms)
- self.assert_status([401])
-
- def test_changing_sig(self):
- contents = self.env.obj.read(
- parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- parms = self.obj_tempurl_parms.copy()
- if parms['temp_url_sig'][0] == 'a':
- parms['temp_url_sig'] = 'b' + parms['temp_url_sig'][1:]
- else:
- parms['temp_url_sig'] = 'a' + parms['temp_url_sig'][1:]
-
- self.assertRaises(ResponseError, self.env.obj.read,
- cfg={'no_auth_token': True},
- parms=parms)
- self.assert_status([401])
-
- def test_changing_expires(self):
- contents = self.env.obj.read(
- parms=self.obj_tempurl_parms,
- cfg={'no_auth_token': True})
- self.assertEqual(contents, "obj contents")
-
- parms = self.obj_tempurl_parms.copy()
- if parms['temp_url_expires'][-1] == '0':
- parms['temp_url_expires'] = parms['temp_url_expires'][:-1] + '1'
- else:
- parms['temp_url_expires'] = parms['temp_url_expires'][:-1] + '0'
-
- self.assertRaises(ResponseError, self.env.obj.read,
- cfg={'no_auth_token': True},
- parms=parms)
- self.assert_status([401])
-
- @requires_acls
- def test_tempurl_keys_visible_to_account_owner(self):
- if not tf.cluster_info.get('tempauth'):
- raise SkipTest('TEMP AUTH SPECIFIC TEST')
- metadata = self.env.container.info()
- self.assertEqual(metadata.get('tempurl_key'), self.env.tempurl_key)
- self.assertEqual(metadata.get('tempurl_key2'), self.env.tempurl_key2)
-
- @requires_acls
- def test_tempurl_keys_hidden_from_acl_readonly(self):
- if not tf.cluster_info.get('tempauth'):
- raise SkipTest('TEMP AUTH SPECIFIC TEST')
- original_token = self.env.container.conn.storage_token
- self.env.container.conn.storage_token = self.env.conn2.storage_token
- metadata = self.env.container.info()
- self.env.container.conn.storage_token = original_token
-
- self.assertNotIn(
- 'tempurl_key', metadata,
- 'Container TempURL key found, should not be visible '
- 'to readonly ACLs')
- self.assertNotIn(
- 'tempurl_key2', metadata,
- 'Container TempURL key-2 found, should not be visible '
- 'to readonly ACLs')
-
- def test_GET_DLO_inside_container(self):
- seg1 = self.env.container.file(
- "get-dlo-inside-seg1" + Utils.create_name())
- seg2 = self.env.container.file(
- "get-dlo-inside-seg2" + Utils.create_name())
- seg1.write("one fish two fish ")
- seg2.write("red fish blue fish")
-
- manifest = self.env.container.file("manifest" + Utils.create_name())
- manifest.write(
- '',
- hdrs={"X-Object-Manifest": "%s/get-dlo-inside-seg" %
- (self.env.container.name,)})
-
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'GET', expires, self.env.conn.make_path(manifest.path),
- self.env.tempurl_key)
- parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- contents = manifest.read(parms=parms, cfg={'no_auth_token': True})
- self.assertEqual(contents, "one fish two fish red fish blue fish")
-
- def test_GET_DLO_outside_container(self):
- container2 = self.env.account.container(Utils.create_name())
- container2.create()
- seg1 = container2.file(
- "get-dlo-outside-seg1" + Utils.create_name())
- seg2 = container2.file(
- "get-dlo-outside-seg2" + Utils.create_name())
- seg1.write("one fish two fish ")
- seg2.write("red fish blue fish")
-
- manifest = self.env.container.file("manifest" + Utils.create_name())
- manifest.write(
- '',
- hdrs={"X-Object-Manifest": "%s/get-dlo-outside-seg" %
- (container2.name,)})
-
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'GET', expires, self.env.conn.make_path(manifest.path),
- self.env.tempurl_key)
- parms = {'temp_url_sig': sig,
- 'temp_url_expires': str(expires)}
-
- # cross container tempurl does not work for container tempurl key
- try:
- manifest.read(parms=parms, cfg={'no_auth_token': True})
- except ResponseError as e:
- self.assertEqual(e.status, 401)
- else:
- self.fail('request did not error')
- try:
- manifest.info(parms=parms, cfg={'no_auth_token': True})
- except ResponseError as e:
- self.assertEqual(e.status, 401)
- else:
- self.fail('request did not error')
-
-
-class TestContainerTempurlUTF8(Base2, TestContainerTempurl):
- set_up = False
-
-
-class TestSloTempurlEnv(object):
- enabled = None # tri-state: None initially, then True/False
-
- @classmethod
- def setUp(cls):
- cls.conn = Connection(tf.config)
- cls.conn.authenticate()
-
- if cls.enabled is None:
- cls.enabled = 'tempurl' in cluster_info and 'slo' in cluster_info
-
- cls.tempurl_key = Utils.create_name()
-
- cls.account = Account(
- cls.conn, tf.config.get('account', tf.config['username']))
- cls.account.delete_containers()
- cls.account.update_metadata({'temp-url-key': cls.tempurl_key})
-
- cls.manifest_container = cls.account.container(Utils.create_name())
- cls.segments_container = cls.account.container(Utils.create_name())
- if not cls.manifest_container.create():
- raise ResponseError(cls.conn.response)
- if not cls.segments_container.create():
- raise ResponseError(cls.conn.response)
-
- seg1 = cls.segments_container.file(Utils.create_name())
- seg1.write('1' * 1024 * 1024)
-
- seg2 = cls.segments_container.file(Utils.create_name())
- seg2.write('2' * 1024 * 1024)
-
- cls.manifest_data = [{'size_bytes': 1024 * 1024,
- 'etag': seg1.md5,
- 'path': '/%s/%s' % (cls.segments_container.name,
- seg1.name)},
- {'size_bytes': 1024 * 1024,
- 'etag': seg2.md5,
- 'path': '/%s/%s' % (cls.segments_container.name,
- seg2.name)}]
-
- cls.manifest = cls.manifest_container.file(Utils.create_name())
- cls.manifest.write(
- json.dumps(cls.manifest_data),
- parms={'multipart-manifest': 'put'})
-
-
-class TestSloTempurl(Base):
- env = TestSloTempurlEnv
- set_up = False
-
- def setUp(self):
- super(TestSloTempurl, self).setUp()
- if self.env.enabled is False:
- raise SkipTest("TempURL and SLO not both enabled")
- elif self.env.enabled is not True:
- # just some sanity checking
- raise Exception(
- "Expected enabled to be True/False, got %r" %
- (self.env.enabled,))
-
- def tempurl_sig(self, method, expires, path, key):
- return hmac.new(
- key,
- '%s\n%s\n%s' % (method, expires, urllib.parse.unquote(path)),
- hashlib.sha1).hexdigest()
-
- def test_GET(self):
- expires = int(time.time()) + 86400
- sig = self.tempurl_sig(
- 'GET', expires, self.env.conn.make_path(self.env.manifest.path),
- self.env.tempurl_key)
- parms = {'temp_url_sig': sig, 'temp_url_expires': str(expires)}
-
- contents = self.env.manifest.read(
- parms=parms,
- cfg={'no_auth_token': True})
- self.assertEqual(len(contents), 2 * 1024 * 1024)
-
- # GET tempurls also allow HEAD requests
- self.assertTrue(self.env.manifest.info(
- parms=parms, cfg={'no_auth_token': True}))
-
-
-class TestSloTempurlUTF8(Base2, TestSloTempurl):
- set_up = False
+ pass
class TestServiceToken(unittest2.TestCase):
diff --git a/test/object_expirer_functional/test_object_expirer_gluster_swift.py b/test/object_expirer_functional/test_object_expirer_gluster_swift.py
index 279994f..a897d6c 100644
--- a/test/object_expirer_functional/test_object_expirer_gluster_swift.py
+++ b/test/object_expirer_functional/test_object_expirer_gluster_swift.py
@@ -51,6 +51,10 @@ class TestObjectExpirerEnv:
conf = readconf('/etc/swift/object-expirer.conf', 'object-expirer')
cls.expirer = ObjectExpirer(conf)
+ @classmethod
+ def tearDown(cls):
+ pass
+
class TestObjectExpirer(Base):
env = TestObjectExpirerEnv
diff --git a/test/unit/__init__.py b/test/unit/__init__.py
index ee2a262..d9750b7 100644
--- a/test/unit/__init__.py
+++ b/test/unit/__init__.py
@@ -21,6 +21,7 @@ import copy
import logging
import errno
from six.moves import range
+from six import BytesIO
import sys
from contextlib import contextmanager, closing
from collections import defaultdict, Iterable
@@ -29,18 +30,20 @@ from numbers import Number
from tempfile import NamedTemporaryFile
import time
import eventlet
+from eventlet import greenpool, debug as eventlet_debug
from eventlet.green import socket
from tempfile import mkdtemp
from shutil import rmtree
import signal
import json
-
+import random
from swift.common.utils import Timestamp, NOTICE
from test import get_config
from swift.common import utils
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.ring import Ring, RingData
+from swift.obj import server
from hashlib import md5
import logging.handlers
@@ -48,6 +51,7 @@ from six.moves.http_client import HTTPException
from swift.common import storage_policy
from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
VALID_EC_TYPES)
+from swift.common import swob
import functools
import six.moves.cPickle as pickle
from gzip import GzipFile
@@ -118,7 +122,7 @@ def patch_policies(thing_or_policies=None, legacy_only=False,
class PatchPolicies(object):
"""
Why not mock.patch? In my case, when used as a decorator on the class it
- seemed to patch setUp at the wrong time (i.e. in setup the global wasn't
+ seemed to patch setUp at the wrong time (i.e. in setUp the global wasn't
patched yet)
"""
@@ -165,41 +169,38 @@ class PatchPolicies(object):
"""
orig_setUp = cls.setUp
- orig_tearDown = cls.tearDown
+
+ def unpatch_cleanup(cls_self):
+ if cls_self._policies_patched:
+ self.__exit__()
+ cls_self._policies_patched = False
def setUp(cls_self):
- self._orig_POLICIES = storage_policy._POLICIES
if not getattr(cls_self, '_policies_patched', False):
- storage_policy._POLICIES = self.policies
- self._setup_rings()
+ self.__enter__()
cls_self._policies_patched = True
-
+ cls_self.addCleanup(unpatch_cleanup, cls_self)
orig_setUp(cls_self)
- def tearDown(cls_self):
- orig_tearDown(cls_self)
- storage_policy._POLICIES = self._orig_POLICIES
-
cls.setUp = setUp
- cls.tearDown = tearDown
return cls
def _patch_method(self, f):
@functools.wraps(f)
def mywrapper(*args, **kwargs):
- self._orig_POLICIES = storage_policy._POLICIES
- try:
- storage_policy._POLICIES = self.policies
- self._setup_rings()
+ with self:
return f(*args, **kwargs)
- finally:
- storage_policy._POLICIES = self._orig_POLICIES
return mywrapper
def __enter__(self):
self._orig_POLICIES = storage_policy._POLICIES
storage_policy._POLICIES = self.policies
+ try:
+ self._setup_rings()
+ except: # noqa
+ self.__exit__()
+ raise
def __exit__(self, *args):
storage_policy._POLICIES = self._orig_POLICIES
@@ -212,17 +213,35 @@ class FakeRing(Ring):
self._base_port = base_port
self.max_more_nodes = max_more_nodes
self._part_shift = 32 - part_power
+ self._init_device_char()
# 9 total nodes (6 more past the initial 3) is the cap, no matter if
# this is set higher, or R^2 for R replicas
self.set_replicas(replicas)
self._reload()
+ def has_changed(self):
+ """
+ The real implementation uses getmtime on the serialized_path attribute,
+ which doesn't exist on our fake and relies on the implementation of
+ _reload which we override. So ... just NOOPE.
+ """
+ return False
+
def _reload(self):
self._rtime = time.time()
+ @property
+ def device_char(self):
+ return next(self._device_char_iter)
+
+ def _init_device_char(self):
+ self._device_char_iter = itertools.cycle(
+ ['sd%s' % chr(ord('a') + x) for x in range(26)])
+
def set_replicas(self, replicas):
self.replicas = replicas
self._devs = []
+ self._init_device_char()
for x in range(self.replicas):
ip = '10.0.0.%s' % x
port = self._base_port + x
@@ -232,7 +251,7 @@ class FakeRing(Ring):
'replication_ip': ip,
'port': port,
'replication_port': port,
- 'device': 'sd' + (chr(ord('a') + x)),
+ 'device': self.device_char,
'zone': x % 3,
'region': x % 2,
'id': x,
@@ -289,9 +308,8 @@ class FabricatedRing(Ring):
self.devices = devices
self.nodes = nodes
self.port = port
- self.replicas = 6
- self.part_power = part_power
- self._part_shift = 32 - self.part_power
+ self.replicas = replicas
+ self._part_shift = 32 - part_power
self._reload()
def _reload(self, *args, **kwargs):
@@ -681,6 +699,16 @@ if utils.config_true_value(
fake_syslog_handler()
+@contextmanager
+def quiet_eventlet_exceptions():
+ orig_state = greenpool.DEBUG
+ eventlet_debug.hub_exceptions(False)
+ try:
+ yield
+ finally:
+ eventlet_debug.hub_exceptions(orig_state)
+
+
class MockTrue(object):
"""
Instances of MockTrue evaluate like True
@@ -998,6 +1026,7 @@ def fake_http_connect(*code_iter, **kwargs):
body_iter = kwargs.get('body_iter', None)
if body_iter:
body_iter = iter(body_iter)
+ unexpected_requests = []
def connect(*args, **ckwargs):
if kwargs.get('slow_connect', False):
@@ -1007,7 +1036,15 @@ def fake_http_connect(*code_iter, **kwargs):
kwargs['give_content_type'](args[6]['Content-Type'])
else:
kwargs['give_content_type']('')
- i, status = next(conn_id_and_code_iter)
+ try:
+ i, status = next(conn_id_and_code_iter)
+ except StopIteration:
+ # the code under test may swallow the StopIteration, so by logging
+ # unexpected requests here we allow the test framework to check for
+ # them after the connect function has been used.
+ unexpected_requests.append((args, kwargs))
+ raise
+
if 'give_connect' in kwargs:
give_conn_fn = kwargs['give_connect']
argspec = inspect.getargspec(give_conn_fn)
@@ -1030,6 +1067,7 @@ def fake_http_connect(*code_iter, **kwargs):
connection_id=i, give_send=kwargs.get('give_send'),
give_expect=kwargs.get('give_expect'))
+ connect.unexpected_requests = unexpected_requests
connect.code_iter = code_iter
return connect
@@ -1059,10 +1097,14 @@ def mocked_http_conn(*args, **kwargs):
left_over_status = list(fake_conn.code_iter)
if left_over_status:
raise AssertionError('left over status %r' % left_over_status)
+ if fake_conn.unexpected_requests:
+ raise AssertionError('unexpected requests %r' %
+ fake_conn.unexpected_requests)
-def make_timestamp_iter():
- return iter(Timestamp(t) for t in itertools.count(int(time.time())))
+def make_timestamp_iter(offset=0):
+ return iter(Timestamp(t)
+ for t in itertools.count(int(time.time()) + offset))
class Timeout(object):
@@ -1091,6 +1133,30 @@ def requires_o_tmpfile_support(func):
return wrapper
+class StubResponse(object):
+
+ def __init__(self, status, body='', headers=None, frag_index=None):
+ self.status = status
+ self.body = body
+ self.readable = BytesIO(body)
+ self.headers = HeaderKeyDict(headers)
+ if frag_index is not None:
+ self.headers['X-Object-Sysmeta-Ec-Frag-Index'] = frag_index
+ fake_reason = ('Fake', 'This response is a lie.')
+ self.reason = swob.RESPONSE_REASONS.get(status, fake_reason)[0]
+
+ def getheader(self, header_name, default=None):
+ return self.headers.get(header_name, default)
+
+ def getheaders(self):
+ if 'Content-Length' not in self.headers:
+ self.headers['Content-Length'] = len(self.body)
+ return self.headers.items()
+
+ def read(self, amt=0):
+ return self.readable.read(amt)
+
+
def encode_frag_archive_bodies(policy, body):
"""
Given a stub body produce a list of complete frag_archive bodies as
@@ -1109,7 +1175,8 @@ def encode_frag_archive_bodies(policy, body):
# encode the buffers into fragment payloads
fragment_payloads = []
for chunk in chunks:
- fragments = policy.pyeclib_driver.encode(chunk)
+ fragments = policy.pyeclib_driver.encode(chunk) \
+ * policy.ec_duplication_factor
if not fragments:
break
fragment_payloads.append(fragments)
@@ -1118,3 +1185,128 @@ def encode_frag_archive_bodies(policy, body):
ec_archive_bodies = [''.join(frags)
for frags in zip(*fragment_payloads)]
return ec_archive_bodies
+
+
+def make_ec_object_stub(test_body, policy, timestamp):
+ segment_size = policy.ec_segment_size
+ test_body = test_body or (
+ 'test' * segment_size)[:-random.randint(1, 1000)]
+ timestamp = timestamp or utils.Timestamp.now()
+ etag = md5(test_body).hexdigest()
+ ec_archive_bodies = encode_frag_archive_bodies(policy, test_body)
+
+ return {
+ 'body': test_body,
+ 'etag': etag,
+ 'frags': ec_archive_bodies,
+ 'timestamp': timestamp
+ }
+
+
+def fake_ec_node_response(node_frags, policy):
+ """
+ Given a list of entries for each node in ring order, where the entries
+ are a dict (or list of dicts) which describes the fragment (or
+ fragments) that are on the node; create a function suitable for use
+ with capture_http_requests that will accept a req object and return a
+ response that will suitably fake the behavior of an object server who
+ had the given fragments on disk at the time.
+
+ :param node_frags: a list. Each item in the list describes the
+ fragments that are on a node; each item is a dict or list of dicts,
+ each dict describing a single fragment; where the item is a list,
+ repeated calls to get_response will return fragments in the order
+ of the list; each dict has keys:
+ - obj: an object stub, as generated by _make_ec_object_stub,
+ that defines all of the fragments that compose an object
+ at a specific timestamp.
+ - frag: the index of a fragment to be selected from the object
+ stub
+ - durable (optional): True if the selected fragment is durable
+ :param policy: storage policy to return
+ """
+ node_map = {} # maps node ip and port to node index
+ all_nodes = []
+ call_count = {} # maps node index to get_response call count for node
+
+ def _build_node_map(req, policy):
+ node_key = lambda n: (n['ip'], n['port'])
+ part = utils.split_path(req['path'], 5, 5, True)[1]
+ all_nodes.extend(policy.object_ring.get_part_nodes(part))
+ all_nodes.extend(policy.object_ring.get_more_nodes(part))
+ for i, node in enumerate(all_nodes):
+ node_map[node_key(node)] = i
+ call_count[i] = 0
+
+ # normalize node_frags to a list of fragments for each node even
+ # if there's only one fragment in the dataset provided.
+ for i, frags in enumerate(node_frags):
+ if isinstance(frags, dict):
+ node_frags[i] = [frags]
+
+ def get_response(req):
+ requested_policy = int(
+ req['headers']['X-Backend-Storage-Policy-Index'])
+ if int(policy) != requested_policy:
+ AssertionError(
+ "Requested polciy doesn't fit the fake response policy")
+ if not node_map:
+ _build_node_map(req, policy)
+
+ try:
+ node_index = node_map[(req['ip'], req['port'])]
+ except KeyError:
+ raise Exception("Couldn't find node %s:%s in %r" % (
+ req['ip'], req['port'], all_nodes))
+ try:
+ frags = node_frags[node_index]
+ except IndexError:
+ raise Exception('Found node %r:%r at index %s - '
+ 'but only got %s stub response nodes' % (
+ req['ip'], req['port'], node_index,
+ len(node_frags)))
+
+ if not frags:
+ return StubResponse(404)
+
+ # determine response fragment (if any) for this call
+ resp_frag = frags[call_count[node_index]]
+ call_count[node_index] += 1
+ frag_prefs = req['headers'].get('X-Backend-Fragment-Preferences')
+ if not (frag_prefs or resp_frag.get('durable', True)):
+ return StubResponse(404)
+
+ # prepare durable timestamp and backend frags header for this node
+ obj_stub = resp_frag['obj']
+ ts2frags = defaultdict(list)
+ durable_timestamp = None
+ for frag in frags:
+ ts_frag = frag['obj']['timestamp']
+ if frag.get('durable', True):
+ durable_timestamp = ts_frag.internal
+ ts2frags[ts_frag].append(frag['frag'])
+
+ try:
+ body = obj_stub['frags'][resp_frag['frag']]
+ except IndexError as err:
+ raise Exception(
+ 'Frag index %s not defined: node index %s, frags %r\n%s' %
+ (resp_frag['frag'], node_index, [f['frag'] for f in frags],
+ err))
+ headers = {
+ 'X-Object-Sysmeta-Ec-Content-Length': len(obj_stub['body']),
+ 'X-Object-Sysmeta-Ec-Etag': obj_stub['etag'],
+ 'X-Object-Sysmeta-Ec-Frag-Index':
+ policy.get_backend_index(resp_frag['frag']),
+ 'X-Backend-Timestamp': obj_stub['timestamp'].internal,
+ 'X-Timestamp': obj_stub['timestamp'].normal,
+ 'X-Backend-Data-Timestamp': obj_stub['timestamp'].internal,
+ 'X-Backend-Fragments':
+ server._make_backend_fragments_header(ts2frags)
+ }
+ if durable_timestamp:
+ headers['X-Backend-Durable-Timestamp'] = durable_timestamp
+
+ return StubResponse(200, body, headers)
+
+ return get_response
diff --git a/test/unit/common/test_diskdir.py b/test/unit/common/test_diskdir.py
index ae9aa6e..5010b31 100644
--- a/test/unit/common/test_diskdir.py
+++ b/test/unit/common/test_diskdir.py
@@ -1265,7 +1265,7 @@ class TestAccountBroker(unittest.TestCase):
listing = broker.list_containers_iter(100, '', None, None,
'', 'text/plain')
self.assertEquals(len(listing), 10)
- for i, (name, o_count, bytes_used, j) in enumerate(listing):
+ for i, (name, o_count, bytes_used, last_modified, j) in enumerate(listing):
self.assertEqual(name, 'lci%d' % i)
self.assertEqual(o_count, 0)
self.assertEqual(bytes_used, 0)
diff --git a/test/unit/obj/test_expirer.py b/test/unit/obj/test_expirer.py
index 4830a90..9849f6d 100644
--- a/test/unit/obj/test_expirer.py
+++ b/test/unit/obj/test_expirer.py
@@ -373,7 +373,7 @@ class TestObjectExpirer(TestCase):
'Pass beginning; 1 possible containers; 2 possible objects',
'Pass completed in 0s; 0 objects expired',
])
- self.assertTrue('error' not in logs)
+ self.assertNotIn('error', logs)
# Reverse test to be sure it still would blow up the way expected.
fake_swift = InternalClient([{'name': str(int(time() - 86400))}])
@@ -414,7 +414,7 @@ class TestObjectExpirer(TestCase):
x = expirer.ObjectExpirer(self.conf, logger=self.logger,
swift=fake_swift)
x.run_once()
- self.assertTrue('error' not in x.logger.all_log_lines())
+ self.assertNotIn('error', x.logger.all_log_lines())
self.assertEqual(x.logger.get_lines_for_level('info'), [
'Pass beginning; 1 possible containers; 2 possible objects',
'Pass completed in 0s; 0 objects expired',
diff --git a/test/unit/proxy/controllers/test_base.py b/test/unit/proxy/controllers/test_base.py
index 1ab0037..8351843 100644
--- a/test/unit/proxy/controllers/test_base.py
+++ b/test/unit/proxy/controllers/test_base.py
@@ -26,8 +26,8 @@ from swift.common import exceptions
from swift.common.utils import split_path
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.http import is_success
-from swift.common.storage_policy import StoragePolicy
-from test.unit import fake_http_connect, FakeRing, FakeMemcache
+from swift.common.storage_policy import StoragePolicy, StoragePolicyCollection
+from test.unit import fake_http_connect, FakeRing, FakeMemcache, PatchPolicies
from swift.proxy import server as proxy_server
from swift.common.request_helpers import (
get_sys_meta_prefix, get_object_transient_sysmeta
@@ -209,7 +209,8 @@ class TestFuncs(unittest.TestCase):
self.assertEqual(info_c['object_count'], 1000)
# Make sure the env cache is set
exp_cached_info_c = {
- k: str(v) if k in ('bytes', 'object_count') else v
+ k: str(v) if k in (
+ 'bytes', 'object_count', 'storage_policy') else v
for k, v in info_c.items()}
self.assertEqual(env['swift.infocache'].get('account/a'),
exp_cached_info_a)
@@ -340,7 +341,7 @@ class TestFuncs(unittest.TestCase):
req = Request.blank("/v1/AUTH_account/cont",
environ={'swift.cache': FakeCache({})})
resp = get_container_info(req.environ, FakeApp())
- self.assertEqual(resp['storage_policy'], '0')
+ self.assertEqual(resp['storage_policy'], 0)
self.assertEqual(resp['bytes'], 6666)
self.assertEqual(resp['object_count'], 1000)
@@ -365,7 +366,7 @@ class TestFuncs(unittest.TestCase):
req = Request.blank("/v1/account/cont",
environ={'swift.cache': FakeCache(cache_stub)})
resp = get_container_info(req.environ, FakeApp())
- self.assertEqual(resp['storage_policy'], '0')
+ self.assertEqual(resp['storage_policy'], 0)
self.assertEqual(resp['bytes'], 3333)
self.assertEqual(resp['object_count'], 10)
self.assertEqual(resp['status'], 404)
@@ -614,6 +615,30 @@ class TestFuncs(unittest.TestCase):
resp,
headers_to_account_info(headers.items(), 200))
+ def test_headers_to_account_info_storage_policies(self):
+ headers = {
+ 'x-account-storage-policy-zero-object-count': '13',
+ 'x-account-storage-policy-zero-container-count': '120',
+ 'x-account-storage-policy-zero-bytes-used': '1002',
+ 'x-account-storage-policy-one-object-count': '10',
+ 'x-account-storage-policy-one-container-count': '20',
+ }
+ spc = StoragePolicyCollection([StoragePolicy(0, 'zero', True),
+ StoragePolicy(1, 'one', False)])
+ with PatchPolicies(spc):
+ resp = headers_to_account_info(headers.items(), 200)
+ self.assertEqual(
+ resp['storage_policies'][0]['object_count'], 13)
+ self.assertEqual(
+ resp['storage_policies'][0]['container_count'], 120)
+ self.assertEqual(
+ resp['storage_policies'][0]['bytes'], 1002)
+ self.assertEqual(
+ resp['storage_policies'][1]['object_count'], 10)
+ self.assertEqual(
+ resp['storage_policies'][1]['container_count'], 20)
+ self.assertEqual(resp['storage_policies'][1]['bytes'], 0)
+
def test_headers_to_object_info_missing(self):
resp = headers_to_object_info({}, 404)
self.assertEqual(resp['status'], 404)
diff --git a/tox.ini b/tox.ini
index 4ff8d89..0ef1940 100644
--- a/tox.ini
+++ b/tox.ini
@@ -12,7 +12,7 @@ setenv = VIRTUAL_ENV={envdir}
NOSE_COVER_BRANCHES=1
NOSE_COVER_PACKAGE=gluster
deps =
- git+https://github.com/openstack/swift.git@2.10.1
+ git+https://github.com/openstack/swift.git@2.15.1
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
# Just having testtools package installed fixes some dependency issue