From 286a1308db72c5cfdd6ce16aff3f291ebce257c2 Mon Sep 17 00:00:00 2001 From: Peter Portante Date: Thu, 24 Oct 2013 16:15:25 -0400 Subject: Rebase to OpenStack Swift Havana (1.10.0) Change-Id: I90821230a1a7100c74d97cccc9c445251d0f65e7 Signed-off-by: Peter Portante Reviewed-on: http://review.gluster.org/6157 Reviewed-by: Luis Pabon Tested-by: Luis Pabon --- gluster/swift/__init__.py | 2 +- gluster/swift/common/fs_utils.py | 10 +- gluster/swift/common/utils.py | 60 +- gluster/swift/obj/diskfile.py | 470 +++++++------ gluster/swift/obj/server.py | 2 - gluster/swift/proxy/server.py | 7 +- glusterfs-openstack-swift.spec | 10 +- modules/swift | 2 +- test/__init__.py | 36 +- test/functional/swift_test_client.py | 97 ++- test/functional/tests.py | 668 +++++++++---------- test/functionalnosetests/swift_testing.py | 42 +- test/functionalnosetests/test_account.py | 43 +- test/functionalnosetests/test_container.py | 149 +++-- test/functionalnosetests/test_object.py | 211 +++--- test/unit/__init__.py | 103 ++- test/unit/common/test_fs_utils.py | 73 ++- test/unit/obj/test_diskfile.py | 431 ++++++------ test/unit/proxy/controllers/test_account.py | 24 +- test/unit/proxy/controllers/test_base.py | 160 ++++- test/unit/proxy/controllers/test_container.py | 24 +- test/unit/proxy/controllers/test_obj.py | 64 +- test/unit/proxy/test_server.py | 904 ++++++++++++++++---------- tox.ini | 2 +- 24 files changed, 2129 insertions(+), 1465 deletions(-) diff --git a/gluster/swift/__init__.py b/gluster/swift/__init__.py index 4febd57..8861ba6 100644 --- a/gluster/swift/__init__.py +++ b/gluster/swift/__init__.py @@ -44,6 +44,6 @@ class PkgInfo(object): ### ### Change the Package version here ### -_pkginfo = PkgInfo('1.9.1', '0', 'glusterfs-openstack-swift', False) +_pkginfo = PkgInfo('1.10.0', '0', 'glusterfs-openstack-swift', False) __version__ = _pkginfo.pretty_version __canonical_version__ = _pkginfo.canonical_version diff --git a/gluster/swift/common/fs_utils.py b/gluster/swift/common/fs_utils.py index afc0cfe..8b26fd0 100644 --- a/gluster/swift/common/fs_utils.py +++ b/gluster/swift/common/fs_utils.py @@ -32,7 +32,7 @@ class Fake_file(object): return 0 def read(self, count): - return 0 + return None def fileno(self): return -1 @@ -265,6 +265,14 @@ def do_fsync(fd): err.errno, '%s, os.fsync("%s")' % (err.strerror, fd)) +def do_fdatasync(fd): + try: + os.fdatasync(fd) + except OSError as err: + raise GlusterFileSystemOSError( + err.errno, '%s, os.fdatasync("%s")' % (err.strerror, fd)) + + def mkdirs(path): """ Ensures the path is a directory or makes it if not. Errors if the path diff --git a/gluster/swift/common/utils.py b/gluster/swift/common/utils.py index 522d307..595a965 100644 --- a/gluster/swift/common/utils.py +++ b/gluster/swift/common/utils.py @@ -23,8 +23,9 @@ from hashlib import md5 from eventlet import sleep import cPickle as pickle from swift.common.utils import normalize_timestamp +from gluster.swift.common.exceptions import GlusterFileSystemIOError from gluster.swift.common.fs_utils import do_rename, do_fsync, os_path, \ - do_stat, do_listdir, do_walk, do_rmdir + do_stat, do_fstat, do_listdir, do_walk, do_rmdir from gluster.swift.common import Glusterfs X_CONTENT_TYPE = 'Content-Type' @@ -55,18 +56,6 @@ PICKLE_PROTOCOL = 2 CHUNK_SIZE = 65536 -class GlusterFileSystemOSError(OSError): - # Having our own class means the name will show up in the stack traces - # recorded in the log files. - pass - - -class GlusterFileSystemIOError(IOError): - # Having our own class means the name will show up in the stack traces - # recorded in the log files. - pass - - def read_metadata(path_or_fd): """ Helper function to read the pickled metadata from a File/Directory. @@ -320,6 +309,23 @@ def get_account_details(acc_path): return container_list, container_count +def _read_for_etag(fp): + etag = md5() + while True: + chunk = fp.read(CHUNK_SIZE) + if chunk: + etag.update(chunk) + if len(chunk) >= CHUNK_SIZE: + # It is likely that we have more data to be read from the + # file. Yield the co-routine cooperatively to avoid + # consuming the worker during md5sum() calculations on + # large files. + sleep() + else: + break + return etag.hexdigest() + + def _get_etag(path): """ FIXME: It would be great to have a translator that returns the md5sum() of @@ -328,28 +334,24 @@ def _get_etag(path): Since we don't have that we should yield after each chunk read and computed so that we don't consume the worker thread. """ - etag = md5() - with open(path, 'rb') as fp: - while True: - chunk = fp.read(CHUNK_SIZE) - if chunk: - etag.update(chunk) - if len(chunk) >= CHUNK_SIZE: - # It is likely that we have more data to be read from the - # file. Yield the co-routine cooperatively to avoid - # consuming the worker during md5sum() calculations on - # large files. - sleep() - else: - break - return etag.hexdigest() + if isinstance(path, int): + with os.fdopen(os.dup(path), 'rb') as fp: + etag = _read_for_etag(fp) + os.lseek(path, 0, os.SEEK_SET) + else: + with open(path, 'rb') as fp: + etag = _read_for_etag(fp) + return etag def get_object_metadata(obj_path): """ Return metadata of object. """ - stats = do_stat(obj_path) + if isinstance(obj_path, int): + stats = do_fstat(obj_path) + else: + stats = do_stat(obj_path) if not stats: metadata = {} else: diff --git a/gluster/swift/obj/diskfile.py b/gluster/swift/obj/diskfile.py index 26852b1..0e0abef 100644 --- a/gluster/swift/obj/diskfile.py +++ b/gluster/swift/obj/diskfile.py @@ -32,10 +32,12 @@ from swift.common.utils import TRUE_VALUES, drop_buffer_cache, ThreadPool from swift.common.exceptions import DiskFileNotExist, DiskFileError, \ DiskFileNoSpace, DiskFileDeviceUnavailable -from gluster.swift.common.exceptions import GlusterFileSystemOSError +from gluster.swift.common.exceptions import GlusterFileSystemOSError, \ + GlusterFileSystemIOError from gluster.swift.common.Glusterfs import mount from gluster.swift.common.fs_utils import do_fstat, do_open, do_close, \ - do_unlink, do_chown, os_path, do_fsync, do_fchown, do_stat, Fake_file + do_unlink, do_chown, os_path, do_fsync, do_fchown, do_stat, do_write, \ + do_fdatasync, do_rename, Fake_file from gluster.swift.common.utils import read_metadata, write_metadata, \ validate_object, create_object_metadata, rmobjdir, dir_is_object, \ get_object_metadata @@ -45,7 +47,6 @@ from gluster.swift.common.utils import X_CONTENT_LENGTH, X_CONTENT_TYPE, \ from ConfigParser import ConfigParser, NoSectionError, NoOptionError from swift.obj.diskfile import DiskFile as SwiftDiskFile -from swift.obj.diskfile import DiskWriter as SwiftDiskWriter # FIXME: Hopefully we'll be able to move to Python 2.7+ where O_CLOEXEC will # be back ported. See http://www.python.org/dev/peps/pep-0433/ @@ -278,7 +279,7 @@ def _adjust_metadata(metadata): return metadata -class DiskWriter(SwiftDiskWriter): +class DiskWriter(object): """ Encapsulation of the write context for servicing PUT REST API requests. Serves as the context manager object for DiskFile's writer() @@ -286,6 +287,126 @@ class DiskWriter(SwiftDiskWriter): We just override the put() method for Gluster. """ + def __init__(self, disk_file, fd, tmppath, threadpool): + self.disk_file = disk_file + self.fd = fd + self.tmppath = tmppath + self.upload_size = 0 + self.last_sync = 0 + self.threadpool = threadpool + + def write(self, chunk): + """ + Write a chunk of data into the temporary file. + + :param chunk: the chunk of data to write as a string object + """ + + def _write_entire_chunk(chunk): + while chunk: + written = do_write(self.fd, chunk) + self.upload_size += written + chunk = chunk[written:] + + self.threadpool.run_in_thread(_write_entire_chunk, chunk) + + # For large files sync every 512MB (by default) written + diff = self.upload_size - self.last_sync + if diff >= self.disk_file.bytes_per_sync: + self.threadpool.force_run_in_thread(do_fdatasync, self.fd) + drop_buffer_cache(self.fd, self.last_sync, diff) + self.last_sync = self.upload_size + + def _finalize_put(self, metadata): + # Write out metadata before fsync() to ensure it is also forced to + # disk. + write_metadata(self.fd, metadata) + + # We call fsync() before calling drop_cache() to lower the + # amount of redundant work the drop cache code will perform on + # the pages (now that after fsync the pages will be all + # clean). + do_fsync(self.fd) + + # From the Department of the Redundancy Department, make sure + # we call drop_cache() after fsync() to avoid redundant work + # (pages all clean). + drop_buffer_cache(self.fd, 0, self.upload_size) + + # At this point we know that the object's full directory path + # exists, so we can just rename it directly without using Swift's + # swift.common.utils.renamer(), which makes the directory path and + # adds extra stat() calls. + df = self.disk_file + data_file = os.path.join(df.put_datadir, df._obj) + attempts = 1 + while True: + try: + do_rename(self.tmppath, data_file) + except OSError as err: + if err.errno in (errno.ENOENT, errno.EIO) \ + and attempts < MAX_RENAME_ATTEMPTS: + # FIXME: Why either of these two error conditions is + # happening is unknown at this point. This might be a + # FUSE issue of some sort or a possible race + # condition. So let's sleep on it, and double check + # the environment after a good nap. + _random_sleep() + # Tease out why this error occurred. The man page for + # rename reads: + # "The link named by tmppath does not exist; or, a + # directory component in data_file does not exist; + # or, tmppath or data_file is an empty string." + assert len(self.tmppath) > 0 and len(data_file) > 0 + tpstats = do_stat(self.tmppath) + tfstats = do_fstat(self.fd) + assert tfstats + if not tpstats or tfstats.st_ino != tpstats.st_ino: + # Temporary file name conflict + raise DiskFileError( + 'DiskFile.put(): temporary file, %s, was' + ' already renamed (targeted for %s)' % ( + self.tmppath, data_file)) + else: + # Data file target name now has a bad path! + dfstats = do_stat(df.put_datadir) + if not dfstats: + raise DiskFileError( + 'DiskFile.put(): path to object, %s, no' + ' longer exists (targeted for %s)' % ( + df.put_datadir, + data_file)) + else: + is_dir = stat.S_ISDIR(dfstats.st_mode) + if not is_dir: + raise DiskFileError( + 'DiskFile.put(): path to object, %s,' + ' no longer a directory (targeted for' + ' %s)' % (df.put_datadir, + data_file)) + else: + # Let's retry since everything looks okay + logging.warn( + "DiskFile.put(): os.rename('%s','%s')" + " initially failed (%s) but a" + " stat('%s') following that succeeded:" + " %r" % ( + self.tmppath, data_file, + str(err), df.put_datadir, + dfstats)) + attempts += 1 + continue + else: + raise GlusterFileSystemOSError( + err.errno, "%s, os.rename('%s', '%s')" % ( + err.strerror, self.tmppath, data_file)) + else: + # Success! + break + # Close here so the calling context does not have to perform this + # in a thread. + do_close(self.fd) + def put(self, metadata, extension='.data'): """ Finalize writing the file on disk, and renames it from the temp file @@ -306,120 +427,34 @@ class DiskWriter(SwiftDiskWriter): if not df.data_file: # Does not exist, create it data_file = os.path.join(df._obj_path, df._obj) - _, df.metadata = self.threadpool.force_run_in_thread( + _, df._metadata = self.threadpool.force_run_in_thread( df._create_dir_object, data_file, metadata) df.data_file = os.path.join(df._container_path, data_file) - elif not df.is_dir: + elif not df._is_dir: # Exists, but as a file raise DiskFileError('DiskFile.put(): directory creation failed' ' since the target, %s, already exists as' ' a file' % df.data_file) return - if df._is_dir: - # A pre-existing directory already exists on the file - # system, perhaps gratuitously created when another - # object was created, or created externally to Swift - # REST API servicing (UFO use case). - raise DiskFileError('DiskFile.put(): file creation failed since' - ' the target, %s, already exists as a' - ' directory' % df.data_file) - - def finalize_put(): - # Write out metadata before fsync() to ensure it is also forced to - # disk. - write_metadata(self.fd, metadata) - - # We call fsync() before calling drop_cache() to lower the - # amount of redundant work the drop cache code will perform on - # the pages (now that after fsync the pages will be all - # clean). - do_fsync(self.fd) - # From the Department of the Redundancy Department, make sure - # we call drop_cache() after fsync() to avoid redundant work - # (pages all clean). - drop_buffer_cache(self.fd, 0, self.upload_size) - - # At this point we know that the object's full directory path - # exists, so we can just rename it directly without using Swift's - # swift.common.utils.renamer(), which makes the directory path and - # adds extra stat() calls. - data_file = os.path.join(df.put_datadir, df._obj) - attempts = 1 - while True: - try: - os.rename(self.tmppath, data_file) - except OSError as err: - if err.errno in (errno.ENOENT, errno.EIO) \ - and attempts < MAX_RENAME_ATTEMPTS: - # FIXME: Why either of these two error conditions is - # happening is unknown at this point. This might be a - # FUSE issue of some sort or a possible race - # condition. So let's sleep on it, and double check - # the environment after a good nap. - _random_sleep() - # Tease out why this error occurred. The man page for - # rename reads: - # "The link named by tmppath does not exist; or, a - # directory component in data_file does not exist; - # or, tmppath or data_file is an empty string." - assert len(self.tmppath) > 0 and len(data_file) > 0 - tpstats = do_stat(self.tmppath) - tfstats = do_fstat(self.fd) - assert tfstats - if not tpstats or tfstats.st_ino != tpstats.st_ino: - # Temporary file name conflict - raise DiskFileError( - 'DiskFile.put(): temporary file, %s, was' - ' already renamed (targeted for %s)' % ( - self.tmppath, data_file)) - else: - # Data file target name now has a bad path! - dfstats = do_stat(df.put_datadir) - if not dfstats: - raise DiskFileError( - 'DiskFile.put(): path to object, %s, no' - ' longer exists (targeted for %s)' % ( - df.put_datadir, - data_file)) - else: - is_dir = stat.S_ISDIR(dfstats.st_mode) - if not is_dir: - raise DiskFileError( - 'DiskFile.put(): path to object, %s,' - ' no longer a directory (targeted for' - ' %s)' % (df.put_datadir, - data_file)) - else: - # Let's retry since everything looks okay - logging.warn( - "DiskFile.put(): os.rename('%s','%s')" - " initially failed (%s) but a" - " stat('%s') following that succeeded:" - " %r" % ( - self.tmppath, data_file, - str(err), df.put_datadir, - dfstats)) - attempts += 1 - continue - else: - raise GlusterFileSystemOSError( - err.errno, "%s, os.rename('%s', '%s')" % ( - err.strerror, self.tmppath, data_file)) - else: - # Success! - break - # Close here so the calling context does not have to perform this - # in a thread. - do_close(self.fd) - - self.threadpool.force_run_in_thread(finalize_put) + try: + self.threadpool.force_run_in_thread(self._finalize_put, metadata) + except GlusterFileSystemOSError as err: + if err.errno == errno.EISDIR: + # A pre-existing directory already exists on the file + # system, perhaps gratuitously created when another + # object was created, or created externally to Swift + # REST API servicing (UFO use case). + raise DiskFileError('DiskFile.put(): file creation failed' + ' since the target, %s, already exists as' + ' a directory' % df.data_file) + raise # Avoid the unlink() system call as part of the mkstemp context # cleanup self.tmppath = None - df.metadata = metadata + df._metadata = metadata df._filter_metadata() # Mark that it actually exists now @@ -443,7 +478,6 @@ class DiskFile(SwiftDiskFile): :param container: container name for the object :param obj: object name for the object :param logger: logger object for writing out log file messages - :param keep_data_fp: if True, don't close the fp, otherwise close it :param disk_chunk_Size: size of chunks on file reads :param bytes_per_sync: number of bytes between fdatasync calls :param iter_hook: called when __iter__ returns a chunk @@ -456,18 +490,15 @@ class DiskFile(SwiftDiskFile): """ def __init__(self, path, device, partition, account, container, obj, - logger, keep_data_fp=False, - disk_chunk_size=DEFAULT_DISK_CHUNK_SIZE, + logger, disk_chunk_size=DEFAULT_DISK_CHUNK_SIZE, bytes_per_sync=DEFAULT_BYTES_PER_SYNC, iter_hook=None, threadpool=None, obj_dir='objects', mount_check=False, - disallowed_metadata_keys=None, uid=DEFAULT_UID, - gid=DEFAULT_GID): + uid=DEFAULT_UID, gid=DEFAULT_GID): if mount_check and not mount(path, device): raise DiskFileDeviceUnavailable() self.disk_chunk_size = disk_chunk_size self.bytes_per_sync = bytes_per_sync self.iter_hook = iter_hook - self.threadpool = threadpool or ThreadPool(nthreads=0) obj = obj.strip(os.path.sep) if os.path.sep in obj: @@ -491,59 +522,78 @@ class DiskFile(SwiftDiskFile): self.put_datadir = self.datadir self._is_dir = False self.logger = logger - self.metadata = {} - self.meta_file = None + self._metadata = None + # Don't store a value for data_file until we know it exists. + self.data_file = None + self._data_file_size = None self.fp = None self.iter_etag = None self.started_at_0 = False self.read_to_eof = False self.quarantined_dir = None + self.suppress_file_closing = False + self._verify_close = False + self.threadpool = threadpool or ThreadPool(nthreads=0) + # FIXME(portante): this attribute is set after open and affects the + # behavior of the class (i.e. public interface) self.keep_cache = False self.uid = int(uid) self.gid = int(gid) - self.suppress_file_closing = False - # Don't store a value for data_file until we know it exists. - self.data_file = None - data_file = os.path.join(self.put_datadir, self._obj) + def open(self, verify_close=False): + """ + Open the file and read the metadata. + + This method must populate the _metadata attribute. + :param verify_close: force implicit close to verify_file, no effect on + explicit close. + + :raises DiskFileCollision: on md5 collision + """ + data_file = os.path.join(self.put_datadir, self._obj) try: - stats = do_stat(data_file) - except OSError as err: - if err.errno == errno.ENOTDIR: - return + fd = do_open(data_file, os.O_RDONLY | os.O_EXCL) + except GlusterFileSystemOSError as err: + self.logger.exception( + "Error opening file, %s :: %s", data_file, err) else: - if not stats: - return - - self.data_file = data_file - self._is_dir = stat.S_ISDIR(stats.st_mode) - - self.metadata = read_metadata(data_file) - if not self.metadata: - create_object_metadata(data_file) - self.metadata = read_metadata(data_file) - - if not validate_object(self.metadata): - create_object_metadata(data_file) - self.metadata = read_metadata(data_file) - - self._filter_metadata() - - if keep_data_fp: - if not self._is_dir: - # The caller has an assumption that the "fp" field of this - # object is an file object if keep_data_fp is set. However, - # this implementation of the DiskFile object does not need to - # open the file for internal operations. So if the caller - # requests it, we'll just open the file for them. - self.fp = do_open(data_file, 'rb') + try: + stats = do_fstat(fd) + except GlusterFileSystemOSError as err: + self.logger.exception( + "Error stat'ing open file, %s :: %s", data_file, err) else: - self.fp = Fake_file(data_file) + self._is_dir = stat.S_ISDIR(stats.st_mode) + + self.data_file = data_file - def drop_cache(self, fd, offset, length): + self._metadata = read_metadata(fd) + if not self._metadata: + create_object_metadata(fd) + self._metadata = read_metadata(fd) + + if not validate_object(self._metadata): + create_object_metadata(fd) + self._metadata = read_metadata(fd) + + self._filter_metadata() + + if self._is_dir: + # Use a fake file handle to satisfy the super class's + # __iter__ method requirement when dealing with + # directories as objects. + os.close(fd) + self.fp = Fake_file(data_file) + else: + self.fp = os.fdopen(fd, 'rb') + self._verify_close = verify_close + self._metadata = self._metadata or {} + return self + + def _drop_cache(self, fd, offset, length): if fd >= 0: - super(DiskFile, self).drop_cache(fd, offset, length) + super(DiskFile, self)._drop_cache(fd, offset, length) def close(self, verify_file=True): """ @@ -555,12 +605,17 @@ class DiskFile(SwiftDiskFile): if self.fp: do_close(self.fp) self.fp = None + self._metadata = None + self._data_file_size = None + self._verify_close = False def _filter_metadata(self): - if X_TYPE in self.metadata: - self.metadata.pop(X_TYPE) - if X_OBJECT_TYPE in self.metadata: - self.metadata.pop(X_OBJECT_TYPE) + if self._metadata is None: + return + if X_TYPE in self._metadata: + self._metadata.pop(X_TYPE) + if X_OBJECT_TYPE in self._metadata: + self._metadata.pop(X_OBJECT_TYPE) def _create_dir_object(self, dir_path, metadata=None): """ @@ -619,7 +674,7 @@ class DiskFile(SwiftDiskFile): return True, newmd @contextmanager - def writer(self, size=None): + def create(self, size=None): """ Contextmanager to make a temporary file, optionally of a specified initial size. @@ -721,63 +776,68 @@ class DiskFile(SwiftDiskFile): if tombstone: # We don't write tombstone files. So do nothing. return - assert self.data_file is not None, \ - "put_metadata: no file to put metadata into" metadata = _adjust_metadata(metadata) - self.threadpool.run_in_thread(write_metadata, self.data_file, metadata) - self.metadata = metadata - self._filter_metadata() + data_file = os.path.join(self.put_datadir, self._obj) + self.threadpool.run_in_thread(write_metadata, data_file, metadata) - def unlinkold(self, timestamp): + def _delete(self): + if self._is_dir: + # Marker, or object, directory. + # + # Delete from the filesystem only if it contains no objects. + # If it does contain objects, then just remove the object + # metadata tag which will make this directory a + # fake-filesystem-only directory and will be deleted when the + # container or parent directory is deleted. + metadata = read_metadata(self.data_file) + if dir_is_object(metadata): + metadata[X_OBJECT_TYPE] = DIR_NON_OBJECT + write_metadata(self.data_file, metadata) + rmobjdir(self.data_file) + else: + # Delete file object + do_unlink(self.data_file) + + # Garbage collection of non-object directories. Now that we + # deleted the file, determine if the current directory and any + # parent directory may be deleted. + dirname = os.path.dirname(self.data_file) + while dirname and dirname != self._container_path: + # Try to remove any directories that are not objects. + if not rmobjdir(dirname): + # If a directory with objects has been found, we can stop + # garabe collection + break + else: + dirname = os.path.dirname(dirname) + + def delete(self, timestamp): """ Remove any older versions of the object file. Any file that has an older timestamp than timestamp will be deleted. :param timestamp: timestamp to compare with each file """ - if not self.metadata or self.metadata[X_TIMESTAMP] >= timestamp: - return - - assert self.data_file, \ - "Have metadata, %r, but no data_file" % self.metadata - - def _unlinkold(): - if self._is_dir: - # Marker, or object, directory. - # - # Delete from the filesystem only if it contains no objects. - # If it does contain objects, then just remove the object - # metadata tag which will make this directory a - # fake-filesystem-only directory and will be deleted when the - # container or parent directory is deleted. - metadata = read_metadata(self.data_file) - if dir_is_object(metadata): - metadata[X_OBJECT_TYPE] = DIR_NON_OBJECT - write_metadata(self.data_file, metadata) - rmobjdir(self.data_file) - else: - # Delete file object - do_unlink(self.data_file) - - # Garbage collection of non-object directories. Now that we - # deleted the file, determine if the current directory and any - # parent directory may be deleted. - dirname = os.path.dirname(self.data_file) - while dirname and dirname != self._container_path: - # Try to remove any directories that are not objects. - if not rmobjdir(dirname): - # If a directory with objects has been found, we can stop - # garabe collection - break - else: - dirname = os.path.dirname(dirname) - - self.threadpool.run_in_thread(_unlinkold) - - self.metadata = {} + timestamp_fl = float(timestamp) + data_file = os.path.join(self.put_datadir, self._obj) + try: + metadata = read_metadata(data_file) + except (GlusterFileSystemIOError, GlusterFileSystemOSError) as err: + if err.errno != errno.ENOENT: + raise + else: + try: + old_ts = float(metadata[X_TIMESTAMP]) >= timestamp_fl + except (KeyError, ValueError): + # If no X-Timestamp to compare against, or the timestamp is + # not a valid float, we'll just delete the object anyways. + old_ts = False + if not old_ts: + self.threadpool.run_in_thread(self._delete) + self._metadata = {} self.data_file = None - def get_data_file_size(self): + def _get_data_file_size(self): """ Returns the os_path.getsize for the file. Raises an exception if this file does not match the Content-Length stored in the metadata, or if @@ -795,12 +855,12 @@ class DiskFile(SwiftDiskFile): if self.data_file: def _old_getsize(): file_size = os_path.getsize(self.data_file) - if X_CONTENT_LENGTH in self.metadata: - metadata_size = int(self.metadata[X_CONTENT_LENGTH]) + if X_CONTENT_LENGTH in self._metadata: + metadata_size = int(self._metadata[X_CONTENT_LENGTH]) if file_size != metadata_size: # FIXME - bit rot detection? - self.metadata[X_CONTENT_LENGTH] = file_size - write_metadata(self.data_file, self.metadata) + self._metadata[X_CONTENT_LENGTH] = file_size + write_metadata(self.data_file, self._metadata) return file_size file_size = self.threadpool.run_in_thread(_old_getsize) return file_size diff --git a/gluster/swift/obj/server.py b/gluster/swift/obj/server.py index bdd7687..433879f 100644 --- a/gluster/swift/obj/server.py +++ b/gluster/swift/obj/server.py @@ -39,8 +39,6 @@ class ObjectController(server.ObjectController): kwargs.setdefault('disk_chunk_size', self.disk_chunk_size) kwargs.setdefault('threadpool', self.threadpools[device]) kwargs.setdefault('obj_dir', server.DATADIR) - kwargs.setdefault('disallowed_metadata_keys', - server.DISALLOWED_HEADERS) return DiskFile(self.devices, device, partition, account, container, obj, self.logger, **kwargs) diff --git a/gluster/swift/proxy/server.py b/gluster/swift/proxy/server.py index 512b9bb..3254409 100644 --- a/gluster/swift/proxy/server.py +++ b/gluster/swift/proxy/server.py @@ -18,11 +18,14 @@ # needs import gluster.swift.common.constraints # noqa -from swift.proxy import server +from swift.proxy.server import Application, mimetypes # noqa +from swift.proxy.controllers import AccountController # noqa +from swift.proxy.controllers import ObjectController # noqa +from swift.proxy.controllers import ContainerController # noqa def app_factory(global_conf, **local_conf): # noqa """paste.deploy app factory for creating WSGI proxy apps.""" conf = global_conf.copy() conf.update(local_conf) - return server.Application(conf) + return Application(conf) diff --git a/glusterfs-openstack-swift.spec b/glusterfs-openstack-swift.spec index c41ddfd..791ad70 100644 --- a/glusterfs-openstack-swift.spec +++ b/glusterfs-openstack-swift.spec @@ -39,11 +39,11 @@ BuildRequires: python-setuptools Requires : memcached Requires : openssl Requires : python -Requires : openstack-swift = 1.9.1 -Requires : openstack-swift-account = 1.9.1 -Requires : openstack-swift-container = 1.9.1 -Requires : openstack-swift-object = 1.9.1 -Requires : openstack-swift-proxy = 1.9.1 +Requires : openstack-swift = 1.10.0 +Requires : openstack-swift-account = 1.10.0 +Requires : openstack-swift-container = 1.10.0 +Requires : openstack-swift-object = 1.10.0 +Requires : openstack-swift-proxy = 1.10.0 Obsoletes: glusterfs-swift-plugin Obsoletes: glusterfs-swift Obsoletes: glusterfs-ufo diff --git a/modules/swift b/modules/swift index 4bd9e45..7accddf 160000 --- a/modules/swift +++ b/modules/swift @@ -1 +1 @@ -Subproject commit 4bd9e4584d31eb37c7e30e555daeb6b90703ee3a +Subproject commit 7accddf1c3f54f67cf29d6eb69e416f798af6e23 diff --git a/test/__init__.py b/test/__init__.py index 9095348..7eb5f47 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -1,13 +1,43 @@ +# Copyright (c) 2010-2012 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # See http://code.google.com/p/python-nose/issues/detail?id=373 # The code below enables nosetests to work with i18n _() blocks -import __builtin__ import sys import os +try: + from unittest.util import safe_repr +except ImportError: + # Probably py26 + _MAX_LENGTH = 80 -from swift.common.utils import readconf + def safe_repr(obj, short=False): + try: + result = repr(obj) + except Exception: + result = object.__repr__(obj) + if not short or len(result) < _MAX_LENGTH: + return result + return result[:_MAX_LENGTH] + ' [truncated]...' -setattr(__builtin__, '_', lambda x: x) +# make unittests pass on all locale +import swift +setattr(swift, 'gettext_', lambda x: x) + +from swift.common.utils import readconf # Work around what seems to be a Python bug. diff --git a/test/functional/swift_test_client.py b/test/functional/swift_test_client.py index daea902..2a161ac 100644 --- a/test/functional/swift_test_client.py +++ b/test/functional/swift_test_client.py @@ -1,19 +1,4 @@ -# Copyright (c) 2010-2013 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Copyright (c) 2013 Red Hat, Inc. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -43,6 +28,8 @@ from nose import SkipTest from xml.dom import minidom from swiftclient import get_auth +from test import safe_repr + class AuthenticationFailed(Exception): pass @@ -146,12 +133,10 @@ class Connection(object): auth_netloc = "%s:%d" % (self.auth_host, self.auth_port) auth_url = auth_scheme + auth_netloc + auth_path - (storage_url, storage_token) = get_auth(auth_url, - auth_user, self.password, - snet=False, - tenant_name=self.account, - auth_version=self.auth_version, - os_options={}) + (storage_url, storage_token) = get_auth( + auth_url, auth_user, self.password, snet=False, + tenant_name=self.account, auth_version=self.auth_version, + os_options={}) if not (storage_url and storage_token): raise AuthenticationFailed() @@ -233,18 +218,22 @@ class Connection(object): self.response = None try_count = 0 + fail_messages = [] while try_count < 5: try_count += 1 try: self.response = try_request() - except httplib.HTTPException: + except httplib.HTTPException as e: + fail_messages.append(safe_repr(e)) continue if self.response.status == 401: + fail_messages.append("Response 401") self.authenticate() continue elif self.response.status == 503: + fail_messages.append("Response 503") if try_count != 5: time.sleep(5) continue @@ -254,7 +243,11 @@ class Connection(object): if self.response: return self.response.status - raise RequestError('Unable to complete http request') + request = "{method} {path} headers: {headers} data: {data}".format( + method=method, path=path, headers=headers, data=data) + raise RequestError('Unable to complete http request: %s. ' + 'Attempts: %s, Failures: %s' % + (request, len(fail_messages), fail_messages)) def put_start(self, path, hdrs={}, parms={}, cfg={}, chunked=False): self.http_connect() @@ -329,21 +322,21 @@ class Account(Base): return Container(self.conn, self.name, container_name) def containers(self, hdrs={}, parms={}, cfg={}): - format = parms.get('format', None) - if format not in [None, 'json', 'xml']: - raise RequestError('Invalid format: %s' % format) - if format is None and 'format' in parms: + format_type = parms.get('format', None) + if format_type not in [None, 'json', 'xml']: + raise RequestError('Invalid format: %s' % format_type) + if format_type is None and 'format' in parms: del parms['format'] status = self.conn.make_request('GET', self.path, hdrs=hdrs, parms=parms, cfg=cfg) if status == 200: - if format == 'json': + if format_type == 'json': conts = json.loads(self.conn.response.read()) for cont in conts: cont['name'] = cont['name'].encode('utf-8') return conts - elif format == 'xml': + elif format_type == 'xml': conts = [] tree = minidom.parseString(self.conn.response.read()) for x in tree.getElementsByTagName('container'): @@ -406,8 +399,8 @@ class Container(Base): def delete_files(self): for f in listing_items(self.files): - file = self.file(f) - if not file.delete(): + file_item = self.file(f) + if not file_item.delete(): return False return listing_empty(self.files) @@ -419,37 +412,39 @@ class Container(Base): return File(self.conn, self.account, self.name, file_name) def files(self, hdrs={}, parms={}, cfg={}): - format = parms.get('format', None) - if format not in [None, 'json', 'xml']: - raise RequestError('Invalid format: %s' % format) - if format is None and 'format' in parms: + format_type = parms.get('format', None) + if format_type not in [None, 'json', 'xml']: + raise RequestError('Invalid format: %s' % format_type) + if format_type is None and 'format' in parms: del parms['format'] status = self.conn.make_request('GET', self.path, hdrs=hdrs, parms=parms, cfg=cfg) if status == 200: - if format == 'json': + if format_type == 'json': files = json.loads(self.conn.response.read()) - for file in files: - file['name'] = file['name'].encode('utf-8') - file['content_type'] = file['content_type'].encode('utf-8') + for file_item in files: + file_item['name'] = file_item['name'].encode('utf-8') + file_item['content_type'] = file_item['content_type'].\ + encode('utf-8') return files - elif format == 'xml': + elif format_type == 'xml': files = [] tree = minidom.parseString(self.conn.response.read()) for x in tree.getElementsByTagName('object'): - file = {} + file_item = {} for key in ['name', 'hash', 'bytes', 'content_type', 'last_modified']: - file[key] = x.getElementsByTagName(key)[0].\ + file_item[key] = x.getElementsByTagName(key)[0].\ childNodes[0].nodeValue - files.append(file) + files.append(file_item) - for file in files: - file['name'] = file['name'].encode('utf-8') - file['content_type'] = file['content_type'].encode('utf-8') + for file_item in files: + file_item['name'] = file_item['name'].encode('utf-8') + file_item['content_type'] = file_item['content_type'].\ + encode('utf-8') return files else: content = self.conn.response.read() @@ -616,11 +611,11 @@ class File(Base): callback=None, cfg={}): if size > 0: - range = 'bytes=%d-%d' % (offset, (offset + size) - 1) + range_string = 'bytes=%d-%d' % (offset, (offset + size) - 1) if hdrs: - hdrs['Range'] = range + hdrs['Range'] = range_string else: - hdrs = {'Range': range} + hdrs = {'Range': range_string} status = self.conn.make_request('GET', self.path, hdrs=hdrs, cfg=cfg) @@ -736,7 +731,7 @@ class File(Base): callback(transferred, self.size) self.conn.put_end() - except socket.timeout, err: + except socket.timeout as err: raise err if (self.conn.response.status < 200) or \ diff --git a/test/functional/tests.py b/test/functional/tests.py index dcd8b51..e5ccda2 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -1,5 +1,5 @@ #!/usr/bin/python -u -# Copyright (c) 2010-2013 OpenStack, LLC. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,20 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# Modifications by Red Hat, Inc. from datetime import datetime import os @@ -89,6 +76,7 @@ web_front_end = config.get('web_front_end', 'integral') normalized_urls = config.get('normalized_urls', False) set_object_name_component_length() + def load_constraint(name): c = config[name] if not isinstance(c, int): @@ -143,7 +131,7 @@ def timeout(seconds, method, *args, **kwargs): def run(self): try: self.method(*self.args, **self.kwargs) - except Exception, e: + except Exception as e: self.exception = e t = TimeoutThread(method, *args, **kwargs) @@ -304,23 +292,24 @@ class TestAccount(Base): info = {'bytes': 0} info['count'] = random.randint(10, 30) for i in range(info['count']): - file = container.file(Utils.create_name()) + file_item = container.file(Utils.create_name()) bytes = random.randint(1, 32768) - file.write_random(bytes) + file_item.write_random(bytes) info['bytes'] += bytes container_info[container.name] = info - for format in ['json', 'xml']: - for a in self.env.account.containers(parms={'format': format}): + for format_type in ['json', 'xml']: + for a in self.env.account.containers( + parms={'format': format_type}): self.assert_(a['count'] >= 0) self.assert_(a['bytes'] >= 0) headers = dict(self.env.conn.response.getheaders()) - if format == 'json': + if format_type == 'json': self.assertEquals(headers['content-type'], 'application/json; charset=utf-8') - elif format == 'xml': + elif format_type == 'xml': self.assertEquals(headers['content-type'], 'application/xml; charset=utf-8') @@ -340,8 +329,8 @@ class TestAccount(Base): def testContainerListing(self): a = sorted([c.name for c in self.env.containers]) - for format in [None, 'json', 'xml']: - b = self.env.account.containers(parms={'format': format}) + for format_type in [None, 'json', 'xml']: + b = self.env.account.containers(parms={'format': format_type}) if isinstance(b[0], dict): b = [x['name'] for x in b] @@ -354,27 +343,29 @@ class TestAccount(Base): self.assert_status(401) def testLastContainerMarker(self): - for format in [None, 'json', 'xml']: - containers = self.env.account.containers({'format': format}) + for format_type in [None, 'json', 'xml']: + containers = self.env.account.containers({'format': format_type}) self.assertEquals(len(containers), len(self.env.containers)) self.assert_status(200) containers = self.env.account.containers( - parms={'format': format, 'marker': containers[-1]}) + parms={'format': format_type, 'marker': containers[-1]}) self.assertEquals(len(containers), 0) - if format is None: + if format_type is None: self.assert_status(204) else: self.assert_status(200) def testMarkerLimitContainerList(self): - for format in [None, 'json', 'xml']: + for format_type in [None, 'json', 'xml']: for marker in ['0', 'A', 'I', 'R', 'Z', 'a', 'i', 'r', 'z', 'abc123', 'mnop', 'xyz']: limit = random.randint(2, 9) containers = self.env.account.containers( - parms={'format': format, 'marker': marker, 'limit': limit}) + parms={'format': format_type, + 'marker': marker, + 'limit': limit}) self.assert_(len(containers) <= limit) if containers: if isinstance(containers[0], dict): @@ -382,9 +373,9 @@ class TestAccount(Base): self.assert_(locale.strcoll(containers[0], marker) > 0) def testContainersOrderedByName(self): - for format in [None, 'json', 'xml']: + for format_type in [None, 'json', 'xml']: containers = self.env.account.containers( - parms={'format': format}) + parms={'format': format_type}) if isinstance(containers[0], dict): containers = [x['name'] for x in containers] self.assertEquals(sorted(containers, cmp=locale.strcoll), @@ -410,11 +401,11 @@ class TestAccountNoContainers(Base): set_up = False def testGetRequest(self): - for format in [None, 'json', 'xml']: + for format_type in [None, 'json', 'xml']: self.assert_(not self.env.account.containers( - parms={'format': format})) + parms={'format': format_type})) - if format is None: + if format_type is None: self.assert_status(204) else: self.assert_status(200) @@ -441,9 +432,9 @@ class TestContainerEnv: cls.file_size = 128 cls.files = list() for x in range(cls.file_count): - file = cls.container.file(Utils.create_name()) - file.write_random(cls.file_size) - cls.files.append(file.name) + file_item = cls.container.file(Utils.create_name()) + file_item.write_random(cls.file_size) + cls.files.append(file_item.name) class TestContainerDev(Base): @@ -475,12 +466,12 @@ class TestContainer(Base): def testFileThenContainerDelete(self): cont = self.env.account.container(Utils.create_name()) self.assert_(cont.create()) - file = cont.file(Utils.create_name()) - self.assert_(file.write_random()) + file_item = cont.file(Utils.create_name()) + self.assert_(file_item.write_random()) - self.assert_(file.delete()) + self.assert_(file_item.delete()) self.assert_status(204) - self.assert_(file.name not in cont.files()) + self.assert_(file_item.name not in cont.files()) self.assert_(cont.delete()) self.assert_status(204) @@ -492,8 +483,8 @@ class TestContainer(Base): files = sorted([Utils.create_name() for x in xrange(10)]) for f in files: - file = cont.file(f) - self.assert_(file.write_random()) + file_item = cont.file(f) + self.assert_(file_item.write_random()) for i in xrange(len(files)): f = files[i] @@ -518,23 +509,23 @@ class TestContainer(Base): prefix_files[prefix] = [] for i in range(prefix_file_count): - file = cont.file(prefix + Utils.create_name()) - file.write() - prefix_files[prefix].append(file.name) + file_item = cont.file(prefix + Utils.create_name()) + file_item.write() + prefix_files[prefix].append(file_item.name) - for format in [None, 'json', 'xml']: + for format_type in [None, 'json', 'xml']: for prefix in prefixs: files = cont.files(parms={'prefix': prefix}) self.assertEquals(files, sorted(prefix_files[prefix])) - for format in [None, 'json', 'xml']: + for format_type in [None, 'json', 'xml']: for prefix in prefixs: files = cont.files(parms={'limit': limit_count, 'prefix': prefix}) self.assertEquals(len(files), limit_count) - for file in files: - self.assert_(file.startswith(prefix)) + for file_item in files: + self.assert_(file_item.startswith(prefix)) def testCreate(self): cont = self.env.account.container(Utils.create_name()) @@ -543,10 +534,10 @@ class TestContainer(Base): self.assert_(cont.name in self.env.account.containers()) def testContainerFileListOnContainerThatDoesNotExist(self): - for format in [None, 'json', 'xml']: + for format_type in [None, 'json', 'xml']: container = self.env.account.container(Utils.create_name()) self.assertRaises(ResponseError, container.files, - parms={'format': format}) + parms={'format': format_type}) self.assert_status(404) def testUtf8Container(self): @@ -606,52 +597,52 @@ class TestContainer(Base): def testDeleteOnContainerWithFiles(self): cont = self.env.account.container(Utils.create_name()) self.assert_(cont.create()) - file = cont.file(Utils.create_name()) - file.write_random(self.env.file_size) - self.assert_(file.name in cont.files()) + file_item = cont.file(Utils.create_name()) + file_item.write_random(self.env.file_size) + self.assert_(file_item.name in cont.files()) self.assert_(not cont.delete()) self.assert_status(409) def testFileCreateInContainerThatDoesNotExist(self): - file = File(self.env.conn, self.env.account, Utils.create_name(), - Utils.create_name()) - self.assertRaises(ResponseError, file.write) + file_item = File(self.env.conn, self.env.account, Utils.create_name(), + Utils.create_name()) + self.assertRaises(ResponseError, file_item.write) self.assert_status(404) def testLastFileMarker(self): - for format in [None, 'json', 'xml']: - files = self.env.container.files({'format': format}) + for format_type in [None, 'json', 'xml']: + files = self.env.container.files({'format': format_type}) self.assertEquals(len(files), len(self.env.files)) self.assert_status(200) files = self.env.container.files( - parms={'format': format, 'marker': files[-1]}) + parms={'format': format_type, 'marker': files[-1]}) self.assertEquals(len(files), 0) - if format is None: + if format_type is None: self.assert_status(204) else: self.assert_status(200) def testContainerFileList(self): - for format in [None, 'json', 'xml']: - files = self.env.container.files(parms={'format': format}) + for format_type in [None, 'json', 'xml']: + files = self.env.container.files(parms={'format': format_type}) self.assert_status(200) if isinstance(files[0], dict): files = [x['name'] for x in files] - for file in self.env.files: - self.assert_(file in files) + for file_item in self.env.files: + self.assert_(file_item in files) - for file in files: - self.assert_(file in self.env.files) + for file_item in files: + self.assert_(file_item in self.env.files) def testMarkerLimitFileList(self): - for format in [None, 'json', 'xml']: + for format_type in [None, 'json', 'xml']: for marker in ['0', 'A', 'I', 'R', 'Z', 'a', 'i', 'r', 'z', 'abc123', 'mnop', 'xyz']: limit = random.randint(2, self.env.file_count - 1) - files = self.env.container.files(parms={'format': format, + files = self.env.container.files(parms={'format': format_type, 'marker': marker, 'limit': limit}) @@ -668,8 +659,8 @@ class TestContainer(Base): self.assert_(locale.strcoll(files[0], marker) > 0) def testFileOrder(self): - for format in [None, 'json', 'xml']: - files = self.env.container.files(parms={'format': format}) + for format_type in [None, 'json', 'xml']: + files = self.env.container.files(parms={'format': format_type}) if isinstance(files[0], dict): files = [x['name'] for x in files] self.assertEquals(sorted(files, cmp=locale.strcoll), files) @@ -687,8 +678,8 @@ class TestContainer(Base): self.assert_status(404) def testContainerFileListWithLimit(self): - for format in [None, 'json', 'xml']: - files = self.env.container.files(parms={'format': format, + for format_type in [None, 'json', 'xml']: + files = self.env.container.files(parms={'format': format_type, 'limit': 2}) self.assertEquals(len(files), 2) @@ -707,8 +698,8 @@ class TestContainer(Base): cont = self.env.account.container(Utils.create_name()) self.assertRaises(ResponseError, cont.files) self.assert_(cont.create()) - file = cont.file(Utils.create_name()) - file.write_random() + file_item = cont.file(Utils.create_name()) + file_item.write_random() class TestContainerUTF8(Base2, TestContainer): @@ -775,12 +766,13 @@ class TestContainerPathsEnv: stored_files = set() for f in cls.files: - file = cls.container.file(f) + file_item = cls.container.file(f) if f.endswith('/'): - file.write(hdrs={'Content-Type': 'application/directory'}) + file_item.write(hdrs={'Content-Type': 'application/directory'}) else: - file.write_random(cls.file_size, hdrs={'Content-Type': - 'application/directory'}) + file_item.write_random(cls.file_size, + hdrs={'Content-Type': + 'application/directory'}) if (normalized_urls): nfile = '/'.join(filter(None, f.split('/'))) if (f[-1] == '/'): @@ -791,8 +783,6 @@ class TestContainerPathsEnv: cls.stored_files = sorted(stored_files) - - class TestContainerPaths(Base): env = TestContainerPathsEnv set_up = False @@ -805,61 +795,62 @@ class TestContainerPaths(Base): if count > 10: raise ValueError('too deep recursion') - for file in self.env.container.files(parms={'path': path}): - self.assert_(file.startswith(path)) - if file.endswith('/'): - recurse_path(file, count + 1) - found_dirs.append(file) + for file_item in self.env.container.files(parms={'path': path}): + self.assert_(file_item.startswith(path)) + if file_item.endswith('/'): + recurse_path(file_item, count + 1) + found_dirs.append(file_item) else: - found_files.append(file) + found_files.append(file_item) recurse_path('') - for file in self.env.stored_files: - if file.startswith('/'): - self.assert_(file not in found_dirs) - self.assert_(file not in found_files) - elif file.endswith('/'): - self.assert_(file in found_dirs) - self.assert_(file not in found_files) + for file_item in self.env.stored_files: + if file_item.startswith('/'): + self.assert_(file_item not in found_dirs) + self.assert_(file_item not in found_files) + elif file_item.endswith('/'): + self.assert_(file_item in found_dirs) + self.assert_(file_item not in found_files) else: - self.assert_(file in found_files) - self.assert_(file not in found_dirs) + self.assert_(file_item in found_files) + self.assert_(file_item not in found_dirs) found_files = [] found_dirs = [] recurse_path('/') - for file in self.env.stored_files: - if not file.startswith('/'): - self.assert_(file not in found_dirs) - self.assert_(file not in found_files) - elif file.endswith('/'): - self.assert_(file in found_dirs) - self.assert_(file not in found_files) + for file_item in self.env.stored_files: + if not file_item.startswith('/'): + self.assert_(file_item not in found_dirs) + self.assert_(file_item not in found_files) + elif file_item.endswith('/'): + self.assert_(file_item in found_dirs) + self.assert_(file_item not in found_files) else: - self.assert_(file in found_files) - self.assert_(file not in found_dirs) + self.assert_(file_item in found_files) + self.assert_(file_item not in found_dirs) def testContainerListing(self): - for format in (None, 'json', 'xml'): - files = self.env.container.files(parms={'format': format}) + for format_type in (None, 'json', 'xml'): + files = self.env.container.files(parms={'format': format_type}) if isinstance(files[0], dict): files = [str(x['name']) for x in files] self.assertEquals(files, self.env.stored_files) - for format in ('json', 'xml'): - for file in self.env.container.files(parms={'format': format}): - self.assert_(int(file['bytes']) >= 0) - self.assert_('last_modified' in file) - if file['name'].endswith('/'): - self.assertEquals(file['content_type'], + for format_type in ('json', 'xml'): + for file_item in self.env.container.files(parms={'format': + format_type}): + self.assert_(int(file_item['bytes']) >= 0) + self.assert_('last_modified' in file_item) + if file_item['name'].endswith('/'): + self.assertEquals(file_item['content_type'], 'application/directory') def testStructure(self): - def assert_listing(path, list): + def assert_listing(path, file_list): files = self.env.container.files(parms={'path': path}) - self.assertEquals(sorted(list, cmp=locale.strcoll), files) + self.assertEquals(sorted(file_list, cmp=locale.strcoll), files) if not normalized_urls: assert_listing('/', ['/dir1/', '/dir2/', '/file1', '/file A']) assert_listing('/dir1', @@ -927,14 +918,14 @@ class TestFile(Base): def testCopy(self): # makes sure to test encoded characters" source_filename = 'dealde%2Fl04 011e%204c8df/flash.png' - file = self.env.container.file(source_filename) + file_item = self.env.container.file(source_filename) metadata = {} for i in range(1): metadata[Utils.create_ascii_name()] = Utils.create_name() - data = file.write_random() - file.sync_metadata(metadata) + data = file_item.write_random() + file_item.sync_metadata(metadata) dest_cont = self.env.account.container(Utils.create_name()) self.assert_(dest_cont.create()) @@ -945,21 +936,21 @@ class TestFile(Base): for prefix in ('', '/'): dest_filename = Utils.create_name() - file = self.env.container.file(source_filename) - file.copy('%s%s' % (prefix, cont), dest_filename) + file_item = self.env.container.file(source_filename) + file_item.copy('%s%s' % (prefix, cont), dest_filename) self.assert_(dest_filename in cont.files()) - file = cont.file(dest_filename) + file_item = cont.file(dest_filename) - self.assert_(data == file.read()) - self.assert_(file.initialize()) - self.assert_(metadata == file.metadata) + self.assert_(data == file_item.read()) + self.assert_(file_item.initialize()) + self.assert_(metadata == file_item.metadata) def testCopy404s(self): source_filename = Utils.create_name() - file = self.env.container.file(source_filename) - file.write_random() + file_item = self.env.container.file(source_filename) + file_item.write_random() dest_cont = self.env.account.container(Utils.create_name()) self.assert_(dest_cont.create()) @@ -967,60 +958,65 @@ class TestFile(Base): for prefix in ('', '/'): # invalid source container source_cont = self.env.account.container(Utils.create_name()) - file = source_cont.file(source_filename) - self.assert_(not file.copy('%s%s' % (prefix, self.env.container), - Utils.create_name())) + file_item = source_cont.file(source_filename) + self.assert_(not file_item.copy( + '%s%s' % (prefix, self.env.container), + Utils.create_name())) self.assert_status(404) - self.assert_(not file.copy('%s%s' % (prefix, dest_cont), - Utils.create_name())) + self.assert_(not file_item.copy('%s%s' % (prefix, dest_cont), + Utils.create_name())) self.assert_status(404) # invalid source object - file = self.env.container.file(Utils.create_name()) - self.assert_(not file.copy('%s%s' % (prefix, self.env.container), - Utils.create_name())) + file_item = self.env.container.file(Utils.create_name()) + self.assert_(not file_item.copy( + '%s%s' % (prefix, self.env.container), + Utils.create_name())) self.assert_status(404) - self.assert_(not file.copy('%s%s' % (prefix, dest_cont), - Utils.create_name())) + self.assert_(not file_item.copy('%s%s' % (prefix, dest_cont), + Utils.create_name())) self.assert_status(404) # invalid destination container - file = self.env.container.file(source_filename) - self.assert_(not file.copy('%s%s' % (prefix, Utils.create_name()), - Utils.create_name())) + file_item = self.env.container.file(source_filename) + self.assert_(not file_item.copy( + '%s%s' % (prefix, Utils.create_name()), + Utils.create_name())) def testCopyNoDestinationHeader(self): source_filename = Utils.create_name() - file = self.env.container.file(source_filename) - file.write_random() + file_item = self.env.container.file(source_filename) + file_item.write_random() - file = self.env.container.file(source_filename) - self.assert_(not file.copy(Utils.create_name(), Utils.create_name(), + file_item = self.env.container.file(source_filename) + self.assert_(not file_item.copy(Utils.create_name(), + Utils.create_name(), cfg={'no_destination': True})) self.assert_status(412) def testCopyDestinationSlashProblems(self): source_filename = Utils.create_name() - file = self.env.container.file(source_filename) - file.write_random() + file_item = self.env.container.file(source_filename) + file_item.write_random() # no slash - self.assert_(not file.copy(Utils.create_name(), Utils.create_name(), + self.assert_(not file_item.copy(Utils.create_name(), + Utils.create_name(), cfg={'destination': Utils.create_name()})) self.assert_status(412) def testCopyFromHeader(self): source_filename = Utils.create_name() - file = self.env.container.file(source_filename) + file_item = self.env.container.file(source_filename) metadata = {} for i in range(1): metadata[Utils.create_ascii_name()] = Utils.create_name() - file.metadata = metadata + file_item.metadata = metadata - data = file.write_random() + data = file_item.write_random() dest_cont = self.env.account.container(Utils.create_name()) self.assert_(dest_cont.create()) @@ -1031,35 +1027,35 @@ class TestFile(Base): for prefix in ('', '/'): dest_filename = Utils.create_name() - file = cont.file(dest_filename) - file.write(hdrs={'X-Copy-From': '%s%s/%s' % (prefix, - self.env.container.name, source_filename)}) + file_item = cont.file(dest_filename) + file_item.write(hdrs={'X-Copy-From': '%s%s/%s' % ( + prefix, self.env.container.name, source_filename)}) self.assert_(dest_filename in cont.files()) - file = cont.file(dest_filename) + file_item = cont.file(dest_filename) - self.assert_(data == file.read()) - self.assert_(file.initialize()) - self.assert_(metadata == file.metadata) + self.assert_(data == file_item.read()) + self.assert_(file_item.initialize()) + self.assert_(metadata == file_item.metadata) def testCopyFromHeader404s(self): source_filename = Utils.create_name() - file = self.env.container.file(source_filename) - file.write_random() + file_item = self.env.container.file(source_filename) + file_item.write_random() for prefix in ('', '/'): # invalid source container - file = self.env.container.file(Utils.create_name()) - self.assertRaises(ResponseError, file.write, + file_item = self.env.container.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.write, hdrs={'X-Copy-From': '%s%s/%s' % (prefix, Utils.create_name(), source_filename)}) self.assert_status(404) # invalid source object - file = self.env.container.file(Utils.create_name()) - self.assertRaises(ResponseError, file.write, + file_item = self.env.container.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.write, hdrs={'X-Copy-From': '%s%s/%s' % (prefix, self.env.container.name, Utils.create_name())}) @@ -1067,8 +1063,8 @@ class TestFile(Base): # invalid destination container dest_cont = self.env.account.container(Utils.create_name()) - file = dest_cont.file(Utils.create_name()) - self.assertRaises(ResponseError, file.write, + file_item = dest_cont.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.write, hdrs={'X-Copy-From': '%s%s/%s' % (prefix, self.env.container.name, source_filename)}) @@ -1078,13 +1074,13 @@ class TestFile(Base): limit = load_constraint('max_object_name_length') for l in (1, 10, limit / 2, limit - 1, limit, limit + 1, limit * 2): - file = self.env.container.file(create_limit_filename(l)) + file_item = self.env.container.file(create_limit_filename(l)) if l <= limit: - self.assert_(file.write()) + self.assert_(file_item.write()) self.assert_status(201) else: - self.assertRaises(ResponseError, file.write) + self.assertRaises(ResponseError, file_item.write) self.assert_status(400) def testQuestionMarkInName(self): @@ -1095,30 +1091,32 @@ class TestFile(Base): else: file_name = Utils.create_name(6) + '?' + Utils.create_name(6) - file = self.env.container.file(file_name) - self.assert_(file.write(cfg={'no_path_quote': True})) + file_item = self.env.container.file(file_name) + self.assert_(file_item.write(cfg={'no_path_quote': True})) self.assert_(file_name not in self.env.container.files()) self.assert_(file_name.split('?')[0] in self.env.container.files()) def testDeleteThen404s(self): - file = self.env.container.file(Utils.create_name()) - self.assert_(file.write_random()) + file_item = self.env.container.file(Utils.create_name()) + self.assert_(file_item.write_random()) self.assert_status(201) - self.assert_(file.delete()) + self.assert_(file_item.delete()) self.assert_status(204) - file.metadata = {Utils.create_ascii_name(): Utils.create_name()} + file_item.metadata = {Utils.create_ascii_name(): Utils.create_name()} - for method in (file.info, file.read, file.sync_metadata, - file.delete): + for method in (file_item.info, + file_item.read, + file_item.sync_metadata, + file_item.delete): self.assertRaises(ResponseError, method) self.assert_status(404) def testBlankMetadataName(self): - file = self.env.container.file(Utils.create_name()) - file.metadata = {'': Utils.create_name()} - self.assertRaises(ResponseError, file.write_random) + file_item = self.env.container.file(Utils.create_name()) + file_item.metadata = {'': Utils.create_name()} + self.assertRaises(ResponseError, file_item.write_random) self.assert_status(400) def testMetadataNumberLimit(self): @@ -1143,22 +1141,22 @@ class TestFile(Base): size += len(key) + len(val) metadata[key] = val - file = self.env.container.file(Utils.create_name()) - file.metadata = metadata + file_item = self.env.container.file(Utils.create_name()) + file_item.metadata = metadata if i <= number_limit: - self.assert_(file.write()) + self.assert_(file_item.write()) self.assert_status(201) - self.assert_(file.sync_metadata()) + self.assert_(file_item.sync_metadata()) self.assert_status((201, 202)) else: - self.assertRaises(ResponseError, file.write) + self.assertRaises(ResponseError, file_item.write) self.assert_status(400) - file.metadata = {} - self.assert_(file.write()) + file_item.metadata = {} + self.assert_(file_item.write()) self.assert_status(201) - file.metadata = metadata - self.assertRaises(ResponseError, file.sync_metadata) + file_item.metadata = metadata + self.assertRaises(ResponseError, file_item.sync_metadata) self.assert_status(400) def testContentTypeGuessing(self): @@ -1169,8 +1167,8 @@ class TestFile(Base): self.assert_(container.create()) for i in file_types.keys(): - file = container.file(Utils.create_name() + '.' + i) - file.write('', cfg={'no_content_type': True}) + file_item = container.file(Utils.create_name() + '.' + i) + file_item.write('', cfg={'no_content_type': True}) file_types_read = {} for i in container.files(parms={'format': 'json'}): @@ -1181,13 +1179,13 @@ class TestFile(Base): def testRangedGets(self): file_length = 10000 range_size = file_length / 10 - file = self.env.container.file(Utils.create_name()) - data = file.write_random(file_length) + file_item = self.env.container.file(Utils.create_name()) + data = file_item.write_random(file_length) for i in range(0, file_length, range_size): range_string = 'bytes=%d-%d' % (i, i + range_size - 1) hdrs = {'Range': range_string} - self.assert_(data[i: i + range_size] == file.read(hdrs=hdrs), + self.assert_(data[i: i + range_size] == file_item.read(hdrs=hdrs), range_string) range_string = 'bytes=-%d' % (i) @@ -1198,45 +1196,45 @@ class TestFile(Base): # least one suffix-byte-range-spec with a NON-ZERO # suffix-length, then the byte-range-set is satisfiable. # Otherwise, the byte-range-set is unsatisfiable. - self.assertRaises(ResponseError, file.read, hdrs=hdrs) + self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(416) else: - self.assertEquals(file.read(hdrs=hdrs), data[-i:]) + self.assertEquals(file_item.read(hdrs=hdrs), data[-i:]) range_string = 'bytes=%d-' % (i) hdrs = {'Range': range_string} - self.assert_(file.read(hdrs=hdrs) == data[i - file_length:], + self.assert_(file_item.read(hdrs=hdrs) == data[i - file_length:], range_string) range_string = 'bytes=%d-%d' % (file_length + 1000, file_length + 2000) hdrs = {'Range': range_string} - self.assertRaises(ResponseError, file.read, hdrs=hdrs) + self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(416) range_string = 'bytes=%d-%d' % (file_length - 1000, file_length + 2000) hdrs = {'Range': range_string} - self.assert_(file.read(hdrs=hdrs) == data[-1000:], range_string) + self.assert_(file_item.read(hdrs=hdrs) == data[-1000:], range_string) hdrs = {'Range': '0-4'} - self.assert_(file.read(hdrs=hdrs) == data, range_string) + self.assert_(file_item.read(hdrs=hdrs) == data, range_string) # RFC 2616 14.35.1 # "If the entity is shorter than the specified suffix-length, the # entire entity-body is used." range_string = 'bytes=-%d' % (file_length + 10) hdrs = {'Range': range_string} - self.assert_(file.read(hdrs=hdrs) == data, range_string) + self.assert_(file_item.read(hdrs=hdrs) == data, range_string) def testRangedGetsWithLWSinHeader(self): #Skip this test until webob 1.2 can tolerate LWS in Range header. file_length = 10000 - file = self.env.container.file(Utils.create_name()) - data = file.write_random(file_length) + file_item = self.env.container.file(Utils.create_name()) + data = file_item.write_random(file_length) for r in ('BYTES=0-999', 'bytes = 0-999', 'BYTES = 0 - 999', 'bytes = 0 - 999', 'bytes=0 - 999', 'bytes=0-999 '): - self.assert_(file.read(hdrs={'Range': r}) == data[0:1000]) + self.assert_(file_item.read(hdrs={'Range': r}) == data[0:1000]) def testFileSizeLimit(self): limit = load_constraint('max_file_size') @@ -1245,39 +1243,40 @@ class TestFile(Base): for i in (limit - 100, limit - 10, limit - 1, limit, limit + 1, limit + 10, limit + 100): - file = self.env.container.file(Utils.create_name()) + file_item = self.env.container.file(Utils.create_name()) if i <= limit: - self.assert_(timeout(tsecs, file.write, + self.assert_(timeout(tsecs, file_item.write, cfg={'set_content_length': i})) else: self.assertRaises(ResponseError, timeout, tsecs, - file.write, cfg={'set_content_length': i}) + file_item.write, + cfg={'set_content_length': i}) def testNoContentLengthForPut(self): - file = self.env.container.file(Utils.create_name()) - self.assertRaises(ResponseError, file.write, 'testing', + file_item = self.env.container.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.write, 'testing', cfg={'no_content_length': True}) self.assert_status(411) def testDelete(self): - file = self.env.container.file(Utils.create_name()) - file.write_random(self.env.file_size) + file_item = self.env.container.file(Utils.create_name()) + file_item.write_random(self.env.file_size) - self.assert_(file.name in self.env.container.files()) - self.assert_(file.delete()) - self.assert_(file.name not in self.env.container.files()) + self.assert_(file_item.name in self.env.container.files()) + self.assert_(file_item.delete()) + self.assert_(file_item.name not in self.env.container.files()) def testBadHeaders(self): file_length = 100 # no content type on puts should be ok - file = self.env.container.file(Utils.create_name()) - file.write_random(file_length, cfg={'no_content_type': True}) + file_item = self.env.container.file(Utils.create_name()) + file_item.write_random(file_length, cfg={'no_content_type': True}) self.assert_status(201) # content length x - self.assertRaises(ResponseError, file.write_random, file_length, + self.assertRaises(ResponseError, file_item.write_random, file_length, hdrs={'Content-Length': 'X'}, cfg={'no_content_length': True}) self.assert_status(400) @@ -1290,7 +1289,7 @@ class TestFile(Base): self.assert_status(405) # bad range headers - self.assert_(len(file.read(hdrs={'Range': 'parsecs=8-12'})) == + self.assert_(len(file_item.read(hdrs={'Range': 'parsecs=8-12'})) == file_length) self.assert_status(200) @@ -1304,49 +1303,49 @@ class TestFile(Base): for l in lengths: metadata = {'a' * l[0]: 'b' * l[1]} - file = self.env.container.file(Utils.create_name()) - file.metadata = metadata + file_item = self.env.container.file(Utils.create_name()) + file_item.metadata = metadata if l[0] <= key_limit and l[1] <= value_limit: - self.assert_(file.write()) + self.assert_(file_item.write()) self.assert_status(201) - self.assert_(file.sync_metadata()) + self.assert_(file_item.sync_metadata()) else: - self.assertRaises(ResponseError, file.write) + self.assertRaises(ResponseError, file_item.write) self.assert_status(400) - file.metadata = {} - self.assert_(file.write()) + file_item.metadata = {} + self.assert_(file_item.write()) self.assert_status(201) - file.metadata = metadata - self.assertRaises(ResponseError, file.sync_metadata) + file_item.metadata = metadata + self.assertRaises(ResponseError, file_item.sync_metadata) self.assert_status(400) def testEtagWayoff(self): - file = self.env.container.file(Utils.create_name()) + file_item = self.env.container.file(Utils.create_name()) hdrs = {'etag': 'reallylonganddefinitelynotavalidetagvalue'} - self.assertRaises(ResponseError, file.write_random, hdrs=hdrs) + self.assertRaises(ResponseError, file_item.write_random, hdrs=hdrs) self.assert_status(422) def testFileCreate(self): for i in range(10): - file = self.env.container.file(Utils.create_name()) - data = file.write_random() + file_item = self.env.container.file(Utils.create_name()) + data = file_item.write_random() self.assert_status(201) - self.assert_(data == file.read()) + self.assert_(data == file_item.read()) self.assert_status(200) def testHead(self): file_name = Utils.create_name() content_type = Utils.create_name() - file = self.env.container.file(file_name) - file.content_type = content_type - file.write_random(self.env.file_size) + file_item = self.env.container.file(file_name) + file_item.content_type = content_type + file_item.write_random(self.env.file_size) - md5 = file.md5 + md5 = file_item.md5 - file = self.env.container.file(file_name) - info = file.info() + file_item = self.env.container.file(file_name) + info = file_item.info() self.assert_status(200) self.assertEquals(info['content_length'], self.env.file_size) @@ -1356,83 +1355,83 @@ class TestFile(Base): def testDeleteOfFileThatDoesNotExist(self): # in container that exists - file = self.env.container.file(Utils.create_name()) - self.assertRaises(ResponseError, file.delete) + file_item = self.env.container.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.delete) self.assert_status(404) # in container that does not exist container = self.env.account.container(Utils.create_name()) - file = container.file(Utils.create_name()) - self.assertRaises(ResponseError, file.delete) + file_item = container.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.delete) self.assert_status(404) def testHeadOnFileThatDoesNotExist(self): # in container that exists - file = self.env.container.file(Utils.create_name()) - self.assertRaises(ResponseError, file.info) + file_item = self.env.container.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.info) self.assert_status(404) # in container that does not exist container = self.env.account.container(Utils.create_name()) - file = container.file(Utils.create_name()) - self.assertRaises(ResponseError, file.info) + file_item = container.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.info) self.assert_status(404) def testMetadataOnPost(self): - file = self.env.container.file(Utils.create_name()) - file.write_random(self.env.file_size) + file_item = self.env.container.file(Utils.create_name()) + file_item.write_random(self.env.file_size) for i in range(10): metadata = {} - for i in range(10): + for j in range(10): metadata[Utils.create_ascii_name()] = Utils.create_name() - file.metadata = metadata - self.assert_(file.sync_metadata()) + file_item.metadata = metadata + self.assert_(file_item.sync_metadata()) self.assert_status((201, 202)) - file = self.env.container.file(file.name) - self.assert_(file.initialize()) + file_item = self.env.container.file(file_item.name) + self.assert_(file_item.initialize()) self.assert_status(200) - self.assertEquals(file.metadata, metadata) + self.assertEquals(file_item.metadata, metadata) def testGetContentType(self): file_name = Utils.create_name() content_type = Utils.create_name() - file = self.env.container.file(file_name) - file.content_type = content_type - file.write_random() + file_item = self.env.container.file(file_name) + file_item.content_type = content_type + file_item.write_random() - file = self.env.container.file(file_name) - file.read() + file_item = self.env.container.file(file_name) + file_item.read() - self.assertEquals(content_type, file.content_type) + self.assertEquals(content_type, file_item.content_type) def testGetOnFileThatDoesNotExist(self): # in container that exists - file = self.env.container.file(Utils.create_name()) - self.assertRaises(ResponseError, file.read) + file_item = self.env.container.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.read) self.assert_status(404) # in container that does not exist container = self.env.account.container(Utils.create_name()) - file = container.file(Utils.create_name()) - self.assertRaises(ResponseError, file.read) + file_item = container.file(Utils.create_name()) + self.assertRaises(ResponseError, file_item.read) self.assert_status(404) def testPostOnFileThatDoesNotExist(self): # in container that exists - file = self.env.container.file(Utils.create_name()) - file.metadata['Field'] = 'Value' - self.assertRaises(ResponseError, file.sync_metadata) + file_item = self.env.container.file(Utils.create_name()) + file_item.metadata['Field'] = 'Value' + self.assertRaises(ResponseError, file_item.sync_metadata) self.assert_status(404) # in container that does not exist container = self.env.account.container(Utils.create_name()) - file = container.file(Utils.create_name()) - file.metadata['Field'] = 'Value' - self.assertRaises(ResponseError, file.sync_metadata) + file_item = container.file(Utils.create_name()) + file_item.metadata['Field'] = 'Value' + self.assertRaises(ResponseError, file_item.sync_metadata) self.assert_status(404) def testMetadataOnPut(self): @@ -1441,14 +1440,14 @@ class TestFile(Base): for j in range(10): metadata[Utils.create_ascii_name()] = Utils.create_name() - file = self.env.container.file(Utils.create_name()) - file.metadata = metadata - file.write_random(self.env.file_size) + file_item = self.env.container.file(Utils.create_name()) + file_item.metadata = metadata + file_item.write_random(self.env.file_size) - file = self.env.container.file(file.name) - self.assert_(file.initialize()) + file_item = self.env.container.file(file_item.name) + self.assert_(file_item.initialize()) self.assert_status(200) - self.assertEquals(file.metadata, metadata) + self.assertEquals(file_item.metadata, metadata) def testSerialization(self): container = self.env.account.container(Utils.create_name()) @@ -1461,28 +1460,29 @@ class TestFile(Base): write_time = time.time() for f in files: - file = container.file(f['name']) - file.content_type = f['content_type'] - file.write_random(f['bytes']) + file_item = container.file(f['name']) + file_item.content_type = f['content_type'] + file_item.write_random(f['bytes']) - f['hash'] = file.md5 + f['hash'] = file_item.md5 f['json'] = False f['xml'] = False write_time = time.time() - write_time - for format in ['json', 'xml']: - for file in container.files(parms={'format': format}): + for format_type in ['json', 'xml']: + for file_item in container.files(parms={'format': format_type}): found = False for f in files: - if f['name'] != file['name']: + if f['name'] != file_item['name']: continue - self.assertEquals(file['content_type'], + self.assertEquals(file_item['content_type'], f['content_type']) - self.assertEquals(int(file['bytes']), f['bytes']) + self.assertEquals(int(file_item['bytes']), f['bytes']) - d = datetime.strptime(file['last_modified'].split('.')[0], - "%Y-%m-%dT%H:%M:%S") + d = datetime.strptime( + file_item['last_modified'].split('.')[0], + "%Y-%m-%dT%H:%M:%S") lm = time.mktime(d.timetuple()) if 'last_modified' in f: @@ -1490,17 +1490,17 @@ class TestFile(Base): else: f['last_modified'] = lm - f[format] = True + f[format_type] = True found = True self.assert_(found, 'Unexpected file %s found in ' - '%s listing' % (file['name'], format)) + '%s listing' % (file_item['name'], format_type)) headers = dict(self.env.conn.response.getheaders()) - if format == 'json': + if format_type == 'json': self.assertEquals(headers['content-type'], 'application/json; charset=utf-8') - elif format == 'xml': + elif format_type == 'xml': self.assertEquals(headers['content-type'], 'application/xml; charset=utf-8') @@ -1510,35 +1510,35 @@ class TestFile(Base): 'modified times should be less than time to write files') for f in files: - for format in ['json', 'xml']: - self.assert_(f[format], 'File %s not found in %s listing' - % (f['name'], format)) + for format_type in ['json', 'xml']: + self.assert_(f[format_type], 'File %s not found in %s listing' + % (f['name'], format_type)) def testStackedOverwrite(self): - file = self.env.container.file(Utils.create_name()) + file_item = self.env.container.file(Utils.create_name()) for i in range(1, 11): - data = file.write_random(512) - file.write(data) + data = file_item.write_random(512) + file_item.write(data) - self.assert_(file.read() == data) + self.assert_(file_item.read() == data) def testTooLongName(self): - file = self.env.container.file('x' * 1025) - self.assertRaises(ResponseError, file.write) + file_item = self.env.container.file('x' * 1025) + self.assertRaises(ResponseError, file_item.write) self.assert_status(400) def testZeroByteFile(self): - file = self.env.container.file(Utils.create_name()) + file_item = self.env.container.file(Utils.create_name()) - self.assert_(file.write('')) - self.assert_(file.name in self.env.container.files()) - self.assert_(file.read() == '') + self.assert_(file_item.write('')) + self.assert_(file_item.name in self.env.container.files()) + self.assert_(file_item.read() == '') def testEtagResponse(self): - file = self.env.container.file(Utils.create_name()) + file_item = self.env.container.file(Utils.create_name()) - data = StringIO.StringIO(file.write_random(512)) + data = StringIO.StringIO(file_item.write_random(512)) etag = File.compute_md5sum(data) headers = dict(self.env.conn.response.getheaders()) @@ -1554,15 +1554,15 @@ class TestFile(Base): etag = File.compute_md5sum(data) for i in (1, 10, 100, 1000): - file = self.env.container.file(Utils.create_name()) + file_item = self.env.container.file(Utils.create_name()) for j in chunks(data, i): - file.chunked_write(j) + file_item.chunked_write(j) - self.assert_(file.chunked_write()) - self.assert_(data == file.read()) + self.assert_(file_item.chunked_write()) + self.assert_(data == file_item.read()) - info = file.info() + info = file_item.info() self.assertEquals(etag, info['etag']) @@ -1588,9 +1588,9 @@ class TestFileComparisonEnv: cls.file_size = 128 cls.files = list() for x in range(cls.file_count): - file = cls.container.file(Utils.create_name()) - file.write_random(cls.file_size) - cls.files.append(file) + file_item = cls.container.file(Utils.create_name()) + file_item.write_random(cls.file_size) + cls.files.append(file_item) cls.time_old = time.asctime(time.localtime(time.time() - 86400)) cls.time_new = time.asctime(time.localtime(time.time() + 86400)) @@ -1601,55 +1601,55 @@ class TestFileComparison(Base): set_up = False def testIfMatch(self): - for file in self.env.files: - hdrs = {'If-Match': file.md5} - self.assert_(file.read(hdrs=hdrs)) + for file_item in self.env.files: + hdrs = {'If-Match': file_item.md5} + self.assert_(file_item.read(hdrs=hdrs)) hdrs = {'If-Match': 'bogus'} - self.assertRaises(ResponseError, file.read, hdrs=hdrs) + self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(412) def testIfNoneMatch(self): - for file in self.env.files: + for file_item in self.env.files: hdrs = {'If-None-Match': 'bogus'} - self.assert_(file.read(hdrs=hdrs)) + self.assert_(file_item.read(hdrs=hdrs)) - hdrs = {'If-None-Match': file.md5} - self.assertRaises(ResponseError, file.read, hdrs=hdrs) + hdrs = {'If-None-Match': file_item.md5} + self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(304) def testIfModifiedSince(self): - for file in self.env.files: + for file_item in self.env.files: hdrs = {'If-Modified-Since': self.env.time_old} - self.assert_(file.read(hdrs=hdrs)) + self.assert_(file_item.read(hdrs=hdrs)) hdrs = {'If-Modified-Since': self.env.time_new} - self.assertRaises(ResponseError, file.read, hdrs=hdrs) + self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(304) def testIfUnmodifiedSince(self): - for file in self.env.files: + for file_item in self.env.files: hdrs = {'If-Unmodified-Since': self.env.time_new} - self.assert_(file.read(hdrs=hdrs)) + self.assert_(file_item.read(hdrs=hdrs)) hdrs = {'If-Unmodified-Since': self.env.time_old} - self.assertRaises(ResponseError, file.read, hdrs=hdrs) + self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(412) def testIfMatchAndUnmodified(self): - for file in self.env.files: - hdrs = {'If-Match': file.md5, + for file_item in self.env.files: + hdrs = {'If-Match': file_item.md5, 'If-Unmodified-Since': self.env.time_new} - self.assert_(file.read(hdrs=hdrs)) + self.assert_(file_item.read(hdrs=hdrs)) hdrs = {'If-Match': 'bogus', 'If-Unmodified-Since': self.env.time_new} - self.assertRaises(ResponseError, file.read, hdrs=hdrs) + self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(412) - hdrs = {'If-Match': file.md5, + hdrs = {'If-Match': file_item.md5, 'If-Unmodified-Since': self.env.time_old} - self.assertRaises(ResponseError, file.read, hdrs=hdrs) + self.assertRaises(ResponseError, file_item.read, hdrs=hdrs) self.assert_status(412) diff --git a/test/functionalnosetests/swift_testing.py b/test/functionalnosetests/swift_testing.py index c49d9cd..50abc8e 100644 --- a/test/functionalnosetests/swift_testing.py +++ b/test/functionalnosetests/swift_testing.py @@ -1,19 +1,4 @@ -# Copyright (c) 2010-2013 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Copyright (c) 2013 Red Hat, Inc. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from httplib import HTTPException import os import socket import sys @@ -35,7 +21,7 @@ from time import sleep from test import get_config -from swiftclient import get_auth, http_connection, HTTPException +from swiftclient import get_auth, http_connection conf = get_config('func_test') web_front_end = conf.get('web_front_end', 'integral') @@ -57,8 +43,8 @@ if conf: if 'auth_prefix' not in conf: conf['auth_prefix'] = '/' try: - swift_test_auth += \ - '://%(auth_host)s:%(auth_port)s%(auth_prefix)s' % conf + suffix = '://%(auth_host)s:%(auth_port)s%(auth_prefix)s' % conf + swift_test_auth += suffix except KeyError: pass # skip @@ -71,17 +57,17 @@ if conf: swift_test_user[0] = '%(username)s' % conf swift_test_key[0] = conf['password'] try: - swift_test_user[1] = '%s%s' % \ - ('%s:' % conf['account2'] if 'account2' in conf else '', + swift_test_user[1] = '%s%s' % ( + '%s:' % conf['account2'] if 'account2' in conf else '', conf['username2']) swift_test_key[1] = conf['password2'] - except KeyError, err: + except KeyError as err: pass # old conf, no second account tests can be run try: swift_test_user[2] = '%s%s' % ('%s:' % conf['account'] if 'account' in conf else '', conf['username3']) swift_test_key[2] = conf['password3'] - except KeyError, err: + except KeyError as err: pass # old conf, no third account tests can be run for _ in range(3): @@ -99,7 +85,8 @@ if conf: swift_test_key[2] = conf['password3'] for _ in range(3): - swift_test_perm[_] = swift_test_tenant[_] + ':' + swift_test_user[_] + swift_test_perm[_] = swift_test_tenant[_] + ':' \ + + swift_test_user[_] skip = not all([swift_test_auth, swift_test_user[0], swift_test_key[0]]) if skip: @@ -108,12 +95,12 @@ if skip: skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]]) if not skip and skip2: print >>sys.stderr, \ - 'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' + 'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]]) if not skip and skip3: print >>sys.stderr, \ - 'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' + 'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' class AuthError(Exception): @@ -160,7 +147,8 @@ def retry(func, *args, **kwargs): parsed[use_account], conn[use_account] = \ http_connection(url[use_account]) return func(url[use_account], token[use_account], - parsed[use_account], conn[use_account], *args, **kwargs) + parsed[use_account], conn[use_account], + *args, **kwargs) except (socket.error, HTTPException): if attempts > retries: raise diff --git a/test/functionalnosetests/test_account.py b/test/functionalnosetests/test_account.py index d28ff2f..b2f743f 100755 --- a/test/functionalnosetests/test_account.py +++ b/test/functionalnosetests/test_account.py @@ -1,6 +1,6 @@ #!/usr/bin/python -# Copyright (c) 2010-2013 OpenStack, LLC. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,22 +15,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Copyright (c) 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - import unittest from nose import SkipTest @@ -45,16 +29,20 @@ class TestAccount(unittest.TestCase): def test_metadata(self): if skip: raise SkipTest + def post(url, token, parsed, conn, value): conn.request('POST', parsed.path, '', - {'X-Auth-Token': token, 'X-Account-Meta-Test': value}) + {'X-Auth-Token': token, 'X-Account-Meta-Test': value}) return check_response(conn) + def head(url, token, parsed, conn): conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token}) return check_response(conn) + def get(url, token, parsed, conn): conn.request('GET', parsed.path, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(post, '') resp.read() self.assertEquals(resp.status, 204) @@ -121,13 +109,16 @@ class TestAccount(unittest.TestCase): def test_multi_metadata(self): if skip: raise SkipTest + def post(url, token, parsed, conn, name, value): conn.request('POST', parsed.path, '', {'X-Auth-Token': token, name: value}) return check_response(conn) + def head(url, token, parsed, conn): conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(post, 'X-Account-Meta-One', '1') resp.read() self.assertEquals(resp.status, 204) @@ -147,26 +138,30 @@ class TestAccount(unittest.TestCase): def test_bad_metadata(self): if skip: raise SkipTest + def post(url, token, parsed, conn, extra_headers): headers = {'X-Auth-Token': token} headers.update(extra_headers) conn.request('POST', parsed.path, '', headers) return check_response(conn) + resp = retry(post, - {'X-Account-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) + {'X-Account-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) resp.read() self.assertEquals(resp.status, 204) - resp = retry(post, - {'X-Account-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) + resp = retry( + post, + {'X-Account-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) resp.read() self.assertEquals(resp.status, 400) resp = retry(post, - {'X-Account-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) + {'X-Account-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) resp.read() self.assertEquals(resp.status, 204) - resp = retry(post, - {'X-Account-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) + resp = retry( + post, + {'X-Account-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) resp.read() self.assertEquals(resp.status, 400) diff --git a/test/functionalnosetests/test_container.py b/test/functionalnosetests/test_container.py index af78a7a..15f7fc1 100755 --- a/test/functionalnosetests/test_container.py +++ b/test/functionalnosetests/test_container.py @@ -1,21 +1,6 @@ #!/usr/bin/python -# Copyright (c) 2010-2013 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Copyright (c) 2013 Red Hat, Inc. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -39,7 +24,7 @@ from swift.common.constraints import MAX_META_COUNT, MAX_META_NAME_LENGTH, \ MAX_META_OVERALL_SIZE, MAX_META_VALUE_LENGTH from swift_testing import check_response, retry, skip, skip2, skip3, \ - swift_test_perm, web_front_end + swift_test_perm, web_front_end class TestContainer(unittest.TestCase): @@ -48,10 +33,12 @@ class TestContainer(unittest.TestCase): if skip: raise SkipTest self.name = uuid4().hex + def put(url, token, parsed, conn): conn.request('PUT', parsed.path + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(put) resp.read() self.assertEquals(resp.status, 201) @@ -59,15 +46,18 @@ class TestContainer(unittest.TestCase): def tearDown(self): if skip: raise SkipTest + def get(url, token, parsed, conn): conn.request('GET', parsed.path + '/' + self.name + '?format=json', '', {'X-Auth-Token': token}) return check_response(conn) + def delete(url, token, parsed, conn, obj): conn.request('DELETE', '/'.join([parsed.path, self.name, obj['name']]), '', {'X-Auth-Token': token}) return check_response(conn) + while True: resp = retry(get) body = resp.read() @@ -79,10 +69,12 @@ class TestContainer(unittest.TestCase): resp = retry(delete, obj) resp.read() self.assertEquals(resp.status, 204) + def delete(url, token, parsed, conn): conn.request('DELETE', parsed.path + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(delete) resp.read() self.assertEquals(resp.status, 204) @@ -90,14 +82,17 @@ class TestContainer(unittest.TestCase): def test_multi_metadata(self): if skip: raise SkipTest + def post(url, token, parsed, conn, name, value): conn.request('POST', parsed.path + '/' + self.name, '', - {'X-Auth-Token': token, name: value}) + {'X-Auth-Token': token, name: value}) return check_response(conn) + def head(url, token, parsed, conn): conn.request('HEAD', parsed.path + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(post, 'X-Container-Meta-One', '1') resp.read() self.assertEquals(resp.status, 204) @@ -154,27 +149,33 @@ class TestContainer(unittest.TestCase): resp.read() self.assert_(resp.status in (200, 204), resp.status) self.assertEquals(resp.getheader(uni_key.encode('utf-8')), - uni_value.encode('utf-8')) + uni_value.encode('utf-8')) def test_PUT_metadata(self): if skip: raise SkipTest + def put(url, token, parsed, conn, name, value): conn.request('PUT', parsed.path + '/' + name, '', - {'X-Auth-Token': token, 'X-Container-Meta-Test': value}) + {'X-Auth-Token': token, + 'X-Container-Meta-Test': value}) return check_response(conn) + def head(url, token, parsed, conn, name): conn.request('HEAD', parsed.path + '/' + name, '', {'X-Auth-Token': token}) return check_response(conn) + def get(url, token, parsed, conn, name): conn.request('GET', parsed.path + '/' + name, '', {'X-Auth-Token': token}) return check_response(conn) + def delete(url, token, parsed, conn, name): conn.request('DELETE', parsed.path + '/' + name, '', {'X-Auth-Token': token}) return check_response(conn) + name = uuid4().hex resp = retry(put, name, 'Value') resp.read() @@ -210,18 +211,23 @@ class TestContainer(unittest.TestCase): def test_POST_metadata(self): if skip: raise SkipTest + def post(url, token, parsed, conn, value): conn.request('POST', parsed.path + '/' + self.name, '', - {'X-Auth-Token': token, 'X-Container-Meta-Test': value}) + {'X-Auth-Token': token, + 'X-Container-Meta-Test': value}) return check_response(conn) + def head(url, token, parsed, conn): conn.request('HEAD', parsed.path + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + def get(url, token, parsed, conn): conn.request('GET', parsed.path + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(head) resp.read() self.assert_(resp.status in (200, 204), resp.status) @@ -245,26 +251,31 @@ class TestContainer(unittest.TestCase): def test_PUT_bad_metadata(self): if skip: raise SkipTest + def put(url, token, parsed, conn, name, extra_headers): headers = {'X-Auth-Token': token} headers.update(extra_headers) conn.request('PUT', parsed.path + '/' + name, '', headers) return check_response(conn) + def delete(url, token, parsed, conn, name): conn.request('DELETE', parsed.path + '/' + name, '', {'X-Auth-Token': token}) return check_response(conn) + name = uuid4().hex - resp = retry(put, name, - {'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) + resp = retry( + put, name, + {'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) resp.read() self.assertEquals(resp.status, 201) resp = retry(delete, name) resp.read() self.assertEquals(resp.status, 204) name = uuid4().hex - resp = retry(put, name, - {'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) + resp = retry( + put, name, + {'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) resp.read() self.assertEquals(resp.status, 400) resp = retry(delete, name) @@ -272,16 +283,18 @@ class TestContainer(unittest.TestCase): self.assertEquals(resp.status, 404) name = uuid4().hex - resp = retry(put, name, - {'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) + resp = retry( + put, name, + {'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) resp.read() self.assertEquals(resp.status, 201) resp = retry(delete, name) resp.read() self.assertEquals(resp.status, 204) name = uuid4().hex - resp = retry(put, name, - {'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) + resp = retry( + put, name, + {'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) resp.read() self.assertEquals(resp.status, 400) resp = retry(delete, name) @@ -340,26 +353,32 @@ class TestContainer(unittest.TestCase): def test_POST_bad_metadata(self): if skip: raise SkipTest + def post(url, token, parsed, conn, extra_headers): headers = {'X-Auth-Token': token} headers.update(extra_headers) conn.request('POST', parsed.path + '/' + self.name, '', headers) return check_response(conn) - resp = retry(post, - {'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) + + resp = retry( + post, + {'X-Container-Meta-' + ('k' * MAX_META_NAME_LENGTH): 'v'}) resp.read() self.assertEquals(resp.status, 204) - resp = retry(post, - {'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) + resp = retry( + post, + {'X-Container-Meta-' + ('k' * (MAX_META_NAME_LENGTH + 1)): 'v'}) resp.read() self.assertEquals(resp.status, 400) - resp = retry(post, - {'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) + resp = retry( + post, + {'X-Container-Meta-Too-Long': 'k' * MAX_META_VALUE_LENGTH}) resp.read() self.assertEquals(resp.status, 204) - resp = retry(post, - {'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) + resp = retry( + post, + {'X-Container-Meta-Too-Long': 'k' * (MAX_META_VALUE_LENGTH + 1)}) resp.read() self.assertEquals(resp.status, 400) @@ -399,36 +418,42 @@ class TestContainer(unittest.TestCase): def test_public_container(self): if skip: raise SkipTest + def get(url, token, parsed, conn): conn.request('GET', parsed.path + '/' + self.name) return check_response(conn) + try: resp = retry(get) raise Exception('Should not have been able to GET') - except Exception, err: + except Exception as err: self.assert_(str(err).startswith('No result after '), err) + def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', {'X-Auth-Token': token, 'X-Container-Read': '.r:*,.rlistings'}) return check_response(conn) + resp = retry(post) resp.read() self.assertEquals(resp.status, 204) resp = retry(get) resp.read() self.assertEquals(resp.status, 204) + def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', {'X-Auth-Token': token, 'X-Container-Read': ''}) return check_response(conn) + resp = retry(post) resp.read() self.assertEquals(resp.status, 204) try: resp = retry(get) raise Exception('Should not have been able to GET') - except Exception, err: + except Exception as err: self.assert_(str(err).startswith('No result after '), err) def test_cross_account_container(self): @@ -436,27 +461,34 @@ class TestContainer(unittest.TestCase): raise SkipTest # Obtain the first account's string first_account = ['unknown'] + def get1(url, token, parsed, conn): first_account[0] = parsed.path conn.request('HEAD', parsed.path + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(get1) resp.read() + # Ensure we can't access the container with the second account def get2(url, token, parsed, conn): conn.request('GET', first_account[0] + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(get2, use_account=2) resp.read() self.assertEquals(resp.status, 403) + # Make the container accessible by the second account def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', - {'X-Auth-Token': token, 'X-Container-Read': swift_test_perm[1], - 'X-Container-Write': swift_test_perm[1]}) + {'X-Auth-Token': token, + 'X-Container-Read': swift_test_perm[1], + 'X-Container-Write': swift_test_perm[1]}) return check_response(conn) + resp = retry(post) resp.read() self.assertEquals(resp.status, 204) @@ -464,12 +496,14 @@ class TestContainer(unittest.TestCase): resp = retry(get2, use_account=2) resp.read() self.assertEquals(resp.status, 204) + # Make the container private again def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', {'X-Auth-Token': token, 'X-Container-Read': '', 'X-Container-Write': ''}) return check_response(conn) + resp = retry(post) resp.read() self.assertEquals(resp.status, 204) @@ -483,27 +517,33 @@ class TestContainer(unittest.TestCase): raise SkipTest # Obtain the first account's string first_account = ['unknown'] + def get1(url, token, parsed, conn): first_account[0] = parsed.path conn.request('HEAD', parsed.path + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(get1) resp.read() + # Ensure we can't access the container with the second account def get2(url, token, parsed, conn): conn.request('GET', first_account[0] + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(get2, use_account=2) resp.read() self.assertEquals(resp.status, 403) + # Make the container completely public def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', {'X-Auth-Token': token, 'X-Container-Read': '.r:*,.rlistings'}) return check_response(conn) + resp = retry(post) resp.read() self.assertEquals(resp.status, 204) @@ -511,20 +551,24 @@ class TestContainer(unittest.TestCase): resp = retry(get2, use_account=2) resp.read() self.assertEquals(resp.status, 204) + # But we shouldn't be able to write with the second account def put2(url, token, parsed, conn): conn.request('PUT', first_account[0] + '/' + self.name + '/object', 'test object', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(put2, use_account=2) resp.read() self.assertEquals(resp.status, 403) + # Now make the container also writeable by the second account def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', - {'X-Auth-Token': token, - 'X-Container-Write': swift_test_perm[1]}) + {'X-Auth-Token': token, + 'X-Container-Write': swift_test_perm[1]}) return check_response(conn) + resp = retry(post) resp.read() self.assertEquals(resp.status, 204) @@ -542,26 +586,33 @@ class TestContainer(unittest.TestCase): raise SkipTest # Obtain the first account's string first_account = ['unknown'] + def get1(url, token, parsed, conn): first_account[0] = parsed.path conn.request('HEAD', parsed.path + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(get1) resp.read() + # Ensure we can't access the container with the third account def get3(url, token, parsed, conn): conn.request('GET', first_account[0] + '/' + self.name, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(get3, use_account=3) resp.read() self.assertEquals(resp.status, 403) + # Make the container accessible by the third account def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', - {'X-Auth-Token': token, 'X-Container-Read': swift_test_perm[2]}) + {'X-Auth-Token': token, + 'X-Container-Read': swift_test_perm[2]}) return check_response(conn) + resp = retry(post) resp.read() self.assertEquals(resp.status, 204) @@ -569,20 +620,24 @@ class TestContainer(unittest.TestCase): resp = retry(get3, use_account=3) resp.read() self.assertEquals(resp.status, 204) + # But we shouldn't be able to write with the third account def put3(url, token, parsed, conn): conn.request('PUT', first_account[0] + '/' + self.name + '/object', 'test object', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(put3, use_account=3) resp.read() self.assertEquals(resp.status, 403) + # Now make the container also writeable by the third account def post(url, token, parsed, conn): conn.request('POST', parsed.path + '/' + self.name, '', {'X-Auth-Token': token, 'X-Container-Write': swift_test_perm[2]}) return check_response(conn) + resp = retry(post) resp.read() self.assertEquals(resp.status, 204) @@ -601,9 +656,10 @@ class TestContainer(unittest.TestCase): def put(url, token, parsed, conn): container_name = 'X' * 2048 - conn.request('PUT', '%s/%s' % (parsed.path, - container_name), 'there', {'X-Auth-Token': token}) + conn.request('PUT', '%s/%s' % (parsed.path, container_name), + 'there', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(put) resp.read() self.assertEquals(resp.status, 400) @@ -618,6 +674,7 @@ class TestContainer(unittest.TestCase): conn.request('PUT', '%s/abc%%00def' % parsed.path, '', {'X-Auth-Token': token}) return check_response(conn) + resp = retry(put) if (web_front_end == 'apache2'): self.assertEquals(resp.status, 404) diff --git a/test/functionalnosetests/test_object.py b/test/functionalnosetests/test_object.py index 3972aaf..97cd8d0 100755 --- a/test/functionalnosetests/test_object.py +++ b/test/functionalnosetests/test_object.py @@ -1,21 +1,6 @@ #!/usr/bin/python -# Copyright (c) 2010-2013 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Copyright (c) 2013 Red Hat, Inc. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -55,8 +40,9 @@ class TestObject(unittest.TestCase): self.obj = uuid4().hex def put(url, token, parsed, conn): - conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container, - self.obj), 'test', {'X-Auth-Token': token}) + conn.request('PUT', '%s/%s/%s' % ( + parsed.path, self.container, self.obj), 'test', + {'X-Auth-Token': token}) return check_response(conn) resp = retry(put) resp.read() @@ -182,7 +168,7 @@ class TestObject(unittest.TestCase): try: resp = retry(get) raise Exception('Should not have been able to GET') - except Exception, err: + except Exception as err: self.assert_(str(err).startswith('No result after ')) def post(url, token, parsed, conn): @@ -207,7 +193,7 @@ class TestObject(unittest.TestCase): try: resp = retry(get) raise Exception('Should not have been able to GET') - except Exception, err: + except Exception as err: self.assert_(str(err).startswith('No result after ')) def test_private_object(self): @@ -216,9 +202,9 @@ class TestObject(unittest.TestCase): # Ensure we can't access the object with the third account def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/%s' % (parsed.path, self.container, - self.obj), '', - {'X-Auth-Token': token}) + conn.request('GET', '%s/%s/%s' % ( + parsed.path, self.container, self.obj), '', + {'X-Auth-Token': token}) return check_response(conn) resp = retry(get, use_account=3) resp.read() @@ -228,11 +214,11 @@ class TestObject(unittest.TestCase): shared_container = uuid4().hex def put(url, token, parsed, conn): - conn.request('PUT', '%s/%s' % (parsed.path, - shared_container), '', - {'X-Auth-Token': token, - 'X-Container-Read': swift_test_perm[2], - 'X-Container-Write': swift_test_perm[2]}) + conn.request('PUT', '%s/%s' % ( + parsed.path, shared_container), '', + {'X-Auth-Token': token, + 'X-Container-Read': swift_test_perm[2], + 'X-Container-Write': swift_test_perm[2]}) return check_response(conn) resp = retry(put) resp.read() @@ -240,13 +226,11 @@ class TestObject(unittest.TestCase): # verify third account can not copy from private container def copy(url, token, parsed, conn): - conn.request('PUT', '%s/%s/%s' % (parsed.path, - shared_container, - 'private_object'), - '', {'X-Auth-Token': token, - 'Content-Length': '0', - 'X-Copy-From': '%s/%s' % (self.container, - self.obj)}) + conn.request('PUT', '%s/%s/%s' % ( + parsed.path, shared_container, 'private_object'), '', + {'X-Auth-Token': token, + 'Content-Length': '0', + 'X-Copy-From': '%s/%s' % (self.container, self.obj)}) return check_response(conn) resp = retry(copy, use_account=3) resp.read() @@ -254,8 +238,9 @@ class TestObject(unittest.TestCase): # verify third account can write "obj1" to shared container def put(url, token, parsed, conn): - conn.request('PUT', '%s/%s/%s' % (parsed.path, shared_container, - 'obj1'), 'test', {'X-Auth-Token': token}) + conn.request('PUT', '%s/%s/%s' % ( + parsed.path, shared_container, 'obj1'), 'test', + {'X-Auth-Token': token}) return check_response(conn) resp = retry(put, use_account=3) resp.read() @@ -263,12 +248,10 @@ class TestObject(unittest.TestCase): # verify third account can copy "obj1" to shared container def copy2(url, token, parsed, conn): - conn.request('COPY', '%s/%s/%s' % (parsed.path, - shared_container, - 'obj1'), - '', {'X-Auth-Token': token, - 'Destination': '%s/%s' % (shared_container, - 'obj1')}) + conn.request('COPY', '%s/%s/%s' % ( + parsed.path, shared_container, 'obj1'), '', + {'X-Auth-Token': token, + 'Destination': '%s/%s' % (shared_container, 'obj1')}) return check_response(conn) resp = retry(copy2, use_account=3) resp.read() @@ -276,12 +259,11 @@ class TestObject(unittest.TestCase): # verify third account STILL can not copy from private container def copy3(url, token, parsed, conn): - conn.request('COPY', '%s/%s/%s' % (parsed.path, - self.container, - self.obj), - '', {'X-Auth-Token': token, - 'Destination': '%s/%s' % (shared_container, - 'private_object')}) + conn.request('COPY', '%s/%s/%s' % ( + parsed.path, self.container, self.obj), '', + {'X-Auth-Token': token, + 'Destination': '%s/%s' % (shared_container, + 'private_object')}) return check_response(conn) resp = retry(copy3, use_account=3) resp.read() @@ -289,8 +271,9 @@ class TestObject(unittest.TestCase): # clean up "obj1" def delete(url, token, parsed, conn): - conn.request('DELETE', '%s/%s/%s' % (parsed.path, shared_container, - 'obj1'), '', {'X-Auth-Token': token}) + conn.request('DELETE', '%s/%s/%s' % ( + parsed.path, shared_container, 'obj1'), '', + {'X-Auth-Token': token}) return check_response(conn) resp = retry(delete) resp.read() @@ -316,8 +299,8 @@ class TestObject(unittest.TestCase): # Upload the first set of segments def put(url, token, parsed, conn, objnum): - conn.request('PUT', '%s/%s/segments1/%s' % (parsed.path, - self.container, str(objnum)), segments1[objnum], + conn.request('PUT', '%s/%s/segments1/%s' % ( + parsed.path, self.container, str(objnum)), segments1[objnum], {'X-Auth-Token': token}) return check_response(conn) for objnum in xrange(len(segments1)): @@ -327,10 +310,11 @@ class TestObject(unittest.TestCase): # Upload the manifest def put(url, token, parsed, conn): - conn.request('PUT', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token, - 'X-Object-Manifest': '%s/segments1/' % self.container, - 'Content-Type': 'text/jibberish', 'Content-Length': '0'}) + conn.request('PUT', '%s/%s/manifest' % ( + parsed.path, self.container), '', { + 'X-Auth-Token': token, + 'X-Object-Manifest': '%s/segments1/' % self.container, + 'Content-Type': 'text/jibberish', 'Content-Length': '0'}) return check_response(conn) resp = retry(put) resp.read() @@ -338,8 +322,8 @@ class TestObject(unittest.TestCase): # Get the manifest (should get all the segments as the body) def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(get) self.assertEquals(resp.read(), ''.join(segments1)) @@ -348,9 +332,9 @@ class TestObject(unittest.TestCase): # Get with a range at the start of the second segment def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token, 'Range': - 'bytes=3-'}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', { + 'X-Auth-Token': token, 'Range': 'bytes=3-'}) return check_response(conn) resp = retry(get) self.assertEquals(resp.read(), ''.join(segments1[1:])) @@ -358,9 +342,9 @@ class TestObject(unittest.TestCase): # Get with a range in the middle of the second segment def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token, 'Range': - 'bytes=5-'}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', { + 'X-Auth-Token': token, 'Range': 'bytes=5-'}) return check_response(conn) resp = retry(get) self.assertEquals(resp.read(), ''.join(segments1)[5:]) @@ -368,9 +352,9 @@ class TestObject(unittest.TestCase): # Get with a full start and stop range def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token, 'Range': - 'bytes=5-10'}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', { + 'X-Auth-Token': token, 'Range': 'bytes=5-10'}) return check_response(conn) resp = retry(get) self.assertEquals(resp.read(), ''.join(segments1)[5:11]) @@ -378,8 +362,8 @@ class TestObject(unittest.TestCase): # Upload the second set of segments def put(url, token, parsed, conn, objnum): - conn.request('PUT', '%s/%s/segments2/%s' % (parsed.path, - self.container, str(objnum)), segments2[objnum], + conn.request('PUT', '%s/%s/segments2/%s' % ( + parsed.path, self.container, str(objnum)), segments2[objnum], {'X-Auth-Token': token}) return check_response(conn) for objnum in xrange(len(segments2)): @@ -389,8 +373,8 @@ class TestObject(unittest.TestCase): # Get the manifest (should still be the first segments of course) def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(get) self.assertEquals(resp.read(), ''.join(segments1)) @@ -398,10 +382,11 @@ class TestObject(unittest.TestCase): # Update the manifest def put(url, token, parsed, conn): - conn.request('PUT', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token, - 'X-Object-Manifest': '%s/segments2/' % self.container, - 'Content-Length': '0'}) + conn.request('PUT', '%s/%s/manifest' % ( + parsed.path, self.container), '', { + 'X-Auth-Token': token, + 'X-Object-Manifest': '%s/segments2/' % self.container, + 'Content-Length': '0'}) return check_response(conn) resp = retry(put) resp.read() @@ -409,8 +394,8 @@ class TestObject(unittest.TestCase): # Get the manifest (should be the second set of segments now) def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(get) self.assertEquals(resp.read(), ''.join(segments2)) @@ -420,8 +405,8 @@ class TestObject(unittest.TestCase): # Ensure we can't access the manifest with the third account def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(get, use_account=3) resp.read() @@ -430,8 +415,8 @@ class TestObject(unittest.TestCase): # Grant access to the third account def post(url, token, parsed, conn): conn.request('POST', '%s/%s' % (parsed.path, self.container), - '', {'X-Auth-Token': token, - 'X-Container-Read': swift_test_perm[2]}) + '', {'X-Auth-Token': token, + 'X-Container-Read': swift_test_perm[2]}) return check_response(conn) resp = retry(post) resp.read() @@ -439,8 +424,8 @@ class TestObject(unittest.TestCase): # The third account should be able to get the manifest now def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(get, use_account=3) self.assertEquals(resp.read(), ''.join(segments2)) @@ -459,8 +444,8 @@ class TestObject(unittest.TestCase): # Upload the third set of segments in the other container def put(url, token, parsed, conn, objnum): - conn.request('PUT', '%s/%s/segments3/%s' % (parsed.path, - acontainer, str(objnum)), segments3[objnum], + conn.request('PUT', '%s/%s/segments3/%s' % ( + parsed.path, acontainer, str(objnum)), segments3[objnum], {'X-Auth-Token': token}) return check_response(conn) for objnum in xrange(len(segments3)): @@ -470,10 +455,11 @@ class TestObject(unittest.TestCase): # Update the manifest def put(url, token, parsed, conn): - conn.request('PUT', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token, - 'X-Object-Manifest': '%s/segments3/' % acontainer, - 'Content-Length': '0'}) + conn.request('PUT', '%s/%s/manifest' % ( + parsed.path, self.container), '', + {'X-Auth-Token': token, + 'X-Object-Manifest': '%s/segments3/' % acontainer, + 'Content-Length': '0'}) return check_response(conn) resp = retry(put) resp.read() @@ -481,8 +467,8 @@ class TestObject(unittest.TestCase): # Get the manifest to ensure it's the third set of segments def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(get) self.assertEquals(resp.read(), ''.join(segments3)) @@ -495,8 +481,8 @@ class TestObject(unittest.TestCase): # manifest itself is not). def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(get, use_account=3) resp.read() @@ -505,8 +491,8 @@ class TestObject(unittest.TestCase): # Grant access to the third account def post(url, token, parsed, conn): conn.request('POST', '%s/%s' % (parsed.path, acontainer), - '', {'X-Auth-Token': token, - 'X-Container-Read': swift_test_perm[2]}) + '', {'X-Auth-Token': token, + 'X-Container-Read': swift_test_perm[2]}) return check_response(conn) resp = retry(post) resp.read() @@ -514,8 +500,8 @@ class TestObject(unittest.TestCase): # The third account should be able to get the manifest now def get(url, token, parsed, conn): - conn.request('GET', '%s/%s/manifest' % (parsed.path, - self.container), '', {'X-Auth-Token': token}) + conn.request('GET', '%s/%s/manifest' % ( + parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(get, use_account=3) self.assertEquals(resp.read(), ''.join(segments3)) @@ -523,7 +509,8 @@ class TestObject(unittest.TestCase): # Delete the manifest def delete(url, token, parsed, conn, objnum): - conn.request('DELETE', '%s/%s/manifest' % (parsed.path, + conn.request('DELETE', '%s/%s/manifest' % ( + parsed.path, self.container), '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(delete, objnum) @@ -532,8 +519,9 @@ class TestObject(unittest.TestCase): # Delete the third set of segments def delete(url, token, parsed, conn, objnum): - conn.request('DELETE', '%s/%s/segments3/%s' % (parsed.path, - acontainer, str(objnum)), '', {'X-Auth-Token': token}) + conn.request('DELETE', '%s/%s/segments3/%s' % ( + parsed.path, acontainer, str(objnum)), '', + {'X-Auth-Token': token}) return check_response(conn) for objnum in xrange(len(segments3)): resp = retry(delete, objnum) @@ -542,8 +530,9 @@ class TestObject(unittest.TestCase): # Delete the second set of segments def delete(url, token, parsed, conn, objnum): - conn.request('DELETE', '%s/%s/segments2/%s' % (parsed.path, - self.container, str(objnum)), '', {'X-Auth-Token': token}) + conn.request('DELETE', '%s/%s/segments2/%s' % ( + parsed.path, self.container, str(objnum)), '', + {'X-Auth-Token': token}) return check_response(conn) for objnum in xrange(len(segments2)): resp = retry(delete, objnum) @@ -552,8 +541,9 @@ class TestObject(unittest.TestCase): # Delete the first set of segments def delete(url, token, parsed, conn, objnum): - conn.request('DELETE', '%s/%s/segments1/%s' % (parsed.path, - self.container, str(objnum)), '', {'X-Auth-Token': token}) + conn.request('DELETE', '%s/%s/segments1/%s' % ( + parsed.path, self.container, str(objnum)), '', + {'X-Auth-Token': token}) return check_response(conn) for objnum in xrange(len(segments1)): resp = retry(delete, objnum) @@ -563,7 +553,7 @@ class TestObject(unittest.TestCase): # Delete the extra container def delete(url, token, parsed, conn): conn.request('DELETE', '%s/%s' % (parsed.path, acontainer), '', - {'X-Auth-Token': token}) + {'X-Auth-Token': token}) return check_response(conn) resp = retry(delete) resp.read() @@ -574,8 +564,8 @@ class TestObject(unittest.TestCase): raise SkipTest def put(url, token, parsed, conn): - conn.request('PUT', '%s/%s/hi' % (parsed.path, - self.container), 'there', {'X-Auth-Token': token}) + conn.request('PUT', '%s/%s/hi' % (parsed.path, self.container), + 'there', {'X-Auth-Token': token}) return check_response(conn) resp = retry(put) resp.read() @@ -583,7 +573,7 @@ class TestObject(unittest.TestCase): def delete(url, token, parsed, conn): conn.request('DELETE', '%s/%s/hi' % (parsed.path, self.container), - '', {'X-Auth-Token': token}) + '', {'X-Auth-Token': token}) return check_response(conn) resp = retry(delete) resp.read() @@ -596,7 +586,8 @@ class TestObject(unittest.TestCase): raise SkipTest def put(url, token, parsed, conn): - conn.request('PUT', '%s/%s/abc%%00def' % (parsed.path, + conn.request('PUT', '%s/%s/abc%%00def' % ( + parsed.path, self.container), 'test', {'X-Auth-Token': token}) return check_response(conn) resp = retry(put) diff --git a/test/unit/__init__.py b/test/unit/__init__.py index 04895b4..76e09c1 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -1,3 +1,18 @@ +# Copyright (c) 2010-2012 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Swift tests """ import os @@ -8,6 +23,7 @@ from sys import exc_info from contextlib import contextmanager from collections import defaultdict from tempfile import NamedTemporaryFile +import time from eventlet.green import socket from tempfile import mkdtemp from shutil import rmtree @@ -142,13 +158,24 @@ def tmpfile(content): xattr_data = {} -def _get_inode(fd): - if not isinstance(fd, int): - try: - fd = fd.fileno() - except AttributeError: - return os.stat(fd).st_ino - return os.fstat(fd).st_ino +def _get_inode(fd_or_name): + try: + if isinstance(fd_or_name, int): + fd = fd_or_name + else: + try: + fd = fd_or_name.fileno() + except AttributeError: + fd = None + if fd is None: + ino = os.stat(fd_or_name).st_ino + else: + ino = os.fstat(fd).st_ino + except OSError as err: + ioerr = IOError() + ioerr.errno = err.errno + raise ioerr + return ino def _setxattr(fd, k, v): @@ -199,27 +226,57 @@ class NullLoggingHandler(logging.Handler): pass -class FakeLogger(object): +class UnmockTimeModule(object): + """ + Even if a test mocks time.time - you can restore unmolested behavior in a + another module who imports time directly by monkey patching it's imported + reference to the module with an instance of this class + """ + + _orig_time = time.time + + def __getattribute__(self, name): + if name == 'time': + return UnmockTimeModule._orig_time + return getattr(time, name) + + +# logging.LogRecord.__init__ calls time.time +logging.time = UnmockTimeModule() + + +class FakeLogger(logging.Logger): # a thread safe logger def __init__(self, *args, **kwargs): self._clear() + self.name = 'swift.unit.fake_logger' self.level = logging.NOTSET if 'facility' in kwargs: self.facility = kwargs['facility'] def _clear(self): self.log_dict = defaultdict(list) + self.lines_dict = defaultdict(list) def _store_in(store_name): def stub_fn(self, *args, **kwargs): self.log_dict[store_name].append((args, kwargs)) return stub_fn - error = _store_in('error') - info = _store_in('info') - warning = _store_in('warning') - debug = _store_in('debug') + def _store_and_log_in(store_name): + def stub_fn(self, *args, **kwargs): + self.log_dict[store_name].append((args, kwargs)) + self._log(store_name, args[0], args[1:], **kwargs) + return stub_fn + + def get_lines_for_level(self, level): + return self.lines_dict[level] + + error = _store_and_log_in('error') + info = _store_and_log_in('info') + warning = _store_and_log_in('warning') + debug = _store_and_log_in('debug') def exception(self, *args, **kwargs): self.log_dict['exception'].append((args, kwargs, str(exc_info()[1]))) @@ -267,7 +324,13 @@ class FakeLogger(object): pass def handle(self, record): - pass + try: + line = record.getMessage() + except TypeError: + print 'WARNING: unable to format log message %r %% %r' % ( + record.msg, record.args) + raise + self.lines_dict[record.levelno].append(line) def flush(self): pass @@ -354,11 +417,13 @@ def mock(update): else: deletes.append((module, attr)) setattr(module, attr, value) - yield True - for module, attr, value in returns: - setattr(module, attr, value) - for module, attr in deletes: - delattr(module, attr) + try: + yield True + finally: + for module, attr, value in returns: + setattr(module, attr, value) + for module, attr in deletes: + delattr(module, attr) def fake_http_connect(*code_iter, **kwargs): @@ -466,6 +531,8 @@ def fake_http_connect(*code_iter, **kwargs): body_iter = iter(body_iter) def connect(*args, **ckwargs): + if kwargs.get('slow_connect', False): + sleep(0.1) if 'give_content_type' in kwargs: if len(args) >= 7 and 'Content-Type' in args[6]: kwargs['give_content_type'](args[6]['Content-Type']) diff --git a/test/unit/common/test_fs_utils.py b/test/unit/common/test_fs_utils.py index 02c6ecb..19fc2df 100644 --- a/test/unit/common/test_fs_utils.py +++ b/test/unit/common/test_fs_utils.py @@ -30,8 +30,22 @@ from gluster.swift.common.exceptions import NotDirectoryError, \ def mock_os_fsync(fd): return True -def mock_tpool_execute(func, *args, **kwargs): - func(*args, **kwargs) +def mock_os_fdatasync(fd): + return True + + +class TestFakefile(unittest.TestCase): + """ Tests for common.fs_utils.Fake_file """ + + def test_Fake_file(self): + path = "/tmp/bar" + ff = fs.Fake_file(path) + self.assertEqual(path, ff.path) + self.assertEqual(0, ff.tell()) + self.assertEqual(None, ff.read(50)) + self.assertEqual(-1, ff.fileno()) + self.assertEqual(None, ff.close()) + class TestFsUtils(unittest.TestCase): """ Tests for common.fs_utils """ @@ -688,9 +702,8 @@ class TestFsUtils(unittest.TestCase): fd, tmpfile = mkstemp(dir=tmpdir) try: os.write(fd, 'test') - with patch('eventlet.tpool.execute', mock_tpool_execute): - with patch('os.fsync', mock_os_fsync): - assert fs.do_fsync(fd) is None + with patch('os.fsync', mock_os_fsync): + assert fs.do_fsync(fd) is None except GlusterFileSystemOSError as ose: self.fail('Opening a temporary file failed with %s' %ose.strerror) else: @@ -704,15 +717,47 @@ class TestFsUtils(unittest.TestCase): try: fd, tmpfile = mkstemp(dir=tmpdir) os.write(fd, 'test') - with patch('eventlet.tpool.execute', mock_tpool_execute): - with patch('os.fsync', mock_os_fsync): - assert fs.do_fsync(fd) is None + with patch('os.fsync', mock_os_fsync): + assert fs.do_fsync(fd) is None + os.close(fd) + try: + fs.do_fsync(fd) + except GlusterFileSystemOSError: + pass + else: + self.fail("Expected GlusterFileSystemOSError") + finally: + shutil.rmtree(tmpdir) + + def test_do_fdatasync(self): + tmpdir = mkdtemp() + try: + fd, tmpfile = mkstemp(dir=tmpdir) + try: + os.write(fd, 'test') + with patch('os.fdatasync', mock_os_fdatasync): + assert fs.do_fdatasync(fd) is None + except GlusterFileSystemOSError as ose: + self.fail('Opening a temporary file failed with %s' %ose.strerror) + else: os.close(fd) - try: - fs.do_fsync(fd) - except GlusterFileSystemOSError: - pass - else: - self.fail("Expected GlusterFileSystemOSError") + finally: + shutil.rmtree(tmpdir) + + + def test_do_fdatasync_err(self): + tmpdir = mkdtemp() + try: + fd, tmpfile = mkstemp(dir=tmpdir) + os.write(fd, 'test') + with patch('os.fdatasync', mock_os_fdatasync): + assert fs.do_fdatasync(fd) is None + os.close(fd) + try: + fs.do_fdatasync(fd) + except GlusterFileSystemOSError: + pass + else: + self.fail("Expected GlusterFileSystemOSError") finally: shutil.rmtree(tmpdir) diff --git a/test/unit/obj/test_diskfile.py b/test/unit/obj/test_diskfile.py index 7475105..27ff51e 100644 --- a/test/unit/obj/test_diskfile.py +++ b/test/unit/obj/test_diskfile.py @@ -43,18 +43,35 @@ from test.unit import FakeLogger _metadata = {} -def _mock_read_metadata(filename): +def _mapit(filename_or_fd): + if isinstance(filename_or_fd, int): + statmeth = os.fstat + else: + statmeth = os.lstat + try: + stats = statmeth(filename_or_fd) + except OSError as err: + if err.errno == errno.ENOENT: + raise GlusterFileSystemOSError( + err.errno, '%s, os.fstat(%s)' % (err.strerror, filename_or_fd)) + raise + return stats.st_ino + + +def _mock_read_metadata(filename_or_fd): global _metadata - if filename in _metadata: - md = _metadata[filename] + ino = _mapit(filename_or_fd) + if ino in _metadata: + md = _metadata[ino].copy() else: md = {} return md -def _mock_write_metadata(filename, metadata): +def _mock_write_metadata(filename_or_fd, metadata): global _metadata - _metadata[filename] = metadata + ino = _mapit(filename_or_fd) + _metadata[ino] = metadata.copy() def _mock_clear_metadata(): @@ -127,8 +144,7 @@ class TestDiskFile(unittest.TestCase): assert gdf.logger == self.lg assert gdf.uid == DEFAULT_UID assert gdf.gid == DEFAULT_GID - assert gdf.metadata == {} - assert gdf.meta_file is None + assert gdf._metadata == None assert gdf.data_file is None assert gdf.fp is None assert gdf.iter_etag is None @@ -146,7 +162,7 @@ class TestDiskFile(unittest.TestCase): assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "b", "a") assert gdf.device_path == os.path.join(self.td, "vol0") - def test_constructor_no_metadata(self): + def test_open_no_metadata(self): the_path = os.path.join(self.td, "vol0", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) @@ -164,12 +180,13 @@ class TestDiskFile(unittest.TestCase): 'Content-Type': 'application/octet-stream'} gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - assert gdf.fp is None - assert gdf.metadata == exp_md + with gdf.open(): + assert gdf.data_file == the_file + assert not gdf._is_dir + assert gdf.fp is not None + assert gdf._metadata == exp_md - def test_constructor_existing_metadata(self): + def test_open_existing_metadata(self): the_path = os.path.join(self.td, "vol0", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) @@ -182,37 +199,39 @@ class TestDiskFile(unittest.TestCase): 'ETag': 'etag', 'X-Timestamp': 'ts', 'Content-Type': 'application/loctet-stream'} - _metadata[the_file] = ini_md + _metadata[_mapit(the_file)] = ini_md exp_md = ini_md.copy() del exp_md['X-Type'] del exp_md['X-Object-Type'] gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - assert gdf.fp is None - assert gdf.metadata == exp_md + with gdf.open(): + assert gdf.data_file == the_file + assert not gdf._is_dir + assert gdf.fp is not None + assert gdf._metadata == exp_md - def test_constructor_invalid_existing_metadata(self): + def test_open_invalid_existing_metadata(self): the_path = os.path.join(self.td, "vol0", "bar") the_file = os.path.join(the_path, "z") + os.makedirs(the_path) + with open(the_file, "wb") as fd: + fd.write("1234") inv_md = { 'Content-Length': 5, 'ETag': 'etag', 'X-Timestamp': 'ts', 'Content-Type': 'application/loctet-stream'} - _metadata[the_file] = inv_md - os.makedirs(the_path) - with open(the_file, "wb") as fd: - fd.write("1234") + _metadata[_mapit(the_file)] = inv_md gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - assert gdf.fp is None - assert gdf.metadata != inv_md + with gdf.open(): + assert gdf.data_file == the_file + assert not gdf._is_dir + assert gdf.fp is not None + assert gdf._metadata != inv_md - def test_constructor_isdir(self): + def test_open_isdir(self): the_path = os.path.join(self.td, "vol0", "bar") the_dir = os.path.join(the_path, "d") os.makedirs(the_dir) @@ -223,29 +242,16 @@ class TestDiskFile(unittest.TestCase): 'ETag': 'etag', 'X-Timestamp': 'ts', 'Content-Type': 'application/loctet-stream'} - _metadata[the_dir] = ini_md + _metadata[_mapit(the_dir)] = ini_md exp_md = ini_md.copy() del exp_md['X-Type'] del exp_md['X-Object-Type'] - gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d", - keep_data_fp=True) + gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d") assert gdf._obj == "d" - assert gdf.data_file == the_dir - assert gdf._is_dir - assert gdf.metadata == exp_md - - def test_constructor_keep_data_fp(self): - the_path = os.path.join(self.td, "vol0", "bar") - the_file = os.path.join(the_path, "z") - os.makedirs(the_path) - with open(the_file, "wb") as fd: - fd.write("1234") - gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z", - keep_data_fp=True) - assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - assert gdf.fp is not None + with gdf.open(): + assert gdf.data_file == the_dir + assert gdf._is_dir + assert gdf._metadata == exp_md def test_constructor_chunk_size(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z", @@ -258,8 +264,7 @@ class TestDiskFile(unittest.TestCase): assert gdf.iter_hook == 'hook' def test_close_no_open_fp(self): - gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z", - keep_data_fp=True) + gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") gdf._is_dir = False self.called = False @@ -276,28 +281,32 @@ class TestDiskFile(unittest.TestCase): the_dir = "dir" self.called = False os.makedirs(os.path.join(the_cont, the_dir)) - gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir", - keep_data_fp=True) - - ret = isinstance(gdf.fp, Fake_file) - self.assertTrue(ret) + gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") + with gdf.open(): + ret = isinstance(gdf.fp, Fake_file) + self.assertTrue(ret) - # Get a File descriptor - fd = gdf.fp + # Get a "Fake_file" pointer + ffp = gdf.fp - # This expected to call Fake_file interfaces - ret = fd.tell() - self.assertEqual(ret , 0) + # This expected to call Fake_file interfaces + ret = ffp.tell() + self.assertEqual(ret, 0) - ret = fd.read(1) - self.assertEqual(ret , 0) + ret = ffp.read(1) + self.assertEqual(ret, None) - ret = fd.fileno() - self.assertEqual(ret, -1) + ret = ffp.fileno() + self.assertEqual(ret, -1) - ret = fd.close() - self.assertFalse(self.called) + def our_do_close(ffp): + self.called = True + with mock.patch("gluster.swift.obj.diskfile.do_close", + our_do_close): + ret = ffp.close() + self.assertEqual(ret, None) + self.assertFalse(self.called) def test_close_file_object(self): the_cont = os.path.join(self.td, "vol0", "bar") @@ -306,22 +315,23 @@ class TestDiskFile(unittest.TestCase): os.makedirs(the_cont) with open(the_file, "wb") as fd: fd.write("1234") - gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z", - keep_data_fp=True) + gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") def our_do_close(fp): self.called = True with mock.patch("gluster.swift.obj.diskfile.do_close", our_do_close): - gdf.close() + with gdf.open(): + assert not self.called assert self.called def test_is_deleted(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") - assert gdf.is_deleted() - gdf.data_file = os.path.join(self.td, "bar") - assert not gdf.is_deleted() + with gdf.open(): + assert gdf.is_deleted() + gdf.data_file = os.path.join(self.td, "bar") + assert not gdf.is_deleted() def test_create_dir_object_no_md(self): the_cont = os.path.join(self.td, "vol0", "bar") @@ -334,7 +344,7 @@ class TestDiskFile(unittest.TestCase): gdf._create_dir_object(the_dir) full_dir_path = os.path.join(the_cont, the_dir) assert os.path.isdir(full_dir_path) - assert full_dir_path not in _metadata + assert _mapit(full_dir_path) not in _metadata def test_create_dir_object_with_md(self): the_cont = os.path.join(self.td, "vol0", "bar") @@ -349,7 +359,7 @@ class TestDiskFile(unittest.TestCase): gdf._create_dir_object(the_dir, dir_md) full_dir_path = os.path.join(the_cont, the_dir) assert os.path.isdir(full_dir_path) - assert full_dir_path in _metadata + assert _mapit(full_dir_path) in _metadata def test_create_dir_object_exists(self): the_path = os.path.join(self.td, "vol0", "bar") @@ -371,7 +381,7 @@ class TestDiskFile(unittest.TestCase): DiskFileError, gdf._create_dir_object, the_dir) gluster.swift.obj.diskfile.do_chown = dc self.assertFalse(os.path.isdir(the_dir)) - self.assertFalse(the_dir in _metadata) + self.assertFalse(_mapit(the_dir) in _metadata) def test_create_dir_object_do_stat_failure(self): the_path = os.path.join(self.td, "vol0", "bar") @@ -393,25 +403,26 @@ class TestDiskFile(unittest.TestCase): DiskFileError, gdf._create_dir_object, the_dir) gluster.swift.obj.diskfile.do_chown = dc self.assertFalse(os.path.isdir(the_dir)) - self.assertFalse(the_dir in _metadata) + self.assertFalse(_mapit(the_dir) in _metadata) def test_put_metadata(self): - the_path = os.path.join(self.td, "vol0", "bar") - the_dir = os.path.join(the_path, "z") + the_dir = os.path.join(self.td, "vol0", "bar", "z") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") md = {'Content-Type': 'application/octet-stream', 'a': 'b'} gdf.put_metadata(md.copy()) - assert gdf.metadata == md, "gdf.metadata = %r, md = %r" % ( - gdf.metadata, md) - assert _metadata[the_dir] == md + assert gdf._metadata is None + fmd = _metadata[_mapit(the_dir)] + md.update({'X-Object-Type': 'file', 'X-Type': 'Object'}) + assert fmd == md, "on-disk md = %r, md = %r" % (fmd, md) def test_put_w_tombstone(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") - assert gdf.metadata == {} + assert gdf._metadata == None gdf.put_metadata({'x': '1'}, tombstone=True) - assert gdf.metadata == {} + assert gdf._metadata is None + assert _metadata == {} def test_put_w_meta_file(self): the_path = os.path.join(self.td, "vol0", "bar") @@ -420,11 +431,13 @@ class TestDiskFile(unittest.TestCase): with open(the_file, "wb") as fd: fd.write("1234") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") - newmd = gdf.metadata.copy() - newmd['X-Object-Meta-test'] = '1234' + with gdf.open(): + newmd = gdf.get_metadata().copy() + newmd['X-Object-Meta-test'] = '1234' gdf.put_metadata(newmd) - assert gdf.metadata == newmd - assert _metadata[the_file] == newmd + assert gdf._metadata is None + fmd = _metadata[_mapit(the_file)] + assert fmd == newmd, "on-disk md = %r, newmd = %r" % (fmd, newmd) def test_put_w_meta_file_no_content_type(self): the_path = os.path.join(self.td, "vol0", "bar") @@ -433,67 +446,72 @@ class TestDiskFile(unittest.TestCase): with open(the_file, "wb") as fd: fd.write("1234") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") - newmd = gdf.metadata.copy() - newmd['Content-Type'] = '' - newmd['X-Object-Meta-test'] = '1234' + with gdf.open(): + newmd = gdf.get_metadata().copy() + newmd['Content-Type'] = '' + newmd['X-Object-Meta-test'] = '1234' gdf.put_metadata(newmd) - assert gdf.metadata == newmd - assert _metadata[the_file] == newmd + assert gdf._metadata is None + fmd = _metadata[_mapit(the_file)] + assert fmd == newmd, "on-disk md = %r, newmd = %r" % (fmd, newmd) def test_put_w_meta_dir(self): the_path = os.path.join(self.td, "vol0", "bar") the_dir = os.path.join(the_path, "dir") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") - newmd = gdf.metadata.copy() - newmd['X-Object-Meta-test'] = '1234' + with gdf.open(): + newmd = gdf.get_metadata().copy() + newmd['X-Object-Meta-test'] = '1234' gdf.put_metadata(newmd) - assert gdf.metadata == newmd - assert _metadata[the_dir] == newmd + assert gdf._metadata is None + fmd = _metadata[_mapit(the_dir)] + assert fmd == newmd, "on-disk md = %r, newmd = %r" % (fmd, newmd) def test_put_w_marker_dir(self): the_path = os.path.join(self.td, "vol0", "bar") the_dir = os.path.join(the_path, "dir") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") - newmd = gdf.metadata.copy() - newmd['X-Object-Meta-test'] = '1234' + with gdf.open(): + newmd = gdf.get_metadata().copy() + newmd['X-Object-Meta-test'] = '1234' gdf.put_metadata(newmd) - assert gdf.metadata == newmd - assert _metadata[the_dir] == newmd + assert gdf._metadata is None + fmd = _metadata[_mapit(the_dir)] + assert fmd == newmd, "on-disk md = %r, newmd = %r" % (fmd, newmd) def test_put_w_marker_dir_create(self): the_cont = os.path.join(self.td, "vol0", "bar") the_dir = os.path.join(the_cont, "dir") os.makedirs(the_cont) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") - assert gdf.metadata == {} + assert gdf._metadata == None newmd = { 'ETag': 'etag', 'X-Timestamp': 'ts', 'Content-Type': 'application/directory'} - with gdf.writer() as dw: - dw.put(newmd, extension='.dir') - assert gdf.data_file == the_dir - for key, val in newmd.items(): - assert gdf.metadata[key] == val - assert _metadata[the_dir][key] == val - assert gdf.metadata[X_OBJECT_TYPE] == DIR_OBJECT - assert _metadata[the_dir][X_OBJECT_TYPE] == DIR_OBJECT + with gdf.create() as dw: + dw.put(newmd.copy(), extension='.dir') + with gdf.open(): + assert gdf.data_file == the_dir + for key, val in newmd.items(): + assert gdf._metadata[key] == val + assert _metadata[_mapit(the_dir)][key] == val + assert X_OBJECT_TYPE not in gdf._metadata, "md = %r" % gdf._metadata + assert _metadata[_mapit(the_dir)][X_OBJECT_TYPE] == DIR_OBJECT def test_put_is_dir(self): the_path = os.path.join(self.td, "vol0", "bar") the_dir = os.path.join(the_path, "dir") os.makedirs(the_dir) gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir") - origmd = gdf.metadata.copy() - origfmd = _metadata[the_dir] - newmd = gdf.metadata.copy() # FIXME: This is a hack to get to the code-path; it is not clear # how this can happen normally. - newmd['Content-Type'] = '' - newmd['X-Object-Meta-test'] = '1234' - with gdf.writer() as dw: + newmd = { + 'Content-Type': '', + 'X-Object-Meta-test': '1234'} + with gdf.create() as dw: try: dw.put(newmd, extension='.data') except DiskFileError: @@ -501,8 +519,6 @@ class TestDiskFile(unittest.TestCase): else: self.fail("Expected to encounter" " 'already-exists-as-dir' exception") - assert gdf.metadata == origmd - assert _metadata[the_dir] == origfmd def test_put(self): the_cont = os.path.join(self.td, "vol0", "bar") @@ -525,7 +541,7 @@ class TestDiskFile(unittest.TestCase): 'Content-Length': '5', } - with gdf.writer() as dw: + with gdf.create() as dw: assert dw.tmppath is not None tmppath = dw.tmppath dw.write(body) @@ -561,7 +577,7 @@ class TestDiskFile(unittest.TestCase): with mock.patch("os.open", mock_open): try: - with gdf.writer() as dw: + with gdf.create() as dw: assert dw.tmppath is not None dw.write(body) dw.put(metadata) @@ -601,7 +617,7 @@ class TestDiskFile(unittest.TestCase): with mock.patch("gluster.swift.obj.diskfile.sleep", mock_sleep): with mock.patch("os.rename", mock_rename): try: - with gdf.writer() as dw: + with gdf.create() as dw: assert dw.tmppath is not None dw.write(body) dw.put(metadata) @@ -631,7 +647,7 @@ class TestDiskFile(unittest.TestCase): 'Content-Length': '5', } - with gdf.writer() as dw: + with gdf.create() as dw: assert dw.tmppath is not None tmppath = dw.tmppath dw.write(body) @@ -642,32 +658,32 @@ class TestDiskFile(unittest.TestCase): assert os.path.exists(gdf.data_file) assert not os.path.exists(tmppath) - def test_unlinkold_no_metadata(self): + def test_delete_no_metadata(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") - assert gdf.metadata == {} + assert gdf._metadata == None _saved_rmobjdir = gluster.swift.obj.diskfile.rmobjdir gluster.swift.obj.diskfile.rmobjdir = _mock_rmobjdir try: - gdf.unlinkold(None) + gdf.delete(1.0) except MockException as exp: self.fail(str(exp)) finally: gluster.swift.obj.diskfile.rmobjdir = _saved_rmobjdir - def test_unlinkold_same_timestamp(self): + def test_delete_same_timestamp(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") - assert gdf.metadata == {} - gdf.metadata['X-Timestamp'] = 1 + assert gdf._metadata == None + gdf._metadata = {'X-Timestamp': 1} _saved_rmobjdir = gluster.swift.obj.diskfile.rmobjdir gluster.swift.obj.diskfile.rmobjdir = _mock_rmobjdir try: - gdf.unlinkold(1) + gdf.delete(1) except MockException as exp: self.fail(str(exp)) finally: gluster.swift.obj.diskfile.rmobjdir = _saved_rmobjdir - def test_unlinkold_file(self): + def test_delete_file(self): the_path = os.path.join(self.td, "vol0", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) @@ -675,15 +691,14 @@ class TestDiskFile(unittest.TestCase): fd.write("1234") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - - later = float(gdf.metadata['X-Timestamp']) + 1 - gdf.unlinkold(normalize_timestamp(later)) + with gdf.open(): + later = float(gdf.get_metadata()['X-Timestamp']) + 1 + assert gdf.data_file == the_file + gdf.delete(normalize_timestamp(later)) assert os.path.isdir(gdf.datadir) assert not os.path.exists(os.path.join(gdf.datadir, gdf._obj)) - def test_unlinkold_file_not_found(self): + def test_delete_file_not_found(self): the_path = os.path.join(self.td, "vol0", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) @@ -691,18 +706,19 @@ class TestDiskFile(unittest.TestCase): fd.write("1234") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir + with gdf.open(): + later = float(gdf._metadata['X-Timestamp']) + 1 + assert gdf.data_file == the_file + assert not gdf._is_dir # Handle the case the file is not in the directory listing. os.unlink(the_file) - later = float(gdf.metadata['X-Timestamp']) + 1 - gdf.unlinkold(normalize_timestamp(later)) + gdf.delete(normalize_timestamp(later)) assert os.path.isdir(gdf.datadir) assert not os.path.exists(os.path.join(gdf.datadir, gdf._obj)) - def test_unlinkold_file_unlink_error(self): + def test_delete_file_unlink_error(self): the_path = os.path.join(self.td, "vol0", "bar") the_file = os.path.join(the_path, "z") os.makedirs(the_path) @@ -710,10 +726,10 @@ class TestDiskFile(unittest.TestCase): fd.write("1234") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - - later = float(gdf.metadata['X-Timestamp']) + 1 + with gdf.open(): + assert gdf.data_file == the_file + assert not gdf._is_dir + later = float(gdf._metadata['X-Timestamp']) + 1 def _mock_os_unlink_eacces_err(f): raise OSError(errno.EACCES, os.strerror(errno.EACCES)) @@ -725,7 +741,7 @@ class TestDiskFile(unittest.TestCase): # Handle the case os_unlink() raises an OSError with patch("os.unlink", _mock_os_unlink_eacces_err): try: - gdf.unlinkold(normalize_timestamp(later)) + gdf.delete(normalize_timestamp(later)) except OSError as e: assert e.errno == errno.EACCES else: @@ -736,17 +752,17 @@ class TestDiskFile(unittest.TestCase): assert os.path.isdir(gdf.datadir) assert os.path.exists(os.path.join(gdf.datadir, gdf._obj)) - def test_unlinkold_is_dir(self): + def test_delete_is_dir(self): the_path = os.path.join(self.td, "vol0", "bar") the_dir = os.path.join(the_path, "d") os.makedirs(the_dir) - gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d", - keep_data_fp=True) - assert gdf.data_file == the_dir - assert gdf._is_dir - - later = float(gdf.metadata['X-Timestamp']) + 1 - gdf.unlinkold(normalize_timestamp(later)) + gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d") + assert gdf._obj == "d" + with gdf.open(): + assert gdf.data_file == the_dir + assert gdf._is_dir + later = float(gdf._metadata['X-Timestamp']) + 1 + gdf.delete(normalize_timestamp(later)) assert os.path.isdir(gdf.datadir) assert not os.path.exists(os.path.join(gdf.datadir, gdf._obj)) @@ -758,9 +774,10 @@ class TestDiskFile(unittest.TestCase): fd.write("1234") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - assert 4 == gdf.get_data_file_size() + with gdf.open(): + assert gdf.data_file == the_file + assert not gdf._is_dir + assert 4 == gdf.get_data_file_size() def test_get_data_file_size_md_restored(self): the_path = os.path.join(self.td, "vol0", "bar") @@ -770,12 +787,13 @@ class TestDiskFile(unittest.TestCase): fd.write("1234") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - assert 4 == gdf.metadata['Content-Length'] - gdf.metadata['Content-Length'] = 3 - assert 4 == gdf.get_data_file_size() - assert 4 == gdf.metadata['Content-Length'] + with gdf.open(): + assert gdf.data_file == the_file + assert not gdf._is_dir + assert 4 == gdf._metadata['Content-Length'] + gdf._metadata['Content-Length'] = 3 + assert 4 == gdf.get_data_file_size() + assert 4 == gdf._metadata['Content-Length'] def test_get_data_file_size_dne(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", @@ -795,15 +813,16 @@ class TestDiskFile(unittest.TestCase): fd.write("1234") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - gdf.data_file = gdf.data_file + ".dne" - try: - gdf.get_data_file_size() - except DiskFileNotExist: - pass - else: - self.fail("Expected DiskFileNotExist exception") + with gdf.open(): + assert gdf.data_file == the_file + assert not gdf._is_dir + gdf.data_file = gdf.data_file + ".dne" + try: + gdf.get_data_file_size() + except DiskFileNotExist: + pass + else: + self.fail("Expected DiskFileNotExist exception") def test_get_data_file_size_os_err(self): the_path = os.path.join(self.td, "vol0", "bar") @@ -813,55 +832,57 @@ class TestDiskFile(unittest.TestCase): fd.write("1234") gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") assert gdf._obj == "z" - assert gdf.data_file == the_file - assert not gdf._is_dir - stats = os.stat(the_path) - try: - os.chmod(the_path, 0) + with gdf.open(): + assert gdf.data_file == the_file + assert not gdf._is_dir + stats = os.stat(the_path) + try: + os.chmod(the_path, 0) - def _mock_getsize_eaccess_err(f): - raise OSError(errno.EACCES, os.strerror(errno.EACCES)) + def _mock_getsize_eaccess_err(f): + raise OSError(errno.EACCES, os.strerror(errno.EACCES)) - with patch("os.path.getsize", _mock_getsize_eaccess_err): - try: - gdf.get_data_file_size() - except OSError as err: - assert err.errno == errno.EACCES - else: - self.fail("Expected OSError exception") - finally: - os.chmod(the_path, stats.st_mode) + with patch("os.path.getsize", _mock_getsize_eaccess_err): + try: + gdf.get_data_file_size() + except OSError as err: + assert err.errno == errno.EACCES + else: + self.fail("Expected OSError exception") + finally: + os.chmod(the_path, stats.st_mode) def test_get_data_file_size_dir(self): the_path = os.path.join(self.td, "vol0", "bar") the_dir = os.path.join(the_path, "d") os.makedirs(the_dir) - gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d", - keep_data_fp=True) + gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "d") assert gdf._obj == "d" - assert gdf.data_file == the_dir - assert gdf._is_dir - assert 0 == gdf.get_data_file_size() + with gdf.open(): + assert gdf.data_file == the_dir + assert gdf._is_dir + assert 0 == gdf.get_data_file_size() def test_filter_metadata(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "z") - assert gdf.metadata == {} + assert gdf._metadata == None gdf._filter_metadata() - assert gdf.metadata == {} + assert gdf._metadata == None - gdf.metadata[X_TYPE] = 'a' - gdf.metadata[X_OBJECT_TYPE] = 'b' - gdf.metadata['foobar'] = 'c' + gdf._metadata = {} + gdf._metadata[X_TYPE] = 'a' + gdf._metadata[X_OBJECT_TYPE] = 'b' + gdf._metadata['foobar'] = 'c' gdf._filter_metadata() - assert X_TYPE not in gdf.metadata - assert X_OBJECT_TYPE not in gdf.metadata - assert 'foobar' in gdf.metadata + assert X_TYPE not in gdf._metadata + assert X_OBJECT_TYPE not in gdf._metadata + assert 'foobar' in gdf._metadata - def test_writer(self): + def test_create(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z") saved_tmppath = '' saved_fd = None - with gdf.writer() as dw: + with gdf.create() as dw: assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "dir") assert os.path.isdir(gdf.datadir) saved_tmppath = dw.tmppath @@ -881,10 +902,10 @@ class TestDiskFile(unittest.TestCase): self.fail("Exception expected") assert not os.path.exists(saved_tmppath) - def test_writer_err_on_close(self): + def test_create_err_on_close(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z") saved_tmppath = '' - with gdf.writer() as dw: + with gdf.create() as dw: assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "dir") assert os.path.isdir(gdf.datadir) saved_tmppath = dw.tmppath @@ -896,10 +917,10 @@ class TestDiskFile(unittest.TestCase): os.close(dw.fd) assert not os.path.exists(saved_tmppath) - def test_writer_err_on_unlink(self): + def test_create_err_on_unlink(self): gdf = self._get_diskfile("vol0", "p57", "ufo47", "bar", "dir/z") saved_tmppath = '' - with gdf.writer() as dw: + with gdf.create() as dw: assert gdf.datadir == os.path.join(self.td, "vol0", "bar", "dir") assert os.path.isdir(gdf.datadir) saved_tmppath = dw.tmppath diff --git a/test/unit/proxy/controllers/test_account.py b/test/unit/proxy/controllers/test_account.py index 4d67d65..394ada7 100644 --- a/test/unit/proxy/controllers/test_account.py +++ b/test/unit/proxy/controllers/test_account.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010-2012 OpenStack, LLC. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -40,6 +40,28 @@ class TestAccountController(unittest.TestCase): self.assertEqual(headers_to_account_info(resp.headers), resp.environ['swift.account/AUTH_bob']) + def test_swift_owner(self): + owner_headers = { + 'x-account-meta-temp-url-key': 'value', + 'x-account-meta-temp-url-key-2': 'value'} + controller = proxy_server.AccountController(self.app, 'a') + + req = Request.blank('/a') + with mock.patch('swift.proxy.controllers.base.http_connect', + fake_http_connect(200, 200, headers=owner_headers)): + resp = controller.HEAD(req) + self.assertEquals(2, resp.status_int // 100) + for key in owner_headers: + self.assertTrue(key not in resp.headers) + + req = Request.blank('/a', environ={'swift_owner': True}) + with mock.patch('swift.proxy.controllers.base.http_connect', + fake_http_connect(200, 200, headers=owner_headers)): + resp = controller.HEAD(req) + self.assertEquals(2, resp.status_int // 100) + for key in owner_headers: + self.assertTrue(key in resp.headers) + if __name__ == '__main__': unittest.main() diff --git a/test/unit/proxy/controllers/test_base.py b/test/unit/proxy/controllers/test_base.py index 02692eb..8214b98 100644 --- a/test/unit/proxy/controllers/test_base.py +++ b/test/unit/proxy/controllers/test_base.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010-2012 OpenStack, LLC. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,9 +16,9 @@ import unittest from mock import patch from swift.proxy.controllers.base import headers_to_container_info, \ - headers_to_account_info, get_container_info, get_container_memcache_key, \ - get_account_info, get_account_memcache_key, _get_cache_key, get_info, \ - Controller + headers_to_account_info, headers_to_object_info, get_container_info, \ + get_container_memcache_key, get_account_info, get_account_memcache_key, \ + get_object_env_key, _get_cache_key, get_info, get_object_info, Controller from swift.common.swob import Request from swift.common.utils import split_path from test.unit import fake_http_connect, FakeRing, FakeMemcache @@ -29,12 +29,18 @@ FakeResponse_status_int = 201 class FakeResponse(object): - def __init__(self, headers, env, account, container): + def __init__(self, headers, env, account, container, obj): self.headers = headers self.status_int = FakeResponse_status_int self.environ = env - cache_key, env_key = _get_cache_key(account, container) - if container: + if obj: + env_key = get_object_env_key(account, container, obj) + else: + cache_key, env_key = _get_cache_key(account, container) + + if account and container and obj: + info = headers_to_object_info(headers, FakeResponse_status_int) + elif account and container: info = headers_to_container_info(headers, FakeResponse_status_int) else: info = headers_to_account_info(headers, FakeResponse_status_int) @@ -42,18 +48,27 @@ class FakeResponse(object): class FakeRequest(object): - def __init__(self, env, path): + def __init__(self, env, path, swift_source=None): self.environ = env (version, account, container, obj) = split_path(path, 2, 4, True) self.account = account self.container = container - stype = container and 'container' or 'account' - self.headers = {'x-%s-object-count' % (stype): 1000, - 'x-%s-bytes-used' % (stype): 6666} + self.obj = obj + if obj: + stype = 'object' + self.headers = {'content-length': 5555, + 'content-type': 'text/plain'} + else: + stype = container and 'container' or 'account' + self.headers = {'x-%s-object-count' % (stype): 1000, + 'x-%s-bytes-used' % (stype): 6666} + if swift_source: + meta = 'x-%s-meta-fakerequest-swift-source' % stype + self.headers[meta] = swift_source def get_response(self, app): return FakeResponse(self.headers, self.environ, self.account, - self.container) + self.container, self.obj) class FakeCache(object): @@ -73,6 +88,21 @@ class TestFuncs(unittest.TestCase): def test_GETorHEAD_base(self): base = Controller(self.app) + req = Request.blank('/a/c/o/with/slashes') + with patch('swift.proxy.controllers.base.' + 'http_connect', fake_http_connect(200)): + resp = base.GETorHEAD_base(req, 'object', FakeRing(), 'part', + '/a/c/o/with/slashes') + self.assertTrue('swift.object/a/c/o/with/slashes' in resp.environ) + self.assertEqual( + resp.environ['swift.object/a/c/o/with/slashes']['status'], 200) + req = Request.blank('/a/c/o') + with patch('swift.proxy.controllers.base.' + 'http_connect', fake_http_connect(200)): + resp = base.GETorHEAD_base(req, 'object', FakeRing(), 'part', + '/a/c/o') + self.assertTrue('swift.object/a/c/o' in resp.environ) + self.assertEqual(resp.environ['swift.object/a/c/o']['status'], 200) req = Request.blank('/a/c') with patch('swift.proxy.controllers.base.' 'http_connect', fake_http_connect(200)): @@ -101,7 +131,7 @@ class TestFuncs(unittest.TestCase): self.assertEquals(info_a['bytes'], 6666) self.assertEquals(info_a['total_object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env, {'swift.account/a': info_a}) + self.assertEquals(env.get('swift.account/a'), info_a) # Do an env cached call to account info_a = get_info(None, env, 'a') @@ -110,7 +140,7 @@ class TestFuncs(unittest.TestCase): self.assertEquals(info_a['bytes'], 6666) self.assertEquals(info_a['total_object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env, {'swift.account/a': info_a}) + self.assertEquals(env.get('swift.account/a'), info_a) # This time do env cached call to account and non cached to container with patch('swift.proxy.controllers.base.' @@ -121,11 +151,12 @@ class TestFuncs(unittest.TestCase): self.assertEquals(info_c['bytes'], 6666) self.assertEquals(info_c['object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env['swift.account/a'], info_a) - self.assertEquals(env['swift.container/a/c'], info_c) + self.assertEquals(env.get('swift.account/a'), info_a) + self.assertEquals(env.get('swift.container/a/c'), info_c) - # This time do a non cached call to account than non cached to container - env = {} # abandon previous call to env + # This time do a non cached call to account than non cached to + # container + env = {} # abandon previous call to env with patch('swift.proxy.controllers.base.' '_prepare_pre_auth_info_request', FakeRequest): info_c = get_info(None, env, 'a', 'c') @@ -134,10 +165,11 @@ class TestFuncs(unittest.TestCase): self.assertEquals(info_c['bytes'], 6666) self.assertEquals(info_c['object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env['swift.account/a'], info_a) - self.assertEquals(env['swift.container/a/c'], info_c) + self.assertEquals(env.get('swift.account/a'), info_a) + self.assertEquals(env.get('swift.container/a/c'), info_c) - # This time do an env cached call to container while account is not cached + # This time do an env cached call to container while account is not + # cached del(env['swift.account/a']) info_c = get_info(None, env, 'a', 'c') # Check that you got proper info @@ -145,7 +177,7 @@ class TestFuncs(unittest.TestCase): self.assertEquals(info_c['bytes'], 6666) self.assertEquals(info_c['object_count'], 1000) # Make sure the env cache is set and account still not cached - self.assertEquals(env, {'swift.container/a/c': info_c}) + self.assertEquals(env.get('swift.container/a/c'), info_c) # Do a non cached call to account not found with ret_not_found env = {} @@ -161,7 +193,7 @@ class TestFuncs(unittest.TestCase): self.assertEquals(info_a['bytes'], 6666) self.assertEquals(info_a['total_object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env, {'swift.account/a': info_a}) + self.assertEquals(env.get('swift.account/a'), info_a) # Do a cached call to account not found with ret_not_found info_a = get_info(None, env, 'a', ret_not_found=True) @@ -170,7 +202,7 @@ class TestFuncs(unittest.TestCase): self.assertEquals(info_a['bytes'], 6666) self.assertEquals(info_a['total_object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env, {'swift.account/a': info_a}) + self.assertEquals(env.get('swift.account/a'), info_a) # Do a non cached call to account not found without ret_not_found env = {} @@ -191,6 +223,21 @@ class TestFuncs(unittest.TestCase): self.assertEquals(info_a, None) self.assertEquals(env['swift.account/a']['status'], 404) + def test_get_container_info_swift_source(self): + req = Request.blank("/v1/a/c", environ={'swift.cache': FakeCache({})}) + with patch('swift.proxy.controllers.base.' + '_prepare_pre_auth_info_request', FakeRequest): + resp = get_container_info(req.environ, 'app', swift_source='MC') + self.assertEquals(resp['meta']['fakerequest-swift-source'], 'MC') + + def test_get_object_info_swift_source(self): + req = Request.blank("/v1/a/c/o", + environ={'swift.cache': FakeCache({})}) + with patch('swift.proxy.controllers.base.' + '_prepare_pre_auth_info_request', FakeRequest): + resp = get_object_info(req.environ, 'app', swift_source='LU') + self.assertEquals(resp['meta']['fakerequest-swift-source'], 'LU') + def test_get_container_info_no_cache(self): req = Request.blank("/v1/AUTH_account/cont", environ={'swift.cache': FakeCache({})}) @@ -217,11 +264,18 @@ class TestFuncs(unittest.TestCase): cache_key = get_container_memcache_key("account", "cont") env_key = 'swift.%s' % cache_key req = Request.blank("/v1/account/cont", - environ={ env_key: {'bytes': 3867}, - 'swift.cache': FakeCache({})}) + environ={env_key: {'bytes': 3867}, + 'swift.cache': FakeCache({})}) resp = get_container_info(req.environ, 'xxx') self.assertEquals(resp['bytes'], 3867) + def test_get_account_info_swift_source(self): + req = Request.blank("/v1/a", environ={'swift.cache': FakeCache({})}) + with patch('swift.proxy.controllers.base.' + '_prepare_pre_auth_info_request', FakeRequest): + resp = get_account_info(req.environ, 'a', swift_source='MC') + self.assertEquals(resp['meta']['fakerequest-swift-source'], 'MC') + def test_get_account_info_no_cache(self): req = Request.blank("/v1/AUTH_account", environ={'swift.cache': FakeCache({})}) @@ -266,11 +320,33 @@ class TestFuncs(unittest.TestCase): cache_key = get_account_memcache_key("account") env_key = 'swift.%s' % cache_key req = Request.blank("/v1/account", - environ={ env_key: {'bytes': 3867}, - 'swift.cache': FakeCache({})}) + environ={env_key: {'bytes': 3867}, + 'swift.cache': FakeCache({})}) resp = get_account_info(req.environ, 'xxx') self.assertEquals(resp['bytes'], 3867) + def test_get_object_info_env(self): + cached = {'status': 200, + 'length': 3333, + 'type': 'application/json', + 'meta': {}} + env_key = get_object_env_key("account", "cont", "obj") + req = Request.blank("/v1/account/cont/obj", + environ={env_key: cached, + 'swift.cache': FakeCache({})}) + resp = get_object_info(req.environ, 'xxx') + self.assertEquals(resp['length'], 3333) + self.assertEquals(resp['type'], 'application/json') + + def test_get_object_info_no_env(self): + req = Request.blank("/v1/account/cont/obj", + environ={'swift.cache': FakeCache({})}) + with patch('swift.proxy.controllers.base.' + '_prepare_pre_auth_info_request', FakeRequest): + resp = get_object_info(req.environ, 'xxx') + self.assertEquals(resp['length'], 5555) + self.assertEquals(resp['type'], 'text/plain') + def test_headers_to_container_info_missing(self): resp = headers_to_container_info({}, 404) self.assertEquals(resp['status'], 404) @@ -329,3 +405,31 @@ class TestFuncs(unittest.TestCase): self.assertEquals( resp, headers_to_account_info(headers.items(), 200)) + + def test_headers_to_object_info_missing(self): + resp = headers_to_object_info({}, 404) + self.assertEquals(resp['status'], 404) + self.assertEquals(resp['length'], None) + self.assertEquals(resp['etag'], None) + + def test_headers_to_object_info_meta(self): + headers = {'X-Object-Meta-Whatevs': 14, + 'x-object-meta-somethingelse': 0} + resp = headers_to_object_info(headers.items(), 200) + self.assertEquals(len(resp['meta']), 2) + self.assertEquals(resp['meta']['whatevs'], 14) + self.assertEquals(resp['meta']['somethingelse'], 0) + + def test_headers_to_object_info_values(self): + headers = { + 'content-length': '1024', + 'content-type': 'application/json', + } + resp = headers_to_object_info(headers.items(), 200) + self.assertEquals(resp['length'], '1024') + self.assertEquals(resp['type'], 'application/json') + + headers['x-unused-header'] = 'blahblahblah' + self.assertEquals( + resp, + headers_to_object_info(headers.items(), 200)) diff --git a/test/unit/proxy/controllers/test_container.py b/test/unit/proxy/controllers/test_container.py index c2e9483..63e6b0e 100644 --- a/test/unit/proxy/controllers/test_container.py +++ b/test/unit/proxy/controllers/test_container.py @@ -1,4 +1,4 @@ -# Copyright (c) 2010-2012 OpenStack, LLC. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -40,6 +40,28 @@ class TestContainerController(unittest.TestCase): self.assertEqual(headers_to_container_info(resp.headers), resp.environ['swift.container/a/c']) + def test_swift_owner(self): + owner_headers = { + 'x-container-read': 'value', 'x-container-write': 'value', + 'x-container-sync-key': 'value', 'x-container-sync-to': 'value'} + controller = proxy_server.ContainerController(self.app, 'a', 'c') + + req = Request.blank('/a/c') + with mock.patch('swift.proxy.controllers.base.http_connect', + fake_http_connect(200, 200, headers=owner_headers)): + resp = controller.HEAD(req) + self.assertEquals(2, resp.status_int // 100) + for key in owner_headers: + self.assertTrue(key not in resp.headers) + + req = Request.blank('/a/c', environ={'swift_owner': True}) + with mock.patch('swift.proxy.controllers.base.http_connect', + fake_http_connect(200, 200, headers=owner_headers)): + resp = controller.HEAD(req) + self.assertEquals(2, resp.status_int // 100) + for key in owner_headers: + self.assertTrue(key in resp.headers) + if __name__ == '__main__': unittest.main() diff --git a/test/unit/proxy/controllers/test_obj.py b/test/unit/proxy/controllers/test_obj.py index e4af789..cae62b0 100755 --- a/test/unit/proxy/controllers/test_obj.py +++ b/test/unit/proxy/controllers/test_obj.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (c) 2010-2012 OpenStack, LLC. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,9 +15,28 @@ # limitations under the License. import unittest +from contextlib import contextmanager +import mock + +import swift from swift.proxy import server as proxy_server -from test.unit import FakeRing, FakeMemcache +from test.unit import FakeRing, FakeMemcache, fake_http_connect + + +@contextmanager +def set_http_connect(*args, **kwargs): + old_connect = swift.proxy.controllers.base.http_connect + new_connect = fake_http_connect(*args, **kwargs) + swift.proxy.controllers.base.http_connect = new_connect + swift.proxy.controllers.obj.http_connect = new_connect + swift.proxy.controllers.account.http_connect = new_connect + swift.proxy.controllers.container.http_connect = new_connect + yield new_connect + swift.proxy.controllers.base.http_connect = old_connect + swift.proxy.controllers.obj.http_connect = old_connect + swift.proxy.controllers.account.http_connect = old_connect + swift.proxy.controllers.container.http_connect = old_connect class TestObjControllerWriteAffinity(unittest.TestCase): @@ -44,7 +63,8 @@ class TestObjControllerWriteAffinity(unittest.TestCase): def test_iter_nodes_local_first_moves_locals_first(self): controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') - self.app.write_affinity_is_local_fn = (lambda node: node['region'] == 1) + self.app.write_affinity_is_local_fn = ( + lambda node: node['region'] == 1) self.app.write_affinity_node_count = lambda ring: 4 all_nodes = self.app.object_ring.get_part_nodes(1) @@ -59,6 +79,44 @@ class TestObjControllerWriteAffinity(unittest.TestCase): # we don't skip any nodes self.assertEqual(sorted(all_nodes), sorted(local_first_nodes)) + def test_connect_put_node_timeout(self): + controller = proxy_server.ObjectController(self.app, 'a', 'c', 'o') + self.app.conn_timeout = 0.1 + with set_http_connect(200, slow_connect=True): + nodes = [dict(ip='', port='', device='')] + res = controller._connect_put_node(nodes, '', '', {}, ('', '')) + self.assertTrue(res is None) + + +class TestObjController(unittest.TestCase): + + def test_PUT_log_info(self): + # mock out enough to get to the area of the code we want to test + with mock.patch('swift.proxy.controllers.obj.check_object_creation', + mock.MagicMock(return_value=None)): + app = mock.MagicMock() + app.container_ring.get_nodes.return_value = (1, [2]) + app.object_ring.get_nodes.return_value = (1, [2]) + controller = proxy_server.ObjectController(app, 'a', 'c', 'o') + controller.container_info = mock.MagicMock(return_value={ + 'partition': 1, + 'nodes': [{}], + 'write_acl': None, + 'sync_key': None, + 'versions': None}) + # and now test that we add the header to log_info + req = swift.common.swob.Request.blank('/v1/a/c/o') + req.headers['x-copy-from'] = 'somewhere' + controller.PUT(req) + self.assertEquals( + req.environ.get('swift.log_info'), ['x-copy-from:somewhere']) + # and then check that we don't do that for originating POSTs + req = swift.common.swob.Request.blank('/v1/a/c/o') + req.method = 'POST' + req.headers['x-copy-from'] = 'elsewhere' + controller.PUT(req) + self.assertEquals(req.environ.get('swift.log_info'), None) + if __name__ == '__main__': unittest.main() diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 488f253..490e8fc 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1,19 +1,4 @@ -# Copyright (c) 2010-2013 OpenStack, LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Copyright (c) 2013 Red Hat, Inc. +# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,16 +19,17 @@ import logging import os import sys import unittest -from nose import SkipTest import urlparse -import signal +from nose import SkipTest from contextlib import contextmanager, nested, closing from gzip import GzipFile from shutil import rmtree +import gc import time from urllib import quote from hashlib import md5 from tempfile import mkdtemp +import weakref import mock from eventlet import sleep, spawn, wsgi, listen @@ -53,18 +39,18 @@ import gluster.swift.common.Glusterfs as gfs gfs.RUN_DIR = mkdtemp() from test.unit import connect_tcp, readuntil2crlfs, FakeLogger, \ - fake_http_connect, FakeRing, FakeMemcache -from gluster.swift.proxy.server import server as proxy_server -from gluster.swift.obj import server as object_server + fake_http_connect, FakeRing, FakeMemcache, DebugLogger +from gluster.swift.proxy import server as proxy_server from gluster.swift.account import server as account_server from gluster.swift.container import server as container_server +from gluster.swift.obj import server as object_server from swift.common import ring from swift.common.exceptions import ChunkReadTimeout, SegmentError from swift.common.constraints import MAX_META_NAME_LENGTH, \ MAX_META_VALUE_LENGTH, MAX_META_COUNT, MAX_META_OVERALL_SIZE, \ MAX_FILE_SIZE, MAX_ACCOUNT_NAME_LENGTH, MAX_CONTAINER_NAME_LENGTH from swift.common import utils -from swift.common.utils import mkdirs, normalize_timestamp, NullLogger +from swift.common.utils import mkdirs, normalize_timestamp from swift.common.wsgi import monkey_patch_mimetools from swift.proxy.controllers.obj import SegmentedIterable from swift.proxy.controllers.base import get_container_memcache_key, \ @@ -78,22 +64,15 @@ logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) STATIC_TIME = time.time() -_request_instances = 0 +_request_instances = weakref.WeakKeyDictionary() _test_coros = _test_servers = _test_sockets = _orig_container_listing_limit = \ _testdir = None def request_init(self, *args, **kwargs): - global _request_instances self._orig_init(*args, **kwargs) - _request_instances += 1 - -def request_del(self): - global _request_instances - if self._orig_del: - self._orig_del() - _request_instances -= 1 + _request_instances[self] = None def setup(): @@ -102,8 +81,6 @@ def setup(): _orig_container_listing_limit, _test_coros Request._orig_init = Request.__init__ Request.__init__ = request_init - Request._orig_del = getattr(Request, '__del__', None) - Request.__del__ = request_del monkey_patch_mimetools() # Since we're starting up a lot here, we're going to test more than # just chunked puts; we're also going to test parts of @@ -132,8 +109,8 @@ def setup(): obj2lis = listen(('localhost', 0)) _test_sockets = \ (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis) - with closing(GzipFile(os.path.join(_testdir, 'account.ring.gz'), 'wb')) \ - as f: + account_ring_path = os.path.join(_testdir, 'account.ring.gz') + with closing(GzipFile(account_ring_path, 'wb')) as f: pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', 'port': acc1lis.getsockname()[1]}, @@ -143,8 +120,8 @@ def setup(): {'id': 1, 'zone': 1, 'device': 'a', 'ip': '127.0.0.1', 'port': acc2lis.getsockname()[1]}], 30), f) - with closing(GzipFile(os.path.join(_testdir, 'container.ring.gz'), 'wb')) \ - as f: + container_ring_path = os.path.join(_testdir, 'container.ring.gz') + with closing(GzipFile(container_ring_path, 'wb')) as f: pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', 'port': con1lis.getsockname()[1]}, @@ -154,8 +131,8 @@ def setup(): {'id': 1, 'zone': 1, 'device': 'a', 'ip': '127.0.0.1', 'port': con2lis.getsockname()[1]}], 30), f) - with closing(GzipFile(os.path.join(_testdir, 'object.ring.gz'), 'wb')) \ - as f: + object_ring_path = os.path.join(_testdir, 'object.ring.gz') + with closing(GzipFile(object_ring_path, 'wb')) as f: pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]], [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1', 'port': obj1lis.getsockname()[1]}, @@ -174,9 +151,6 @@ def setup(): obj2srv = object_server.ObjectController(conf) _test_servers = \ (prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, obj2srv) - # Use DebugLogger() when trying to figure out what failed in the spawned - # servers. - from test.unit import DebugLogger nl = DebugLogger() prospa = spawn(wsgi.server, prolis, prosrv, nl) acc1spa = spawn(wsgi.server, acc1lis, acc1srv, nl) @@ -212,7 +186,8 @@ def setup(): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - assert headers[:len(exp)] == exp, "Expected '%s', encountered '%s'" % (exp, headers[:len(exp)]) + assert headers[:len(exp)] == exp, "Expected '%s', encountered '%s'" % ( + exp, headers[:len(exp)]) def teardown(): @@ -222,8 +197,6 @@ def teardown(): _orig_container_listing_limit rmtree(os.path.dirname(_testdir)) Request.__init__ = Request._orig_init - if Request._orig_del: - Request.__del__ = Request._orig_del def sortHeaderNames(headerNames): @@ -391,7 +364,7 @@ class TestController(unittest.TestCase): # 'container_count' changed from 0 to None cache_key = get_account_memcache_key(self.account) account_info = {'status': 404, - 'container_count': None, # internally keep None + 'container_count': None, # internally keep None 'total_object_count': None, 'bytes': None, 'meta': {}} @@ -459,8 +432,8 @@ class TestController(unittest.TestCase): with save_globals(): headers = {'x-container-read': self.read_acl, 'x-container-write': self.write_acl} - set_http_connect(200, # account_info is found - 200, headers=headers) # container_info is found + set_http_connect(200, # account_info is found + 200, headers=headers) # container_info is found ret = self.controller.container_info( self.account, self.container, self.request) self.check_container_info_return(ret) @@ -482,8 +455,8 @@ class TestController(unittest.TestCase): return True, True, 0 with save_globals(): - set_http_connect(503, 204, # account_info found - 504, 404, 404) # container_info 'NotFound' + set_http_connect(503, 204, # account_info found + 504, 404, 404) # container_info 'NotFound' ret = self.controller.container_info( self.account, self.container, self.request) self.check_container_info_return(ret, True) @@ -499,7 +472,7 @@ class TestController(unittest.TestCase): self.account, self.container, self.request) self.check_container_info_return(ret, True) - set_http_connect(503, 404, 404)# account_info 'NotFound' + set_http_connect(503, 404, 404) # account_info 'NotFound' ret = self.controller.container_info( self.account, self.container, self.request) self.check_container_info_return(ret, True) @@ -720,41 +693,31 @@ class TestObjectController(unittest.TestCase): self.assertEquals(res.status_int, expected) def test_GET_newest_large_file(self): - calls = [0] - - def handler(_junk1, _junk2): - calls[0] += 1 - - old_handler = signal.signal(signal.SIGPIPE, handler) - try: - prolis = _test_sockets[0] - prosrv = _test_servers[0] - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - obj = 'a' * (1024 * 1024) - path = '/v1/a/c/o.large' - fd.write('PUT %s HTTP/1.1\r\n' - 'Host: localhost\r\n' - 'Connection: close\r\n' - 'X-Storage-Token: t\r\n' - 'Content-Length: %s\r\n' - 'Content-Type: application/octet-stream\r\n' - '\r\n%s' % (path, str(len(obj)), obj)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - req = Request.blank(path, - environ={'REQUEST_METHOD': 'GET'}, - headers={'Content-Type': - 'application/octet-stream', - 'X-Newest': 'true'}) - res = req.get_response(prosrv) - self.assertEqual(res.status_int, 200) - self.assertEqual(res.body, obj) - self.assertEqual(calls[0], 0) - finally: - signal.signal(signal.SIGPIPE, old_handler) + prolis = _test_sockets[0] + prosrv = _test_servers[0] + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + obj = 'a' * (1024 * 1024) + path = '/v1/a/c/o.large' + fd.write('PUT %s HTTP/1.1\r\n' + 'Host: localhost\r\n' + 'Connection: close\r\n' + 'X-Storage-Token: t\r\n' + 'Content-Length: %s\r\n' + 'Content-Type: application/octet-stream\r\n' + '\r\n%s' % (path, str(len(obj)), obj)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + req = Request.blank(path, + environ={'REQUEST_METHOD': 'GET'}, + headers={'Content-Type': + 'application/octet-stream', + 'X-Newest': 'true'}) + res = req.get_response(prosrv) + self.assertEqual(res.status_int, 200) + self.assertEqual(res.body, obj) def test_PUT_expect_header_zero_content_length(self): test_errors = [] @@ -860,8 +823,8 @@ class TestObjectController(unittest.TestCase): proxy_server.ObjectController(self.app, 'a', 'c', 'o.jpg') controller.error_limit( self.app.object_ring.get_part_nodes(1)[0], 'test') - set_http_connect(200, 200, # account, container - 201, 201, 201, # 3 working backends + set_http_connect(200, 200, # account, container + 201, 201, 201, # 3 working backends give_connect=test_connect) req = Request.blank('/a/c/o.jpg', {}) req.content_length = 1 @@ -874,7 +837,7 @@ class TestObjectController(unittest.TestCase): # this is kind of a hokey test, but in FakeRing, the port is even when # the region is 0, and odd when the region is 1, so this test asserts # that we wrote to 2 nodes in region 0, then went to 1 non-r0 node. - self.assertEqual(0, written_to[0][1] % 2) # it's (ip, port, device) + self.assertEqual(0, written_to[0][1] % 2) # it's (ip, port, device) self.assertEqual(0, written_to[1][1] % 2) self.assertNotEqual(0, written_to[2][1] % 2) @@ -1085,7 +1048,7 @@ class TestObjectController(unittest.TestCase): response_bodies = ( '', # HEAD /a '', # HEAD /a/c - '', # GET manifest + simplejson.dumps([]), # GET manifest simplejson.dumps([])) # GET empty listing with save_globals(): @@ -1279,8 +1242,8 @@ class TestObjectController(unittest.TestCase): "content_type": "application/octet-stream"}] json_listing = simplejson.dumps(listing) response_bodies = ( - '', # HEAD /a - '', # HEAD /a/c + '', # HEAD /a + '', # HEAD /a/c json_listing) # GET manifest with save_globals(): controller = proxy_server.ObjectController( @@ -1432,7 +1395,7 @@ class TestObjectController(unittest.TestCase): {"hash": "8681fb3ada2715c8754706ee5f23d4f8", "last_modified": "2012-11-08T04:05:37.846710", "bytes": 4, - "name": "/d2/sub_manifest", + "name": u"/d2/sub_manifest \u2661", "sub_slo": True, "content_type": "application/octet-stream"}, {"hash": "419af6d362a14b7a789ba1c7e772bbae", "last_modified": "2012-11-08T04:05:37.866820", @@ -1455,8 +1418,8 @@ class TestObjectController(unittest.TestCase): '', # HEAD /a '', # HEAD /a/c simplejson.dumps(listing), # GET manifest - 'Aa', # GET seg01 simplejson.dumps(sub_listing), # GET sub_manifest + 'Aa', # GET seg01 'Bb', # GET seg02 'Cc', # GET seg03 'Dd') # GET seg04 @@ -1478,12 +1441,12 @@ class TestObjectController(unittest.TestCase): 200, # HEAD /a 200, # HEAD /a/c 200, # GET listing1 - 200, # GET seg01 200, # GET sub listing1 + 200, # GET seg01 200, # GET seg02 200, # GET seg03 200, # GET seg04 - headers=[{}, {}, slob_headers, {}, slob_headers, {}, {}, {}], + headers=[{}, {}, slob_headers, slob_headers, {}, {}, {}, {}], body_iter=response_bodies, give_connect=capture_requested_paths) req = Request.blank('/a/c/manifest') @@ -1496,7 +1459,8 @@ class TestObjectController(unittest.TestCase): requested, [['HEAD', '/a', {}], ['HEAD', '/a/c', {}], - ['GET', '/a/c/manifest', {}]]) + ['GET', '/a/c/manifest', {}], + ['GET', '/a/d2/sub_manifest \xe2\x99\xa1', {}]]) # iterating over body will retrieve manifest and sub manifest's # objects self.assertEqual(resp.body, 'AaBbCcDd') @@ -1505,12 +1469,116 @@ class TestObjectController(unittest.TestCase): [['HEAD', '/a', {}], ['HEAD', '/a/c', {}], ['GET', '/a/c/manifest', {}], + ['GET', '/a/d2/sub_manifest \xe2\x99\xa1', {}], ['GET', '/a/d1/seg01', {}], - ['GET', '/a/d2/sub_manifest', {}], ['GET', '/a/d1/seg02', {}], ['GET', '/a/d2/seg03', {}], ['GET', '/a/d1/seg04', {}]]) + def test_GET_nested_manifest_slo_with_range(self): + """ + Original whole slo is Aa1234Bb where 1234 is a sub-manifests. I'm + pulling out 34Bb + """ + listing = [{"hash": "98568d540134639be4655198a36614a4", # Aa + "last_modified": "2012-11-08T04:05:37.866820", + "bytes": 2, + "name": "/d1/seg01", + "content_type": "application/octet-stream"}, + {"hash": "7b4b0ffa275d404bdc2fc6384916714f", # SubManifest1 + "last_modified": "2012-11-08T04:05:37.866820", + "bytes": 4, "sub_slo": True, + "name": "/d2/subManifest01", + "content_type": "application/octet-stream"}, + {"hash": "d526f1c8ef6c1e4e980e2b8471352d23", # Bb + "last_modified": "2012-11-08T04:05:37.866820", + "bytes": 2, + "name": "/d1/seg02", + "content_type": "application/octet-stream"}] + + sublisting = [{"hash": "c20ad4d76fe97759aa27a0c99bff6710", # 12 + "last_modified": "2012-11-08T04:05:37.866820", + "bytes": 2, + "name": "/d2/subSeg01", + "content_type": "application/octet-stream"}, + {"hash": "e369853df766fa44e1ed0ff613f563bd", # 34 + "last_modified": "2012-11-08T04:05:37.866820", + "bytes": 2, + "name": "/d2/subSeg02", + "content_type": "application/octet-stream"}] + + response_bodies = ( + '', # HEAD /a + '', # HEAD /a/c + simplejson.dumps(listing)[1:1], # GET incomplete manifest + simplejson.dumps(listing), # GET complete manifest + simplejson.dumps(sublisting), # GET complete submanifest + '34', # GET subseg02 + 'Bb') # GET seg02 + etag_iter = ['', '', '', '', '', + 'e369853df766fa44e1ed0ff613f563bd', # subSeg02 + 'd526f1c8ef6c1e4e980e2b8471352d23'] # seg02 + headers = [{}, {}, + {'X-Static-Large-Object': 'True', + 'content-type': 'text/html; swift_bytes=4'}, + {'X-Static-Large-Object': 'True', + 'content-type': 'text/html; swift_bytes=4'}, + {'X-Static-Large-Object': 'True', + 'content-type': 'text/html; swift_bytes=4'}, + {}, {}] + self.assertTrue(len(response_bodies) == len(etag_iter) == len(headers)) + with save_globals(): + controller = proxy_server.ObjectController( + self.app, 'a', 'c', 'manifest') + + requested = [] + + def capture_requested_paths(ipaddr, port, device, partition, + method, path, headers=None, + query_string=None): + qs_dict = dict(urlparse.parse_qsl(query_string or '')) + requested.append([method, path, qs_dict]) + + set_http_connect( + 200, # HEAD /a + 200, # HEAD /a/c + 206, # GET incomplete listing + 200, # GET complete listing + 200, # GET complete sublisting + 200, # GET subSeg02 + 200, # GET seg02 + headers=headers, + etags=etag_iter, + body_iter=response_bodies, + give_connect=capture_requested_paths) + + req = Request.blank('/a/c/manifest') + req.range = 'bytes=4-7' + resp = controller.GET(req) + got_called = [False, ] + + def fake_start_response(*args, **kwargs): + got_called[0] = True + self.assertTrue(args[0].startswith('206')) + + app_iter = resp(req.environ, fake_start_response) + resp_body = ''.join(app_iter) # read in entire resp + self.assertEqual(resp.status_int, 206) + self.assertEqual(resp_body, '34Bb') + self.assertTrue(got_called[0]) + self.assertEqual(resp.content_length, 4) + self.assertEqual(resp.content_type, 'text/html') + + self.assertEqual( + requested, + [['HEAD', '/a', {}], + ['HEAD', '/a/c', {}], + ['GET', '/a/c/manifest', {}], # for incomplete manifest + ['GET', '/a/c/manifest', {}], + ['GET', '/a/d2/subManifest01', {}], + ['GET', '/a/d2/subSeg02', {}], + ['GET', '/a/d1/seg02', {}]]) + def test_GET_bad_404_manifest_slo(self): listing = [{"hash": "98568d540134639be4655198a36614a4", "last_modified": "2012-11-08T04:05:37.866820", @@ -1777,16 +1845,14 @@ class TestObjectController(unittest.TestCase): def test_POST(self): with save_globals(): self.app.object_post_as_copy = False - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') def test_status_map(statuses, expected): set_http_connect(*statuses) self.app.memcache.store = {} - req = Request.blank('/a/c/o', {}, + req = Request.blank('/v1/a/c/o', {}, method='POST', headers={'Content-Type': 'foo/bar'}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) expected = str(expected) self.assertEquals(res.status[:len(expected)], expected) test_status_map((200, 200, 202, 202, 202), 202) @@ -1799,16 +1865,13 @@ class TestObjectController(unittest.TestCase): def test_POST_as_copy(self): with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - def test_status_map(statuses, expected): set_http_connect(*statuses) self.app.memcache.store = {} - req = Request.blank('/a/c/o', {}, + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, headers={'Content-Type': 'foo/bar'}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) expected = str(expected) self.assertEquals(res.status[:len(expected)], expected) test_status_map((200, 200, 200, 200, 200, 202, 202, 202), 202) @@ -1821,15 +1884,12 @@ class TestObjectController(unittest.TestCase): def test_DELETE(self): with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - def test_status_map(statuses, expected): set_http_connect(*statuses) self.app.memcache.store = {} - req = Request.blank('/a/c/o', {}) + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'DELETE'}) self.app.update_request(req) - res = controller.DELETE(req) + res = req.get_response(self.app) self.assertEquals(res.status[:len(str(expected))], str(expected)) test_status_map((200, 200, 204, 204, 204), 204) @@ -1841,15 +1901,12 @@ class TestObjectController(unittest.TestCase): def test_HEAD(self): with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - def test_status_map(statuses, expected): set_http_connect(*statuses) self.app.memcache.store = {} - req = Request.blank('/a/c/o', {}) + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'}) self.app.update_request(req) - res = controller.HEAD(req) + res = req.get_response(self.app) self.assertEquals(res.status[:len(str(expected))], str(expected)) if expected < 400: @@ -1867,16 +1924,14 @@ class TestObjectController(unittest.TestCase): def test_HEAD_newest(self): with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - def test_status_map(statuses, expected, timestamps, expected_timestamp): set_http_connect(*statuses, timestamps=timestamps) self.app.memcache.store = {} - req = Request.blank('/a/c/o', {}, headers={'x-newest': 'true'}) + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'}, + headers={'x-newest': 'true'}) self.app.update_request(req) - res = controller.HEAD(req) + res = req.get_response(self.app) self.assertEquals(res.status[:len(str(expected))], str(expected)) self.assertEquals(res.headers.get('last-modified'), @@ -1898,16 +1953,14 @@ class TestObjectController(unittest.TestCase): def test_GET_newest(self): with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - def test_status_map(statuses, expected, timestamps, expected_timestamp): set_http_connect(*statuses, timestamps=timestamps) self.app.memcache.store = {} - req = Request.blank('/a/c/o', {}, headers={'x-newest': 'true'}) + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'GET'}, + headers={'x-newest': 'true'}) self.app.update_request(req) - res = controller.GET(req) + res = req.get_response(self.app) self.assertEquals(res.status[:len(str(expected))], str(expected)) self.assertEquals(res.headers.get('last-modified'), @@ -1927,16 +1980,13 @@ class TestObjectController(unittest.TestCase): None, '1'), '1') with save_globals(): - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') - def test_status_map(statuses, expected, timestamps, expected_timestamp): set_http_connect(*statuses, timestamps=timestamps) self.app.memcache.store = {} - req = Request.blank('/a/c/o', {}) + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'}) self.app.update_request(req) - res = controller.HEAD(req) + res = req.get_response(self.app) self.assertEquals(res.status[:len(str(expected))], str(expected)) self.assertEquals(res.headers.get('last-modified'), @@ -1957,123 +2007,118 @@ class TestObjectController(unittest.TestCase): with save_globals(): limit = MAX_META_VALUE_LENGTH self.app.object_post_as_copy = False - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') + proxy_server.ObjectController(self.app, 'account', + 'container', 'object') set_http_connect(200, 200, 202, 202, 202) # acct cont obj obj obj - req = Request.blank('/a/c/o', {}, headers={ - 'Content-Type': 'foo/bar', - 'X-Object-Meta-Foo': 'x' * limit}) + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers={'Content-Type': 'foo/bar', + 'X-Object-Meta-Foo': 'x' * limit}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 202) set_http_connect(202, 202, 202) - req = Request.blank('/a/c/o', {}, headers={ - 'Content-Type': 'foo/bar', - 'X-Object-Meta-Foo': 'x' * (limit + 1)}) + req = Request.blank( + '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers={'Content-Type': 'foo/bar', + 'X-Object-Meta-Foo': 'x' * (limit + 1)}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 400) def test_POST_as_copy_meta_val_len(self): with save_globals(): limit = MAX_META_VALUE_LENGTH - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') set_http_connect(200, 200, 200, 200, 200, 202, 202, 202) # acct cont objc objc objc obj obj obj - req = Request.blank('/a/c/o', {}, headers={ - 'Content-Type': 'foo/bar', - 'X-Object-Meta-Foo': 'x' * limit}) + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers={'Content-Type': 'foo/bar', + 'X-Object-Meta-Foo': 'x' * limit}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 202) set_http_connect(202, 202, 202) - req = Request.blank('/a/c/o', {}, headers={ - 'Content-Type': 'foo/bar', - 'X-Object-Meta-Foo': 'x' * (limit + 1)}) + req = Request.blank( + '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers={'Content-Type': 'foo/bar', + 'X-Object-Meta-Foo': 'x' * (limit + 1)}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 400) def test_POST_meta_key_len(self): with save_globals(): limit = MAX_META_NAME_LENGTH self.app.object_post_as_copy = False - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') set_http_connect(200, 200, 202, 202, 202) # acct cont obj obj obj - req = Request.blank('/a/c/o', {}, headers={ - 'Content-Type': 'foo/bar', - ('X-Object-Meta-' + 'x' * limit): 'x'}) + req = Request.blank( + '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers={'Content-Type': 'foo/bar', + ('X-Object-Meta-' + 'x' * limit): 'x'}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 202) set_http_connect(202, 202, 202) - req = Request.blank('/a/c/o', {}, headers={ - 'Content-Type': 'foo/bar', - ('X-Object-Meta-' + 'x' * (limit + 1)): 'x'}) + req = Request.blank( + '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers={'Content-Type': 'foo/bar', + ('X-Object-Meta-' + 'x' * (limit + 1)): 'x'}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 400) def test_POST_as_copy_meta_key_len(self): with save_globals(): limit = MAX_META_NAME_LENGTH - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') set_http_connect(200, 200, 200, 200, 200, 202, 202, 202) # acct cont objc objc objc obj obj obj - req = Request.blank('/a/c/o', {}, headers={ - 'Content-Type': 'foo/bar', - ('X-Object-Meta-' + 'x' * limit): 'x'}) + req = Request.blank( + '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers={'Content-Type': 'foo/bar', + ('X-Object-Meta-' + 'x' * limit): 'x'}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 202) set_http_connect(202, 202, 202) - req = Request.blank('/a/c/o', {}, headers={ - 'Content-Type': 'foo/bar', - ('X-Object-Meta-' + 'x' * (limit + 1)): 'x'}) + req = Request.blank( + '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers={'Content-Type': 'foo/bar', + ('X-Object-Meta-' + 'x' * (limit + 1)): 'x'}) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 400) def test_POST_meta_count(self): with save_globals(): limit = MAX_META_COUNT - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') headers = dict( (('X-Object-Meta-' + str(i), 'a') for i in xrange(limit + 1))) headers.update({'Content-Type': 'foo/bar'}) set_http_connect(202, 202, 202) - req = Request.blank('/a/c/o', {}, headers=headers) + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers=headers) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 400) def test_POST_meta_size(self): with save_globals(): limit = MAX_META_OVERALL_SIZE - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') count = limit / 256 # enough to cause the limit to be reached headers = dict( (('X-Object-Meta-' + str(i), 'a' * 256) for i in xrange(count + 1))) headers.update({'Content-Type': 'foo/bar'}) set_http_connect(202, 202, 202) - req = Request.blank('/a/c/o', {}, headers=headers) + req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, + headers=headers) self.app.update_request(req) - res = controller.POST(req) + res = req.get_response(self.app) self.assertEquals(res.status_int, 400) def test_PUT_not_autodetect_content_type(self): with save_globals(): - controller = proxy_server.ObjectController( - self.app, 'a', 'c', 'o.html') - headers = {'Content-Type': 'something/right', 'Content-Length': 0} it_worked = [] @@ -2086,17 +2131,15 @@ class TestObjectController(unittest.TestCase): set_http_connect(204, 204, 201, 201, 201, give_connect=verify_content_type) - req = Request.blank('/a/c/o.html', {}, headers=headers) + req = Request.blank('/v1/a/c/o.html', {'REQUEST_METHOD': 'PUT'}, + headers=headers) self.app.update_request(req) - res = controller.PUT(req) + req.get_response(self.app) self.assertNotEquals(it_worked, []) self.assertTrue(all(it_worked)) def test_PUT_autodetect_content_type(self): with save_globals(): - controller = proxy_server.ObjectController( - self.app, 'a', 'c', 'o.html') - headers = {'Content-Type': 'something/wrong', 'Content-Length': 0, 'X-Detect-Content-Type': 'True'} it_worked = [] @@ -2110,9 +2153,10 @@ class TestObjectController(unittest.TestCase): set_http_connect(204, 204, 201, 201, 201, give_connect=verify_content_type) - req = Request.blank('/a/c/o.html', {}, headers=headers) + req = Request.blank('/v1/a/c/o.html', {'REQUEST_METHOD': 'PUT'}, + headers=headers) self.app.update_request(req) - res = controller.PUT(req) + req.get_response(self.app) self.assertNotEquals(it_worked, []) self.assertTrue(all(it_worked)) @@ -2143,20 +2187,18 @@ class TestObjectController(unittest.TestCase): return ' ' return '' - req = Request.blank('/a/c/o', + req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()}, headers={'Content-Length': '4', 'Content-Type': 'text/plain'}) self.app.update_request(req) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') set_http_connect(200, 200, 201, 201, 201) # acct cont obj obj obj - resp = controller.PUT(req) + resp = req.get_response(self.app) self.assertEquals(resp.status_int, 201) self.app.client_timeout = 0.1 - req = Request.blank('/a/c/o', + req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()}, headers={'Content-Length': '4', @@ -2164,7 +2206,7 @@ class TestObjectController(unittest.TestCase): self.app.update_request(req) set_http_connect(201, 201, 201) # obj obj obj - resp = controller.PUT(req) + resp = req.get_response(self.app) self.assertEquals(resp.status_int, 408) def test_client_disconnect(self): @@ -2190,17 +2232,15 @@ class TestObjectController(unittest.TestCase): def read(self, size=-1): raise Exception('Disconnected') - req = Request.blank('/a/c/o', + req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT', 'wsgi.input': SlowBody()}, headers={'Content-Length': '4', 'Content-Type': 'text/plain'}) self.app.update_request(req) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') set_http_connect(200, 200, 201, 201, 201) # acct cont obj obj obj - resp = controller.PUT(req) + resp = req.get_response(self.app) self.assertEquals(resp.status_int, 499) def test_node_read_timeout(self): @@ -2217,13 +2257,11 @@ class TestObjectController(unittest.TestCase): for dev in self.app.object_ring.devs.values(): dev['ip'] = '127.0.0.1' dev['port'] = 1 - req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'}) + req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) self.app.update_request(req) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') set_http_connect(200, 200, 200, slow=True) req.sent_size = 0 - resp = controller.GET(req) + resp = req.get_response(self.app) got_exc = False try: resp.body @@ -2232,7 +2270,7 @@ class TestObjectController(unittest.TestCase): self.assert_(not got_exc) self.app.node_timeout = 0.1 set_http_connect(200, 200, 200, slow=True) - resp = controller.GET(req) + resp = req.get_response(self.app) got_exc = False try: resp.body @@ -2254,26 +2292,24 @@ class TestObjectController(unittest.TestCase): for dev in self.app.object_ring.devs.values(): dev['ip'] = '127.0.0.1' dev['port'] = 1 - req = Request.blank('/a/c/o', + req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '4', 'Content-Type': 'text/plain'}, body=' ') self.app.update_request(req) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') set_http_connect(200, 200, 201, 201, 201, slow=True) - resp = controller.PUT(req) + resp = req.get_response(self.app) self.assertEquals(resp.status_int, 201) self.app.node_timeout = 0.1 set_http_connect(201, 201, 201, slow=True) - req = Request.blank('/a/c/o', + req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'Content-Length': '4', 'Content-Type': 'text/plain'}, body=' ') self.app.update_request(req) - resp = controller.PUT(req) + resp = req.get_response(self.app) self.assertEquals(resp.status_int, 503) def test_iter_nodes(self): @@ -2422,20 +2458,18 @@ class TestObjectController(unittest.TestCase): def test_proxy_passes_content_type(self): with save_globals(): - req = Request.blank('/a/c/o', environ={'REQUEST_METHOD': 'GET'}) + req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) self.app.update_request(req) - controller = proxy_server.ObjectController(self.app, 'account', - 'container', 'object') set_http_connect(200, 200, 200) - resp = controller.GET(req) + resp = req.get_response(self.app) self.assertEquals(resp.status_int, 200) self.assertEquals(resp.content_type, 'x-application/test') set_http_connect(200, 200, 200) - resp = controller.GET(req) + resp = req.get_response(self.app) self.assertEquals(resp.status_int, 200) self.assertEquals(resp.content_length, 0) set_http_connect(200, 200, 200, slow=True) - resp = controller.GET(req) + resp = req.get_response(self.app) self.assertEquals(resp.status_int, 200) self.assertEquals(resp.content_length, 4) @@ -2865,7 +2899,7 @@ class TestObjectController(unittest.TestCase): copy_from_obj_body = LargeResponseBody() set_http_connect(200, 200, 200, 200, 200, 201, 201, 201, - body=copy_from_obj_body) + body=copy_from_obj_body) self.app.memcache.store = {} resp = controller.PUT(req) self.assertEquals(resp.status_int, 413) @@ -2995,7 +3029,7 @@ class TestObjectController(unittest.TestCase): copy_from_obj_body = LargeResponseBody() set_http_connect(200, 200, 200, 200, 200, 201, 201, 201, - body=copy_from_obj_body) + body=copy_from_obj_body) self.app.memcache.store = {} resp = controller.COPY(req) self.assertEquals(resp.status_int, 413) @@ -3347,16 +3381,24 @@ class TestObjectController(unittest.TestCase): body = fd.read() self.assertEquals(body, 'oh hai123456789abcdef') - def test_version_manifest(self): + def test_version_manifest(self, oc='versions', vc='vers', o='name'): versions_to_create = 3 # Create a container for our versioned object testing (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis) = _test_sockets sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/versions HTTP/1.1\r\nHost: localhost\r\n' + pre = quote('%03x' % len(o)) + osub = '%s.sub' % o + presub = quote('%03x' % len(osub)) + osub = quote(osub) + presub = quote(presub) + oc = quote(oc) + vc = quote(vc) + fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\nX-Versions-Location: vers\r\n\r\n') + 'Content-Length: 0\r\nX-Versions-Location: %s\r\n\r\n' + % (oc, vc)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' @@ -3364,19 +3406,19 @@ class TestObjectController(unittest.TestCase): # check that the header was set sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/versions HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n\r\n') + fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n\r\n\r\n' % oc) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response self.assertEquals(headers[:len(exp)], exp) - self.assert_('X-Versions-Location: vers' in headers) + self.assert_('X-Versions-Location: %s' % vc in headers) # make the container for the object versions sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/vers HTTP/1.1\r\nHost: localhost\r\n' + fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n') + 'Content-Length: 0\r\n\r\n' % vc) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' @@ -3384,10 +3426,10 @@ class TestObjectController(unittest.TestCase): # Create the versioned file sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: ' + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Storage-Token: ' 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' - 'X-Object-Meta-Foo: barbaz\r\n\r\n00000\r\n') + 'X-Object-Meta-Foo: barbaz\r\n\r\n00000\r\n' % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' @@ -3397,10 +3439,10 @@ class TestObjectController(unittest.TestCase): sleep(.01) # guarantee that the timestamp changes sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: ' + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Storage-Token: ' 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish%s' - '\r\n\r\n%05d\r\n' % (segment, segment)) + '\r\n\r\n%05d\r\n' % (oc, o, segment, segment)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' @@ -3408,9 +3450,9 @@ class TestObjectController(unittest.TestCase): # Ensure retrieving the manifest file gets the latest version sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: ' + fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n' - '\r\n') + '\r\n' % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' @@ -3422,8 +3464,9 @@ class TestObjectController(unittest.TestCase): # Ensure we have the right number of versions saved sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n') + fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % (vc, pre, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' @@ -3434,18 +3477,19 @@ class TestObjectController(unittest.TestCase): # copy a version and make sure the version info is stripped sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('COPY /v1/a/versions/name HTTP/1.1\r\nHost: ' + fd.write('COPY /v1/a/%s/%s HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\nDestination: versions/copied_name\r\n' - 'Content-Length: 0\r\n\r\n') + 't\r\nDestination: %s/copied_name\r\n' + 'Content-Length: 0\r\n\r\n' % (oc, o, oc)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response to the COPY self.assertEquals(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/versions/copied_name HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n') + fd.write('GET /v1/a/%s/copied_name HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % oc) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' @@ -3455,18 +3499,19 @@ class TestObjectController(unittest.TestCase): # post and make sure it's updated sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('POST /v1/a/versions/name HTTP/1.1\r\nHost: ' + fd.write('POST /v1/a/%s/%s HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Auth-Token: ' 't\r\nContent-Type: foo/bar\r\nContent-Length: 0\r\n' - 'X-Object-Meta-Bar: foo\r\n\r\n') + 'X-Object-Meta-Bar: foo\r\n\r\n' % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response to the POST self.assertEquals(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n') + fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' @@ -3479,8 +3524,9 @@ class TestObjectController(unittest.TestCase): for segment in xrange(versions_to_create - 1, 0, -1): sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('DELETE /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r' - '\nConnection: close\r\nX-Storage-Token: t\r\n\r\n') + fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r' + '\nConnection: close\r\nX-Storage-Token: t\r\n\r\n' + % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response @@ -3488,8 +3534,9 @@ class TestObjectController(unittest.TestCase): # Ensure retrieving the manifest file gets the latest version sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n\r\n') + fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Auth-Token: t\r\n\r\n' + % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' @@ -3501,9 +3548,9 @@ class TestObjectController(unittest.TestCase): # Ensure we have the right number of versions saved sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: ' + fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r' - '\n') + '\n' % (vc, pre, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response @@ -3515,8 +3562,9 @@ class TestObjectController(unittest.TestCase): # Ensure we have no saved versions sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n') + fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % (vc, pre, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 204 No Content' @@ -3524,8 +3572,8 @@ class TestObjectController(unittest.TestCase): # delete the last verision sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('DELETE /v1/a/versions/name HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n') + fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response @@ -3533,8 +3581,9 @@ class TestObjectController(unittest.TestCase): # Ensure it's all gone sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/versions/name HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n') + fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 404' @@ -3543,10 +3592,11 @@ class TestObjectController(unittest.TestCase): # make sure manifest files don't get versioned sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/versions/name HTTP/1.1\r\nHost: ' + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Storage-Token: ' 't\r\nContent-Length: 0\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\nX-Object-Manifest: vers/foo_\r\n\r\n') + 'Foo: barbaz\r\nX-Object-Manifest: %s/foo_\r\n\r\n' + % (oc, vc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' @@ -3554,55 +3604,68 @@ class TestObjectController(unittest.TestCase): # Ensure we have no saved versions sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/vers?prefix=004name/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n') + fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % (vc, pre, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 204 No Content' self.assertEquals(headers[:len(exp)], exp) - # DELETE v1/a/c/dir shouldn't delete v1/a/c/dir/sub versions + # DELETE v1/a/c/obj shouldn't delete v1/a/c/obj/sub versions + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' + 'Foo: barbaz\r\n\r\n00000\r\n' % (oc, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEquals(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/versions/dir HTTP/1.1\r\nHost: ' + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 0\r\nContent-Type: application/directory\r\n\r\n') + 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' + 'Foo: barbaz\r\n\r\n00001\r\n' % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' self.assertEquals(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/versions/dir/sub HTTP/1.1\r\nHost: ' + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Storage-Token: ' 't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\nsub1\r\n') + 'Foo: barbaz\r\n\r\nsub1\r\n' % (oc, osub)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' self.assertEquals(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/versions/dir/sub HTTP/1.1\r\nHost: ' + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Storage-Token: ' 't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\nsub2\r\n') + 'Foo: barbaz\r\n\r\nsub2\r\n' % (oc, osub)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' self.assertEquals(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('DELETE /v1/a/versions/dir HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n') + fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response self.assertEquals(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('GET /v1/a/vers?prefix=007dir/sub/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n') + fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % (vc, presub, osub)) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response @@ -3614,9 +3677,9 @@ class TestObjectController(unittest.TestCase): # Check for when the versions target container doesn't exist sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/whoops HTTP/1.1\r\nHost: localhost\r\n' + fd.write('PUT /v1/a/%swhoops HTTP/1.1\r\nHost: localhost\r\n' 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\nX-Versions-Location: none\r\n\r\n') + 'Content-Length: 0\r\nX-Versions-Location: none\r\n\r\n' % oc) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' @@ -3624,9 +3687,9 @@ class TestObjectController(unittest.TestCase): # Create the versioned file sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/whoops/foo HTTP/1.1\r\nHost: ' + fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\n\r\n00000\r\n') + 't\r\nContent-Length: 5\r\n\r\n00000\r\n' % oc) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' @@ -3634,9 +3697,9 @@ class TestObjectController(unittest.TestCase): # Create another version sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('PUT /v1/a/whoops/foo HTTP/1.1\r\nHost: ' + fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: ' 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\n\r\n00001\r\n') + 't\r\nContent-Length: 5\r\n\r\n00001\r\n' % oc) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 412' @@ -3644,13 +3707,102 @@ class TestObjectController(unittest.TestCase): # Delete the object sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() - fd.write('DELETE /v1/a/whoops/foo HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n') + fd.write('DELETE /v1/a/%swhoops/foo HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % oc) fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx response self.assertEquals(headers[:len(exp)], exp) + def test_version_manifest_utf8(self): + oc = '0_oc_non_ascii\xc2\xa3' + vc = '0_vc_non_ascii\xc2\xa3' + o = '0_o_non_ascii\xc2\xa3' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_container(self): + oc = '1_oc_non_ascii\xc2\xa3' + vc = '1_vc_ascii' + o = '1_o_ascii' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_version_container(self): + oc = '2_oc_ascii' + vc = '2_vc_non_ascii\xc2\xa3' + o = '2_o_ascii' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_containers(self): + oc = '3_oc_non_ascii\xc2\xa3' + vc = '3_vc_non_ascii\xc2\xa3' + o = '3_o_ascii' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_object(self): + oc = '4_oc_ascii' + vc = '4_vc_ascii' + o = '4_o_non_ascii\xc2\xa3' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_version_container_utf_object(self): + oc = '5_oc_ascii' + vc = '5_vc_non_ascii\xc2\xa3' + o = '5_o_non_ascii\xc2\xa3' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_container_utf_object(self): + oc = '6_oc_non_ascii\xc2\xa3' + vc = '6_vc_ascii' + o = '6_o_non_ascii\xc2\xa3' + self.test_version_manifest(oc, vc, o) + + def test_conditional_range_get(self): + (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis) = \ + _test_sockets + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + + # make a container + fd = sock.makefile() + fd.write('PUT /v1/a/con HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\n\r\n') + fd.flush() + exp = 'HTTP/1.1 201' + headers = readuntil2crlfs(fd) + self.assertEquals(headers[:len(exp)], exp) + + # put an object in it + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/con/o HTTP/1.1\r\n' + 'Host: localhost\r\n' + 'Connection: close\r\n' + 'X-Storage-Token: t\r\n' + 'Content-Length: 10\r\n' + 'Content-Type: text/plain\r\n' + '\r\n' + 'abcdefghij\r\n') + fd.flush() + exp = 'HTTP/1.1 201' + headers = readuntil2crlfs(fd) + self.assertEquals(headers[:len(exp)], exp) + + # request with both If-None-Match and Range + etag = md5("abcdefghij").hexdigest() + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/con/o HTTP/1.1\r\n' + + 'Host: localhost\r\n' + + 'Connection: close\r\n' + + 'X-Storage-Token: t\r\n' + + 'If-None-Match: "' + etag + '"\r\n' + + 'Range: bytes=3-8\r\n' + + '\r\n') + fd.flush() + exp = 'HTTP/1.1 304' + headers = readuntil2crlfs(fd) + self.assertEquals(headers[:len(exp)], exp) + def test_chunked_put_lobjects_with_nonzero_size_manifest_file(self): raise SkipTest("Not until we support pure object requests") # Create a container for our segmented/manifest object testing @@ -4408,7 +4560,6 @@ class TestObjectController(unittest.TestCase): self.assertTrue('X-Delete-At in past' in resp.body) def test_leak_1(self): - global _request_instances prolis = _test_sockets[0] prosrv = _test_servers[0] obj_len = prosrv.client_chunk_size * 2 @@ -4426,8 +4577,11 @@ class TestObjectController(unittest.TestCase): headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' self.assertEqual(headers[:len(exp)], exp) - # Remember Request instance count - before_request_instances = _request_instances + # Remember Request instance count, make sure the GC is run for pythons + # without reference counting. + for i in xrange(4): + gc.collect() + before_request_instances = len(_request_instances) # GET test file, but disconnect early sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -4443,7 +4597,10 @@ class TestObjectController(unittest.TestCase): fd.read(1) fd.close() sock.close() - self.assertEquals(before_request_instances, _request_instances) + # Make sure the GC is run again for pythons without reference counting + for i in xrange(4): + gc.collect() + self.assertEquals(before_request_instances, len(_request_instances)) def test_OPTIONS(self): with save_globals(): @@ -4630,7 +4787,8 @@ class TestObjectController(unittest.TestCase): seen_headers = self._gather_x_container_headers( controller.PUT, req, 200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT - self.assertEqual(seen_headers, [ + self.assertEqual( + seen_headers, [ {'X-Container-Host': '10.0.0.0:1000', 'X-Container-Partition': '1', 'X-Container-Device': 'sda'}, @@ -4651,7 +4809,8 @@ class TestObjectController(unittest.TestCase): controller.PUT, req, 200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT - self.assertEqual(seen_headers, [ + self.assertEqual( + seen_headers, [ {'X-Container-Host': '10.0.0.0:1000', 'X-Container-Partition': '1', 'X-Container-Device': 'sda'}, @@ -4672,7 +4831,8 @@ class TestObjectController(unittest.TestCase): controller.PUT, req, 200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT - self.assertEqual(seen_headers, [ + self.assertEqual( + seen_headers, [ {'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003', 'X-Container-Partition': '1', 'X-Container-Device': 'sda,sdd'}, @@ -4694,7 +4854,8 @@ class TestObjectController(unittest.TestCase): controller.POST, req, 200, 200, 200, 200, 200) # HEAD HEAD POST POST POST - self.assertEqual(seen_headers, [ + self.assertEqual( + seen_headers, [ {'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003', 'X-Container-Partition': '1', 'X-Container-Device': 'sda,sdd'}, @@ -4859,8 +5020,9 @@ class TestContainerController(unittest.TestCase): self.assertEquals(res.headers['x-works'], 'yes') if c_expected: self.assertTrue('swift.container/a/c' in res.environ) - self.assertEquals(res.environ['swift.container/a/c']['status'], - c_expected) + self.assertEquals( + res.environ['swift.container/a/c']['status'], + c_expected) else: self.assertTrue('swift.container/a/c' not in res.environ) if a_expected: @@ -4882,8 +5044,9 @@ class TestContainerController(unittest.TestCase): self.assertEquals(res.headers['x-works'], 'yes') if c_expected: self.assertTrue('swift.container/a/c' in res.environ) - self.assertEquals(res.environ['swift.container/a/c']['status'], - c_expected) + self.assertEquals( + res.environ['swift.container/a/c']['status'], + c_expected) else: self.assertTrue('swift.container/a/c' not in res.environ) if a_expected: @@ -4909,7 +5072,8 @@ class TestContainerController(unittest.TestCase): # In all the following tests cache 404 for account # return 404 (as account is not found) and dont cache container test_status_map((404, 404, 404), 404, None, 404) - self.app.account_autocreate = True # This should make no difference + # This should make no difference + self.app.account_autocreate = True test_status_map((404, 404, 404), 404, None, 404) def test_PUT(self): @@ -4934,41 +5098,41 @@ class TestContainerController(unittest.TestCase): self.assertFalse(self.app.account_autocreate) test_status_map((404, 404, 404), 404, missing_container=True) self.app.account_autocreate = True - #fail to retrieve account info + # fail to retrieve account info test_status_map( - (503, 503, 503), # account_info fails on 503 - 404, missing_container=True) + (503, 503, 503), # account_info fails on 503 + 404, missing_container=True) # account fail after creation test_status_map( - (404, 404, 404, # account_info fails on 404 - 201, 201, 201, # PUT account - 404, 404, 404), # account_info fail - 404, missing_container=True) + (404, 404, 404, # account_info fails on 404 + 201, 201, 201, # PUT account + 404, 404, 404), # account_info fail + 404, missing_container=True) test_status_map( - (503, 503, 404, # account_info fails on 404 - 503, 503, 503, # PUT account - 503, 503, 404), # account_info fail - 404, missing_container=True) - #put fails + (503, 503, 404, # account_info fails on 404 + 503, 503, 503, # PUT account + 503, 503, 404), # account_info fail + 404, missing_container=True) + # put fails test_status_map( - (404, 404, 404, # account_info fails on 404 - 201, 201, 201, # PUT account - 200, # account_info success - 503, 503, 201), # put container fail - 503, missing_container=True) + (404, 404, 404, # account_info fails on 404 + 201, 201, 201, # PUT account + 200, # account_info success + 503, 503, 201), # put container fail + 503, missing_container=True) # all goes according to plan test_status_map( - (404, 404, 404, # account_info fails on 404 - 201, 201, 201, # PUT account - 200, # account_info success - 201, 201, 201), # put container success - 201, missing_container=True) + (404, 404, 404, # account_info fails on 404 + 201, 201, 201, # PUT account + 200, # account_info success + 201, 201, 201), # put container success + 201, missing_container=True) test_status_map( - (503, 404, 404, # account_info fails on 404 - 503, 201, 201, # PUT account - 503, 200, # account_info success - 503, 201, 201), # put container success - 201, missing_container=True) + (503, 404, 404, # account_info fails on 404 + 503, 201, 201, # PUT account + 503, 200, # account_info success + 503, 201, 201), # put container success + 201, missing_container=True) def test_POST(self): with save_globals(): @@ -5382,7 +5546,8 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/a/c') self.app.update_request(req) res = controller.GET(req) - self.assertEquals(res.environ['swift.container/a/c']['status'], 204) + self.assertEquals( + res.environ['swift.container/a/c']['status'], 204) self.assertEquals(res.content_length, 0) self.assertTrue('transfer-encoding' not in res.headers) @@ -5586,7 +5751,7 @@ class TestContainerController(unittest.TestCase): self.assertEquals(expected_exposed, exposed) def _gather_x_account_headers(self, controller_call, req, *connect_args, - **kwargs): + **kwargs): seen_headers = [] to_capture = ('X-Account-Partition', 'X-Account-Host', 'X-Account-Device') @@ -5736,9 +5901,10 @@ class TestAccountController(unittest.TestCase): # Access-Control-Request-Method headers) self.app.allow_account_management = False controller = proxy_server.AccountController(self.app, 'account') - req = Request.blank('/account', {'REQUEST_METHOD': 'OPTIONS'}, + req = Request.blank( + '/account', {'REQUEST_METHOD': 'OPTIONS'}, headers={'Origin': 'http://foo.com', - 'Access-Control-Request-Method': 'GET'}) + 'Access-Control-Request-Method': 'GET'}) req.content_length = 0 resp = controller.OPTIONS(req) self.assertEquals(200, resp.status_int) @@ -5753,7 +5919,6 @@ class TestAccountController(unittest.TestCase): req.content_length = 0 resp = controller.OPTIONS(req) self.assertEquals(200, resp.status_int) - print resp.headers['Allow'] for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split(): self.assertTrue( verb in resp.headers['Allow']) @@ -5847,16 +6012,19 @@ class TestAccountController(unittest.TestCase): # first test with autocreate being False self.assertFalse(self.app.account_autocreate) self.assert_status_map(controller.POST, - (404, 404, 404), 404) + (404, 404, 404), 404) # next turn it on and test account being created than updated controller.app.account_autocreate = True - self.assert_status_map(controller.POST, + self.assert_status_map( + controller.POST, (404, 404, 404, 202, 202, 202, 201, 201, 201), 201) # account_info PUT account POST account - self.assert_status_map(controller.POST, - (404, 404, 503, 201, 201, 503, 204, 204, 504), 204) + self.assert_status_map( + controller.POST, + (404, 404, 503, 201, 201, 503, 204, 204, 504), 204) # what if create fails - self.assert_status_map(controller.POST, + self.assert_status_map( + controller.POST, (404, 404, 404, 403, 403, 403, 400, 400, 400), 400) def test_connection_refused(self): @@ -6121,20 +6289,21 @@ class TestAccountControllerFakeGetResponse(unittest.TestCase): have to match the responses for empty accounts that really exist. """ def setUp(self): - self.app = proxy_server.Application(None, FakeMemcache(), + conf = {'account_autocreate': 'yes'} + self.app = proxy_server.Application(conf, FakeMemcache(), account_ring=FakeRing(), container_ring=FakeRing(), object_ring=FakeRing) self.app.memcache = FakeMemcacheReturnsNone() - self.controller = proxy_server.AccountController(self.app, 'acc') - self.controller.app.account_autocreate = True def test_GET_autocreate_accept_json(self): with save_globals(): - set_http_connect(404) # however many backends we ask, they all 404 - req = Request.blank('/a', headers={'Accept': 'application/json'}) - - resp = self.controller.GET(req) + set_http_connect(*([404] * 100)) # nonexistent: all backends 404 + req = Request.blank( + '/v1/a', headers={'Accept': 'application/json'}, + environ={'REQUEST_METHOD': 'GET', + 'PATH_INFO': '/v1/a'}) + resp = req.get_response(self.app) self.assertEqual(200, resp.status_int) self.assertEqual('application/json; charset=utf-8', resp.headers['Content-Type']) @@ -6142,10 +6311,12 @@ class TestAccountControllerFakeGetResponse(unittest.TestCase): def test_GET_autocreate_format_json(self): with save_globals(): - set_http_connect(404) # however many backends we ask, they all 404 - req = Request.blank('/a?format=json') - - resp = self.controller.GET(req) + set_http_connect(*([404] * 100)) # nonexistent: all backends 404 + req = Request.blank('/v1/a?format=json', + environ={'REQUEST_METHOD': 'GET', + 'PATH_INFO': '/v1/a', + 'QUERY_STRING': 'format=json'}) + resp = req.get_response(self.app) self.assertEqual(200, resp.status_int) self.assertEqual('application/json; charset=utf-8', resp.headers['Content-Type']) @@ -6153,30 +6324,54 @@ class TestAccountControllerFakeGetResponse(unittest.TestCase): def test_GET_autocreate_accept_xml(self): with save_globals(): - set_http_connect(404) # however many backends we ask, they all 404 - req = Request.blank('/a', headers={"Accept": "text/xml"}) + set_http_connect(*([404] * 100)) # nonexistent: all backends 404 + req = Request.blank('/v1/a', headers={"Accept": "text/xml"}, + environ={'REQUEST_METHOD': 'GET', + 'PATH_INFO': '/v1/a'}) - resp = self.controller.GET(req) + resp = req.get_response(self.app) self.assertEqual(200, resp.status_int) + self.assertEqual('text/xml; charset=utf-8', resp.headers['Content-Type']) empty_xml_listing = ('\n' - '\n') + '\n') self.assertEqual(empty_xml_listing, resp.body) def test_GET_autocreate_format_xml(self): with save_globals(): - set_http_connect(404) # however many backends we ask, they all 404 - req = Request.blank('/a?format=xml') - - resp = self.controller.GET(req) + set_http_connect(*([404] * 100)) # nonexistent: all backends 404 + req = Request.blank('/v1/a?format=xml', + environ={'REQUEST_METHOD': 'GET', + 'PATH_INFO': '/v1/a', + 'QUERY_STRING': 'format=xml'}) + resp = req.get_response(self.app) self.assertEqual(200, resp.status_int) self.assertEqual('application/xml; charset=utf-8', resp.headers['Content-Type']) empty_xml_listing = ('\n' - '\n') + '\n') self.assertEqual(empty_xml_listing, resp.body) + def test_GET_autocreate_accept_unknown(self): + with save_globals(): + set_http_connect(*([404] * 100)) # nonexistent: all backends 404 + req = Request.blank('/v1/a', headers={"Accept": "mystery/meat"}, + environ={'REQUEST_METHOD': 'GET', + 'PATH_INFO': '/v1/a'}) + resp = req.get_response(self.app) + self.assertEqual(406, resp.status_int) + + def test_GET_autocreate_format_invalid_utf8(self): + with save_globals(): + set_http_connect(*([404] * 100)) # nonexistent: all backends 404 + req = Request.blank('/v1/a?format=\xff\xfe', + environ={'REQUEST_METHOD': 'GET', + 'PATH_INFO': '/v1/a', + 'QUERY_STRING': 'format=\xff\xfe'}) + resp = req.get_response(self.app) + self.assertEqual(400, resp.status_int) + class FakeObjectController(object): @@ -6350,7 +6545,8 @@ class TestSegmentedIterable(unittest.TestCase): segit.ratelimit_index = 0 segit.listing.next() segit._load_next_segment() - self.assertEquals(self.controller.GETorHEAD_base_args[-1][4], '/a/lc/o2') + self.assertEquals( + self.controller.GETorHEAD_base_args[-1][4], '/a/lc/o2') data = ''.join(segit.segment_iter) self.assertEquals(data, '22') @@ -6362,9 +6558,11 @@ class TestSegmentedIterable(unittest.TestCase): segit.listing.next() segit.seek = 1 segit._load_next_segment() - self.assertEquals(self.controller.GETorHEAD_base_args[-1][4], '/a/lc/o2') - self.assertEquals(str(self.controller.GETorHEAD_base_args[-1][0].range), - 'bytes=1-') + self.assertEquals( + self.controller.GETorHEAD_base_args[-1][4], '/a/lc/o2') + self.assertEquals( + str(self.controller.GETorHEAD_base_args[-1][0].range), + 'bytes=1-') data = ''.join(segit.segment_iter) self.assertEquals(data, '2') diff --git a/tox.ini b/tox.ini index 61ecb9b..f1deb7e 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ setenv = VIRTUAL_ENV={envdir} NOSE_OPENSTACK_SHOW_ELAPSED=1 NOSE_OPENSTACK_STDOUT=1 deps = - https://launchpad.net/swift/havana/1.9.1/+download/swift-1.9.1.tar.gz + https://launchpad.net/swift/havana/1.10.0/+download/swift-1.10.0.tar.gz --download-cache={homedir}/.pipcache -r{toxinidir}/tools/test-requires changedir = {toxinidir}/test/unit -- cgit