diff options
Diffstat (limited to 'gluster/swift/common')
-rw-r--r-- | gluster/swift/common/fs_utils.py | 10 | ||||
-rw-r--r-- | gluster/swift/common/utils.py | 60 |
2 files changed, 40 insertions, 30 deletions
diff --git a/gluster/swift/common/fs_utils.py b/gluster/swift/common/fs_utils.py index afc0cfe..8b26fd0 100644 --- a/gluster/swift/common/fs_utils.py +++ b/gluster/swift/common/fs_utils.py @@ -32,7 +32,7 @@ class Fake_file(object): return 0 def read(self, count): - return 0 + return None def fileno(self): return -1 @@ -265,6 +265,14 @@ def do_fsync(fd): err.errno, '%s, os.fsync("%s")' % (err.strerror, fd)) +def do_fdatasync(fd): + try: + os.fdatasync(fd) + except OSError as err: + raise GlusterFileSystemOSError( + err.errno, '%s, os.fdatasync("%s")' % (err.strerror, fd)) + + def mkdirs(path): """ Ensures the path is a directory or makes it if not. Errors if the path diff --git a/gluster/swift/common/utils.py b/gluster/swift/common/utils.py index 522d307..595a965 100644 --- a/gluster/swift/common/utils.py +++ b/gluster/swift/common/utils.py @@ -23,8 +23,9 @@ from hashlib import md5 from eventlet import sleep import cPickle as pickle from swift.common.utils import normalize_timestamp +from gluster.swift.common.exceptions import GlusterFileSystemIOError from gluster.swift.common.fs_utils import do_rename, do_fsync, os_path, \ - do_stat, do_listdir, do_walk, do_rmdir + do_stat, do_fstat, do_listdir, do_walk, do_rmdir from gluster.swift.common import Glusterfs X_CONTENT_TYPE = 'Content-Type' @@ -55,18 +56,6 @@ PICKLE_PROTOCOL = 2 CHUNK_SIZE = 65536 -class GlusterFileSystemOSError(OSError): - # Having our own class means the name will show up in the stack traces - # recorded in the log files. - pass - - -class GlusterFileSystemIOError(IOError): - # Having our own class means the name will show up in the stack traces - # recorded in the log files. - pass - - def read_metadata(path_or_fd): """ Helper function to read the pickled metadata from a File/Directory. @@ -320,6 +309,23 @@ def get_account_details(acc_path): return container_list, container_count +def _read_for_etag(fp): + etag = md5() + while True: + chunk = fp.read(CHUNK_SIZE) + if chunk: + etag.update(chunk) + if len(chunk) >= CHUNK_SIZE: + # It is likely that we have more data to be read from the + # file. Yield the co-routine cooperatively to avoid + # consuming the worker during md5sum() calculations on + # large files. + sleep() + else: + break + return etag.hexdigest() + + def _get_etag(path): """ FIXME: It would be great to have a translator that returns the md5sum() of @@ -328,28 +334,24 @@ def _get_etag(path): Since we don't have that we should yield after each chunk read and computed so that we don't consume the worker thread. """ - etag = md5() - with open(path, 'rb') as fp: - while True: - chunk = fp.read(CHUNK_SIZE) - if chunk: - etag.update(chunk) - if len(chunk) >= CHUNK_SIZE: - # It is likely that we have more data to be read from the - # file. Yield the co-routine cooperatively to avoid - # consuming the worker during md5sum() calculations on - # large files. - sleep() - else: - break - return etag.hexdigest() + if isinstance(path, int): + with os.fdopen(os.dup(path), 'rb') as fp: + etag = _read_for_etag(fp) + os.lseek(path, 0, os.SEEK_SET) + else: + with open(path, 'rb') as fp: + etag = _read_for_etag(fp) + return etag def get_object_metadata(obj_path): """ Return metadata of object. """ - stats = do_stat(obj_path) + if isinstance(obj_path, int): + stats = do_fstat(obj_path) + else: + stats = do_stat(obj_path) if not stats: metadata = {} else: |