summaryrefslogtreecommitdiffstats
path: root/tools/glusterfind/src/main.py
diff options
context:
space:
mode:
authorAravinda VK <avishwan@redhat.com>2015-04-30 12:28:17 +0530
committerVijay Bellur <vbellur@redhat.com>2015-05-08 21:59:10 -0700
commite88837ed0ff68093912c2b8e996c5851c53674ca (patch)
tree854c30520331099685b29e28e8b8dd15fa357d3a /tools/glusterfind/src/main.py
parent2676c402bc47ee89b763393e496a013e82d76e54 (diff)
tools/glusterfind: GFID to Path conversion using Changelog
Records fop information collected from Changelogs in sqlite database. This is only working database, not required after processing. After post processing, output file is generated by reading these database files. This is applicable only in incremental run, When a changelog is parsed, all the details are saved in Db. GFID to Path is converted to those files for which information is available in Changelogs. For all the failed cases, it tries to convert to Path using Pgfid, if not found GFID to Path is done using find. BUG: 1201284 Change-Id: I53f168860dae15a0149004835e67f97aebd822be Signed-off-by: Aravinda VK <avishwan@redhat.com> Reviewed-on: http://review.gluster.org/10463 Reviewed-by: Kotresh HR <khiremat@redhat.com> Tested-by: Gluster Build System <jenkins@build.gluster.com> Reviewed-by: Vijay Bellur <vbellur@redhat.com>
Diffstat (limited to 'tools/glusterfind/src/main.py')
-rw-r--r--tools/glusterfind/src/main.py34
1 files changed, 28 insertions, 6 deletions
diff --git a/tools/glusterfind/src/main.py b/tools/glusterfind/src/main.py
index 089a3aec3c5..d9936eebde1 100644
--- a/tools/glusterfind/src/main.py
+++ b/tools/glusterfind/src/main.py
@@ -20,9 +20,9 @@ import shutil
from utils import execute, is_host_local, mkdirp, fail
from utils import setup_logger, human_time, handle_rm_error
-from utils import get_changelog_rollover_time, cache_output
+from utils import get_changelog_rollover_time, cache_output, create_file
import conf
-
+from changelogdata import OutputMerger
PROG_DESCRIPTION = """
GlusterFS Incremental API
@@ -235,6 +235,9 @@ def _get_args():
help="Regenerate outfile, discard the outfile "
"generated from last pre command",
action="store_true")
+ parser_pre.add_argument("-N", "--only-namespace-changes",
+ help="List only namespace changes",
+ action="store_true")
# post <SESSION> <VOLUME>
parser_post = subparsers.add_parser('post')
@@ -377,10 +380,29 @@ def mode_pre(session_dir, args):
run_cmd_nodes("pre", args, start=start)
# Merger
- cmd = ["sort", "-u"] + node_outfiles + ["-o", args.outfile]
- execute(cmd,
- exit_msg="Failed to merge output files "
- "collected from nodes", logger=logger)
+ if args.full:
+ cmd = ["sort", "-u"] + node_outfiles + ["-o", args.outfile]
+ execute(cmd,
+ exit_msg="Failed to merge output files "
+ "collected from nodes", logger=logger)
+ else:
+ # Read each Changelogs db and generate finaldb
+ create_file(args.outfile, exit_on_err=True, logger=logger)
+ outfilemerger = OutputMerger(args.outfile + ".db", node_outfiles)
+
+ with open(args.outfile, "a") as f:
+ for row in outfilemerger.get():
+ # Multiple paths in case of Hardlinks
+ paths = row[1].split(",")
+ for p in paths:
+ if p == "":
+ continue
+ f.write("%s %s %s\n" % (row[0], p, row[2]))
+
+ try:
+ os.remove(args.outfile + ".db")
+ except (IOError, OSError):
+ pass
run_cmd_nodes("cleanup", args)