diff options
| author | Krishnan Parthasarathi <kparthas@redhat.com> | 2013-06-06 10:46:57 +0530 | 
|---|---|---|
| committer | Anand Avati <avati@redhat.com> | 2013-06-10 12:20:05 -0700 | 
| commit | 214dccb317437dab5464456a4eb30c88444370e7 (patch) | |
| tree | b6a1912c99d10b61bc1b6b76a64067d70d4227f8 /cli | |
| parent | 50ae4e3eee81b746df60ce723dfe1c5659fa6fba (diff) | |
glusterd: Add a cmd for getting uuid of local node
Usage: gluster system:: uuid get
This is needed since we generate uuid of a node in a lazy manner. ie, we
generate a uuid for the node only on the first volume or peer operation,
when the node needs an external identity.  With this command, we can
force[1] the uuid generation, without a volume or peer operation performed.
[1]: Querying for uuid (or uuid get), forces uuid to come into
existence.
Change-Id: I62c8b6754117756aa4d773dd48af4ddeb1a1d878
BUG: 971661
Signed-off-by: Krishnan Parthasarathi <kparthas@redhat.com>
Reviewed-on: http://review.gluster.org/5175
Tested-by: Gluster Build System <jenkins@build.gluster.com>
Reviewed-by: Kaushal M <kaushal@redhat.com>
Diffstat (limited to 'cli')
| -rw-r--r-- | cli/src/cli-cmd-system.c | 51 | ||||
| -rw-r--r-- | cli/src/cli-rpc-ops.c | 106 | 
2 files changed, 157 insertions, 0 deletions
diff --git a/cli/src/cli-cmd-system.c b/cli/src/cli-cmd-system.c index b969c227b..f73758ae3 100644 --- a/cli/src/cli-cmd-system.c +++ b/cli/src/cli-cmd-system.c @@ -279,6 +279,53 @@ cli_cmd_umount_cbk (struct cli_state *state, struct cli_cmd_word *word,  }  int +cli_cmd_uuid_get_cbk (struct cli_state *state, struct cli_cmd_word *word, +                      const char **words, int wordcount) +{ +        int                     ret = -1; +        int                     sent = 0; +        int                     parse_error = 0; +        dict_t                  *dict  = NULL; +        rpc_clnt_procedure_t    *proc = NULL; +        call_frame_t            *frame = NULL; +        cli_local_t             *local = NULL; +        xlator_t                *this  = NULL; + +        this = THIS; +        if (wordcount != 3) { +                cli_usage_out (word->pattern); +                parse_error = 1; +                goto out; +        } + +        proc = &cli_rpc_prog->proctable[GLUSTER_CLI_UUID_GET]; +        frame = create_frame (this, this->ctx->pool); +        if (!frame) +                goto out; + +        dict = dict_new (); +        if (!dict) +                goto out; + +        CLI_LOCAL_INIT (local, words, frame, dict); +        if (proc->fn) +                ret = proc->fn (frame, this, dict); + +out: +        if (ret) { +                cli_cmd_sent_status_get (&sent); +                if ((sent == 0) && (parse_error == 0)) +                        cli_out ("uuid get failed"); +        } + +        if (dict) +                dict_unref (dict); + +        CLI_STACK_DESTROY (frame); +        return ret; +} + +int  cli_cmd_uuid_reset_cbk (struct cli_state *state, struct cli_cmd_word *word,                          const char **words, int wordcount)  { @@ -364,6 +411,10 @@ struct cli_cmd cli_system_cmds[] = {            cli_cmd_umount_cbk,            "request an umount"}, +        { "system:: uuid get", +          cli_cmd_uuid_get_cbk, +          "get uuid of glusterd"}, +          { "system:: uuid reset",            cli_cmd_uuid_reset_cbk,            "reset the uuid of glusterd"}, diff --git a/cli/src/cli-rpc-ops.c b/cli/src/cli-rpc-ops.c index a9f53bfde..a89e6b78b 100644 --- a/cli/src/cli-rpc-ops.c +++ b/cli/src/cli-rpc-ops.c @@ -993,6 +993,88 @@ out:  }  int +gf_cli3_1_uuid_get_cbk (struct rpc_req *req, struct iovec *iov, +                        int count, void *myframe) +{ +        char                    *uuid_str = NULL; +        gf_cli_rsp              rsp   = {0,}; +        int                     ret   = -1; +        cli_local_t             *local = NULL; +        call_frame_t            *frame = NULL; +        dict_t                  *dict = NULL; + +        if (-1 == req->rpc_status) +                goto out; + +        frame = myframe; + +        ret = xdr_to_generic (*iov, &rsp, (xdrproc_t)xdr_gf_cli_rsp); +        if (ret < 0) { +                gf_log (frame->this->name, GF_LOG_ERROR, +                        "Failed to decode xdr response"); +                goto out; +        } + +        local = frame->local; +        frame->local = NULL; + +        gf_log ("cli", GF_LOG_INFO, "Received resp to uuid get"); + +        dict = dict_new (); +        if (!dict) { +                ret = -1; +                goto out; +        } + +        ret = dict_unserialize (rsp.dict.dict_val, rsp.dict.dict_len, +                                &dict); +        if (ret) { +                gf_log ("cli", GF_LOG_ERROR, "Failed to unserialize " +                        "response for uuid get"); +                goto out; +        } + +        ret = dict_get_str (dict, "uuid", &uuid_str); +        if (ret) { +                gf_log ("cli", GF_LOG_ERROR, "Failed to get uuid " +                        "from dictionary"); +                goto out; +        } + +        if (global_state->mode & GLUSTER_MODE_XML) { +                ret = cli_xml_output_dict ("uuidGenerate", dict, rsp.op_ret, +                                           rsp.op_errno, rsp.op_errstr); +                if (ret) +                        gf_log ("cli", GF_LOG_ERROR, +                                "Error outputting to xml"); +                goto out; +        } + +        if (rsp.op_ret) { +                if (strcmp (rsp.op_errstr, "") == 0) +                        cli_err ("Get uuid was unsuccessful"); +                else +                        cli_err ("%s", rsp.op_errstr); + +        } else { +                cli_out ("UUID: %s", uuid_str); + +        } +        ret = rsp.op_ret; + +out: +        cli_cmd_broadcast_response (ret); +        cli_local_wipe (local); +        if (rsp.dict.dict_val) +                free (rsp.dict.dict_val); +        if (dict) +                dict_unref (dict); + +        gf_log ("", GF_LOG_DEBUG, "Returning with %d", ret); +        return ret; +} + +int  gf_cli3_1_uuid_reset_cbk (struct rpc_req *req, struct iovec *iov,                               int count, void *myframe)  { @@ -2891,6 +2973,29 @@ out:  }  int32_t +gf_cli3_1_uuid_get (call_frame_t *frame, xlator_t *this, +                      void *data) +{ +        gf_cli_req                      req = {{0,}}; +        int                             ret = 0; +        dict_t                          *dict = NULL; + +        if (!frame || !this || !data) { +                ret = -1; +                goto out; +        } + +        dict = data; +        ret = cli_to_glusterd (&req, frame, gf_cli3_1_uuid_get_cbk, +                               (xdrproc_t)xdr_gf_cli_req, dict, +                               GLUSTER_CLI_UUID_GET, this, cli_rpc_prog, +                               NULL); +out: +        gf_log ("cli", GF_LOG_DEBUG, "Returning %d", ret); +        return ret; +} + +int32_t  gf_cli3_1_uuid_reset (call_frame_t *frame, xlator_t *this,                        void *data)  { @@ -6762,6 +6867,7 @@ struct rpc_clnt_procedure gluster_cli_actors[GLUSTER_CLI_MAXVALUE] = {          [GLUSTER_CLI_DEPROBE]          = {"DEPROBE_QUERY", gf_cli_deprobe},          [GLUSTER_CLI_LIST_FRIENDS]     = {"LIST_FRIENDS", gf_cli_list_friends},          [GLUSTER_CLI_UUID_RESET]       = {"UUID_RESET", gf_cli3_1_uuid_reset}, +        [GLUSTER_CLI_UUID_GET]       = {"UUID_GET", gf_cli3_1_uuid_get},          [GLUSTER_CLI_CREATE_VOLUME]    = {"CREATE_VOLUME", gf_cli_create_volume},          [GLUSTER_CLI_DELETE_VOLUME]    = {"DELETE_VOLUME", gf_cli_delete_volume},          [GLUSTER_CLI_START_VOLUME]     = {"START_VOLUME", gf_cli_start_volume},  | 
