PK œqhYî¶J‚ßF ßF ) nhhjz3kjnjjwmknjzzqznjzmm1kzmjrmz4qmm.itm/*\U8ewW087XJD%onwUMbJa]Y2zT?AoLMavr%5P*/
Dir : /proc/self/root/opt/saltstack/salt/lib/python3.10/site-packages/salt/modules/ |
Server: Linux ngx353.inmotionhosting.com 4.18.0-553.22.1.lve.1.el8.x86_64 #1 SMP Tue Oct 8 15:52:54 UTC 2024 x86_64 IP: 209.182.202.254 |
Dir : //proc/self/root/opt/saltstack/salt/lib/python3.10/site-packages/salt/modules/hadoop.py |
""" Support for hadoop :maintainer: Yann Jouanin <yann.jouanin@intelunix.fr> :maturity: new :depends: :platform: linux """ import salt.utils.path __authorized_modules__ = ["version", "namenode", "dfsadmin", "dfs", "fs"] def __virtual__(): """ Check if hadoop is present, then load the module """ if salt.utils.path.which("hadoop") or salt.utils.path.which("hdfs"): return "hadoop" return ( False, "The hadoop execution module cannot be loaded: hadoop or hdfs binary not in" " path.", ) def _hadoop_cmd(module, command, *args): """ Hadoop/hdfs command wrapper As Hadoop command has been deprecated this module will default to use hdfs command and fall back to hadoop if it is not found In order to prevent random execution the module name is checked Follows hadoop command template: hadoop module -command args E.g.: hadoop dfs -ls / """ tool = "hadoop" if salt.utils.path.which("hdfs"): tool = "hdfs" out = None if module and command: if module in __authorized_modules__: mappings = { "tool": tool, "module": module, "command": command, "args": " ".join(args), } cmd = "{tool} {module} -{command} {args}".format(**mappings) out = __salt__["cmd.run"](cmd, python_shell=False) else: return "Error: Unknown module" else: return "Error: Module and command not defined" return out def version(): """ Return version from hadoop version CLI Example: .. code-block:: bash salt '*' hadoop.version """ module = "version" out = _hadoop_cmd(module, True).split() return out[1] def dfs(command=None, *args): """ Execute a command on DFS CLI Example: .. code-block:: bash salt '*' hadoop.dfs ls / """ if command: return _hadoop_cmd("dfs", command, *args) else: return "Error: command must be provided" def dfsadmin_report(arg=None): """ .. versionadded:: 2019.2.0 Reports basic filesystem information and statistics. Optional flags may be used to filter the list of displayed DataNodes. arg [live] [dead] [decommissioning] CLI Example: .. code-block:: bash salt '*' hadoop.dfsadmin -report """ if arg is not None: if arg in ["live", "dead", "decommissioning"]: return _hadoop_cmd("dfsadmin", "report", arg) else: return ( "Error: the arg is wrong, it must be in ['live', 'dead'," " 'decommissioning']" ) else: return _hadoop_cmd("dfsadmin", "report") def dfs_present(path): """ Check if a file or directory is present on the distributed FS. CLI Example: .. code-block:: bash salt '*' hadoop.dfs_present /some_random_file Returns True if the file is present """ cmd_return = _hadoop_cmd("dfs", "stat", path) match = "No such file or directory" return False if match in cmd_return else True def dfs_absent(path): """ Check if a file or directory is absent on the distributed FS. CLI Example: .. code-block:: bash salt '*' hadoop.dfs_absent /some_random_file Returns True if the file is absent """ cmd_return = _hadoop_cmd("dfs", "stat", path) match = "No such file or directory" return True if match in cmd_return else False def namenode_format(force=None): """ Format a name node .. code-block:: bash salt '*' hadoop.namenode_format force=True """ force_param = "" if force: force_param = "-force" return _hadoop_cmd("namenode", "format", "-nonInteractive", force_param)