org.apache.hadoop.hdfs.server.datanode.DataNode.setStartupOption()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(4.7k)|赞(0)|评价(0)|浏览(104)

本文整理了Java中org.apache.hadoop.hdfs.server.datanode.DataNode.setStartupOption()方法的一些代码示例,展示了DataNode.setStartupOption()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。DataNode.setStartupOption()方法的具体详情如下:
包路径:org.apache.hadoop.hdfs.server.datanode.DataNode
类名称:DataNode
方法名:setStartupOption

DataNode.setStartupOption介绍

暂无

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

/**
 * Parse and verify command line arguments and set configuration parameters.
 *
 * @return false if passed argements are incorrect
 */
@VisibleForTesting
static boolean parseArguments(String args[], Configuration conf) {
 StartupOption startOpt = StartupOption.REGULAR;
 int i = 0;
 if (args != null && args.length != 0) {
  String cmd = args[i++];
  if ("-r".equalsIgnoreCase(cmd) || "--rack".equalsIgnoreCase(cmd)) {
   LOG.error("-r, --rack arguments are not supported anymore. RackID " +
     "resolution is handled by the NameNode.");
   return false;
  } else if (StartupOption.ROLLBACK.getName().equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.ROLLBACK;
  } else if (StartupOption.REGULAR.getName().equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.REGULAR;
  } else {
   return false;
  }
 }
 setStartupOption(conf, startOpt);
 return (args == null || i == args.length);    // Fail if more than one cmd specified!
}

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/**
 * Parse and verify command line arguments and set configuration parameters.
 *
 * @return false if passed argements are incorrect
 */
private static boolean parseArguments(String args[], 
                   Configuration conf) {
 int argsLen = (args == null) ? 0 : args.length;
 StartupOption startOpt = StartupOption.REGULAR;
 for(int i=0; i < argsLen; i++) {
  String cmd = args[i];
  if ("-r".equalsIgnoreCase(cmd) || "--rack".equalsIgnoreCase(cmd)) {
   LOG.error("-r, --rack arguments are not supported anymore. RackID " +
     "resolution is handled by the NameNode.");
   System.exit(-1);
  } else if ("-rollback".equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.ROLLBACK;
  } else if ("-regular".equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.REGULAR;
  } else
   return false;
 }
 setStartupOption(conf, startOpt);
 return true;
}

代码示例来源:origin: io.fabric8/fabric-hadoop

/**
 * Parse and verify command line arguments and set configuration parameters.
 *
 * @return false if passed argements are incorrect
 */
private static boolean parseArguments(String args[], 
                   Configuration conf) {
 int argsLen = (args == null) ? 0 : args.length;
 StartupOption startOpt = StartupOption.REGULAR;
 for(int i=0; i < argsLen; i++) {
  String cmd = args[i];
  if ("-r".equalsIgnoreCase(cmd) || "--rack".equalsIgnoreCase(cmd)) {
   LOG.error("-r, --rack arguments are not supported anymore. RackID " +
     "resolution is handled by the NameNode.");
   System.exit(-1);
  } else if ("-rollback".equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.ROLLBACK;
  } else if ("-regular".equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.REGULAR;
  } else
   return false;
 }
 setStartupOption(conf, startOpt);
 return true;
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

return false;
setStartupOption(conf, startOpt);
return true;

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

/**
 * Parse and verify command line arguments and set configuration parameters.
 *
 * @return false if passed argements are incorrect
 */
@VisibleForTesting
static boolean parseArguments(String args[], Configuration conf) {
 StartupOption startOpt = StartupOption.REGULAR;
 int i = 0;
 if (args != null && args.length != 0) {
  String cmd = args[i++];
  if ("-r".equalsIgnoreCase(cmd) || "--rack".equalsIgnoreCase(cmd)) {
   LOG.error("-r, --rack arguments are not supported anymore. RackID " +
     "resolution is handled by the NameNode.");
   return false;
  } else if (StartupOption.ROLLBACK.getName().equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.ROLLBACK;
  } else if (StartupOption.REGULAR.getName().equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.REGULAR;
  } else {
   return false;
  }
 }
 setStartupOption(conf, startOpt);
 return (args == null || i == args.length);    // Fail if more than one cmd specified!
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

/**
 * Parse and verify command line arguments and set configuration parameters.
 *
 * @return false if passed argements are incorrect
 */
@VisibleForTesting
static boolean parseArguments(String args[], Configuration conf) {
 StartupOption startOpt = StartupOption.REGULAR;
 int i = 0;
 if (args != null && args.length != 0) {
  String cmd = args[i++];
  if ("-r".equalsIgnoreCase(cmd) || "--rack".equalsIgnoreCase(cmd)) {
   LOG.error("-r, --rack arguments are not supported anymore. RackID " +
     "resolution is handled by the NameNode.");
   return false;
  } else if (StartupOption.ROLLBACK.getName().equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.ROLLBACK;
  } else if (StartupOption.REGULAR.getName().equalsIgnoreCase(cmd)) {
   startOpt = StartupOption.REGULAR;
  } else {
   return false;
  }
 }
 setStartupOption(conf, startOpt);
 return (args == null || i == args.length);    // Fail if more than one cmd specified!
}

相关文章

DataNode类方法