本文整理了Java中org.apache.hadoop.fs.FileUtil.canExecute()
方法的一些代码示例,展示了FileUtil.canExecute()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。FileUtil.canExecute()
方法的具体详情如下:
包路径:org.apache.hadoop.fs.FileUtil
类名称:FileUtil
方法名:canExecute
[英]Platform independent implementation for File#canExecute()
[中]文件#canExecute()的平台独立实现
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Method used to determine if or not node health monitoring service should be
* started or not. Returns true if following conditions are met:
*
* <ol>
* <li>Path to Node health check script is not empty</li>
* <li>Node health check script file exists</li>
* </ol>
*
* @return true if node health monitoring service can be started.
*/
public static boolean shouldRun(String healthScript) {
if (healthScript == null || healthScript.trim().isEmpty()) {
return false;
}
File f = new File(healthScript);
return f.exists() && FileUtil.canExecute(f);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
/**
* Checks that the current running process can read, write, and execute the
* given directory by using methods of the File object.
*
* @param dir File to check
* @throws DiskErrorException if dir is not readable, not writable, or not
* executable
*/
private static void checkAccessByFileMethods(File dir)
throws DiskErrorException {
if (!dir.isDirectory()) {
throw new DiskErrorException("Not a directory: "
+ dir.toString());
}
if (!FileUtil.canRead(dir)) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
if (!FileUtil.canWrite(dir)) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
if (!FileUtil.canExecute(dir)) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
}
代码示例来源:origin: io.hops/hadoop-common
/**
* Method used to determine if or not node health monitoring service should be
* started or not. Returns true if following conditions are met:
*
* <ol>
* <li>Path to Node health check script is not empty</li>
* <li>Node health check script file exists</li>
* </ol>
*
* @return true if node health monitoring service can be started.
*/
public static boolean shouldRun(String healthScript) {
if (healthScript == null || healthScript.trim().isEmpty()) {
return false;
}
File f = new File(healthScript);
return f.exists() && FileUtil.canExecute(f);
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-yarn-server-nodemanager
/**
* Method used to determine if or not node health monitoring service should be
* started or not. Returns true if following conditions are met:
*
* <ol>
* <li>Path to Node health check script is not empty</li>
* <li>Node health check script file exists</li>
* </ol>
*
* @param conf
* @return true if node health monitoring service can be started.
*/
public static boolean shouldRun(Configuration conf) {
String nodeHealthScript =
conf.get(YarnConfiguration.NM_HEALTH_CHECK_SCRIPT_PATH);
if (nodeHealthScript == null || nodeHealthScript.trim().isEmpty()) {
return false;
}
File f = new File(nodeHealthScript);
return f.exists() && FileUtil.canExecute(f);
}
代码示例来源:origin: ch.cern.hadoop/hadoop-yarn-server-nodemanager
/**
* Method used to determine if or not node health monitoring service should be
* started or not. Returns true if following conditions are met:
*
* <ol>
* <li>Path to Node health check script is not empty</li>
* <li>Node health check script file exists</li>
* </ol>
*
* @param conf
* @return true if node health monitoring service can be started.
*/
public static boolean shouldRun(Configuration conf) {
String nodeHealthScript =
conf.get(YarnConfiguration.NM_HEALTH_CHECK_SCRIPT_PATH);
if (nodeHealthScript == null || nodeHealthScript.trim().isEmpty()) {
return false;
}
File f = new File(nodeHealthScript);
return f.exists() && FileUtil.canExecute(f);
}
代码示例来源:origin: org.apache.hadoop/hadoop-yarn-server-nodemanager
/**
* Method used to determine if or not node descriptors fetching script is
* configured and whether it is fit to run. Returns true if following
* conditions are met:
*
* <ol>
* <li>Path to the script is not empty</li>
* <li>The script file exists</li>
* </ol>
*
* @throws IOException
*/
protected void verifyConfiguredScript(String scriptPath)
throws IOException {
boolean invalidConfiguration;
if (scriptPath == null
|| scriptPath.trim().isEmpty()) {
invalidConfiguration = true;
} else {
File f = new File(scriptPath);
invalidConfiguration = !f.exists() || !FileUtil.canExecute(f);
}
if (invalidConfiguration) {
throw new IOException(
"Node descriptors provider script \"" + scriptPath
+ "\" is not configured properly. Please check whether"
+ " the script path exists, owner and the access rights"
+ " are suitable for NM process to execute it");
}
}
代码示例来源:origin: io.hops/hadoop-yarn-server-nodemanager
/**
* Method used to determine if or not node labels fetching script is
* configured and whether it is fit to run. Returns true if following
* conditions are met:
*
* <ol>
* <li>Path to Node Labels fetch script is not empty</li>
* <li>Node Labels fetch script file exists</li>
* </ol>
*
* @throws IOException
*/
private void verifyConfiguredScript()
throws IOException {
boolean invalidConfiguration = false;
if (nodeLabelsScriptPath == null
|| nodeLabelsScriptPath.trim().isEmpty()) {
invalidConfiguration = true;
} else {
File f = new File(nodeLabelsScriptPath);
invalidConfiguration = !f.exists() || !FileUtil.canExecute(f);
}
if (invalidConfiguration) {
throw new IOException(
"Distributed Node labels provider script \"" + nodeLabelsScriptPath
+ "\" is not configured properly. Please check whether the script "
+ "path exists, owner and the access rights are suitable for NM "
+ "process to execute it");
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-common
/**
* Checks that the current running process can read, write, and execute the
* given directory by using methods of the File object.
*
* @param dir File to check
* @throws DiskErrorException if dir is not readable, not writable, or not
* executable
*/
private static void checkAccessByFileMethods(File dir)
throws DiskErrorException {
if (!FileUtil.canRead(dir)) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
if (!FileUtil.canWrite(dir)) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
if (!FileUtil.canExecute(dir)) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
}
}
代码示例来源:origin: io.hops/hadoop-common
/**
* Checks that the current running process can read, write, and execute the
* given directory by using methods of the File object.
*
* @param dir File to check
* @throws DiskErrorException if dir is not readable, not writable, or not
* executable
*/
private static void checkAccessByFileMethods(File dir)
throws DiskErrorException {
if (!dir.isDirectory()) {
throw new DiskErrorException("Not a directory: "
+ dir.toString());
}
if (!FileUtil.canRead(dir)) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
if (!FileUtil.canWrite(dir)) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
if (!FileUtil.canExecute(dir)) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-common
/**
* Checks that the current running process can read, write, and execute the
* given directory by using methods of the File object.
*
* @param dir File to check
* @throws DiskErrorException if dir is not readable, not writable, or not
* executable
*/
private static void checkAccessByFileMethods(File dir)
throws DiskErrorException {
if (!FileUtil.canRead(dir)) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
if (!FileUtil.canWrite(dir)) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
if (!FileUtil.canExecute(dir)) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
/**
* Checks that the current running process can read, write, and execute the
* given directory by using methods of the File object.
*
* @param dir File to check
* @throws DiskErrorException if dir is not readable, not writable, or not
* executable
*/
private static void checkAccessByFileMethods(File dir)
throws DiskErrorException {
if (!FileUtil.canRead(dir)) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
if (!FileUtil.canWrite(dir)) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
if (!FileUtil.canExecute(dir)) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs
/**
* @return a debug string which can help diagnose an error of why
* a given directory might have a permissions error in the context
* of a test case
*/
private String createPermissionsDiagnosisString(File path) {
StringBuilder sb = new StringBuilder();
while (path != null) {
sb.append("path '" + path + "': ").append("\n");
sb.append("\tabsolute:").append(path.getAbsolutePath()).append("\n");
sb.append("\tpermissions: ");
sb.append(path.isDirectory() ? "d": "-");
sb.append(FileUtil.canRead(path) ? "r" : "-");
sb.append(FileUtil.canWrite(path) ? "w" : "-");
sb.append(FileUtil.canExecute(path) ? "x" : "-");
sb.append("\n");
path = path.getParentFile();
}
return sb.toString();
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
if (!FileUtil.canExecute(new File(executable))) {
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
if (!FileUtil.canExecute(new File(executable))) {
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
if (!FileUtil.canExecute(new File(executable))) {
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
if (!FileUtil.canExecute(new File(executable))) {
内容来源于网络,如有侵权,请联系作者删除!