org.apache.hadoop.fs.FileUtil.copy()方法的使用及代码示例

x33g5p2x  于2022-01-19 转载在 其他  
字(10.5k)|赞(0)|评价(0)|浏览(404)

本文整理了Java中org.apache.hadoop.fs.FileUtil.copy()方法的一些代码示例,展示了FileUtil.copy()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。FileUtil.copy()方法的具体详情如下:
包路径:org.apache.hadoop.fs.FileUtil
类名称:FileUtil
方法名:copy

FileUtil.copy介绍

[英]Copy local files to a FileSystem.
[中]将本地文件复制到文件系统。

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-common

/** Copy FileSystem files to local files. */
public static boolean copy(FileSystem srcFS, Path src,
              File dst, boolean deleteSource,
              Configuration conf) throws IOException {
 FileStatus filestatus = srcFS.getFileStatus(src);
 return copy(srcFS, filestatus, dst, deleteSource, conf);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/** Copy files between FileSystems. */
public static boolean copy(FileSystem srcFS, Path src,
              FileSystem dstFS, Path dst,
              boolean deleteSource,
              boolean overwrite,
              Configuration conf) throws IOException {
 FileStatus fileStatus = srcFS.getFileStatus(src);
 return copy(srcFS, fileStatus, dstFS, dst, deleteSource, overwrite, conf);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * The src file is on the local disk.  Add it to the filesystem at
 * the given dst name.
 * delSrc indicates if the source should be removed
 * @param delSrc whether to delete the src
 * @param overwrite whether to overwrite an existing file
 * @param src path
 * @param dst path
 * @throws IOException IO failure
 */
public void copyFromLocalFile(boolean delSrc, boolean overwrite,
               Path src, Path dst)
 throws IOException {
 Configuration conf = getConf();
 FileUtil.copy(getLocal(conf), src, this, dst, delSrc, overwrite, conf);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * The src files are on the local disk.  Add it to the filesystem at
 * the given dst name.
 * delSrc indicates if the source should be removed
 * @param delSrc whether to delete the src
 * @param overwrite whether to overwrite an existing file
 * @param srcs array of paths which are source
 * @param dst path
 * @throws IOException IO failure
 */
public void copyFromLocalFile(boolean delSrc, boolean overwrite,
               Path[] srcs, Path dst)
 throws IOException {
 Configuration conf = getConf();
 FileUtil.copy(getLocal(conf), srcs, this, dst, delSrc, overwrite, conf);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void copyToLocalFile(boolean delSrc, Path src, Path dst)
 throws IOException {
 FileUtil.copy(this, src, this, dst, delSrc, getConf());
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
 throws IOException {
 FileUtil.copy(this, src, this, dst, delSrc, getConf());
}

代码示例来源:origin: apache/hbase

private static List<Path> copyFiles(FileSystem srcFS, Path src, FileSystem dstFS, Path dst,
   Configuration conf, ExecutorService pool, List<Future<Void>> futures) throws IOException {
  List<Path> traversedPaths = new ArrayList<>();
  traversedPaths.add(dst);
  FileStatus currentFileStatus = srcFS.getFileStatus(src);
  if (currentFileStatus.isDirectory()) {
   if (!dstFS.mkdirs(dst)) {
    throw new IOException("create dir failed: " + dst);
   }
   FileStatus[] subPaths = srcFS.listStatus(src);
   for (FileStatus subPath : subPaths) {
    traversedPaths.addAll(copyFiles(srcFS, subPath.getPath(), dstFS,
     new Path(dst, subPath.getPath().getName()), conf, pool, futures));
   }
  } else {
   Future<Void> future = pool.submit(() -> {
    FileUtil.copy(srcFS, src, dstFS, dst, false, false, conf);
    return null;
   });
   futures.add(future);
  }
  return traversedPaths;
 }
}

代码示例来源:origin: apache/hive

private void doRegularCopyOnce(FileSystem sourceFs, List<Path> srcList, FileSystem destinationFs,
  Path destination, boolean usePrivilegedUser) throws IOException {
/*
 even for regular copy we have to use the same user permissions that distCp will use since
 hive-server user might be different that the super user required to copy relevant files.
 */
 final Path[] paths = srcList.toArray(new Path[] {});
 if (usePrivilegedUser) {
  final Path finalDestination = destination;
  UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(
    copyAsUser, UserGroupInformation.getLoginUser());
  try {
   proxyUser.doAs((PrivilegedExceptionAction<Boolean>) () -> {
    FileUtil
      .copy(sourceFs, paths, destinationFs, finalDestination, false, true, hiveConf);
    return true;
   });
  } catch (InterruptedException e) {
   throw new IOException(e);
  }
 } else {
  FileUtil.copy(sourceFs, paths, destinationFs, destination, false, true, hiveConf);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/** Copy FileSystem files to local files. */
private static boolean copy(FileSystem srcFS, FileStatus srcStatus,
              File dst, boolean deleteSource,
              Configuration conf) throws IOException {
 Path src = srcStatus.getPath();
 if (srcStatus.isDirectory()) {
  if (!dst.mkdirs()) {
   return false;
  }
  FileStatus contents[] = srcFS.listStatus(src);
  for (int i = 0; i < contents.length; i++) {
   copy(srcFS, contents[i],
      new File(dst, contents[i].getPath().getName()),
      deleteSource, conf);
  }
 } else {
  InputStream in = srcFS.open(src);
  IOUtils.copyBytes(in, new FileOutputStream(dst), conf);
 }
 if (deleteSource) {
  return srcFS.delete(src, true);
 } else {
  return true;
 }
}

代码示例来源:origin: apache/kylin

@Override
void prepareForWrite(String workingDir, boolean isGlobal) throws IOException {
  if (!fileSystem.exists(basePath)) {
    logger.info("Global dict at {} doesn't exist, create a new one", basePath);
    fileSystem.mkdirs(basePath);
  }
  migrateOldLayout();
  logger.trace("Prepare to write Global dict at {}, isGlobal={}", workingDir, isGlobal);
  Path working = new Path(workingDir);
  if (fileSystem.exists(working)) {
    fileSystem.delete(working, true);
    logger.trace("Working directory {} exits, delete it first", working);
  }
  // when build dict, copy all data into working dir and work on it, avoiding suddenly server crash made data corrupt
  Long[] versions = listAllVersions();
  if (versions.length > 0 && isGlobal) {
    Path latestVersion = getVersionDir(versions[versions.length - 1]);
    FileUtil.copy(fileSystem, latestVersion, fileSystem, working, false, true, conf);
  } else {
    fileSystem.mkdirs(working);
  }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
 throws IOException {
 Configuration conf = getConf();
 FileUtil.copy(getLocal(conf), src, this, dst, delSrc, conf);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * The src file is under FS, and the dst is on the local disk.
 * Copy it from FS control to the local dst name.
 */
@Override
public void copyToLocalFile(boolean delSrc, Path src, Path dst)
 throws IOException {
 Configuration conf = getConf();
 FileUtil.copy(this, src, getLocal(conf), dst, delSrc, conf);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

@Override
public boolean rename(Path src, Path dst) throws IOException {
 // Attempt rename using Java API.
 File srcFile = pathToFile(src);
 File dstFile = pathToFile(dst);
 if (srcFile.renameTo(dstFile)) {
  return true;
 }
 // Else try POSIX style rename on Windows only
 if (Shell.WINDOWS &&
   handleEmptyDstDirectoryOnWindows(src, srcFile, dst, dstFile)) {
  return true;
 }
 // The fallback behavior accomplishes the rename by a full copy.
 if (LOG.isDebugEnabled()) {
  LOG.debug("Falling through to a copy of " + src + " to " + dst);
 }
 return FileUtil.copy(this, src, this, dst, true, getConf());
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * copies the file in the har filesystem to a local file.
 */
@Override
public void copyToLocalFile(boolean delSrc, Path src, Path dst) 
 throws IOException {
 FileUtil.copy(this, src, getLocal(getConf()), dst, false, getConf());
}

代码示例来源:origin: apache/kylin

@Override
public String copyToAnotherMeta(KylinConfig srcConfig, KylinConfig dstConfig) throws IOException {
  if (baseDir.contains("resources/SegmentDict")) {
    logger.info("SegmentAppendTrieDict needn't to copy");
    return baseDir;
  }
  checkArgument(baseDir.startsWith(srcConfig.getHdfsWorkingDirectory()),
      "Please check why current directory {} doesn't belong to source working directory {}", baseDir,
      srcConfig.getHdfsWorkingDirectory());
  final String dstBaseDir = baseDir.replaceFirst(srcConfig.getHdfsWorkingDirectory(),
      dstConfig.getHdfsWorkingDirectory());
  Long[] versions = listAllVersions();
  if (versions.length == 0) { // empty dict, nothing to copy
    return dstBaseDir;
  }
  Path srcVersionDir = getVersionDir(versions[versions.length - 1]);
  Path dstVersionDir = new Path(srcVersionDir.toString().replaceFirst(srcConfig.getHdfsWorkingDirectory(),
      dstConfig.getHdfsWorkingDirectory()));
  FileSystem dstFS = dstVersionDir.getFileSystem(conf);
  if (dstFS.exists(dstVersionDir)) {
    dstFS.delete(dstVersionDir, true);
  }
  FileUtil.copy(fileSystem, srcVersionDir, dstFS, dstVersionDir, false, true, conf);
  return dstBaseDir;
}

代码示例来源:origin: apache/incubator-pinot

/**
 * Note that this method copies within a cluster. If you want to copy outside the cluster, you will
 * need to create a new configuration and filesystem. Keeps files if copy/move is partial.
 */
@Override
public boolean copy(URI srcUri, URI dstUri)
  throws IOException {
 Path source = new Path(srcUri);
 Path target = new Path(dstUri);
 RemoteIterator<LocatedFileStatus> sourceFiles = _hadoopFS.listFiles(source, true);
 if (sourceFiles != null) {
  while (sourceFiles.hasNext()) {
   boolean succeeded =
     FileUtil.copy(_hadoopFS, sourceFiles.next().getPath(), _hadoopFS, target, true, _hadoopConf);
   if (!succeeded) {
    return false;
   }
  }
 }
 return true;
}

代码示例来源:origin: apache/ignite

/**
 * Copy files from one FS to another.
 *
 * @param msg Info message to display after copying finishes.
 * @param srcFs Source file system.
 * @param src Source path to copy from.
 * @param destFs Destination file system.
 * @param dest Destination path to copy to.
 * @throws IOException If failed.
 */
private void copy(String msg, FileSystem srcFs, Path src, FileSystem destFs, Path dest) throws IOException {
  assert destFs.delete(dest, true) || !destFs.exists(dest) : "Failed to remove: " + dest;
  destFs.mkdirs(dest);
  Configuration conf = new Configuration(true);
  long time = System.currentTimeMillis();
  FileUtil.copy(srcFs, src, destFs, dest, false, true, conf);
  time = System.currentTimeMillis() - time;
  info("Copying finished, " + msg + " [time=" + time + "ms, src=" + src + ", dest=" + dest + ']');
}

代码示例来源:origin: voldemort/voldemort

@Override
public void addDisabledNode(int nodeId,
              String storeName,
              long storeVersion) throws Exception {
  if (!lockAcquired) {
    throw new VoldemortException("HdfsFailedFetchLock.addDisabledNode() called while the lock is not acquired!");
  }
  int attempts = 1;
  boolean success = false;
  while (!success && attempts <= maxAttempts) {
    FSDataOutputStream outputStream = null;
    try {
      String nodeIdDir = NODE_ID_DIR_PREFIX + nodeId;
      String failedJobDir = clusterDir + "/" + nodeIdDir + "/" + storeName + "/" + storeVersion;
      Path failedJobFile = new Path(failedJobDir, getUniqueFileName());
      FileUtil.copy(this.fileSystem, this.lockFile, this.fileSystem, failedJobFile, false, true, new Configuration());
      success = true;
    }  catch (IOException e) {
      handleIOException(e, ADD_DISABLED_NODE, attempts);
      attempts++;
    } finally {
      IOUtils.closeQuietly(outputStream);
    }
  }
  if (!success) {
    throw new VoldemortException(exceptionMessage(ADD_DISABLED_NODE));
  }
}

代码示例来源:origin: apache/hbase

"the destination store. Copying file over to destination filesystem.");
Path tmpPath = createTempName();
FileUtil.copy(realSrcFs, srcPath, fs, tmpPath, false, conf);
LOG.info("Copied " + srcPath + " to temporary path on destination filesystem: " + tmpPath);
srcPath = tmpPath;

代码示例来源:origin: apache/hive

copied = FileUtil.copy(srcFS, src, dstFS, dst, deleteSource, overwrite, conf);

相关文章