org.apache.hadoop.hdfs.server.common.Util.stringAsURI()方法的使用及代码示例

x33g5p2x  于2022-02-01 转载在 其他  
字(5.1k)|赞(0)|评价(0)|浏览(165)

本文整理了Java中org.apache.hadoop.hdfs.server.common.Util.stringAsURI()方法的一些代码示例,展示了Util.stringAsURI()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Util.stringAsURI()方法的具体详情如下:
包路径:org.apache.hadoop.hdfs.server.common.Util
类名称:Util
方法名:stringAsURI

Util.stringAsURI介绍

[英]Interprets the passed string as a URI. In case of error it assumes the specified string is a file.
[中]将传递的字符串解释为URI。如果出现错误,它会假定指定的字符串是一个文件。

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

/**
 * Converts a collection of strings into a collection of URIs.
 * @param names collection of strings to convert to URIs
 * @return collection of URIs
 */
public static List<URI> stringCollectionAsURIs(
                Collection<String> names) {
 List<URI> uris = new ArrayList<>(names.size());
 for(String name : names) {
  try {
   uris.add(stringAsURI(name));
  } catch (IOException e) {
   LOG.error("Error while processing URI: " + name, e);
  }
 }
 return uris;
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-test

/**
 * Test for a relative path, os independent
 * @throws IOException 
 */
public void testRelativePathAsURI() throws IOException {
 URI u = Util.stringAsURI(RELATIVE_FILE_PATH);
 LOG.info("Uri: " + u);
 assertNotNull(u);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

/**
  * Converts a collection of strings into a collection of URIs.
  * @param names collection of strings to convert to URIs
  * @return collection of URIs
  */
 public static List<URI> stringCollectionAsURIs(
                 Collection<String> names) {
  List<URI> uris = new ArrayList<URI>(names.size());
  for(String name : names) {
   try {
    uris.add(stringAsURI(name));
   } catch (IOException e) {
    LOG.error("Error while processing URI: " + name, e);
   }
  }
  return uris;
 }
}

代码示例来源:origin: com.facebook.hadoop/hadoop-core

/**
  * Converts a collection of strings into a collection of URIs.
  * @param names collection of strings to convert to URIs
  * @return collection of URIs
  */
 public static Collection<URI> stringCollectionAsURIs(
                 Collection<String> names) {
  Collection<URI> uris = new ArrayList<URI>(names.size());
  for(String name : names) {
   try {
    uris.add(stringAsURI(name));
   } catch (IOException e) {
    LOG.error("Error while processing URI: " + name, e);
   }
  }
  return uris;
 }
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

/**
  * Converts a collection of strings into a collection of URIs.
  * @param names collection of strings to convert to URIs
  * @return collection of URIs
  */
 public static List<URI> stringCollectionAsURIs(
                 Collection<String> names) {
  List<URI> uris = new ArrayList<URI>(names.size());
  for(String name : names) {
   try {
    uris.add(stringAsURI(name));
   } catch (IOException e) {
    LOG.error("Error while processing URI: " + name, e);
   }
  }
  return uris;
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-test

/**
 * Test for an OS dependent absolute paths.
 * @throws IOException 
 */
public void testAbsolutePathAsURI() throws IOException {
 URI u = null;
 u = Util.stringAsURI(ABSOLUTE_PATH_WINDOWS);
 assertNotNull(
   "Uri should not be null for Windows path" + ABSOLUTE_PATH_WINDOWS, u);
 assertEquals(URI_FILE_SCHEMA, u.getScheme());
 u = Util.stringAsURI(ABSOLUTE_PATH_UNIX);
 assertNotNull("Uri should not be null for Unix path" + ABSOLUTE_PATH_UNIX, u);
 assertEquals(URI_FILE_SCHEMA, u.getScheme());
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-test

/**
  * Test for a URI
  * @throws IOException 
  */
 public void testURI() throws IOException {
  LOG.info("Testing correct Unix URI: " + URI_UNIX);
  URI u = Util.stringAsURI(URI_UNIX);
  LOG.info("Uri: " + u);    
  assertNotNull("Uri should not be null at this point", u);
  assertEquals(URI_FILE_SCHEMA, u.getScheme());
  assertEquals(URI_PATH_UNIX, u.getPath());

  LOG.info("Testing correct windows URI: " + URI_WINDOWS);
  u = Util.stringAsURI(URI_WINDOWS);
  LOG.info("Uri: " + u);
  assertNotNull("Uri should not be null at this point", u);
  assertEquals(URI_FILE_SCHEMA, u.getScheme());
  assertEquals(URI_PATH_WINDOWS.replace("%20", " "), u.getPath());
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

/**
 * Test for a relative path, os independent
 * @throws IOException 
 */
@Test
public void testRelativePathAsURI() throws IOException {
 URI u = Util.stringAsURI(RELATIVE_FILE_PATH);
 LOG.info("Uri: " + u);
 assertNotNull(u);
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

/**
 * Test for an OS dependent absolute paths.
 * @throws IOException 
 */
@Test
public void testAbsolutePathAsURI() throws IOException {
 URI u = null;
 u = Util.stringAsURI(ABSOLUTE_PATH_WINDOWS);
 assertNotNull(
   "Uri should not be null for Windows path" + ABSOLUTE_PATH_WINDOWS, u);
 assertEquals(URI_FILE_SCHEMA, u.getScheme());
 u = Util.stringAsURI(ABSOLUTE_PATH_UNIX);
 assertNotNull("Uri should not be null for Unix path" + ABSOLUTE_PATH_UNIX, u);
 assertEquals(URI_FILE_SCHEMA, u.getScheme());
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

/**
  * Test for a URI
  * @throws IOException 
  */
 @Test
 public void testURI() throws IOException {
  LOG.info("Testing correct Unix URI: " + URI_UNIX);
  URI u = Util.stringAsURI(URI_UNIX);
  LOG.info("Uri: " + u);    
  assertNotNull("Uri should not be null at this point", u);
  assertEquals(URI_FILE_SCHEMA, u.getScheme());
  assertEquals(URI_PATH_UNIX, u.getPath());

  LOG.info("Testing correct windows URI: " + URI_WINDOWS);
  u = Util.stringAsURI(URI_WINDOWS);
  LOG.info("Uri: " + u);
  assertNotNull("Uri should not be null at this point", u);
  assertEquals(URI_FILE_SCHEMA, u.getScheme());
  assertEquals(URI_PATH_WINDOWS.replace("%20", " "), u.getPath());
 }
}

相关文章