org.apache.flink.configuration.Configuration.keySet()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(8.5k)|赞(0)|评价(0)|浏览(276)

本文整理了Java中org.apache.flink.configuration.Configuration.keySet()方法的一些代码示例,展示了Configuration.keySet()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Configuration.keySet()方法的具体详情如下:
包路径:org.apache.flink.configuration.Configuration
类名称:Configuration
方法名:keySet

Configuration.keySet介绍

[英]Returns the keys of all key/value pairs stored inside this configuration object.
[中]返回存储在此配置对象中的所有键/值对的键。

代码示例

代码示例来源:origin: apache/flink

@Override
public Set<String> keySet() {
  if (this.prefix == null) {
    return this.backingConfig.keySet();
  }
  final HashSet<String> set = new HashSet<>();
  int prefixLen = this.prefix.length();
  for (String key : this.backingConfig.keySet()) {
    if (key.startsWith(prefix)) {
      set.add(key.substring(prefixLen));
    }
  }
  return set;
}

代码示例来源:origin: apache/flink

private org.apache.hadoop.conf.Configuration loadHadoopConfigFromFlink() {
  org.apache.hadoop.conf.Configuration hadoopConfig = new org.apache.hadoop.conf.Configuration();
  for (String key : flinkConfig.keySet()) {
    for (String prefix : flinkConfigPrefixes) {
      if (key.startsWith(prefix)) {
        String newKey = hadoopConfigPrefix + key.substring(prefix.length());
        String newValue = fixHadoopConfig(key, flinkConfig.getString(key, null));
        hadoopConfig.set(newKey, newValue);
        LOG.debug("Adding Flink config entry for {} as {} to Hadoop config", key, newKey);
      }
    }
  }
  return hadoopConfig;
}

代码示例来源:origin: apache/flink

@VisibleForTesting
  org.apache.hadoop.conf.Configuration getHadoopConfiguration() {
    org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
    if (flinkConfig == null) {
      return conf;
    }

    // read all configuration with prefix 'FLINK_CONFIG_PREFIXES'
    for (String key : flinkConfig.keySet()) {
      for (String prefix : FLINK_CONFIG_PREFIXES) {
        if (key.startsWith(prefix)) {
          String value = flinkConfig.getString(key, null);
          conf.set(key, value);
          if (CONFIG_KEYS_TO_SHADE.contains(key)) {
            conf.set(key, FLINK_SHADING_PREFIX + value);
          }

          LOG.debug("Adding Flink config entry for {} as {} to Hadoop config", key, conf.get(key));
        }
      }
    }
    return conf;
  }
}

代码示例来源:origin: apache/flink

for (String key : flinkConfig.keySet()) {
  if (key.startsWith(CONFIG_PREFIX)) {
    String value = flinkConfig.getString(key, null);

代码示例来源:origin: apache/flink

finalConf = new org.apache.hadoop.conf.Configuration(hadoopConf);
for (String key : extraUserConf.keySet()) {
  finalConf.set(key, extraUserConf.getString(key, null));

代码示例来源:origin: apache/flink

assertEquals(6, conf.keySet().size());

代码示例来源:origin: apache/flink

@Test
public void testDelegationConfigurationWithNullPrefix() {
  Configuration backingConf = new Configuration();
  backingConf.setValueInternal("test-key", "value");
  DelegatingConfiguration configuration = new DelegatingConfiguration(
      backingConf, null);
  Set<String> keySet = configuration.keySet();
  assertEquals(keySet, backingConf.keySet());
}

代码示例来源:origin: apache/flink

@Test
  public void testRemove(){
    Configuration cfg = new Configuration();
    cfg.setInteger("a", 1);
    cfg.setInteger("b", 2);

    ConfigOption<Integer> validOption = ConfigOptions
      .key("a")
      .defaultValue(-1);

    ConfigOption<Integer> deprecatedOption = ConfigOptions
      .key("c")
      .defaultValue(-1)
      .withDeprecatedKeys("d", "b");

    ConfigOption<Integer> unexistedOption = ConfigOptions
      .key("e")
      .defaultValue(-1)
      .withDeprecatedKeys("f", "g", "j");

    assertEquals("Wrong expectation about size", cfg.keySet().size(), 2);
    assertTrue("Expected 'validOption' is removed", cfg.removeConfig(validOption));
    assertEquals("Wrong expectation about size", cfg.keySet().size(), 1);
    assertTrue("Expected 'existedOption' is removed", cfg.removeConfig(deprecatedOption));
    assertEquals("Wrong expectation about size", cfg.keySet().size(), 0);
    assertFalse("Expected 'unexistedOption' is not removed", cfg.removeConfig(unexistedOption));
  }
}

代码示例来源:origin: apache/flink

assertEquals(orig.keySet(), copy.keySet());
assertEquals(orig.hashCode(), copy.hashCode());

代码示例来源:origin: org.apache.flink/flink-core

@Override
public Set<String> keySet() {
  if (this.prefix == null) {
    return this.backingConfig.keySet();
  }
  final HashSet<String> set = new HashSet<>();
  int prefixLen = this.prefix.length();
  for (String key : this.backingConfig.keySet()) {
    if (key.startsWith(prefix)) {
      set.add(key.substring(prefixLen));
    }
  }
  return set;
}

代码示例来源:origin: com.alibaba.blink/flink-core

@Override
public Set<String> keySet() {
  if (this.prefix == null) {
    return this.backingConfig.keySet();
  }
  final HashSet<String> set = new HashSet<>();
  int prefixLen = this.prefix.length();
  for (String key : this.backingConfig.keySet()) {
    if (key.startsWith(prefix)) {
      set.add(key.substring(prefixLen));
    }
  }
  return set;
}

代码示例来源:origin: dataArtisans/cascading-flink

public static Configuration toHadoopConfig(org.apache.flink.configuration.Configuration flinkConfig) {
  if(flinkConfig == null) {
    return null;
  }
  Configuration hadoopConfig = new Configuration();
  for(String key : flinkConfig.keySet()) {
    hadoopConfig.set(key, flinkConfig.getString(key, null));
  }
  return hadoopConfig;
}

代码示例来源:origin: org.apache.flink/flink-runtime_2.11

/**
 * Writes a Flink YAML config file from a Flink Configuration object.
 * @param cfg The Flink config
 * @param file The File to write to
 * @throws IOException
 */
public static void writeConfiguration(Configuration cfg, File file) throws IOException {
  try (FileWriter fwrt = new FileWriter(file);
    PrintWriter out = new PrintWriter(fwrt)) {
    for (String key : cfg.keySet()) {
      String value = cfg.getString(key, null);
      out.print(key);
      out.print(": ");
      out.println(value);
    }
  }
}

代码示例来源:origin: com.alibaba.blink/flink-runtime

/**
 * Writes a Flink YAML config file from a Flink Configuration object.
 * @param cfg The Flink config
 * @param file The File to write to
 * @throws IOException
 */
public static void writeConfiguration(Configuration cfg, File file) throws IOException {
  try (FileWriter fwrt = new FileWriter(file);
    PrintWriter out = new PrintWriter(fwrt))
  {
    for (String key : cfg.keySet()) {
      String value = cfg.getString(key, null);
      out.print(key);
      out.print(": ");
      out.println(value);
    }
  }
}

代码示例来源:origin: org.apache.flink/flink-runtime

/**
 * Writes a Flink YAML config file from a Flink Configuration object.
 * @param cfg The Flink config
 * @param file The File to write to
 * @throws IOException
 */
public static void writeConfiguration(Configuration cfg, File file) throws IOException {
  try (FileWriter fwrt = new FileWriter(file);
    PrintWriter out = new PrintWriter(fwrt)) {
    for (String key : cfg.keySet()) {
      String value = cfg.getString(key, null);
      out.print(key);
      out.print(": ");
      out.println(value);
    }
  }
}

代码示例来源:origin: org.apache.flink/flink-runtime_2.10

/**
 * Writes a Flink YAML config file from a Flink Configuration object.
 * @param cfg The Flink config
 * @param file The File to write to
 * @throws IOException
 */
public static void writeConfiguration(Configuration cfg, File file) throws IOException {
  try (FileWriter fwrt = new FileWriter(file);
    PrintWriter out = new PrintWriter(fwrt))
  {
    for (String key : cfg.keySet()) {
      String value = cfg.getString(key, null);
      out.print(key);
      out.print(": ");
      out.println(value);
    }
  }
}

代码示例来源:origin: com.alibaba.blink/flink-runtime

public static void setStubParameters(TaskConfig config, OperatorID operatorID, Configuration parameters) {
    for (String key : parameters.keySet()) {
      config.setStubParameter(operatorID + "." + key, parameters.getString(key, null));
    }
  }
}

代码示例来源:origin: org.apache.flink/flink-runtime_2.11

public static ClusterConfigurationInfo from(Configuration config) {
  ClusterConfigurationInfo clusterConfig = new ClusterConfigurationInfo(config.keySet().size());
  for (String key : config.keySet()) {
    String value = config.getString(key, null);
    // Mask key values which contain sensitive information
    if (value != null && GlobalConfiguration.isSensitive(key)) {
      value = GlobalConfiguration.HIDDEN_CONTENT;
    }
    clusterConfig.add(new ClusterConfigurationInfoEntry(key, value));
  }
  return clusterConfig;
}

代码示例来源:origin: org.apache.flink/flink-runtime

public static ClusterConfigurationInfo from(Configuration config) {
  ClusterConfigurationInfo clusterConfig = new ClusterConfigurationInfo(config.keySet().size());
  for (String key : config.keySet()) {
    String value = config.getString(key, null);
    // Mask key values which contain sensitive information
    if (value != null && GlobalConfiguration.isSensitive(key)) {
      value = GlobalConfiguration.HIDDEN_CONTENT;
    }
    clusterConfig.add(new ClusterConfigurationInfoEntry(key, value));
  }
  return clusterConfig;
}

代码示例来源:origin: com.alibaba.blink/flink-runtime

public static ClusterConfigurationInfo from(Configuration config) {
  ClusterConfigurationInfo clusterConfig = new ClusterConfigurationInfo(config.keySet().size());
  for (String key : config.keySet()) {
    String value = config.getString(key, null);
    // Mask key values which contain sensitive information
    if (value != null && GlobalConfiguration.isSensitive(key)) {
      value = GlobalConfiguration.HIDDEN_CONTENT;
    }
    clusterConfig.add(new ClusterConfigurationInfoEntry(key, value));
  }
  return clusterConfig;
}

相关文章