org.apache.gobblin.configuration.State.getProperties()方法的使用及代码示例

x33g5p2x  于2022-01-30 转载在 其他  
字(10.9k)|赞(0)|评价(0)|浏览(118)

本文整理了Java中org.apache.gobblin.configuration.State.getProperties()方法的一些代码示例,展示了State.getProperties()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。State.getProperties()方法的具体详情如下:
包路径:org.apache.gobblin.configuration.State
类名称:State
方法名:getProperties

State.getProperties介绍

[英]Return a copy of the underlying Properties object.
[中]返回基础属性对象的副本。

代码示例

代码示例来源:origin: apache/incubator-gobblin

/**
 * Compare an existing state and a new {@link State} to ensure that the existing {@link State} contains all entries in the new
 * {@link State}, and update {@link #result} accordingly.
 */
protected void checkExistingIsSuperstate(State existingState, State newState) {
 checkExistingIsSuperset(existingState.getProperties().entrySet(), newState.getProperties().entrySet());
}

代码示例来源:origin: apache/incubator-gobblin

@Override
public Properties getProperties() {
 Properties props = new Properties();
 props.putAll(this.jobState.getProperties());
 props.putAll(this.workUnit.getProperties());
 props.putAll(super.getProperties());
 return props;
}

代码示例来源:origin: apache/incubator-gobblin

public State(State otherState) {
 this.commonProperties = otherState.getCommonProperties();
 this.specProperties = new Properties();
 this.specProperties.putAll(otherState.getProperties());
 for (Object key : this.commonProperties.keySet()) {
  if (this.specProperties.containsKey(key) && this.commonProperties.get(key).equals(this.specProperties.get(key))) {
   this.specProperties.remove(key);
  }
 }
}

代码示例来源:origin: apache/incubator-gobblin

@SuppressWarnings("unchecked")
public T withProps(State props) {
 this.props = new State(props.getProperties());
 return (T) this;
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * Get an instance of {@link HiveRegister}.
 *
 * @param props A {@link State} object. To get a specific implementation of {@link HiveRegister},
 * specify property {@link #HIVE_REGISTER_TYPE} as the class name. Otherwise, {@link #DEFAULT_HIVE_REGISTER_TYPE}
 * will be returned. This {@link State} object is also used to instantiate the {@link HiveRegister} object.
 */
public static HiveRegister get(State props) {
 Optional<String> metastoreUri =
   Optional.fromNullable(props.getProperties().getProperty(HIVE_METASTORE_URI_KEY));
 return get(props, metastoreUri);
}

代码示例来源:origin: apache/incubator-gobblin

public static Comparator<Requestor<SimpleDatasetRequest>>  createRequestorComparator(State state) throws IOException {
 TreeMap<Integer, Pattern> tiers = Maps.newTreeMap();
 Matcher matcher;
 for (Map.Entry<Object, Object> entry : state.getProperties().entrySet()) {
  if (entry.getKey() instanceof String && entry.getValue() instanceof String
    && (matcher = TIER_PATTERN.matcher((String) entry.getKey())).matches()) {
   int tier = Integer.parseInt(matcher.group(1));
   String regex = (String)entry.getValue();
   tiers.put(tier, Pattern.compile(regex));
  }
 }
 return new SimpleDatasetHierarchicalPrioritizer.TierComparator(tiers);
}

代码示例来源:origin: apache/incubator-gobblin

private void setTopicsFromConfigStore(State state) {
 Set<String> blacklistTopicsFromConfigStore = new HashSet<>();
 Set<String> whitelistTopicsFromConfigStore = new HashSet<>();
 ConfigStoreUtils.setTopicsFromConfigStore(state.getProperties(), blacklistTopicsFromConfigStore,
   whitelistTopicsFromConfigStore, MRCompactor.COMPACTION_BLACKLIST, MRCompactor.COMPACTION_WHITELIST);
 this.blacklist.addAll(DatasetFilterUtils.getPatternsFromStrings(new ArrayList<>(blacklistTopicsFromConfigStore)));
 this.whitelist.addAll(DatasetFilterUtils.getPatternsFromStrings(new ArrayList<>(whitelistTopicsFromConfigStore)));
}

代码示例来源:origin: apache/incubator-gobblin

private RecordWriter getWriter() throws IOException {
 try {
  HiveOutputFormat<?, ?> outputFormat = HiveOutputFormat.class
    .cast(Class.forName(this.properties.getProp(HiveWritableHdfsDataWriterBuilder.WRITER_OUTPUT_FORMAT_CLASS))
      .newInstance());
  @SuppressWarnings("unchecked")
  Class<? extends Writable> writableClass = (Class<? extends Writable>) Class
    .forName(this.properties.getProp(HiveWritableHdfsDataWriterBuilder.WRITER_WRITABLE_CLASS));
  return outputFormat.getHiveRecordWriter(new JobConf(), this.stagingFile, writableClass, true,
    this.properties.getProperties(), null);
 } catch (Throwable t) {
  throw new IOException(String.format("Failed to create writer"), t);
 }
}

代码示例来源:origin: apache/incubator-gobblin

public static HiveUnitUpdateProvider create(State state) {
 try {
  return (HiveUnitUpdateProvider) GobblinConstructorUtils.invokeFirstConstructor(Class.forName(state.getProp(
    OPTIONAL_HIVE_UNIT_UPDATE_PROVIDER_CLASS_KEY, DEFAULT_HIVE_UNIT_UPDATE_PROVIDER_CLASS)),
    ImmutableList.<Object>of(getFileSystem(state.getProperties())), ImmutableList.of());
 } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException
   | ClassNotFoundException | IOException e) {
  throw new RuntimeException(e);
 }
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * @param state
 * @throws IllegalArgumentException If job commit policy is not COMMIT_ON_FULL_SUCCESS or is not on PUBLISH_DATA_AT_JOB_LEVEL
 */
private void validate(State state) {
 JobCommitPolicy jobCommitPolicy = JobCommitPolicy.getCommitPolicy(this.getState().getProperties());
 if (JobCommitPolicy.COMMIT_ON_FULL_SUCCESS != jobCommitPolicy) {
  throw new IllegalArgumentException(this.getClass().getSimpleName()
    + " won't publish as already commited by task. Job commit policy " + jobCommitPolicy);
 }
 if (!state.getPropAsBoolean(ConfigurationKeys.PUBLISH_DATA_AT_JOB_LEVEL,
   ConfigurationKeys.DEFAULT_PUBLISH_DATA_AT_JOB_LEVEL)) {
  throw new IllegalArgumentException(this.getClass().getSimpleName() + " won't publish as "
    + ConfigurationKeys.PUBLISH_DATA_AT_JOB_LEVEL + " is set as false");
 }
}

代码示例来源:origin: apache/incubator-gobblin

@Override
 public void run() {
  for (int i = 0; i < 1000; i++) {
   try {
    state.getProperties().get(Integer.toString(i));
   } catch (Throwable t) {
    exceptions.add(t);
   }
  }
 }
});

代码示例来源:origin: apache/incubator-gobblin

private GobblinMetrics initializeMetrics() {
 ImmutableList.Builder<Tag<?>> tags = ImmutableList.builder();
 tags.addAll(this.tags);
 tags.addAll(Tag.fromMap(ClusterNameTags.getClusterNameTags()));
 GobblinMetrics gobblinMetrics =
   GobblinMetrics.get(this.state.getProp(ConfigurationKeys.JOB_NAME_KEY), null, tags.build());
 gobblinMetrics.startMetricReporting(this.state.getProperties());
 return gobblinMetrics;
}

代码示例来源:origin: apache/incubator-gobblin

AsyncHttpWriterBuilder<D, RQ, RP> fromState(State state) {
 if (!(state instanceof WorkUnitState)) {
  throw new IllegalStateException(String.format("AsyncHttpWriterBuilder requires a %s on construction.", WorkUnitState.class.getSimpleName()));
 }
 this.state = (WorkUnitState) state;
 this.metricContext = Instrumented.getMetricContext(this.state, AsyncHttpWriter.class);
 this.broker = this.state.getTaskBroker();
 Config config = ConfigBuilder.create().loadProps(state.getProperties(), CONF_PREFIX).build();
 config = config.withFallback(FALLBACK);
 this.maxOutstandingWrites = config.getInt(MAX_OUTSTANDING_WRITES);
 this.maxAttempts = config.getInt(MAX_ATTEMPTS);
 return fromConfig(config);
}

代码示例来源:origin: apache/incubator-gobblin

/**
 * Whether data should be committed by the job (as opposed to being commited by the tasks).
 * Data should be committed by the job if either {@link ConfigurationKeys#JOB_COMMIT_POLICY_KEY} is set to "full",
 * or {@link ConfigurationKeys#PUBLISH_DATA_AT_JOB_LEVEL} is set to true.
 */
private static boolean shouldCommitDataInJob(State state) {
 boolean jobCommitPolicyIsFull =
   JobCommitPolicy.getCommitPolicy(state.getProperties()) == JobCommitPolicy.COMMIT_ON_FULL_SUCCESS;
 boolean publishDataAtJobLevel = state.getPropAsBoolean(ConfigurationKeys.PUBLISH_DATA_AT_JOB_LEVEL,
   ConfigurationKeys.DEFAULT_PUBLISH_DATA_AT_JOB_LEVEL);
 boolean jobDataPublisherSpecified =
   !Strings.isNullOrEmpty(state.getProp(ConfigurationKeys.JOB_DATA_PUBLISHER_TYPE));
 return jobCommitPolicyIsFull || publishDataAtJobLevel || jobDataPublisherSpecified;
}

代码示例来源:origin: apache/incubator-gobblin

private void addStateToWorkunit(State s, WorkUnitState wuState) {
 for (Map.Entry<Object, Object> prop : s.getProperties().entrySet()) {
  wuState.setProp((String) prop.getKey(), prop.getValue());
 }
}

代码示例来源:origin: apache/incubator-gobblin

@Override
 public DataWriter<QueryBasedHiveConversionEntity> build() throws IOException {
  try {
   return new HiveQueryExecutionWriter(HiveJdbcConnector.newConnectorWithProps(this.destination.getProperties().getProperties()),
     this.destination.getProperties());
  } catch (SQLException e) {
   throw new RuntimeException(e);
  }
 }
}

代码示例来源:origin: apache/incubator-gobblin

private HiveDataset getHiveDataset(String tableString, FileSystem fs, State state) throws IOException {
 try {
  HiveMetastoreClientPool pool = HiveMetastoreClientPool.get(state.getProperties(),
    Optional.fromNullable(state.getProp(HIVE_METASTORE_URI_KEY)));
  List<String> tokens = Splitter.on(".").splitToList(tableString);
  DbAndTable sourceDbAndTable = new DbAndTable(tokens.get(0), tokens.get(1));
  try (AutoReturnableObject<IMetaStoreClient> client = pool.getClient()) {
   Table sourceTable = new Table(client.get().getTable(sourceDbAndTable.getDb(), sourceDbAndTable.getTable()));
   return new HiveDataset(fs, pool, sourceTable, ConfigUtils.propertiesToConfig(state.getProperties()));
  }
 } catch (TException exc) {
  throw new RuntimeException(exc);
 }
}

代码示例来源:origin: apache/incubator-gobblin

public B fromState(State state) {
 this.state = state;
 Config config = ConfigBuilder.create().loadProps(state.getProperties(), CONF_PREFIX).build();
 fromConfig(config);
 return typedSelf();
}

代码示例来源:origin: apache/incubator-gobblin

public HiveBaseExtractor(WorkUnitState state) throws IOException {
 if (Boolean.valueOf(state.getPropAsBoolean(PartitionLevelWatermarker.IS_WATERMARK_WORKUNIT_KEY))) {
  return;
 }
 this.hiveWorkUnit = new HiveWorkUnit(state.getWorkunit());
 this.hiveDataset = hiveWorkUnit.getHiveDataset();
 this.dbName = hiveDataset.getDbAndTable().getDb();
 this.tableName = hiveDataset.getDbAndTable().getTable();
 this.pool = HiveMetastoreClientPool.get(state.getJobState().getProperties(),
   Optional.fromNullable(state.getJobState().getProp(HiveDatasetFinder.HIVE_METASTORE_URI_KEY)));
}

代码示例来源:origin: apache/incubator-gobblin

public HiveMaterializerQueryGenerator(WorkUnitState workUnitState) throws IOException {
 this.fs = HiveSource.getSourceFs(workUnitState);
 this.pool = HiveMetastoreClientPool.get(workUnitState.getJobState().getProperties(),
   Optional.fromNullable(workUnitState.getJobState().getProp(HiveDatasetFinder.HIVE_METASTORE_URI_KEY)));
 this.workUnitState = workUnitState;
 this.workUnit = new HiveWorkUnit(workUnitState.getWorkunit());
 this.outputTableMetadata = HiveMaterializer.parseStageableTableMetadata(this.workUnit);
 this.outputDatabaseName = outputTableMetadata.getDestinationDbName();
 this.outputTableName = outputTableMetadata.getDestinationTableName();
 this.outputDataLocation = HiveConverterUtils.getOutputDataLocation(outputTableMetadata.getDestinationDataPath());
 this.destinationTableMeta = HiveConverterUtils.getDestinationTableMeta(this.outputTableMetadata.getDestinationDbName(),
   this.outputTableMetadata.getDestinationTableName(), workUnitState.getProperties()).getLeft();
 this.stagingTableName = HiveConverterUtils.getStagingTableName(this.outputTableMetadata.getDestinationStagingTableName());
 this.stagingDataLocation = HiveConverterUtils.getStagingDataLocation(this.outputTableMetadata.getDestinationDataPath(), this.stagingTableName);
}

相关文章