com.google.common.collect.Maps.newTreeMap()方法的使用及代码示例

x33g5p2x  于2022-01-24 转载在 其他  
字(11.7k)|赞(0)|评价(0)|浏览(384)

本文整理了Java中com.google.common.collect.Maps.newTreeMap()方法的一些代码示例,展示了Maps.newTreeMap()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Maps.newTreeMap()方法的具体详情如下:
包路径:com.google.common.collect.Maps
类名称:Maps
方法名:newTreeMap

Maps.newTreeMap介绍

[英]Creates a mutable, empty TreeMap instance using the natural ordering of its elements.

Note: if mutability is not required, use ImmutableSortedMap#of() instead.
[中]使用元素的自然顺序创建一个可变的空TreeMap实例。
注意:如果不需要可变性,请改用ImmutableSortedMap#of()。

代码示例

代码示例来源:origin: opentripplanner/OpenTripPlanner

/**
 * Just an example of fetching status information from the graph updater manager to expose it in a web service.
 * More useful stuff should be added later.
 */
public Map<Integer, String> getUpdaterDescriptions () {
  Map<Integer, String> ret = Maps.newTreeMap();
  int i = 0;
  for (GraphUpdater updater : updaterList) {
    ret.put(i++, updater.toString());
  }
  return ret;
}

代码示例来源:origin: apache/hive

private Map<String, JsonNode> parseAsJson(byte[] value) throws IOException {
 JsonNode document = mapper.readValue(value, JsonNode.class);
 //Hive Column names are case insensitive.
 Map<String, JsonNode> documentMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
 document.fields().forEachRemaining(field -> documentMap.put(field.getKey().toLowerCase(), field.getValue()));
 return documentMap;
}

代码示例来源:origin: spotify/helios

private void listThreads() {
 final Set<Thread> threads = Thread.getAllStackTraces().keySet();
 final Map<String, Thread> sorted = Maps.newTreeMap();
 for (final Thread t : threads) {
  final ThreadGroup tg = t.getThreadGroup();
  if (t.isAlive() && (tg == null || !tg.getName().equals("system"))) {
   sorted.put(t.getName(), t);
  }
 }
 log.info("= THREADS " + Strings.repeat("=", 70));
 for (final Thread t : sorted.values()) {
  final ThreadGroup tg = t.getThreadGroup();
  log.info("{}: \"{}\" ({}{})", t.getId(), t.getName(),
    (tg == null ? "" : tg.getName() + " "),
    (t.isDaemon() ? "daemon" : ""));
 }
 log.info(Strings.repeat("=", 80));
}

代码示例来源:origin: apache/flume

/**
 * Helper function for {@link #coalesceIncrements} to increment a counter
 * value in the passed data structure.
 *
 * @param counters  Nested data structure containing the counters.
 * @param row       Row key to increment.
 * @param family    Column family to increment.
 * @param qualifier Column qualifier to increment.
 * @param count     Amount to increment by.
 */
private void incrementCounter(
  Map<byte[], Map<byte[], NavigableMap<byte[], Long>>> counters,
  byte[] row, byte[] family, byte[] qualifier, Long count) {
 Map<byte[], NavigableMap<byte[], Long>> families = counters.get(row);
 if (families == null) {
  families = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
  counters.put(row, families);
 }
 NavigableMap<byte[], Long> qualifiers = families.get(family);
 if (qualifiers == null) {
  qualifiers = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
  families.put(family, qualifiers);
 }
 Long existingValue = qualifiers.get(qualifier);
 if (existingValue == null) {
  qualifiers.put(qualifier, count);
 } else {
  qualifiers.put(qualifier, existingValue + count);
 }
}

代码示例来源:origin: Graylog2/graylog2-server

public DateHistogramResult(HistogramAggregation result, String originalQuery, String builtQuery, Searches.DateHistogramInterval interval, long tookMs) {
  super(originalQuery, builtQuery, tookMs);
  this.result = Maps.newTreeMap();
  for (HistogramAggregation.Histogram histogram : result.getBuckets()) {
    final DateTime keyAsDate = new DateTime(histogram.getKey());
    this.result.put(keyAsDate.getMillis() / 1000L, histogram.getCount());
  }
  this.interval = interval;
}

代码示例来源:origin: SonarSource/sonarqube

private void findAvailableXMLFiles() {
 if (contributingPluginKeyToClassLoader == null) {
  contributingPluginKeyToClassLoader = Maps.newTreeMap();
  // Add default model
  contributingPluginKeyToClassLoader.put(DEFAULT_MODEL, getClass().getClassLoader());
  for (PluginInfo pluginInfo : pluginRepository.getPluginInfos()) {
   String pluginKey = pluginInfo.getKey();
   Plugin plugin = pluginRepository.getPluginInstance(pluginKey);
   ClassLoader classLoader = plugin.getClass().getClassLoader();
   if (classLoader.getResource(getXMLFilePath(pluginKey)) != null) {
    contributingPluginKeyToClassLoader.put(pluginKey, classLoader);
   }
  }
 }
 contributingPluginKeyToClassLoader = Collections.unmodifiableMap(contributingPluginKeyToClassLoader);
}

代码示例来源:origin: twitter/ambrose

/**
 * Constructs DAGNodes for each Hive MR task
 */
private void createNodeIdToDAGNode() {
 // creates DAGNodes: each node represents a MR job
 nodeIdToDAGNode = Maps.newTreeMap();
 for (Task<MapredWork> task : allTasks) {
  DAGNode<Job> dagNode = asDAGNode(task);
  nodeIdToDAGNode.put(dagNode.getName(), dagNode);
 }
 // get job dependencies
 Map<String, List<String>> nodeIdToDependencies = getNodeIdToDependencies();
 // wire DAGNodes
 for (Map.Entry<String, List<String>> entry : nodeIdToDependencies.entrySet()) {
  String nodeId = entry.getKey();
  List<String> successorIds = entry.getValue();
  DAGNode<Job> dagNode = nodeIdToDAGNode.get(nodeId);
  List<DAGNode<? extends Job>> dagSuccessors = Lists.newArrayListWithCapacity(successorIds.size());
  for (String sId : successorIds) {
   DAGNode<Job> successor = nodeIdToDAGNode.get(sId);
   dagSuccessors.add(successor);
  }
  dagNode.setSuccessors(dagSuccessors);
 }
}

代码示例来源:origin: apache/kylin

public static Map<String, Integer> getValidEncodings() {
  if (factoryMap == null)
    initFactoryMap();
  Map<String, Integer> result = Maps.newTreeMap();
  for (Pair<String, Integer> p : factoryMap.keySet()) {
    if (result.containsKey(p.getFirst())) {
      if (result.get(p.getFirst()) > p.getSecond()) {
        continue;//skip small versions
      }
    }
    result.put(p.getFirst(), p.getSecond());
  }
  result.put(DictionaryDimEnc.ENCODING_NAME, 1);
  return result;
}

代码示例来源:origin: Graylog2/graylog2-server

public TermsHistogramResult(@Nullable DateHistogramAggregation result, String originalQuery, String builtQuery, long size, long tookMs, Searches.DateHistogramInterval interval, List<String> fields) {
  super(originalQuery, builtQuery, tookMs);
  this.size = size;
  this.interval = interval;
  this.result = Maps.newTreeMap();
  this.terms = new HashSet<>();
  if (result != null) {
    for (DateHistogramAggregation.DateHistogram histogram : result.getBuckets()) {
      final DateTime keyAsDate = new DateTime(histogram.getKey());
      final TermsAggregation termsAggregation = histogram.getFilterAggregation(Searches.AGG_FILTER).getTermsAggregation(Searches.AGG_TERMS);
      final MissingAggregation missingAgregation = histogram.getMissingAggregation("missing");
      final TermsResult termsResult = new TermsResult(termsAggregation, missingAgregation.getMissing(), histogram.getCount(), "", "", tookMs, fields);
      this.terms.addAll(termsResult.getTerms().keySet());
      this.result.put(keyAsDate.getMillis() / 1000L, termsResult);
    }
  }
}

代码示例来源:origin: EngineHub/WorldEdit

Map<Property<?>, Object> valueMap = Maps.newTreeMap(Comparator.comparing(Property::getName));
  BlockState stateMaker = new BlockState(blockType);
  for (int i = 0; i < valueList.size(); i++) {
    Property<?> property = properties.get(i);
    Object value = valueList.get(i);
    valueMap.put(property, value);
    stateMaker.setState(property, value);
  stateMap.put(valueMap, stateMaker);
stateMap.put(new LinkedHashMap<>(), new BlockState(blockType));

代码示例来源:origin: Graylog2/graylog2-server

final Map<Long, Map<String, Number>> results = Maps.newTreeMap();
for (HistogramAggregation.Histogram b : histogramAggregation.getBuckets()) {
  final ImmutableMap.Builder<String, Number> resultMap = ImmutableMap.builder();
  results.put(timestamp, resultMap.build());

代码示例来源:origin: java-json-tools/json-schema-validator

@Test
public void onlyRelevantValidatorsAreBuilt()
  throws ProcessingException
{
  final Map<String, JsonNode> digests = Maps.newTreeMap();
  digests.put(K1, JacksonUtils.nodeFactory().nullNode());
  final SchemaDigest digest = new SchemaDigest(null, digests);
  final ProcessingReport report = mock(ProcessingReport.class);
  final ValidatorList context
    = validatorBuilder.process(report, digest);
  final List<KeywordValidator> list = Lists.newArrayList(context);
  assertEquals(list.size(), 1);
  assertSame(list.get(0).getClass(), Keyword1.class);
}

代码示例来源:origin: voldemort/voldemort

fetchDirs.put(node.getId(), executor.submit(new Callable<String>() {
Map<Node, Response> fetchResponseMap = Maps.newTreeMap();
boolean fetchErrors = false;
  try{
    String response = val.get();
    fetchResponseMap.put(node, new Response(response));
  }catch(Exception e){
    if (e.getCause() instanceof UnauthorizedStoreException){
      fetchResponseMap.put(node, new Response(e));
      failedNodes.add(node);

代码示例来源:origin: java-json-tools/json-schema-validator

@Test
public void allRelevantValidatorsAreBuilt()
  throws ProcessingException
{
  final Map<String, JsonNode> digests = Maps.newTreeMap();
  digests.put(K1, JacksonUtils.nodeFactory().nullNode());
  digests.put(K2, JacksonUtils.nodeFactory().nullNode());
  final SchemaDigest digest = new SchemaDigest(null, digests);
  final ProcessingReport report = mock(ProcessingReport.class);
  final ValidatorList context
    = validatorBuilder.process(report, digest);
  final List<KeywordValidator> list = Lists.newArrayList(context);
  assertEquals(list.size(), 2);
  assertSame(list.get(0).getClass(), Keyword1.class);
  assertSame(list.get(1).getClass(), Keyword2.class);
}

代码示例来源:origin: java-json-tools/json-schema-validator

@Test
public void challengedConstructorRaisesAnException()
{
  final Map<String, JsonNode> digests = Maps.newTreeMap();
  digests.put(K1, JacksonUtils.nodeFactory().nullNode());
  digests.put(CHALLENGED, JacksonUtils.nodeFactory().nullNode());
  final SchemaDigest digest = new SchemaDigest(null, digests);
  final ProcessingReport report = mock(ProcessingReport.class);
  try {
    validatorBuilder.process(report, digest);
    fail("No exception thrown??");
  } catch (ProcessingException ignored) {
  }
}

代码示例来源:origin: apache/pulsar

MessageImpl(String topic, BatchMessageIdImpl batchMessageIdImpl, MessageMetadata msgMetadata,
      PulsarApi.SingleMessageMetadata singleMessageMetadata, ByteBuf payload,
      Optional<EncryptionContext> encryptionCtx, ClientCnx cnx, Schema<T> schema, int redeliveryCount) {
  this.msgMetadataBuilder = MessageMetadata.newBuilder(msgMetadata);
  this.messageId = batchMessageIdImpl;
  this.topic = topic;
  this.cnx = cnx;
  this.redeliveryCount = redeliveryCount;
  this.payload = Unpooled.copiedBuffer(payload);
  this.encryptionCtx = encryptionCtx;
  if (singleMessageMetadata.getPropertiesCount() > 0) {
    Map<String, String> properties = Maps.newTreeMap();
    for (KeyValue entry : singleMessageMetadata.getPropertiesList()) {
      properties.put(entry.getKey(), entry.getValue());
    }
    this.properties = Collections.unmodifiableMap(properties);
  } else {
    properties = Collections.emptyMap();
  }
  if (singleMessageMetadata.hasPartitionKey()) {
    msgMetadataBuilder.setPartitionKeyB64Encoded(singleMessageMetadata.getPartitionKeyB64Encoded());
    msgMetadataBuilder.setPartitionKey(singleMessageMetadata.getPartitionKey());
  }
  if (singleMessageMetadata.hasEventTime()) {
    msgMetadataBuilder.setEventTime(singleMessageMetadata.getEventTime());
  }
  this.schema = schema;
}

代码示例来源:origin: apache/kylin

set = Sets.newHashSet();
    set.add(real);
    models.put(m, set);
    costs.put(m, cost);
  } else {
    set.add(real);
    RealizationCost curCost = costs.get(m);
    if (cost.compareTo(curCost) < 0)
      costs.put(m, cost);
TreeMap<DataModelDesc, Set<IRealization>> result = Maps.newTreeMap(new Comparator<DataModelDesc>() {
  @Override
  public int compare(DataModelDesc o1, DataModelDesc o2) {

代码示例来源:origin: apache/hive

exprFields.put(r.toString(), RelOptUtil.InputFinder.bits(r));
  allExprsDigests.add(r.toString());
  exprFields.put(r.toString(), RelOptUtil.InputFinder.bits(r));
  allExprsDigests.add(r.toString());
equivalence = Maps.newTreeMap();
equalityPredicates = new HashSet<>();
for (int i = 0; i < nSysFields + nFieldsLeft + nFieldsRight; i++) {

代码示例来源:origin: voldemort/voldemort

TreeMap<Node, AdminStoreSwapper.Response> toSwap = Maps.newTreeMap();
for(int nodeId = 0; nodeId < NUM_NODES; nodeId++) {
  if(nodeId != 1) {
  versionToNode.put(nodeId,
           adminClient.readonlyOps.getROCurrentVersion(nodeId,
                                 Lists.newArrayList(STORE_NAME))

代码示例来源:origin: palantir/atlasdb

Map<Integer, MutationMap> mutationMaps = Maps.newTreeMap();
  for (long ts : Ordering.natural().immutableSortedCopy(cellVersions.getValue())) {
    if (!mutationMaps.containsKey(mapIndex)) {
      mutationMaps.put(mapIndex, new MutationMap());

相关文章