本文整理了Java中scala.collection.mutable.Map
类的一些代码示例,展示了Map
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Map
类的具体详情如下:
包路径:scala.collection.mutable.Map
类名称:Map
暂无
代码示例来源:origin: goldmansachs/gs-collections
@Benchmark
public void get()
{
int localSize = this.size;
String[] localElements = this.elements;
Map<String, String> localScalaMap = this.scalaMap;
for (int i = 0; i < localSize; i++)
{
if (!localScalaMap.get(localElements[i]).isDefined())
{
throw new AssertionError(i);
}
}
}
}
代码示例来源:origin: twosigma/beakerx
@Override
public Object deserialize(JsonNode n, ObjectMapper mapper) {
HashMap<String, Object> o = new HashMap<String, Object>();
try {
logger.debug("using custom map deserializer");
Iterator<Map.Entry<String, JsonNode>> e = n.fields();
while (e.hasNext()) {
Map.Entry<String, JsonNode> ee = e.next();
o.put(ee.getKey(), parent.deserialize(ee.getValue(), mapper));
}
} catch (Exception e) {
logger.error("exception deserializing Map {}", e.getMessage());
o = null;
}
if (o != null)
return JavaConverters.mapAsScalaMapConverter(o).asScala().toMap(Predef.<Tuple2<String, Object>>conforms());
return null;
}
代码示例来源:origin: linkedin/cruise-control
Map<Integer, String> rackByBroker = new HashMap<>();
for (BrokerMetadata bm :
JavaConversions.seqAsJavaList(AdminUtils.getBrokerMetadatas(zkUtils, RackAwareMode.Enforced$.MODULE$, Option.empty()))) {
String rack = bm.rack().isEmpty() ? String.valueOf(bm.id()) : bm.rack().get();
brokersByRack.putIfAbsent(rack, new ArrayList<>());
brokersByRack.get(rack).add(bm.id());
newReplicaAssignment.put(new TopicAndPartition(topic, pm.partition()),
JavaConverters.asScalaIteratorConverter(newAssignedReplica.iterator()).asScala().toSeq());
if (newReplicaAssignment.nonEmpty()) {
zkUtils.updatePartitionReassignmentData(newReplicaAssignment);
LOG.info("The replication factor of Kafka topic " + topic + " has increased to " + replicationFactor + ".");
代码示例来源:origin: uber/chaperone
try {
scala.collection.Map<Object, Seq<Object>> partitionsMap =
partitionAssignmentForTopics.get(topic).get();
TopicPartition tp = new TopicPartition(topic, partitionsMap.size());
_topicPartitionInfoMap.put(topic, tp);
代码示例来源:origin: uber/uReplicator
public void tryUpdateTopic(String topic) {
scala.collection.mutable.Map<String, scala.collection.Map<Object, Seq<Object>>> partitionAssignmentForTopics =
_zkUtils.getPartitionAssignmentForTopics(JavaConversions.asScalaBuffer(ImmutableList.of(topic)));
if (partitionAssignmentForTopics.get(topic).isEmpty()
|| partitionAssignmentForTopics.get(topic).get().size() == 0) {
LOGGER.info("try to update for topic {} but found no topic partition for it", topic);
return;
}
synchronized (_lock) {
LOGGER.info("starting to refresh for update topic {}", topic);
try {
_topicPartitionInfoMap.put(topic, new TopicPartition(topic,
partitionAssignmentForTopics.get(topic).get().size()));
} catch (Exception e) {
LOGGER.warn("Failed to get topicPartition info for {} from kafka zk: {}", topic, e);
}
LOGGER.info("finished refreshing for updating topic {}", topic);
}
}
代码示例来源:origin: org.locationtech.geomesa/geomesa-accumulo-jobs
@Deprecated
@SuppressWarnings("unchecked")
public static void configure(Job job,
Map<String, String> dataStoreParams,
String featureTypeName,
String filter,
String[] transform) {
Object m = JavaConverters.mapAsScalaMapConverter(dataStoreParams).asScala();
scala.collection.immutable.Map<String, String> scalaParams =
((scala.collection.mutable.Map<String, String>) m).toMap(Predef.<Tuple2<String, String>>conforms());
Option<String> f = Option.apply(filter);
Option<String[]> t = Option.apply(transform);
GeoMesaAccumuloInputFormat$.MODULE$.configure(job, scalaParams, featureTypeName, f, t);
}
}
代码示例来源:origin: com.typesafe.play/play_2.10
/**
* Converts a Java Map to Scala.
*/
public static <A,B> scala.collection.immutable.Map<A,B> asScala(Map<A,B> javaMap) {
return play.utils.Conversions.newMap(
scala.collection.JavaConverters.mapAsScalaMapConverter(javaMap).asScala().toSeq()
);
}
代码示例来源:origin: goldmansachs/gs-collections
@Setup
public void setUp()
{
Random random = new Random(123456789012345L);
this.elements = new String[this.size];
this.scalaAnyRefMap = new AnyRefMap<>(this.size);
for (int i = 0; i < this.size; i++)
{
String element = RandomStringUtils.random(RANDOM_COUNT, 0, 0, false, true, null, random);
this.elements[i] = element;
this.scalaAnyRefMap.put(element, "dummy");
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-10_2.11
offsets.put(tp1, 23L);
final scala.collection.Map<TopicPartition, Object> sOffsets =
JavaConverters.mapAsScalaMapConverter(offsets).asScala().mapValues(
new scala.runtime.AbstractFunction1<Long, Object>() {
@Override
代码示例来源:origin: uber/chaperone
@Override
public void handleChildChange(String parentPath, List<String> currentChilds)
throws Exception {
if (!tryToRefreshCache()) {
synchronized (_lock) {
Set<String> newAddedTopics = new HashSet<String>(currentChilds);
Set<String> currentServingTopics = getAllTopics();
newAddedTopics.removeAll(currentServingTopics);
for (String existedTopic : currentServingTopics) {
if (!currentChilds.contains(existedTopic)) {
_topicPartitionInfoMap.remove(existedTopic);
}
}
scala.collection.mutable.Map<String, scala.collection.Map<Object, Seq<Object>>> partitionAssignmentForTopics =
_zkUtils.getPartitionAssignmentForTopics(
JavaConversions.asScalaBuffer(ImmutableList.copyOf(newAddedTopics)));
for (String topic : newAddedTopics) {
try {
scala.collection.Map<Object, Seq<Object>> partitionsMap =
partitionAssignmentForTopics.get(topic).get();
TopicPartition tp = new TopicPartition(topic, partitionsMap.size());
_topicPartitionInfoMap.put(topic, tp);
} catch (Exception e) {
LOGGER.warn("Failed to get topicPartition info for {} from kafka zk: {}", topic, e);
}
}
_kafkaTopicsCounter.inc(_topicPartitionInfoMap.size() - _kafkaTopicsCounter.getCount());
}
}
}
代码示例来源:origin: uber/uReplicator
private void tryAddTopic(String topic) {
scala.collection.mutable.Map<String, scala.collection.Map<Object, Seq<Object>>> partitionAssignmentForTopics =
_zkUtils.getPartitionAssignmentForTopics(JavaConversions.asScalaBuffer(ImmutableList.of(topic)));
if (partitionAssignmentForTopics.get(topic).isEmpty()
|| partitionAssignmentForTopics.get(topic).get().size() == 0) {
LOGGER.info("try to refresh for topic {} but found no topic partition for it", topic);
return;
}
synchronized (_lock) {
LOGGER.info("starting to refresh for adding topic {}", topic);
if (!getAllTopics().contains(topic)) {
try {
_topicPartitionInfoMap.put(topic, new TopicPartition(topic,
partitionAssignmentForTopics.get(topic).get().size()));
} catch (Exception e) {
LOGGER.warn("Failed to get topicPartition info for {} from kafka zk: {}", topic, e);
}
}
LOGGER.info("finished refreshing for adding topic {}", topic);
}
}
代码示例来源:origin: com.typesafe.play/play_2.11
/**
* Converts a Java Map to Scala.
*
* @param javaMap the java map
* @param <K> key type
* @param <V> value type
* @return the scala map.
*/
public static <K, V> scala.collection.immutable.Map<K, V> asScala(Map<K, V> javaMap) {
return play.utils.Conversions.newMap(
scala.collection.JavaConverters.mapAsScalaMapConverter(javaMap).asScala().toSeq()
);
}
代码示例来源:origin: goldmansachs/gs-collections
@Setup
public void setUp()
{
Random random = new Random(123456789012345L);
this.elements = new String[this.size];
this.scalaMap = new PresizableHashMap<>(this.size);
for (int i = 0; i < this.size; i++)
{
String element = RandomStringUtils.random(RANDOM_COUNT, 0, 0, false, true, null, random);
this.elements[i] = element;
this.scalaMap.put(element, "dummy");
}
}
代码示例来源:origin: org.apache.spark/spark-streaming-kafka-0-10
offsets.put(tp1, 23L);
final scala.collection.Map<TopicPartition, Object> sOffsets =
JavaConverters.mapAsScalaMapConverter(offsets).asScala().mapValues(
new scala.runtime.AbstractFunction1<Long, Object>() {
@Override
代码示例来源:origin: guru.nidi/text-transform
public static Object getAttribute(Segment segment, Attribute name) {
Option<Object> val = segment.attributes().get(name);
return val instanceof Some ? val.get() : null;
}
代码示例来源:origin: uber/uReplicator
private void tryAddTopic(String topic) {
scala.collection.mutable.Map<String, scala.collection.Map<Object, Seq<Object>>> partitionAssignmentForTopics =
_zkUtils.getPartitionAssignmentForTopics(JavaConversions.asScalaBuffer(ImmutableList.of(topic)));
if (partitionAssignmentForTopics.get(topic).isEmpty()
|| partitionAssignmentForTopics.get(topic).get().size() == 0) {
LOGGER.info("try to refresh for topic {} but found no topic partition for it", topic);
return;
}
synchronized (_lock) {
LOGGER.info("starting to refresh for adding topic {}", topic);
if (!getAllTopics().contains(topic)) {
try {
_topicPartitionInfoMap.put(topic, new TopicPartition(topic,
partitionAssignmentForTopics.get(topic).get().size()));
} catch (Exception e) {
LOGGER.warn("Failed to get topicPartition info for {} from kafka zk: {}", topic, e);
}
}
LOGGER.info("finished refreshing for adding topic {}", topic);
}
}
代码示例来源:origin: goldmansachs/gs-collections
@Benchmark
public void get()
{
int localSize = this.size;
String[] localElements = this.elements;
Map<String, String> localScalaAnyRefMap = this.scalaAnyRefMap;
for (int i = 0; i < localSize; i++)
{
if (!localScalaAnyRefMap.get(localElements[i]).isDefined())
{
throw new AssertionError(i);
}
}
}
}
代码示例来源:origin: com.typesafe.play/play_2.12
/**
* Converts a Java Map to Scala.
*
* @param javaMap the java map
* @param <K> key type
* @param <V> value type
* @return the scala map.
*/
public static <K, V> scala.collection.immutable.Map<K, V> asScala(Map<K, V> javaMap) {
return play.utils.Conversions.newMap(
scala.collection.JavaConverters.mapAsScalaMapConverter(javaMap).asScala().toSeq()
);
}
代码示例来源:origin: twosigma/beakerx
@SuppressWarnings("unchecked")
@Override
public Object deserialize(JsonNode n, ObjectMapper mapper) {
org.apache.commons.lang3.tuple.Pair<String, Object> deserializeObject = TableDisplayDeSerializer.getDeserializeObject(parent, n, mapper);
String subtype = deserializeObject.getLeft();
if (subtype != null && subtype.equals(TableDisplay.DICTIONARY_SUBTYPE)) {
return JavaConverters.mapAsScalaMapConverter((Map<String, Object>) deserializeObject.getRight()).asScala().toMap(Predef.<Tuple2<String, Object>>conforms());
} else if (subtype != null && subtype.equals(TableDisplay.LIST_OF_MAPS_SUBTYPE)) {
List<Map<String, Object>> rows = (List<Map<String, Object>>) deserializeObject.getRight();
List<Object> oo = new ArrayList<Object>();
for (Map<String, Object> row : rows) {
oo.add(JavaConverters.mapAsScalaMapConverter(row).asScala().toMap(Predef.<Tuple2<String, Object>>conforms()));
}
return scala.collection.JavaConversions.collectionAsScalaIterable(oo);
} else if (subtype != null && subtype.equals(TableDisplay.MATRIX_SUBTYPE)) {
List<List<?>> matrix = (List<List<?>>) deserializeObject.getRight();
ArrayList<Object> ll = new ArrayList<Object>();
for (List<?> ob : matrix) {
ll.add(scala.collection.JavaConversions.asScalaBuffer(ob).toList());
}
return scala.collection.JavaConversions.asScalaBuffer(ll).toList();
}
return deserializeObject.getRight();
}
代码示例来源:origin: goldmansachs/gs-collections
@Benchmark
public Map<String, String> scalaAnyRef()
{
int localSize = this.size;
String[] localElements = this.elements;
Map<String, String> scalaAnyRefMap = this.isPresized ? new AnyRefMap<>(localSize) : new AnyRefMap<>();
for (int i = 0; i < localSize; i++)
{
scalaAnyRefMap.put(localElements[i], "dummy");
}
return scalaAnyRefMap;
}
}
内容来源于网络,如有侵权,请联系作者删除!