本文整理了Java中java.util.LinkedHashMap.get()
方法的一些代码示例,展示了LinkedHashMap.get()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。LinkedHashMap.get()
方法的具体详情如下:
包路径:java.util.LinkedHashMap
类名称:LinkedHashMap
方法名:get
[英]Returns the value of the mapping with the specified key.
[中]返回具有指定键的映射的值。
代码示例来源:origin: ch.qos.logback/logback-classic
int getMessageCountAndThenIncrement(String msg) {
// don't insert null elements
if (msg == null) {
return 0;
}
Integer i;
// LinkedHashMap is not LinkedHashMap. See also LBCLASSIC-255
synchronized (this) {
i = super.get(msg);
if (i == null) {
i = 0;
} else {
i = i + 1;
}
super.put(msg, i);
}
return i;
}
代码示例来源:origin: lipangit/JiaoZiVideoPlayer
public Object getValueFromLinkedMap(int index) {
int currentIndex = 0;
for (Object key : urlsMap.keySet()) {
if (currentIndex == index) {
return urlsMap.get(key);
}
currentIndex++;
}
return null;
}
代码示例来源:origin: jeasonlzy/okhttp-OkGo
private void put(String key, String value, boolean isReplace) {
if (key != null && value != null) {
List<String> urlValues = urlParamsMap.get(key);
if (urlValues == null) {
urlValues = new ArrayList<>();
urlParamsMap.put(key, urlValues);
}
if (isReplace) urlValues.clear();
urlValues.add(value);
}
}
代码示例来源:origin: apache/kafka
List<TopicPartition> altered = new ArrayList<>();
for (Iterator<Entry<TopicPartition, PartitionData>> iter =
sessionPartitions.entrySet().iterator(); iter.hasNext(); ) {
Entry<TopicPartition, PartitionData> entry = iter.next();
TopicPartition topicPartition = entry.getKey();
PartitionData prevData = entry.getValue();
PartitionData nextData = next.get(topicPartition);
if (nextData != null) {
if (prevData.equals(nextData)) {
next.put(topicPartition, nextData);
entry.setValue(nextData);
altered.add(topicPartition);
removed.add(topicPartition);
for (Entry<TopicPartition, PartitionData> entry : next.entrySet()) {
TopicPartition topicPartition = entry.getKey();
PartitionData nextData = entry.getValue();
if (sessionPartitions.containsKey(topicPartition)) {
sessionPartitions.put(topicPartition, nextData);
added.add(topicPartition);
partitionsToLogString(sessionPartitions.keySet()));
Map<TopicPartition, PartitionData> toSend =
Collections.unmodifiableMap(new LinkedHashMap<>(next));
代码示例来源:origin: zendesk/maxwell
public RowIdentity getRowIdentity() {
if (rowIdentity == null) {
List<Pair<String, Object>> entries = new ArrayList<>(pkColumns.size());
for (String pk: pkColumns) {
entries.add(Pair.of(pk, data.get(pk)));
}
rowIdentity = new RowIdentity(database, table, entries);
}
return rowIdentity;
}
代码示例来源:origin: sannies/mp4parser
protected static List<byte[]> extractImages(Document ttml) throws XPathExpressionException, URISyntaxException, IOException {
XPathFactory xPathfactory = XPathFactory.newInstance();
XPath xpath = xPathfactory.newXPath();
XPathExpression expr = xpath.compile("//*/@backgroundImage");
NodeList nl = (NodeList) expr.evaluate(ttml, XPathConstants.NODESET);
LinkedHashMap<String, String> internalNames2Original = new LinkedHashMap<String, String>();
int p = 1;
for (int i = 0; i < nl.getLength(); i++) {
Node bgImageNode = nl.item(i);
String uri = bgImageNode.getNodeValue();
String ext = uri.substring(uri.lastIndexOf("."));
String internalName = internalNames2Original.get(uri);
if (internalName == null) {
internalName = "urn:mp4parser:" + p++ + ext;
internalNames2Original.put(internalName, uri);
}
bgImageNode.setNodeValue(internalName);
}
List<byte[]> images = new ArrayList<byte[]>();
if (!internalNames2Original.isEmpty()) {
for (Map.Entry<String, String> internalName2Original : internalNames2Original.entrySet()) {
URI pic = new URI(ttml.getDocumentURI()).resolve(internalName2Original.getValue());
images.add(streamToByteArray(pic.toURL().openStream()));
}
}
return images;
}
代码示例来源:origin: apache/hive
targetAliasToPartnInfo.remove(targetAlias);
List<Path> pathsToRemove = new ArrayList<>();
for (Entry<Path, ArrayList<String>> entry: targetPathToAliases.entrySet()) {
ArrayList<String> aliases = entry.getValue();
aliases.remove(targetAlias);
if (aliases.isEmpty()) {
pathsToRemove.add(entry.getKey());
List<Path> pathsToAdd = new ArrayList<>();
for (Entry<Path, ArrayList<String>> entry: sourcePathToAliases.entrySet()) {
ArrayList<String> aliases = entry.getValue();
if (aliases.contains(sourceAlias)) {
pathsToAdd.add(entry.getKey());
if (!targetPathToAliases.containsKey(pathToAdd)) {
targetPathToAliases.put(pathToAdd, new ArrayList<String>());
targetPathToAliases.get(pathToAdd).add(sourceAlias);
代码示例来源:origin: robolectric/robolectric
@Implementation
@HiddenApi
public List<PhoneAccountHandle> getPhoneAccountsSupportingScheme(String uriScheme) {
List<PhoneAccountHandle> result = new ArrayList<>();
for (PhoneAccountHandle handle : accounts.keySet()) {
PhoneAccount phoneAccount = accounts.get(handle);
if(phoneAccount.getSupportedUriSchemes().contains(uriScheme)) {
result.add(handle);
}
}
return result;
}
代码示例来源:origin: BroadleafCommerce/BroadleafCommerce
protected void expandFulfilledMap(Map<String, String> originalParameters, Map<String, String> derivedParameters, List<String> utilizedNames) {
for (Map.Entry<String, String> entry : originalParameters.entrySet()) {
if (namedOperations.containsKey(entry.getKey())) {
expandFulfilledMap(namedOperations.get(entry.getKey()), derivedParameters, utilizedNames);
if (!utilizedNames.contains(entry.getKey())) {
utilizedNames.add(entry.getKey());
}
} else {
derivedParameters.put(entry.getKey(), entry.getValue());
}
}
}
代码示例来源:origin: pentaho/pentaho-kettle
coll.add( i );
metaNameToIndex.put( metaFieldNames[i], coll );
List<Integer> columnIndexes = metaNameToIndex.get( actualFieldNames[ i ] );
if ( columnIndexes == null || columnIndexes.isEmpty() ) {
unmatchedMetaFields.add( i );
actualToMetaFieldMapping[ i ] = FIELD_DOES_NOT_EXIST;
continue;
代码示例来源:origin: apache/hive
Map<TableDesc, StructObjectInspector> convertedOI = getConvertedOI(tableNameToConf);
for (Map.Entry<Path, ArrayList<String>> entry : conf.getPathToAliases().entrySet()) {
Path onefile = entry.getKey();
List<String> aliases = entry.getValue();
PartitionDesc partDesc = conf.getPathToPartitionInfo().get(onefile);
TableDesc tableDesc = partDesc.getTableDesc();
Configuration newConf = tableNameToConf.get(tableDesc.getTableName());
Operator<? extends OperatorDesc> op = conf.getAliasToWork().get(alias);
if (LOG.isDebugEnabled()) {
LOG.debug("Adding alias " + alias + " to work list for file "
op.getParentOperators().add(this);
children.add(op);
代码示例来源:origin: apache/ignite
cols.put(col.getName(), parseColumn(col));
if (cols.containsKey(QueryUtils.KEY_FIELD_NAME.toUpperCase()) ||
cols.containsKey(QueryUtils.VAL_FIELD_NAME.toUpperCase()))
throw new IgniteSQLException("Direct specification of _KEY and _VAL columns is forbidden",
IgniteQueryErrorCode.PARSING);
GridSqlColumn gridCol = cols.get(pkIdxCol.columnName);
processExtraParam(e.getKey(), e.getValue(), res);
代码示例来源:origin: gocd/gocd
private static void addToHistory(LinkedHashMap<String, Properties> propHistory, Map<String, Object> flatMap) {
String id = String.valueOf(flatMap.get("pipelineid"));
String key = (String) flatMap.get("key");
String value = (String) flatMap.get("value");
if (!propHistory.containsKey(id)) {
propHistory.put(id, new Properties());
}
propHistory.get(id).add(new Property(key, value));
}
代码示例来源:origin: alibaba/druid
for (LinkedHashMap<String, Object> dataNow : data) {
for (Iterator<Entry<String, Object>> it = dataNow.entrySet().iterator(); it.hasNext();) {
Entry<String, Object> entry = it.next();
String key = entry.getKey();
Object value = entry.getValue();
if (JSON_ID_NAME.equals(key)) {
ids.add(value != null ? value.toString() : null);
Object arrayData = dataNow.get(arrayKey);
String newKey = arrayKey + "-" + a + "~" + b + "ms";
dataNow.put(newKey, arrayDataList.get(j));
if (dataIndex == 0) {
REAL_SHOW_LIST.add(newKey);
代码示例来源:origin: redisson/redisson
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
} else if (!(other instanceof AnnotationInvocationHandler)) {
return false;
}
AnnotationInvocationHandler that = (AnnotationInvocationHandler) other;
if (!annotationType.equals(that.annotationType)) {
return false;
}
for (Map.Entry<Method, AnnotationValue.Loaded<?>> entry : values.entrySet()) {
if (!entry.getValue().equals(that.values.get(entry.getKey()))) {
return false;
}
}
return true;
}
代码示例来源:origin: xuxueli/xxl-job
public String route(int jobId, List<String> addressList) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
jobLRUMap.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// init lru
LinkedHashMap<String, String> lruItem = jobLRUMap.get(jobId);
if (lruItem == null) {
/**
* LinkedHashMap
* a、accessOrder:ture=访问顺序排序(get/put时排序);false=插入顺序排期;
* b、removeEldestEntry:新增元素时将会调用,返回true时会删除最老元素;可封装LinkedHashMap并重写该方法,比如定义最大容量,超出是返回true即可实现固定长度的LRU算法;
*/
lruItem = new LinkedHashMap<String, String>(16, 0.75f, true);
jobLRUMap.putIfAbsent(jobId, lruItem);
}
// put
for (String address: addressList) {
if (!lruItem.containsKey(address)) {
lruItem.put(address, address);
}
}
// load
String eldestKey = lruItem.entrySet().iterator().next().getKey();
String eldestValue = lruItem.get(eldestKey);
return eldestValue;
}
代码示例来源:origin: apache/hive
public void addAggregationExprsForClause(String clause,
LinkedHashMap<String, ASTNode> aggregationTrees) {
if (destToAggregationExprs.containsKey(clause)) {
destToAggregationExprs.get(clause).putAll(aggregationTrees);
} else {
destToAggregationExprs.put(clause, aggregationTrees);
}
}
代码示例来源:origin: apache/hive
public static ASTNode genSelectDIAST(RowResolver rr) {
LinkedHashMap<String, LinkedHashMap<String, ColumnInfo>> map = rr.getRslvMap();
ASTNode selectDI = new ASTNode(SELECTDI_TOKEN);
// Note: this will determine the order of columns in the result. For now, the columns for each
// table will be together; the order of the tables, as well as the columns within each
// table, is deterministic, but undefined - RR stores them in the order of addition.
for (String tabAlias : map.keySet()) {
for (Entry<String, ColumnInfo> entry : map.get(tabAlias).entrySet()) {
selectDI.addChild(buildSelExprSubTree(tabAlias, entry.getKey()));
}
}
return selectDI;
}
private static ASTNode buildSelExprSubTree(String tableAlias, String col) {
代码示例来源:origin: wildfly/wildfly
@Override
protected void writeContext(ObjectOutput output, LinkedHashMap<Object, Object> map) throws IOException {
Object insertOrder = new Object();
Object accessOrder = new Object();
map.put(insertOrder, null);
map.put(accessOrder, null);
// Access first inserted entry
// If map uses access order, this element will move to the tail of the map
map.get(insertOrder);
Iterator<Object> keys = map.keySet().iterator();
Object element = keys.next();
while ((element != insertOrder) && (element != accessOrder)) {
element = keys.next();
}
map.remove(insertOrder);
map.remove(accessOrder);
// Map uses access order if previous access changed iteration order
output.writeBoolean(element == accessOrder);
}
代码示例来源:origin: apache/hive
private boolean isAmbiguousReference(String tableAlias, String colAlias) {
if(!getCheckForAmbiguity()) {
return false;
}
if(ambiguousColumns == null || ambiguousColumns.isEmpty()) {
return false;
}
if(tableAlias != null) {
LinkedHashMap<String, String> colAliases = ambiguousColumns.get(tableAlias.toLowerCase());
if(colAliases != null && colAliases.containsKey(colAlias.toLowerCase())) {
return true;
}
} else {
for (Map.Entry<String, LinkedHashMap<String, String>> ambigousColsEntry: ambiguousColumns.entrySet()) {
String rslvKey = ambigousColsEntry.getKey();
LinkedHashMap<String, String> cmap = ambigousColsEntry.getValue();
for (Map.Entry<String, String> cmapEnt : cmap.entrySet()) {
if (colAlias.equalsIgnoreCase(cmapEnt.getKey())) {
return true;
}
}
}
}
return false;
}
内容来源于网络,如有侵权,请联系作者删除!