org.antlr.runtime.tree.Tree.getChild()方法的使用及代码示例

x33g5p2x  于2022-01-29 转载在 其他  
字(11.1k)|赞(0)|评价(0)|浏览(115)

本文整理了Java中org.antlr.runtime.tree.Tree.getChild()方法的一些代码示例,展示了Tree.getChild()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Tree.getChild()方法的具体详情如下:
包路径:org.antlr.runtime.tree.Tree
类名称:Tree
方法名:getChild

Tree.getChild介绍

[英]This node is what child index? 0..n-1
[中]这个节点是什么子索引?0..n-1

代码示例

代码示例来源:origin: apache/nifi

private String getSelectedName(final Tree selectable) {
  if (selectable.getChildCount() == 0) {
    return selectable.getText();
  } else if (selectable.getType() == DOT) {
    return getSelectedName(selectable.getChild(0)) + "." + getSelectedName(selectable.getChild(1));
  } else {
    return selectable.getChild(selectable.getChildCount() - 1).getText();
  }
}

代码示例来源:origin: apache/hive

public static Map<String,String> getPartKeyValuePairsFromAST(Table tbl, ASTNode tree,
   HiveConf hiveConf) throws SemanticException {
  ASTNode child = ((ASTNode) tree.getChild(0).getChild(1));
  Map<String,String> partSpec = new HashMap<String, String>();
  if (child != null) {
   partSpec = DDLSemanticAnalyzer.getValidatedPartSpec(tbl, child, hiveConf, false);
  } //otherwise, it is the case of analyze table T compute statistics for columns;
  return partSpec;
 }
}

代码示例来源:origin: apache/nifi

private static RecordPathSegment[] getArgPaths(final Tree argumentListTree, final int minCount, final int maxCount, final String functionName, final boolean absolute) {
    final int numArgs = argumentListTree.getChildCount();
    if (numArgs < minCount || numArgs > maxCount) {
      throw new RecordPathException("Invalid number of arguments: " + functionName + " function takes at least" + minCount
          + " arguments, and at most " + maxCount + "arguments, but got " + numArgs);
    }

    final List<RecordPathSegment> argPaths = new ArrayList<>();
    for (int i=0; i < argumentListTree.getChildCount(); i++) {
      argPaths.add(buildPath(argumentListTree.getChild(i), null, absolute));
    }

    return argPaths.toArray(new RecordPathSegment[argPaths.size()]);
  }
}

代码示例来源:origin: apache/hive

/**
 * Converts parsed key/value properties pairs into a map.
 *
 * @param prop ASTNode parent of the key/value pairs
 *
 * @param mapProp property map which receives the mappings
 */
public static void readProps(
 ASTNode prop, Map<String, String> mapProp) {
 for (int propChild = 0; propChild < prop.getChildCount(); propChild++) {
  String key = unescapeSQLString(prop.getChild(propChild).getChild(0)
    .getText());
  String value = null;
  if (prop.getChild(propChild).getChild(1) != null) {
   value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText());
  }
  mapProp.put(key, value);
 }
}

代码示例来源:origin: apache/hive

private void addReference(StringBuilder b, ASTNode ref) {
 if ( ref.getType() == HiveParser.DOT ) {
  b.append(ref.getChild(0).getChild(0).getText()).
  append(".").
  append(ref.getChild(1).getText());
 } else {
  b.append(ref.getText());
 }
}

代码示例来源:origin: apache/hive

public boolean isTopLevelSimpleSelectStarQuery() {
 if (alias != null || destToSelExpr.size() != 1 || !isSimpleSelectQuery()) {
  return false;
 }
 for (ASTNode selExprs : destToSelExpr.values()) {
  if (selExprs.getChildCount() != 1) {
   return false;
  }
  Tree sel = selExprs.getChild(0).getChild(0);
  if (sel == null || sel.getType() != HiveParser.TOK_ALLCOLREF) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: apache/nifi

return new EqualsEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case NOT_EQUALS:
  return new NotEqualsEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case GT:
  return new GreaterThanEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case LT:
  return new LessThanEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case GE:
  return new GreaterThanOrEqualEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case LE:
  return new LessThanOrEqualEvaluator(buildReferenceEvaluator(tree.getChild(0)), buildReferenceEvaluator(tree.getChild(1)));
case NOT:
  return new NotEvaluator(buildBooleanEvaluator(tree.getChild(0)));
case AND:
  return new AndEvaluator(buildBooleanEvaluator(tree.getChild(0)), buildBooleanEvaluator(tree.getChild(1)));
case OR:
  return new OrEvaluator(buildBooleanEvaluator(tree.getChild(0)), buildBooleanEvaluator(tree.getChild(1)));
case IS_NULL:
  return new IsNullEvaluator(buildReferenceEvaluator(tree.getChild(0)));
case NOT_NULL:
  return new NotNullEvaluator(buildReferenceEvaluator(tree.getChild(0)));
default:
  throw new HL7QueryParsingException("Cannot build boolean evaluator for '" + tree.getText() + "'");

代码示例来源:origin: apache/drill

/**
 * Converts parsed key/value properties pairs into a map.
 *
 * @param prop ASTNode parent of the key/value pairs
 *
 * @param mapProp property map which receives the mappings
 */
public static void readProps(
 ASTNode prop, Map<String, String> mapProp) {
 for (int propChild = 0; propChild < prop.getChildCount(); propChild++) {
  String key = unescapeSQLString(prop.getChild(propChild).getChild(0)
    .getText());
  String value = null;
  if (prop.getChild(propChild).getChild(1) != null) {
   value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText());
  }
  mapProp.put(key, value);
 }
}

代码示例来源:origin: apache/nifi

private void toTreeString(final Tree tree, final StringBuilder sb, final int indentLevel) {
    final String nodeName = tree.getText();
    for (int i = 0; i < indentLevel; i++) {
      sb.append(" ");
    }
    sb.append(nodeName);
    sb.append("\n");

    for (int i = 0; i < tree.getChildCount(); i++) {
      final Tree child = tree.getChild(i);
      toTreeString(child, sb, indentLevel + 2);
    }
  }
}

代码示例来源:origin: apache/drill

private void addReference(StringBuilder b, ASTNode ref) {
 if ( ref.getType() == HiveParser.DOT ) {
  b.append(ref.getChild(0).getChild(0).getText()).
  append(".").
  append(ref.getChild(1).getText());
 } else {
  b.append(ref.getText());
 }
}

代码示例来源:origin: apache/nifi

private static RecordPathSegment[] getArgPaths(final Tree argumentListTree, final int expectedCount, final String functionName, final boolean absolute) {
  final int numArgs = argumentListTree.getChildCount();
  if (numArgs != expectedCount) {
    throw new RecordPathException("Invalid number of arguments: " + functionName + " function takes " + expectedCount + " arguments but got " + numArgs);
  }
  final RecordPathSegment[] argPaths = new RecordPathSegment[expectedCount];
  for (int i = 0; i < expectedCount; i++) {
    argPaths[i] = buildPath(argumentListTree.getChild(i), null, absolute);
  }
  return argPaths;
}

代码示例来源:origin: apache/drill

public static Map<String,String> getPartKeyValuePairsFromAST(Table tbl, ASTNode tree,
   HiveConf hiveConf) throws SemanticException {
  ASTNode child = ((ASTNode) tree.getChild(0).getChild(1));
  Map<String,String> partSpec = new HashMap<String, String>();
  if (child != null) {
   partSpec = DDLSemanticAnalyzer.getValidatedPartSpec(tbl, child, hiveConf, false);
  } //otherwise, it is the case of analyze table T compute statistics for columns;
  return partSpec;
 }
}

代码示例来源:origin: apache/drill

public boolean isTopLevelSimpleSelectStarQuery() {
 if (alias != null || destToSelExpr.size() != 1 || !isSimpleSelectQuery()) {
  return false;
 }
 for (ASTNode selExprs : destToSelExpr.values()) {
  if (selExprs.getChildCount() != 1) {
   return false;
  }
  Tree sel = selExprs.getChild(0).getChild(0);
  if (sel == null || sel.getType() != HiveParser.TOK_ALLCOLREF) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: apache/nifi

private Evaluator<?> buildReferenceEvaluator(final Tree tree) {
  switch (tree.getType()) {
    case MESSAGE:
      return new MessageEvaluator();
    case SEGMENT_NAME:
      return new SegmentEvaluator(new StringLiteralEvaluator(tree.getText()));
    case IDENTIFIER:
      return new DeclaredReferenceEvaluator(new StringLiteralEvaluator(tree.getText()));
    case DOT:
      final Tree firstChild = tree.getChild(0);
      final Tree secondChild = tree.getChild(1);
      return new DotEvaluator(buildReferenceEvaluator(firstChild), buildIntegerEvaluator(secondChild));
    case STRING_LITERAL:
      return new StringLiteralEvaluator(tree.getText());
    case NUMBER:
      return new IntegerLiteralEvaluator(Integer.parseInt(tree.getText()));
    default:
      throw new HL7QueryParsingException("Failed to build evaluator for " + tree.getText());
  }
}

代码示例来源:origin: apache/hive

/**
 * Get the constraint from the AST and populate the cstrInfos with the required
 * information.
 * @param child  The node with the constraint token
 * @param cstrInfos Constraint information
 * @throws SemanticException
 */
private static void generateConstraintInfos(ASTNode child,
  List<ConstraintInfo> cstrInfos) throws SemanticException {
 ImmutableList.Builder<String> columnNames = ImmutableList.builder();
 for (int j = 0; j < child.getChild(0).getChildCount(); j++) {
  Tree columnName = child.getChild(0).getChild(j);
  checkColumnName(columnName.getText());
  columnNames.add(unescapeIdentifier(columnName.getText().toLowerCase()));
 }
 generateConstraintInfos(child, columnNames.build(), cstrInfos, null, null);
}

代码示例来源:origin: apache/hive

private String poolPath(Tree ast) {
 StringBuilder builder = new StringBuilder();
 builder.append(unescapeIdentifier(ast.getText()));
 for (int i = 0; i < ast.getChildCount(); ++i) {
  // DOT is not affected
  builder.append(unescapeIdentifier(ast.getChild(i).getText()));
 }
 return builder.toString();
}

代码示例来源:origin: apache/hive

private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo,
  RowResolver gByInputRR, RowResolver gByRR) {
 if (gByExpr.getType() == HiveParser.DOT
   && gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL) {
  String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getChild(0)
    .getText().toLowerCase());
  String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(1).getText().toLowerCase());
  gByRR.put(tab_alias, col_alias, colInfo);
 } else if (gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL) {
  String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getText().toLowerCase());
  String tab_alias = null;
  /*
   * If the input to the GBy has a tab alias for the column, then add an
   * entry based on that tab_alias. For e.g. this query: select b.x,
   * count(*) from t1 b group by x needs (tab_alias=b, col_alias=x) in the
   * GBy RR. tab_alias=b comes from looking at the RowResolver that is the
   * ancestor before any GBy/ReduceSinks added for the GBY operation.
   */
  try {
   ColumnInfo pColInfo = gByInputRR.get(tab_alias, col_alias);
   tab_alias = pColInfo == null ? null : pColInfo.getTabAlias();
  } catch (SemanticException se) {
  }
  gByRR.put(tab_alias, col_alias, colInfo);
 }
}

代码示例来源:origin: apache/nifi

private Evaluator<?> buildFunctionExpressionEvaluator(final Tree tree, final int offset) {
  if (tree.getChildCount() == 0) {
    throw new AttributeExpressionLanguageParsingException("EXPRESSION tree node has no children");
  }
  final int firstChildIndex = tree.getChildCount() - offset - 1;
  if (firstChildIndex == 0) {
    return buildEvaluator(tree.getChild(0));
  }
  final Tree functionTree = tree.getChild(firstChildIndex);
  final Evaluator<?> subjectEvaluator = buildFunctionExpressionEvaluator(tree, offset + 1);
  final Tree functionNameTree = functionTree.getChild(0);
  final List<Evaluator<?>> argEvaluators = new ArrayList<>();
  for (int i = 1; i < functionTree.getChildCount(); i++) {
    argEvaluators.add(buildEvaluator(functionTree.getChild(i)));
  }
  return buildFunctionEvaluator(functionNameTree, subjectEvaluator, argEvaluators);
}

代码示例来源:origin: apache/hive

private ASTNode where(ASTNode qry) {
 return (ASTNode) qry.getChild(1).getChild(2);
}

代码示例来源:origin: apache/hive

if (selectExprs != null) {
 for (int i = 0; i < selectExprs.getChildCount(); ++i) {
  if (((ASTNode) selectExprs.getChild(i)).getToken().getType() == HiveParser.QUERY_HINT) {
   continue;
  ASTNode grpbyExpr = (ASTNode) selectExprs.getChild(i).getChild(0);
  result.add(grpbyExpr);
if (grpByExprs != null) {
 for (int i = 0; i < grpByExprs.getChildCount(); ++i) {
  ASTNode grpbyExpr = (ASTNode) grpByExprs.getChild(i);
  if (grpbyExpr.getType() != HiveParser.TOK_GROUPING_SETS_EXPRESSION) {
   result.add(grpbyExpr);

相关文章