org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode.asText()方法的使用及代码示例

x33g5p2x  于2022-01-22 转载在 其他  
字(14.1k)|赞(0)|评价(0)|浏览(243)

本文整理了Java中org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode.asText()方法的一些代码示例,展示了JsonNode.asText()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。JsonNode.asText()方法的具体详情如下:
包路径:org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode
类名称:JsonNode
方法名:asText

JsonNode.asText介绍

暂无

代码示例

代码示例来源:origin: apache/flink

private static TypeInformation<?> convertStringFormat(String location, JsonNode node) {
  if (!node.isTextual()) {
    throw new IllegalArgumentException("Invalid '" + FORMAT + "' property in node: " + location);
  }
  switch (node.asText()) {
    case FORMAT_DATE:
      return Types.SQL_DATE;
    case FORMAT_TIME:
      return Types.SQL_TIME;
    case FORMAT_DATE_TIME:
      return Types.SQL_TIMESTAMP;
    default:
      return Types.STRING; // unlikely that we will support other formats in the future
  }
}

代码示例来源:origin: apache/flink

private static TypeInformation<?> convertStringEncoding(String location, JsonNode node) {
  if (!node.isTextual()) {
    throw new IllegalArgumentException("Invalid '" + CONTENT_ENCODING + "' property in node: " + location);
  }
  // "If the instance value is a string, this property defines that the string SHOULD
  // be interpreted as binary data and decoded using the encoding named by this property."
  switch (node.asText()) {
    case CONTENT_ENCODING_BASE64:
      return Types.PRIMITIVE_ARRAY(Types.BYTE);
    default:
      // we fail hard here:
      // this gives us the chance to support more encodings in the future without problems
      // of backwards compatibility
      throw new IllegalArgumentException("Invalid encoding '" + node.asText() + "' in node: " + location);
  }
}

代码示例来源:origin: apache/flink

/**
   * Select the language from the incoming JSON text.
   */
  @Override
  public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
    if (jsonParser == null) {
      jsonParser = new ObjectMapper();
    }
    JsonNode jsonNode = jsonParser.readValue(value, JsonNode.class);
    boolean isEnglish = jsonNode.has("user") && jsonNode.get("user").has("lang") && jsonNode.get("user").get("lang").asText().equals("en");
    boolean hasText = jsonNode.has("text");
    if (isEnglish && hasText) {
      // message of tweet
      StringTokenizer tokenizer = new StringTokenizer(jsonNode.get("text").asText());
      // split the message
      while (tokenizer.hasMoreTokens()) {
        String result = tokenizer.nextToken().replaceAll("\\s*", "").toLowerCase();
        if (!result.equals("")) {
          out.collect(new Tuple2<>(result, 1));
        }
      }
    }
  }
}

代码示例来源:origin: apache/flink

return node.asBoolean();
} else if (info == Types.STRING) {
  return node.asText();
} else if (info == Types.BIG_DEC) {
  return node.decimalValue();
  return node.bigIntegerValue();
} else if (info == Types.SQL_DATE) {
  return Date.valueOf(node.asText());
} else if (info == Types.SQL_TIME) {
  final String time = node.asText();
  if (time.indexOf('Z') < 0 || time.indexOf('.') >= 0) {
    throw new IllegalStateException(
  final String timestamp = node.asText();
  if (timestamp.indexOf('Z') < 0) {
    throw new IllegalStateException(

代码示例来源:origin: apache/flink

ref = Optional.of(resolveReference(node.get(REF).asText(), node, root));
} else {
  ref = Optional.empty();
    final Iterator<JsonNode> elements = typeNode.elements();
    while (elements.hasNext()) {
      types.add(elements.next().asText());
    types.add(typeNode.asText());
      default:
        throw new IllegalArgumentException(
          "Unsupported type '" + node.get(TYPE).asText() + "' in node: " + location);
  ref.filter(r -> r.has(TYPE)).ifPresent(r -> typeSet.add(convertType(node.get(REF).asText(), r, root)));
  final TypeInformation<?>[] types = convertTypes(node.get(REF).asText() + '/' + ONE_OF, ref.get().get(ONE_OF), root);
  typeSet.addAll(Arrays.asList(types));

代码示例来源:origin: apache/flink

JsonNode job = finishedJobs.get(0);
JobID jobId = JobID.fromHexString(job.get("jid").asText());
String name = job.get("name").asText();
JobStatus state = JobStatus.valueOf(job.get("state").asText());

代码示例来源:origin: apache/flink

@Test
public void getTaskManagerLogAndStdoutFiles() {
  try {
    String json = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/");
    ObjectMapper mapper = new ObjectMapper();
    JsonNode parsed = mapper.readTree(json);
    ArrayNode taskManagers = (ArrayNode) parsed.get("taskmanagers");
    JsonNode taskManager = taskManagers.get(0);
    String id = taskManager.get("id").asText();
    WebMonitorUtils.LogFileLocation logFiles = WebMonitorUtils.LogFileLocation.find(CLUSTER_CONFIGURATION);
    //we check for job manager log files, since no separate taskmanager logs exist
    FileUtils.writeStringToFile(logFiles.logFile, "job manager log");
    String logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/log");
    assertTrue(logs.contains("job manager log"));
    FileUtils.writeStringToFile(logFiles.stdOutFile, "job manager out");
    logs = TestBaseUtils.getFromHTTP("http://localhost:" + getRestPort() + "/taskmanagers/" + id + "/stdout");
    assertTrue(logs.contains("job manager out"));
  } catch (Exception e) {
    e.printStackTrace();
    fail(e.getMessage());
  }
}

代码示例来源:origin: apache/flink

assertTrue(operatorField.isTextual());
if (contentsFields.asText().startsWith("Sync")) {
  assertEquals(1, parallelismField.asInt());
idToNode.put(vertexIdField.asText(), vertex);
    assertTrue(inputIdField.isTextual());
    String inputIdString = inputIdField.asText();
    assertTrue(idToNode.containsKey(inputIdString));

代码示例来源:origin: apache/flink

@Test
public void testDeserializeWithoutKey() throws IOException {
  ObjectMapper mapper = new ObjectMapper();
  byte[] serializedKey = null;
  ObjectNode initialValue = mapper.createObjectNode();
  initialValue.put("word", "world");
  byte[] serializedValue = mapper.writeValueAsBytes(initialValue);
  JSONKeyValueDeserializationSchema schema = new JSONKeyValueDeserializationSchema(false);
  ObjectNode deserializedValue = schema.deserialize(serializedKey, serializedValue, "", 0, 0);
  Assert.assertTrue(deserializedValue.get("metadata") == null);
  Assert.assertTrue(deserializedValue.get("key") == null);
  Assert.assertEquals("world", deserializedValue.get("value").get("word").asText());
}

代码示例来源:origin: apache/flink

@Test
  public void testDeserializeWithMetadata() throws IOException {
    ObjectMapper mapper = new ObjectMapper();
    ObjectNode initialKey = mapper.createObjectNode();
    initialKey.put("index", 4);
    byte[] serializedKey = mapper.writeValueAsBytes(initialKey);

    ObjectNode initialValue = mapper.createObjectNode();
    initialValue.put("word", "world");
    byte[] serializedValue = mapper.writeValueAsBytes(initialValue);

    JSONKeyValueDeserializationSchema schema = new JSONKeyValueDeserializationSchema(true);
    ObjectNode deserializedValue = schema.deserialize(serializedKey, serializedValue, "topic#1", 3, 4);

    Assert.assertEquals(4, deserializedValue.get("key").get("index").asInt());
    Assert.assertEquals("world", deserializedValue.get("value").get("word").asText());
    Assert.assertEquals("topic#1", deserializedValue.get("metadata").get("topic").asText());
    Assert.assertEquals(4, deserializedValue.get("metadata").get("offset").asInt());
    Assert.assertEquals(3, deserializedValue.get("metadata").get("partition").asInt());
  }
}

代码示例来源:origin: apache/flink

@Test
public void testDeserializeWithoutMetadata() throws IOException {
  ObjectMapper mapper = new ObjectMapper();
  ObjectNode initialKey = mapper.createObjectNode();
  initialKey.put("index", 4);
  byte[] serializedKey = mapper.writeValueAsBytes(initialKey);
  ObjectNode initialValue = mapper.createObjectNode();
  initialValue.put("word", "world");
  byte[] serializedValue = mapper.writeValueAsBytes(initialValue);
  JSONKeyValueDeserializationSchema schema = new JSONKeyValueDeserializationSchema(false);
  ObjectNode deserializedValue = schema.deserialize(serializedKey, serializedValue, "", 0, 0);
  Assert.assertTrue(deserializedValue.get("metadata") == null);
  Assert.assertEquals(4, deserializedValue.get("key").get("index").asInt());
  Assert.assertEquals("world", deserializedValue.get("value").get("word").asText());
}

代码示例来源:origin: com.alibaba.blink/flink-json

private static TypeInformation<?> convertStringFormat(String location, JsonNode node) {
  if (!node.isTextual()) {
    throw new IllegalArgumentException("Invalid '" + FORMAT + "' property in node: " + location);
  }
  switch (node.asText()) {
    case FORMAT_DATE:
      return Types.SQL_DATE;
    case FORMAT_TIME:
      return Types.SQL_TIME;
    case FORMAT_DATE_TIME:
      return Types.SQL_TIMESTAMP;
    default:
      return Types.STRING; // unlikely that we will support other formats in the future
  }
}

代码示例来源:origin: org.apache.flink/flink-json

private static TypeInformation<?> convertStringFormat(String location, JsonNode node) {
  if (!node.isTextual()) {
    throw new IllegalArgumentException("Invalid '" + FORMAT + "' property in node: " + location);
  }
  switch (node.asText()) {
    case FORMAT_DATE:
      return Types.SQL_DATE;
    case FORMAT_TIME:
      return Types.SQL_TIME;
    case FORMAT_DATE_TIME:
      return Types.SQL_TIMESTAMP;
    default:
      return Types.STRING; // unlikely that we will support other formats in the future
  }
}

代码示例来源:origin: org.apache.flink/flink-json

private static TypeInformation<?> convertStringEncoding(String location, JsonNode node) {
  if (!node.isTextual()) {
    throw new IllegalArgumentException("Invalid '" + CONTENT_ENCODING + "' property in node: " + location);
  }
  // "If the instance value is a string, this property defines that the string SHOULD
  // be interpreted as binary data and decoded using the encoding named by this property."
  switch (node.asText()) {
    case CONTENT_ENCODING_BASE64:
      return Types.PRIMITIVE_ARRAY(Types.BYTE);
    default:
      // we fail hard here:
      // this gives us the chance to support more encodings in the future without problems
      // of backwards compatibility
      throw new IllegalArgumentException("Invalid encoding '" + node.asText() + "' in node: " + location);
  }
}

代码示例来源:origin: com.alibaba.blink/flink-json

private static TypeInformation<?> convertStringEncoding(String location, JsonNode node) {
  if (!node.isTextual()) {
    throw new IllegalArgumentException("Invalid '" + CONTENT_ENCODING + "' property in node: " + location);
  }
  // "If the instance value is a string, this property defines that the string SHOULD
  // be interpreted as binary data and decoded using the encoding named by this property."
  switch (node.asText()) {
    case CONTENT_ENCODING_BASE64:
      return Types.PRIMITIVE_ARRAY(Types.BYTE);
    default:
      // we fail hard here:
      // this gives us the chance to support more encodings in the future without problems
      // of backwards compatibility
      throw new IllegalArgumentException("Invalid encoding '" + node.asText() + "' in node: " + location);
  }
}

代码示例来源:origin: org.apache.flink/flink-runtime

/**
   * Reads the given archive file and returns a {@link Collection} of contained {@link ArchivedJson}.
   *
   * @param file archive to extract
   * @return collection of archived jsons
   * @throws IOException if the file can't be opened, read or doesn't contain valid json
   */
  public static Collection<ArchivedJson> getArchivedJsons(Path file) throws IOException {
    try (FSDataInputStream input = file.getFileSystem().open(file);
      ByteArrayOutputStream output = new ByteArrayOutputStream()) {
      IOUtils.copyBytes(input, output);

      JsonNode archive = mapper.readTree(output.toByteArray());

      Collection<ArchivedJson> archives = new ArrayList<>();
      for (JsonNode archivePart : archive.get(ARCHIVE)) {
        String path = archivePart.get(PATH).asText();
        String json = archivePart.get(JSON).asText();
        archives.add(new ArchivedJson(path, json));
      }
      return archives;
    }
  }
}

代码示例来源:origin: com.alibaba.blink/flink-runtime

/**
   * Reads the given archive file and returns a {@link Collection} of contained {@link ArchivedJson}.
   *
   * @param file archive to extract
   * @return collection of archived jsons
   * @throws IOException if the file can't be opened, read or doesn't contain valid json
   */
  public static Collection<ArchivedJson> getArchivedJsons(Path file) throws IOException {
    try (FSDataInputStream input = file.getFileSystem().open(file);
      ByteArrayOutputStream output = new ByteArrayOutputStream()) {
      IOUtils.copyBytes(input, output);

      JsonNode archive = mapper.readTree(output.toByteArray());

      Collection<ArchivedJson> archives = new ArrayList<>();
      for (JsonNode archivePart : archive.get(ARCHIVE)) {
        String path = archivePart.get(PATH).asText();
        String json = archivePart.get(JSON).asText();
        archives.add(new ArchivedJson(path, json));
      }
      return archives;
    }
  }
}

代码示例来源:origin: com.alibaba.blink/flink-runtime

@Override
  public JobConfigInfo deserialize(
      JsonParser jsonParser,
      DeserializationContext deserializationContext) throws IOException {
    JsonNode rootNode = jsonParser.readValueAsTree();
    final JobID jobId = JobID.fromHexString(rootNode.get(FIELD_NAME_JOB_ID).asText());
    final String jobName = rootNode.get(FIELD_NAME_JOB_NAME).asText();
    final ExecutionConfigInfo executionConfigInfo;
    if (rootNode.has(FIELD_NAME_EXECUTION_CONFIG)) {
      executionConfigInfo = RestMapperUtils.getStrictObjectMapper().treeToValue(rootNode.get(FIELD_NAME_EXECUTION_CONFIG), ExecutionConfigInfo.class);
    } else {
      executionConfigInfo = null;
    }
    return new JobConfigInfo(jobId, jobName, executionConfigInfo);
  }
}

代码示例来源:origin: org.apache.flink/flink-runtime

@Override
  public JobConfigInfo deserialize(
      JsonParser jsonParser,
      DeserializationContext deserializationContext) throws IOException {
    JsonNode rootNode = jsonParser.readValueAsTree();
    final JobID jobId = JobID.fromHexString(rootNode.get(FIELD_NAME_JOB_ID).asText());
    final String jobName = rootNode.get(FIELD_NAME_JOB_NAME).asText();
    final ExecutionConfigInfo executionConfigInfo;
    if (rootNode.has(FIELD_NAME_EXECUTION_CONFIG)) {
      executionConfigInfo = RestMapperUtils.getStrictObjectMapper().treeToValue(rootNode.get(FIELD_NAME_EXECUTION_CONFIG), ExecutionConfigInfo.class);
    } else {
      executionConfigInfo = null;
    }
    return new JobConfigInfo(jobId, jobName, executionConfigInfo);
  }
}

代码示例来源:origin: org.apache.flink/flink-runtime_2.11

@Override
  public JobConfigInfo deserialize(
      JsonParser jsonParser,
      DeserializationContext deserializationContext) throws IOException {
    JsonNode rootNode = jsonParser.readValueAsTree();
    final JobID jobId = JobID.fromHexString(rootNode.get(FIELD_NAME_JOB_ID).asText());
    final String jobName = rootNode.get(FIELD_NAME_JOB_NAME).asText();
    final ExecutionConfigInfo executionConfigInfo;
    if (rootNode.has(FIELD_NAME_EXECUTION_CONFIG)) {
      executionConfigInfo = RestMapperUtils.getStrictObjectMapper().treeToValue(rootNode.get(FIELD_NAME_EXECUTION_CONFIG), ExecutionConfigInfo.class);
    } else {
      executionConfigInfo = null;
    }
    return new JobConfigInfo(jobId, jobName, executionConfigInfo);
  }
}

相关文章