本文整理了Java中org.elasticsearch.hadoop.serialization.Generator.getParentPath()
方法的一些代码示例,展示了Generator.getParentPath()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Generator.getParentPath()
方法的具体详情如下:
包路径:org.elasticsearch.hadoop.serialization.Generator
类名称:Generator
方法名:getParentPath
暂无
代码示例来源:origin: elastic/elasticsearch-hadoop
assertEquals("", generator.getParentPath());
assertEquals("", generator.getParentPath());
assertEquals("test", generator.getParentPath());
assertEquals("test", generator.getParentPath());
assertEquals("test.subfield", generator.getParentPath());
assertEquals("test.subfield", generator.getParentPath());
assertEquals("test.subfield", generator.getParentPath());
assertEquals("test", generator.getParentPath());
assertEquals("", generator.getParentPath());
assertEquals("", generator.getParentPath());
代码示例来源:origin: elastic/elasticsearch-hadoop
@SuppressWarnings("unchecked")
@Override
public Result write(SinkCall<Object[], ?> sinkCall, Generator generator) {
Tuple tuple = CascadingUtils.coerceToString(sinkCall);
// consider names (in case of aliases these are already applied)
List<String> names = (List<String>) sinkCall.getContext()[SINK_CTX_ALIASES];
generator.writeBeginObject();
for (int i = 0; i < tuple.size(); i++) {
String name = (i < names.size() ? names.get(i) : "tuple" + i);
// filter out fields
if (shouldKeep(generator.getParentPath(), name)) {
generator.writeFieldName(name);
Object object = tuple.getObject(i);
Result result = jdkWriter.write(object, generator);
if (!result.isSuccesful()) {
if (object instanceof Writable) {
return writableWriter.write((Writable) object, generator);
}
return Result.FAILED(object);
}
}
}
generator.writeEndObject();
return Result.SUCCESFUL();
}
代码示例来源:origin: elastic/elasticsearch-hadoop
if (shouldKeep(generator.getParentPath(), fieldName)) {
generator.writeFieldName(fieldName);
Result result = write(entry.getValue(), generator);
代码示例来源:origin: elastic/elasticsearch-hadoop
if (shouldKeep(generator.getParentPath(), actualFieldName)) {
generator.writeFieldName(actualFieldName);
Result result = write(entry.getValue(), moi.getMapValueObjectInspector(), generator);
for (StructField structField : refs) {
String actualFieldName = alias.toES(structField.getFieldName());
if (shouldKeep(generator.getParentPath(), actualFieldName)) {
generator.writeFieldName(actualFieldName);
Result result = write(soi.getStructFieldData(data, structField),
代码示例来源:origin: elastic/elasticsearch-hadoop
if (shouldKeep(generator.getParentPath(), fieldName)) {
generator.writeFieldName(alias.toES(fieldName));
Result result = write(entry.getValue(), valueType, generator);
代码示例来源:origin: elastic/elasticsearch-hadoop
if (shouldKeep(generator.getParentPath(), actualName)) {
generator.writeFieldName(actualName);
Result res = write(tuples.get(i), nestedFields[i], generator);
代码示例来源:origin: org.elasticsearch/elasticsearch-hadoop
@SuppressWarnings("unchecked")
@Override
public Result write(SinkCall<Object[], ?> sinkCall, Generator generator) {
Tuple tuple = CascadingUtils.coerceToString(sinkCall);
// consider names (in case of aliases these are already applied)
List<String> names = (List<String>) sinkCall.getContext()[SINK_CTX_ALIASES];
generator.writeBeginObject();
for (int i = 0; i < tuple.size(); i++) {
String name = (i < names.size() ? names.get(i) : "tuple" + i);
// filter out fields
if (shouldKeep(generator.getParentPath(), name)) {
generator.writeFieldName(name);
Object object = tuple.getObject(i);
Result result = jdkWriter.write(object, generator);
if (!result.isSuccesful()) {
if (object instanceof Writable) {
return writableWriter.write((Writable) object, generator);
}
return Result.FAILED(object);
}
}
}
generator.writeEndObject();
return Result.SUCCESFUL();
}
代码示例来源:origin: org.elasticsearch/elasticsearch-spark
if (shouldKeep(generator.getParentPath(), fieldName)) {
generator.writeFieldName(fieldName);
Result result = write(entry.getValue(), generator);
代码示例来源:origin: org.elasticsearch/elasticsearch-hadoop
if (shouldKeep(generator.getParentPath(), fieldName)) {
generator.writeFieldName(fieldName);
Result result = write(entry.getValue(), generator);
代码示例来源:origin: org.elasticsearch/elasticsearch-spark-13
if (shouldKeep(generator.getParentPath(), fieldName)) {
generator.writeFieldName(fieldName);
Result result = write(entry.getValue(), generator);
代码示例来源:origin: org.elasticsearch/elasticsearch-hadoop-mr
if (shouldKeep(generator.getParentPath(), fieldName)) {
generator.writeFieldName(fieldName);
Result result = write(entry.getValue(), generator);
代码示例来源:origin: org.elasticsearch/elasticsearch-hadoop
if (shouldKeep(generator.getParentPath(), actualFieldName)) {
generator.writeFieldName(actualFieldName);
Result result = write(entry.getValue(), moi.getMapValueObjectInspector(), generator);
for (StructField structField : refs) {
String actualFieldName = alias.toES(structField.getFieldName());
if (shouldKeep(generator.getParentPath(), actualFieldName)) {
generator.writeFieldName(actualFieldName);
Result result = write(soi.getStructFieldData(data, structField),
代码示例来源:origin: org.elasticsearch/elasticsearch-hadoop
if (shouldKeep(generator.getParentPath(), fieldName)) {
generator.writeFieldName(alias.toES(fieldName));
Result result = write(entry.getValue(), valueType, generator);
代码示例来源:origin: org.elasticsearch/elasticsearch-hadoop
if (shouldKeep(generator.getParentPath(), actualName)) {
generator.writeFieldName(actualName);
Result res = write(tuples.get(i), nestedFields[i], generator);
内容来源于网络,如有侵权,请联系作者删除!