本文整理了Java中org.elasticsearch.hadoop.serialization.Generator.writeBeginObject()
方法的一些代码示例,展示了Generator.writeBeginObject()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Generator.writeBeginObject()
方法的具体详情如下:
包路径:org.elasticsearch.hadoop.serialization.Generator
类名称:Generator
方法名:writeBeginObject
暂无
代码示例来源:origin: elastic/elasticsearch-hadoop
@Override
public void toJson(Generator out) {
out.writeFieldName("match_all")
.writeBeginObject().writeEndObject();
}
}
代码示例来源:origin: elastic/elasticsearch-hadoop
@Override
public void toJson(Generator out) {
if (field == null) {
throw new IllegalArgumentException("inner clause [field] cannot be null");
}
if (term == null) {
throw new IllegalArgumentException("inner clause [term] cannot be null");
}
out.writeFieldName("term")
.writeBeginObject()
.writeFieldName(field)
.writeString(term)
.writeEndObject();
}
代码示例来源:origin: elastic/elasticsearch-hadoop
@Override
public void toJson(Generator out) {
if (filter == null) {
throw new IllegalArgumentException("inner clause [filter] cannot be null.");
}
out.writeFieldName("constant_score");
out.writeBeginObject();
out.writeFieldName("filter");
out.writeBeginObject();
filter.toJson(out);
out.writeEndObject();
out.writeFieldName("boost");
out.writeNumber(boost);
out.writeEndObject();
}
}
代码示例来源:origin: elastic/elasticsearch-hadoop
@Override
public void toJson(Generator out) {
if (query == null) {
throw new IllegalArgumentException("inner clause [query] cannot be null.");
}
out.writeFieldName("filtered");
out.writeBeginObject();
out.writeFieldName("query");
out.writeBeginObject();
query.toJson(out);
out.writeEndObject();
if (filters.isEmpty() == false) {
out.writeFieldName("filter");
out.writeBeginObject();
if (filters.size() == 1) {
filters.get(0).toJson(out);
} else {
BoolQueryBuilder inner = new BoolQueryBuilder();
for (QueryBuilder filter : filters) {
inner.must(filter);
}
inner.toJson(out);
}
out.writeEndObject();
}
out.writeEndObject();
}
代码示例来源:origin: elastic/elasticsearch-hadoop
private static void traverseMap(Parser parser, Generator generator) {
generator.writeBeginObject();
parser.nextToken();
for (; parser.currentToken() != Token.END_OBJECT;) {
traverse(parser, generator);
}
generator.writeEndObject();
parser.nextToken();
}
代码示例来源:origin: elastic/elasticsearch-hadoop
@Override
public void toJson(Generator out) {
out.writeFieldName("bool");
out.writeBeginObject();
if (mustClauses.size() > 0) {
out.writeFieldName("must");
out.writeBeginArray();
for (QueryBuilder innerQuery : mustClauses) {
out.writeBeginObject();
innerQuery.toJson(out);
out.writeEndObject();
out.writeBeginArray();
for (QueryBuilder innerQuery : shouldClauses) {
out.writeBeginObject();
innerQuery.toJson(out);
out.writeEndObject();
out.writeBeginArray();
for (QueryBuilder innerQuery : filterClauses) {
out.writeBeginObject();
innerQuery.toJson(out);
out.writeEndObject();
out.writeBeginArray();
for (QueryBuilder innerQuery : mustNotClauses) {
out.writeBeginObject();
innerQuery.toJson(out);
out.writeEndObject();
代码示例来源:origin: elastic/elasticsearch-hadoop
generator.writeBeginObject();
for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
String fieldName = entry.getKey().toString();
代码示例来源:origin: elastic/elasticsearch-hadoop
@SuppressWarnings("unchecked")
@Override
public Result write(SinkCall<Object[], ?> sinkCall, Generator generator) {
Tuple tuple = CascadingUtils.coerceToString(sinkCall);
// consider names (in case of aliases these are already applied)
List<String> names = (List<String>) sinkCall.getContext()[SINK_CTX_ALIASES];
generator.writeBeginObject();
for (int i = 0; i < tuple.size(); i++) {
String name = (i < names.size() ? names.get(i) : "tuple" + i);
// filter out fields
if (shouldKeep(generator.getParentPath(), name)) {
generator.writeFieldName(name);
Object object = tuple.getObject(i);
Result result = jdkWriter.write(object, generator);
if (!result.isSuccesful()) {
if (object instanceof Writable) {
return writableWriter.write((Writable) object, generator);
}
return Result.FAILED(object);
}
}
}
generator.writeEndObject();
return Result.SUCCESFUL();
}
代码示例来源:origin: elastic/elasticsearch-hadoop
public static String printQueryBuilder(QueryBuilder builder, boolean addQuery) {
FastByteArrayOutputStream out = new FastByteArrayOutputStream(256);
Generator generator = new JacksonJsonGenerator(out);
if (addQuery) {
generator.writeBeginObject().writeFieldName("query");
}
generator.writeBeginObject();
builder.toJson(generator);
generator.writeEndObject();
if (addQuery) {
generator.writeEndObject();
}
generator.close();
return out.toString();
}
}
代码示例来源:origin: elastic/elasticsearch-hadoop
static QueryBuilder randomRawQueryStringQuery(Random rand, int level) {
QueryBuilder query = randomQuery(rand);
FastByteArrayOutputStream out = new FastByteArrayOutputStream(256);
Generator generator = new JacksonJsonGenerator(out);
generator.writeBeginObject();
query.toJson(generator);
generator.writeEndObject();
generator.close();
try {
return new RawQueryBuilder(out.toString().trim(), false);
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse raw query", e);
}
}
代码示例来源:origin: elastic/elasticsearch-hadoop
protected Result doWrite(Tuple tuple, Generator generator, String parentField) {
Fields fields = tuple.getFields();
generator.writeBeginObject();
for (String field : fields) {
if (shouldKeep(parentField, field)) {
generator.writeFieldName(field);
Object value = tuple.getValueByField(field);
if (value instanceof Tuple) {
Result result = write((Tuple) value, generator);
if (!result.isSuccesful()) {
return result;
}
}
else {
Result result = jdkWriter.write(value, generator);
if (!result.isSuccesful()) {
return result;
}
}
}
}
generator.writeEndObject();
return Result.SUCCESFUL();
}
}
代码示例来源:origin: elastic/elasticsearch-hadoop
generator.writeBeginObject();
generator.writeEndObject();
generator.writeBeginObject();
代码示例来源:origin: elastic/elasticsearch-hadoop
Map<Writable, Writable> map = (Map<Writable, Writable>) writable;
generator.writeBeginObject();
代码示例来源:origin: elastic/elasticsearch-hadoop
generator.writeFieldName(FieldNames.FIELD_LABELS).writeBeginObject();
for (Map.Entry<String, String> label : labels.entrySet()) {
generator.writeFieldName(label.getKey()).writeString(label.getValue());
generator.writeFieldName(FieldNames.FIELD_HOST).writeBeginObject();
generator.writeFieldName(FieldNames.FIELD_HOST_OS).writeBeginObject();
generator.writeFieldName(FieldNames.FIELD_HOST_TIMEZONE).writeBeginObject();
generator.writeFieldName(FieldNames.FIELD_HOST_TIMEZONE_OFFSET).writeBeginObject();
generator.writeFieldName(FieldNames.FIELD_ERROR).writeBeginObject();
generator.writeFieldName(FieldNames.FIELD_EVENT).writeBeginObject();
代码示例来源:origin: elastic/elasticsearch-hadoop
MapObjectInspector moi = (MapObjectInspector) oi;
generator.writeBeginObject();
for (Map.Entry<?, ?> entry : moi.getMap(data).entrySet()) {
generator.writeBeginObject();
for (StructField structField : refs) {
String actualFieldName = alias.toES(structField.getFieldName());
代码示例来源:origin: elastic/elasticsearch-hadoop
generator.writeBeginObject();
代码示例来源:origin: elastic/elasticsearch-hadoop
out.writeBeginObject();
out.writeFieldName("query");
out.writeString(query);
代码示例来源:origin: elastic/elasticsearch-hadoop
generator.writeBeginObject();
assertEquals("", generator.getParentPath());
generator.writeBeginObject();
assertEquals("test", generator.getParentPath());
generator.writeBeginObject();
assertEquals("test.subfield", generator.getParentPath());
代码示例来源:origin: org.elasticsearch/elasticsearch-hadoop-mr
@Override
public void toJson(Generator out) {
out.writeFieldName("match_all")
.writeBeginObject().writeEndObject();
}
}
代码示例来源:origin: org.elasticsearch/elasticsearch-spark-13
@Override
public void toJson(Generator out) {
out.writeFieldName("match_all")
.writeBeginObject().writeEndObject();
}
}
内容来源于网络,如有侵权,请联系作者删除!