本文整理了Java中org.apache.druid.query.Query.getIntervals
方法的一些代码示例,展示了Query.getIntervals
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Query.getIntervals
方法的具体详情如下:
包路径:org.apache.druid.query.Query
类名称:Query
方法名:getIntervals
暂无
代码示例来源:origin: apache/incubator-druid
@Override
public List<Interval> getIntervals()
{
return query.getIntervals();
}
代码示例来源:origin: apache/incubator-druid
default List<Interval> getIntervalsOfInnerMostQuery()
{
if (getDataSource() instanceof QueryDataSource) {
//noinspection unchecked
return ((QueryDataSource) getDataSource()).getQuery().getIntervalsOfInnerMostQuery();
} else {
return getIntervals();
}
}
}
代码示例来源:origin: apache/incubator-druid
@Override
public <T> QueryRunner<T> getQueryRunner(final Query<T> query)
{
// Calling getQueryRunnerForIntervals here works because there's only one segment per interval for RealtimePlumber.
return texasRanger.getQueryRunnerForIntervals(query, query.getIntervals());
}
代码示例来源:origin: apache/incubator-druid
@Override
public void interval(QueryType query)
{
checkModifiedFromOwnerThread();
builder.setDimension(
DruidMetrics.INTERVAL,
query.getIntervals().stream().map(Interval::toString).toArray(String[]::new)
);
}
代码示例来源:origin: apache/incubator-druid
@POST
@Path("/candidates")
@Produces({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE})
@Consumes({MediaType.APPLICATION_JSON, SmileMediaTypes.APPLICATION_JACKSON_SMILE, APPLICATION_SMILE})
@ResourceFilters(StateResourceFilter.class)
public Response getQueryTargets(
InputStream in,
@QueryParam("pretty") String pretty,
@QueryParam("numCandidates") @DefaultValue("-1") int numCandidates,
@Context final HttpServletRequest req
) throws IOException
{
final ResponseContext context = createContext(req.getContentType(), pretty != null);
try {
Query<?> query = context.getObjectMapper().readValue(in, Query.class);
return context.ok(
ServerViewUtil.getTargetLocations(
brokerServerView,
query.getDataSource(),
query.getIntervals(),
numCandidates
)
);
}
catch (Exception e) {
return context.gotError(e);
}
}
}
代码示例来源:origin: apache/incubator-druid
.create(queryPlus.getQuery().getIntervals())
.transformCat(
new Function<Interval, Iterable<Interval>>()
代码示例来源:origin: apache/incubator-druid
public Sequence<Object[]> runQuery(final DruidQuery druidQuery)
{
final Query query = druidQuery.getQuery();
final Query innerMostQuery = findInnerMostQuery(query);
if (plannerContext.getPlannerConfig().isRequireTimeCondition() &&
innerMostQuery.getIntervals().equals(Intervals.ONLY_ETERNITY)) {
throw new CannotBuildQueryException(
"requireTimeCondition is enabled, all queries must include a filter condition on the __time column"
);
}
if (query instanceof TimeseriesQuery) {
return executeTimeseries(druidQuery, (TimeseriesQuery) query);
} else if (query instanceof TopNQuery) {
return executeTopN(druidQuery, (TopNQuery) query);
} else if (query instanceof GroupByQuery) {
return executeGroupBy(druidQuery, (GroupByQuery) query);
} else if (query instanceof ScanQuery) {
return executeScan(druidQuery, (ScanQuery) query);
} else if (query instanceof SelectQuery) {
return executeSelect(druidQuery, (SelectQuery) query);
} else {
throw new ISE("Cannot run query of class[%s]", query.getClass().getName());
}
}
代码示例来源:origin: apache/incubator-druid
private static <T> Sequence<T> runQuery(
CachingClusteredClient client,
final Query<T> query,
final Map<String, Object> responseContext
)
{
return client.getQueryRunnerForIntervals(query, query.getIntervals()).run(
QueryPlus.wrap(query),
responseContext
);
}
}
代码示例来源:origin: apache/incubator-druid
LoadRule baseRule = null;
for (Interval interval : query.getIntervals()) {
int currRulePosition = 0;
for (Rule rule : rules) {
"WTF?! No brokerServiceName found for datasource[%s], intervals[%s]. Using default[%s].",
query.getDataSource(),
query.getIntervals(),
tierConfig.getDefaultBrokerServiceName()
);
代码示例来源:origin: apache/incubator-druid
@Override
@SuppressWarnings("unchecked")
public Sequence<T> run(final QueryPlus<T> queryPlus, Map<String, Object> responseContext)
{
if (QueryContexts.isBySegment(queryPlus.getQuery())) {
final Sequence<T> baseSequence = base.run(queryPlus, responseContext);
final List<T> results = baseSequence.toList();
return Sequences.simple(
Collections.singletonList(
(T) new Result<>(
timestamp,
new BySegmentResultValueClass<>(
results,
segmentId.toString(),
queryPlus.getQuery().getIntervals().get(0)
)
)
)
);
}
return base.run(queryPlus, responseContext);
}
}
代码示例来源:origin: apache/incubator-druid
final long offset = computeOffset(now, tz);
final Interval interval = queryPlus.getQuery().getIntervals().get(0);
final Interval modifiedInterval = new Interval(
Math.min(interval.getStartMillis() + offset, now + offset),
代码示例来源:origin: apache/incubator-druid
@Override
public Sequence<Result<TimeseriesResultValue>> run(
QueryPlus<Result<TimeseriesResultValue>> queryPlus,
Map<String, Object> responseContext
)
{
final Query<Result<TimeseriesResultValue>> query = queryPlus.getQuery();
return Sequences.simple(
ImmutableList.of(
new Result<>(
query.getIntervals().get(0).getStart(),
new TimeseriesResultValue(ImmutableMap.of("metric", 2))
),
new Result<>(
query.getIntervals().get(0).getEnd(),
new TimeseriesResultValue(ImmutableMap.of("metric", 3))
)
)
);
}
},
代码示例来源:origin: apache/incubator-druid
@Override
public Sequence run(final QueryPlus queryPlus, final Map responseContext)
{
return client.getQueryRunnerForIntervals(queryPlus.getQuery(), queryPlus.getQuery().getIntervals())
.run(queryPlus, responseContext);
}
};
代码示例来源:origin: apache/incubator-druid
@Test
public void testSerde() throws Exception
{
String queryStr = "{\n"
+ " \"queryType\":\"segmentMetadata\",\n"
+ " \"dataSource\":\"test_ds\",\n"
+ " \"intervals\":[\"2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z\"],\n"
+ " \"analysisTypes\":[\"cardinality\",\"size\"]\n"
+ "}";
EnumSet<SegmentMetadataQuery.AnalysisType> expectedAnalysisTypes = EnumSet.of(
SegmentMetadataQuery.AnalysisType.CARDINALITY,
SegmentMetadataQuery.AnalysisType.SIZE
);
Query query = MAPPER.readValue(queryStr, Query.class);
Assert.assertTrue(query instanceof SegmentMetadataQuery);
Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getNames()));
Assert.assertEquals(
Intervals.of("2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z"),
query.getIntervals().get(0)
);
Assert.assertEquals(expectedAnalysisTypes, ((SegmentMetadataQuery) query).getAnalysisTypes());
// test serialize and deserialize
Assert.assertEquals(query, MAPPER.readValue(MAPPER.writeValueAsString(query), Query.class));
}
代码示例来源:origin: apache/incubator-druid
@Test
public void testSerdeWithDefaultInterval() throws Exception
{
String queryStr = "{\n"
+ " \"queryType\":\"segmentMetadata\",\n"
+ " \"dataSource\":\"test_ds\"\n"
+ "}";
Query query = MAPPER.readValue(queryStr, Query.class);
Assert.assertTrue(query instanceof SegmentMetadataQuery);
Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getNames()));
Assert.assertEquals(Intervals.ETERNITY, query.getIntervals().get(0));
Assert.assertTrue(((SegmentMetadataQuery) query).isUsingDefaultInterval());
// test serialize and deserialize
Assert.assertEquals(query, MAPPER.readValue(MAPPER.writeValueAsString(query), Query.class));
// test copy
Assert.assertEquals(query, Druids.SegmentMetadataQueryBuilder.copy((SegmentMetadataQuery) query).build());
}
代码示例来源:origin: apache/incubator-druid
@Override
public Sequence<Result<TimeseriesResultValue>> run(
QueryPlus<Result<TimeseriesResultValue>> queryPlus,
Map<String, Object> responseContext
)
{
return Sequences.simple(
ImmutableList.of(
new Result<>(
DateTimes.of("2014-01-09T-07"),
new TimeseriesResultValue(ImmutableMap.of("metric", 2))
),
new Result<>(
DateTimes.of("2014-01-11T-07"),
new TimeseriesResultValue(ImmutableMap.of("metric", 3))
),
new Result<>(
queryPlus.getQuery().getIntervals().get(0).getEnd(),
new TimeseriesResultValue(ImmutableMap.of("metric", 5))
)
)
);
}
},
代码示例来源:origin: apache/incubator-druid
@Override
public Sequence<Result<TimeseriesResultValue>> run(
QueryPlus<Result<TimeseriesResultValue>> queryPlus,
Map<String, Object> responseContext
)
{
return Sequences.simple(
ImmutableList.of(
new Result<>(
DateTimes.of("2014-01-09"),
new TimeseriesResultValue(ImmutableMap.of("metric", 2))
),
new Result<>(
DateTimes.of("2014-01-11"),
new TimeseriesResultValue(ImmutableMap.of("metric", 3))
),
new Result<>(
queryPlus.getQuery().getIntervals().get(0).getEnd(),
new TimeseriesResultValue(ImmutableMap.of("metric", 5))
)
)
);
}
},
代码示例来源:origin: apache/incubator-druid
@Override
public Sequence<Result<TimeseriesResultValue>> run(
QueryPlus<Result<TimeseriesResultValue>> queryPlus,
Map<String, Object> responseContext
)
{
return Sequences.simple(
ImmutableList.of(
new Result<>(
DateTimes.of("2014-01-09T-08"),
new TimeseriesResultValue(ImmutableMap.of("metric", 2))
),
new Result<>(
DateTimes.of("2014-01-11T-08"),
new TimeseriesResultValue(ImmutableMap.of("metric", 3))
),
new Result<>(
queryPlus.getQuery().getIntervals().get(0).getEnd(),
new TimeseriesResultValue(ImmutableMap.of("metric", 5))
)
)
);
}
},
代码示例来源:origin: apache/incubator-druid
Assert.assertEquals(1, query.getIntervals().size());
代码示例来源:origin: apache/incubator-druid
@Override
public Sequence<T> run(QueryPlus<T> queryPlus, Map<String, Object> responseContext)
{
Query<T> query = queryPlus.getQuery();
List<TimelineObjectHolder> segments = new ArrayList<>();
for (Interval interval : query.getIntervals()) {
segments.addAll(timeline.lookup(interval));
}
List<Sequence<T>> sequences = new ArrayList<>();
for (TimelineObjectHolder<String, Segment> holder : toolChest.filterSegments(query, segments)) {
Segment segment = holder.getObject().getChunk(0).getObject();
QueryPlus queryPlusRunning = queryPlus.withQuerySegmentSpec(
new SpecificSegmentSpec(
new SegmentDescriptor(
holder.getInterval(),
holder.getVersion(),
0
)
)
);
sequences.add(factory.createRunner(segment).run(queryPlusRunning, responseContext));
}
return new MergeSequence<>(query.getResultOrdering(), Sequences.simple(sequences));
}
}
内容来源于网络,如有侵权,请联系作者删除!