本文整理了Java中org.apache.hadoop.mapreduce.RecordReader.getCurrentValue
方法的一些代码示例,展示了RecordReader.getCurrentValue
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。RecordReader.getCurrentValue
方法的具体详情如下:
包路径:org.apache.hadoop.mapreduce.RecordReader
类名称:RecordReader
方法名:getCurrentValue
[英]Get the current value.
[中]获取当前值。
代码示例来源:origin: apache/ignite
/** {@inheritDoc} */
@Override public Object getCurrentValue() throws IOException, InterruptedException {
return reader.getCurrentValue();
}
代码示例来源:origin: apache/hive
@Override
public HCatRecord next() {
try {
return curRecReader.getCurrentValue();
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
代码示例来源:origin: apache/hive
@Override public boolean next(ImmutableBytesWritable rowKey, ResultWritable value) throws IOException {
boolean next = false;
try {
next = recordReader.nextKeyValue();
if (next) {
rowKey.set(recordReader.getCurrentValue().getRow());
value.setResult(recordReader.getCurrentValue());
}
} catch (InterruptedException e) {
throw new IOException(e);
}
return next;
}
};
代码示例来源:origin: apache/flink
@Override
public Tuple2<K, V> nextRecord(Tuple2<K, V> record) throws IOException {
if (!this.fetched) {
fetchNext();
}
if (!this.hasNext) {
return null;
}
try {
record.f0 = recordReader.getCurrentKey();
record.f1 = recordReader.getCurrentValue();
} catch (InterruptedException e) {
throw new IOException("Could not get KeyValue pair.", e);
}
this.fetched = false;
return record;
}
代码示例来源:origin: apache/flink
@Override
public T nextRecord(T record) throws IOException {
if (!this.fetched) {
// first record
fetchNext();
}
if (!this.hasNext) {
return null;
}
try {
// get next HCatRecord
HCatRecord v = this.recordReader.getCurrentValue();
this.fetched = false;
if (this.fieldNames.length > 0) {
// return as Flink tuple
return this.buildFlinkTuple(record, v);
} else {
// return as HCatRecord
return (T) v;
}
} catch (InterruptedException e) {
throw new IOException("Could not get next record.", e);
}
}
代码示例来源:origin: apache/incubator-gobblin
/**
* {@inheritDoc}.
*
* This method will throw a {@link ClassCastException} if type {@link #<D>} is not compatible
* with type {@link #<K>} if keys are supposed to be read, or if it is not compatible with type
* {@link #<V>} if values are supposed to be read.
*/
@Override
@SuppressWarnings("unchecked")
public D readRecord(@Deprecated D reuse) throws DataRecordException, IOException {
try {
if (this.recordReader.nextKeyValue()) {
return this.readKeys ? (D) this.recordReader.getCurrentKey() : (D) this.recordReader.getCurrentValue();
}
} catch (InterruptedException ie) {
throw new IOException(ie);
}
return null;
}
代码示例来源:origin: apache/hive
final ArrayWritable tmpCurValue = realReader.getCurrentValue();
if (value != tmpCurValue) {
final Writable[] arrValue = value.get();
代码示例来源:origin: apache/drill
final ArrayWritable tmpCurValue = realReader.getCurrentValue();
if (value != tmpCurValue) {
final Writable[] arrValue = value.get();
代码示例来源:origin: elastic/elasticsearch-hadoop
if (isJSON) {
dataMap = new HashMap(1);
dataMap.put("data", reader.getCurrentValue());
} else {
dataMap = reader.getCurrentValue();
代码示例来源:origin: thinkaurelius/titan
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
while (reader.nextKeyValue()) {
// TODO titan05 integration -- the duplicate() call may be unnecessary
final TinkerVertex maybeNullTinkerVertex =
deser.readHadoopVertex(reader.getCurrentKey(), reader.getCurrentValue());
if (null != maybeNullTinkerVertex) {
vertex = new VertexWritable(maybeNullTinkerVertex);
//vertexQuery.filterRelationsOf(vertex); // TODO reimplement vertexquery filtering
return true;
}
}
return false;
}
代码示例来源:origin: apache/hive
@Override
public Tuple getNext() throws IOException {
try {
HCatRecord hr = (HCatRecord) (reader.nextKeyValue() ? reader.getCurrentValue() : null);
Tuple t = PigHCatUtil.transformToTuple(hr, outputSchema);
// TODO : we were discussing an iter interface, and also a LazyTuple
// change this when plans for that solidifies.
return t;
} catch (ExecException e) {
int errCode = 6018;
String errMsg = "Error while reading input";
throw new ExecException(errMsg, errCode,
PigException.REMOTE_ENVIRONMENT, e);
} catch (Exception eOther) {
int errCode = 6018;
String errMsg = "Error converting read value to tuple";
throw new ExecException(errMsg, errCode,
PigException.REMOTE_ENVIRONMENT, eOther);
}
}
代码示例来源:origin: apache/incubator-gobblin
@Test
public void testRecordReader()
throws Exception {
List<String> paths = Lists.newArrayList("/path1", "/path2");
GobblinWorkUnitsInputFormat.GobblinSplit split = new GobblinWorkUnitsInputFormat.GobblinSplit(paths);
GobblinWorkUnitsInputFormat inputFormat = new GobblinWorkUnitsInputFormat();
RecordReader<LongWritable, Text> recordReader =
inputFormat.createRecordReader(split, new TaskAttemptContextImpl(new Configuration(), new TaskAttemptID("a", 1,
TaskType.MAP, 1, 1)));
recordReader.nextKeyValue();
Assert.assertEquals(recordReader.getCurrentKey().get(), 0);
Assert.assertEquals(recordReader.getCurrentValue().toString(), "/path1");
recordReader.nextKeyValue();
Assert.assertEquals(recordReader.getCurrentKey().get(), 1);
Assert.assertEquals(recordReader.getCurrentValue().toString(), "/path2");
Assert.assertFalse(recordReader.nextKeyValue());
}
代码示例来源:origin: apache/hive
valueObj = realReader.getCurrentValue();
} else {
eof = true;
代码示例来源:origin: apache/tinkerpop
@Override
public VertexWritable getCurrentValue() throws IOException, InterruptedException {
return this.recordReader.getCurrentValue();
}
代码示例来源:origin: apache/drill
valueObj = realReader.getCurrentValue();
} else {
eof = true;
代码示例来源:origin: apache/avro
value = recordReader.getCurrentValue();
key == recordReader.getCurrentKey());
assertTrue("getCurrentValue() returned different values for the same record",
value == recordReader.getCurrentValue());
value = recordReader.getCurrentValue();
代码示例来源:origin: apache/avro
value = recordReader.getCurrentValue();
key == recordReader.getCurrentKey());
assertTrue("getCurrentValue() returned different values for the same record",
value == recordReader.getCurrentValue());
value = recordReader.getCurrentValue();
代码示例来源:origin: apache/hbase
verifyRowFromMap(rr.getCurrentKey(), rr.getCurrentValue());
rowTracker.addRow(row);
代码示例来源:origin: apache/tinkerpop
@Override
public Edge next() {
try {
while (true) {
if (this.edgeIterator.hasNext())
return new HadoopEdge(this.edgeIterator.next(), this.graph);
if (this.readers.isEmpty())
throw FastNoSuchElementException.instance();
if (this.readers.peek().nextKeyValue()) {
this.edgeIterator = this.readers.peek().getCurrentValue().get().edges(Direction.OUT);
} else {
this.readers.remove().close();
}
}
} catch (final Exception e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
代码示例来源:origin: apache/tinkerpop
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (null == this.graphFilter) {
return this.recordReader.nextKeyValue();
} else {
while (true) {
if (this.recordReader.nextKeyValue()) {
final VertexWritable vertexWritable = this.recordReader.getCurrentValue();
final Optional<StarGraph.StarVertex> vertex = vertexWritable.get().applyGraphFilter(this.graphFilter);
if (vertex.isPresent()) {
vertexWritable.set(vertex.get());
return true;
}
} else {
return false;
}
}
}
}
内容来源于网络,如有侵权,请联系作者删除!