本文整理了Java中org.apache.hadoop.mapred.Mapper.close()
方法的一些代码示例,展示了Mapper.close()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Mapper.close()
方法的具体详情如下:
包路径:org.apache.hadoop.mapred.Mapper
类名称:Mapper
方法名:close
暂无
代码示例来源:origin: apache/ignite
mapper.close();
代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core
public void close() throws IOException {
if (mapper != null) {
mapper.close();
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
public void close() throws IOException {
if (mapper != null) {
mapper.close();
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapred
public void close() throws IOException {
if (mapper != null) {
mapper.close();
}
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
public void close() throws IOException {
if (mapper != null) {
mapper.close();
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
public void close() throws IOException {
if (mapper != null) {
mapper.close();
}
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
public void close() throws IOException {
if (mapper != null) {
mapper.close();
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
public void close() throws IOException {
if (mapper != null) {
mapper.close();
}
}
代码示例来源:origin: org.apache.crunch/crunch-core
@Override
public void cleanup(Emitter<Pair<K2, V2>> emitter) {
try {
instance.close();
} catch (IOException e) {
throw new CrunchRuntimeException("Error closing mapper = " + mapperClass, e);
}
}
代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core
/**
* Closes all the chain elements.
*
* @throws IOException thrown if any of the chain elements threw an
* IOException exception.
*/
public void close() throws IOException {
for (Mapper map : mappers) {
map.close();
}
if (reducer != null) {
reducer.close();
}
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
/**
* Closes all the chain elements.
*
* @throws IOException thrown if any of the chain elements threw an
* IOException exception.
*/
public void close() throws IOException {
for (Mapper map : mappers) {
map.close();
}
if (reducer != null) {
reducer.close();
}
}
代码示例来源:origin: ch.cern.hadoop/hadoop-mapreduce-client-core
/**
* Closes all the chain elements.
*
* @throws IOException thrown if any of the chain elements threw an
* IOException exception.
*/
public void close() throws IOException {
for (Mapper map : mappers) {
map.close();
}
if (reducer != null) {
reducer.close();
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapred
/**
* Closes all the chain elements.
*
* @throws IOException thrown if any of the chain elements threw an
* IOException exception.
*/
public void close() throws IOException {
for (Mapper map : mappers) {
map.close();
}
if (reducer != null) {
reducer.close();
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
/**
* Closes all the chain elements.
*
* @throws IOException thrown if any of the chain elements threw an
* IOException exception.
*/
public void close() throws IOException {
for (Mapper map : mappers) {
map.close();
}
if (reducer != null) {
reducer.close();
}
}
代码示例来源:origin: com.facebook.hadoop/hadoop-core
/**
* Closes all the chain elements.
*
* @throws IOException thrown if any of the chain elements threw an
* IOException exception.
*/
public void close() throws IOException {
for (Mapper map : mappers) {
map.close();
}
if (reducer != null) {
reducer.close();
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
/**
* Closes all the chain elements.
*
* @throws IOException thrown if any of the chain elements threw an
* IOException exception.
*/
public void close() throws IOException {
for (Mapper map : mappers) {
map.close();
}
if (reducer != null) {
reducer.close();
}
}
代码示例来源:origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core
public void run(RecordReader<K1, V1> input, OutputCollector<K2, V2> output,
Reporter reporter)
throws IOException {
try {
// allocate key & value instances that are re-used for all entries
K1 key = input.createKey();
V1 value = input.createValue();
while (input.next(key, value)) {
// map pair to output
mapper.map(key, value, output, reporter);
if(incrProcCount) {
reporter.incrCounter(SkipBadRecords.COUNTER_GROUP,
SkipBadRecords.COUNTER_MAP_PROCESSED_RECORDS, 1);
}
}
} finally {
mapper.close();
}
}
代码示例来源:origin: io.hops/hadoop-mapreduce-client-core
public void run(RecordReader<K1, V1> input, OutputCollector<K2, V2> output,
Reporter reporter)
throws IOException {
try {
// allocate key & value instances that are re-used for all entries
K1 key = input.createKey();
V1 value = input.createValue();
while (input.next(key, value)) {
// map pair to output
mapper.map(key, value, output, reporter);
if(incrProcCount) {
reporter.incrCounter(SkipBadRecords.COUNTER_GROUP,
SkipBadRecords.COUNTER_MAP_PROCESSED_RECORDS, 1);
}
}
} finally {
mapper.close();
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-mapred
public void run(RecordReader<K1, V1> input, OutputCollector<K2, V2> output,
Reporter reporter)
throws IOException {
try {
// allocate key & value instances that are re-used for all entries
K1 key = input.createKey();
V1 value = input.createValue();
while (input.next(key, value)) {
// map pair to output
mapper.map(key, value, output, reporter);
if(incrProcCount) {
reporter.incrCounter(SkipBadRecords.COUNTER_GROUP,
SkipBadRecords.COUNTER_MAP_PROCESSED_RECORDS, 1);
}
}
} finally {
mapper.close();
}
}
代码示例来源:origin: io.prestosql.hadoop/hadoop-apache
public void run(RecordReader<K1, V1> input, OutputCollector<K2, V2> output,
Reporter reporter)
throws IOException {
try {
// allocate key & value instances that are re-used for all entries
K1 key = input.createKey();
V1 value = input.createValue();
while (input.next(key, value)) {
// map pair to output
mapper.map(key, value, output, reporter);
if(incrProcCount) {
reporter.incrCounter(SkipBadRecords.COUNTER_GROUP,
SkipBadRecords.COUNTER_MAP_PROCESSED_RECORDS, 1);
}
}
} finally {
mapper.close();
}
}
内容来源于网络,如有侵权,请联系作者删除!