org.apache.flume.Channel.setName()方法的使用及代码示例

x33g5p2x  于2022-01-18 转载在 其他  
字(8.4k)|赞(0)|评价(0)|浏览(211)

本文整理了Java中org.apache.flume.Channel.setName()方法的一些代码示例,展示了Channel.setName()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Channel.setName()方法的具体详情如下:
包路径:org.apache.flume.Channel
类名称:Channel
方法名:setName

Channel.setName介绍

暂无

代码示例

代码示例来源:origin: apache/flume

private Channel getOrCreateChannel(
  ListMultimap<Class<? extends Channel>, String> channelsNotReused,
  String name, String type)
  throws FlumeException {
 Class<? extends Channel> channelClass = channelFactory.getClass(type);
 /*
  * Channel has requested a new instance on each re-configuration
  */
 if (channelClass.isAnnotationPresent(Disposable.class)) {
  Channel channel = channelFactory.create(name, type);
  channel.setName(name);
  return channel;
 }
 Map<String, Channel> channelMap = channelCache.get(channelClass);
 if (channelMap == null) {
  channelMap = new HashMap<String, Channel>();
  channelCache.put(channelClass, channelMap);
 }
 Channel channel = channelMap.get(name);
 if (channel == null) {
  channel = channelFactory.create(name, type);
  channel.setName(name);
  channelMap.put(name, channel);
 }
 channelsNotReused.get(channelClass).remove(name);
 return channel;
}

代码示例来源:origin: apache/phoenix

private Channel initChannel() {
  //Channel configuration
  Context channelContext = new Context();
  channelContext.put("capacity", "10000");
  channelContext.put("transactionCapacity", "200");
  Channel channel = new MemoryChannel();
  channel.setName("memorychannel");
  Configurables.configure(channel, channelContext);
  return channel;
}

代码示例来源:origin: apache/phoenix

private Channel initChannel() {
  // Channel configuration
  Context channelContext = new Context();
  channelContext.put("capacity", "10000");
  channelContext.put("transactionCapacity", "200");
  Channel channel = new MemoryChannel();
  channel.setName("memorychannel");
  Configurables.configure(channel, channelContext);
  return channel;
}

代码示例来源:origin: apache/phoenix

private Channel initChannel() {
  // Channel configuration
  Context channelContext = new Context();
  channelContext.put("capacity", "10000");
  channelContext.put("transactionCapacity", "200");
  Channel channel = new MemoryChannel();
  channel.setName("memorychannel");
  Configurables.configure(channel, channelContext);
  return channel;
}

代码示例来源:origin: forcedotcom/phoenix

private Channel initChannel() {
  //Channel configuration
  Context channelContext = new Context();
  channelContext.put("capacity", "10000");
  channelContext.put("transactionCapacity", "200");
  Channel channel = new MemoryChannel();
  channel.setName("memorychannel");
  Configurables.configure(channel, channelContext);
  return channel;
}

代码示例来源:origin: apache/phoenix

private Channel initChannel() {
  //Channel configuration
  Context channelContext = new Context();
  channelContext.put("capacity", "10000");
  channelContext.put("transactionCapacity", "200");
  Channel channel = new MemoryChannel();
  channel.setName("memorychannel");
  Configurables.configure(channel, channelContext);
  return channel;
}

代码示例来源:origin: forcedotcom/phoenix

private Channel initChannel() {
  //Channel configuration
  Context channelContext = new Context();
  channelContext.put("capacity", "10000");
  channelContext.put("transactionCapacity", "200");
  Channel channel = new MemoryChannel();
  channel.setName("memorychannel");
  Configurables.configure(channel, channelContext);
  return channel;
}

代码示例来源:origin: org.apache.flume/flume-ng-node

private Channel getOrCreateChannel(
  ListMultimap<Class<? extends Channel>, String> channelsNotReused,
  String name, String type)
  throws FlumeException {
 Class<? extends Channel> channelClass = channelFactory.getClass(type);
 /*
  * Channel has requested a new instance on each re-configuration
  */
 if (channelClass.isAnnotationPresent(Disposable.class)) {
  Channel channel = channelFactory.create(name, type);
  channel.setName(name);
  return channel;
 }
 Map<String, Channel> channelMap = channelCache.get(channelClass);
 if (channelMap == null) {
  channelMap = new HashMap<String, Channel>();
  channelCache.put(channelClass, channelMap);
 }
 Channel channel = channelMap.get(name);
 if (channel == null) {
  channel = channelFactory.create(name, type);
  channel.setName(name);
  channelMap.put(name, channel);
 }
 channelsNotReused.get(channelClass).remove(name);
 return channel;
}

代码示例来源:origin: jaibeermalik/searchanalytics-bigdata

private void createSink() {
  sink = new HDFSEventSink();
  sink.setName("HDFSEventSink-" + UUID.randomUUID());
  channel = new MemoryChannel();
  Map<String, String> channelParamters = new HashMap<>();
  channelParamters.put("capacity", "100000");
  channelParamters.put("transactionCapacity", "1000");
  Context channelContext = new Context(channelParamters);
  Configurables.configure(channel, channelContext);
  channel.setName("HDFSEventSinkChannel-" + UUID.randomUUID());
  Map<String, String> paramters = new HashMap<>();
  paramters.put("hdfs.type", "hdfs");
  String hdfsBasePath = hadoopClusterService.getHDFSUri()
      + "/searchevents";
  paramters.put("hdfs.path", hdfsBasePath + "/%Y/%m/%d/%H");
  paramters.put("hdfs.filePrefix", "searchevents");
  paramters.put("hdfs.fileType", "DataStream");
  paramters.put("hdfs.rollInterval", "0");
  paramters.put("hdfs.rollSize", "0");
  paramters.put("hdfs.idleTimeout", "1");
  paramters.put("hdfs.rollCount", "0");
  paramters.put("hdfs.batchSize", "1000");
  paramters.put("hdfs.useLocalTimeStamp", "true");
  Context sinkContext = new Context(paramters);
  sink.configure(sinkContext);
  sink.setChannel(channel);
  sink.start();
  channel.start();
}

代码示例来源:origin: jaibeermalik/searchanalytics-bigdata

private void createSink() {
  sink = new ElasticSearchSink();
  sink.setName("ElasticSearchSink-" + UUID.randomUUID());
  channel = new MemoryChannel();
  Map<String, String> channelParamters = new HashMap<>();
  channelParamters.put("capacity", "100000");
  channelParamters.put("transactionCapacity", "1000");
  Context channelContext = new Context(channelParamters);
  Configurables.configure(channel, channelContext);
  channel.setName("ElasticSearchSinkChannel-" + UUID.randomUUID());
  Map<String, String> paramters = new HashMap<>();
  paramters.put(ElasticSearchSinkConstants.HOSTNAMES, "127.0.0.1:9310");
  String indexNamePrefix = "recentlyviewed";
  paramters.put(ElasticSearchSinkConstants.INDEX_NAME, indexNamePrefix);
  paramters.put(ElasticSearchSinkConstants.INDEX_TYPE, "clickevent");
  paramters.put(ElasticSearchSinkConstants.CLUSTER_NAME,
      "jai-testclusterName");
  paramters.put(ElasticSearchSinkConstants.BATCH_SIZE, "10");
  paramters.put(ElasticSearchSinkConstants.SERIALIZER,
      ElasticSearchJsonBodyEventSerializer.class.getName());
  Context sinkContext = new Context(paramters);
  sink.configure(sinkContext);
  sink.setChannel(channel);
  sink.start();
  channel.start();
}

代码示例来源:origin: jaibeermalik/searchanalytics-bigdata

Context channelContext = new Context(channelParamters);
Configurables.configure(channel, channelContext);
channel.setName("HbaseSinkChannel-" + UUID.randomUUID());

代码示例来源:origin: jaibeermalik/searchanalytics-bigdata

private void createSparkAvroSink() {
  sparkAvroChannel = new MemoryChannel();
  Map<String, String> channelParamters = new HashMap<>();
  channelParamters.put("capacity", "100000");
  channelParamters.put("transactionCapacity", "1000");
  Context channelContext = new Context(channelParamters);
  Configurables.configure(sparkAvroChannel, channelContext);
  String channelName = "SparkAvroMemoryChannel-" + UUID.randomUUID();
  sparkAvroChannel.setName(channelName);
  sparkAvroSink = new AvroSink();
  sparkAvroSink.setName("SparkAvroSink-" + UUID.randomUUID());
  Map<String, String> paramters = new HashMap<>();
  paramters.put("type", "avro");
  paramters.put("hostname", "localhost");
  paramters.put("port", "41111");
  paramters.put("batch-size", "100");
  Context sinkContext = new Context(paramters);
  sparkAvroSink.configure(sinkContext);
  Configurables.configure(sparkAvroSink, sinkContext);
  sparkAvroSink.setChannel(sparkAvroChannel);
  sparkAvroChannel.start();
  sparkAvroSink.start();
}

代码示例来源:origin: jaibeermalik/searchanalytics-bigdata

channel = new MemoryChannel();
String channelName = "AvroSourceMemoryChannel-" + UUID.randomUUID();
channel.setName(channelName);

相关文章