org.apache.hadoop.ipc.RemoteException.<init>()方法的使用及代码示例

x33g5p2x  于2022-01-28 转载在 其他  
字(7.3k)|赞(0)|评价(0)|浏览(154)

本文整理了Java中org.apache.hadoop.ipc.RemoteException.<init>方法的一些代码示例,展示了RemoteException.<init>的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。RemoteException.<init>方法的具体详情如下:
包路径:org.apache.hadoop.ipc.RemoteException
类名称:RemoteException
方法名:<init>

RemoteException.<init>介绍

暂无

代码示例

代码示例来源:origin: org.apache.hadoop/hadoop-common

/**
 * Create RemoteException from attributes
 * @param attrs may not be null
 */
public static RemoteException valueOf(Attributes attrs) {
 return new RemoteException(attrs.getValue("class"),
   attrs.getValue("message")); 
}

代码示例来源:origin: apache/hbase

private static void readStatus(DataInputStream inStream) throws IOException {
 int status = inStream.readInt(); // read status
 if (status != SaslStatus.SUCCESS.state) {
  throw new RemoteException(WritableUtils.readString(inStream),
    WritableUtils.readString(inStream));
 }
}

代码示例来源:origin: apache/hbase

@Override
public FSDataInputStream open(Path f, final int bufferSize)
  throws IOException {
 throw new RemoteException(FileNotFoundException.class.getName(), "");
}
@Override

代码示例来源:origin: org.apache.hadoop/hadoop-common

public ByteBuffer readResponse() throws IOException {
 int length = in.readInt();
 if (firstResponse) {
  firstResponse = false;
  // pre-rpcv9 exception, almost certainly a version mismatch.
  if (length == -1) {
   in.readInt(); // ignore fatal/error status, it's fatal for us.
   throw new RemoteException(WritableUtils.readString(in),
                WritableUtils.readString(in));
  }
 }
 if (length <= 0) {
  throw new RpcException("RPC response has invalid length");
 }
 if (maxResponseLength > 0 && length > maxResponseLength) {
  throw new RpcException("RPC response exceeds maximum data length");
 }
 ByteBuffer bb = ByteBuffer.allocate(length);
 in.readFully(bb.array());
 return bb;
}

代码示例来源:origin: apache/hbase

String msg = in.toString(offset + classLen + 8, msgLen, HConstants.UTF8_CHARSET);
in.readerIndex(offset + totalLen);
throw new RemoteException(className, msg);

代码示例来源:origin: apache/hbase

@Override
public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req)
  throws IOException {
 switch (rand.nextInt(5)) {
  case 0:
   throw new ServerNotRunningYetException("wait on server startup");
  case 1:
   throw new SocketTimeoutException("simulate socket timeout");
  case 2:
   throw new RemoteException("java.io.IOException", "unexpected exception");
  default:
   // fall out
 }
 return super.sendRequest(server, req);
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

LOG.warn("Detailed error code not set by server on rpc error");
RemoteException re = new RemoteException(exceptionClassName, errorMsg, erCode);
if (status == RpcStatusProto.ERROR) {
 final Call call = calls.remove(callId);

代码示例来源:origin: org.apache.hbase/hbase-client

private static void readStatus(DataInputStream inStream) throws IOException {
 int status = inStream.readInt(); // read status
 if (status != SaslStatus.SUCCESS.state) {
  throw new RemoteException(WritableUtils.readString(inStream),
    WritableUtils.readString(inStream));
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

/**
 * Disallow snapshot on a directory.
 * Usage: hdfs dfsadmin -disallowSnapshot snapshotDir
 * @param argv List of of command line parameters.
 * @exception IOException
 */
public void disallowSnapshot(String[] argv) throws IOException {
 Path p = new Path(argv[1]);
 final DistributedFileSystem dfs = AdminHelper.getDFS(p.toUri(), getConf());
 try {
  dfs.disallowSnapshot(p);
 } catch (SnapshotException e) {
  throw new RemoteException(e.getClass().getName(), e.getMessage());
 }
 System.out.println("Disallowing snapshot on " + argv[1] + " succeeded");
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

/**
 * Allow snapshot on a directory.
 * Usage: hdfs dfsadmin -allowSnapshot snapshotDir
 * @param argv List of of command line parameters.
 * @exception IOException
 */
public void allowSnapshot(String[] argv) throws IOException {
 Path p = new Path(argv[1]);
 final DistributedFileSystem dfs = AdminHelper.getDFS(p.toUri(), getConf());
 try {
  dfs.allowSnapshot(p);
 } catch (SnapshotException e) {
  throw new RemoteException(e.getClass().getName(), e.getMessage());
 }
 System.out.println("Allowing snapshot on " + argv[1] + " succeeded");
}

代码示例来源:origin: org.apache.hadoop/hadoop-common

case ERROR: // might get a RPC error during 
case FATAL:
 throw new RemoteException(header.getExceptionClassName(),
              header.getErrorMsg());
default: break;

代码示例来源:origin: org.apache.hbase/hbase-client

String msg = in.toString(offset + classLen + 8, msgLen, HConstants.UTF8_CHARSET);
in.readerIndex(offset + totalLen);
throw new RemoteException(className, msg);

代码示例来源:origin: org.jvnet.hudson.hadoop/hadoop-core

/** Create RemoteException from attributes */
 public static RemoteException valueOf(Attributes attrs) {
  return new RemoteException(attrs.getValue("class"),
    attrs.getValue("message")); 
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

/** Convert a Json map to a RemoteException. */
public static RemoteException toRemoteException(final Map<?, ?> json) {
 final Map<?, ?> m = (Map<?, ?>)json.get(RemoteException.class.getSimpleName());
 final String message = (String)m.get("message");
 final String javaClassName = (String)m.get("javaClassName");
 return new RemoteException(javaClassName, message);
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-client

/** Convert a Json map to a RemoteException. */
static RemoteException toRemoteException(final Map<?, ?> json) {
 final Map<?, ?> m = (Map<?, ?>)json.get(
   RemoteException.class.getSimpleName());
 final String message = (String)m.get("message");
 final String javaClassName = (String)m.get("javaClassName");
 if (UNSUPPPORTED_EXCEPTION_STR.equals(javaClassName)) {
  throw new UnsupportedOperationException(message);
 }
 return new RemoteException(javaClassName, message);
}

代码示例来源:origin: com.aliyun.hbase/alihbase-client

private static void readStatus(DataInputStream inStream) throws IOException {
 int status = inStream.readInt(); // read status
 if (status != SaslStatus.SUCCESS.state) {
  throw new RemoteException(WritableUtils.readString(inStream),
    WritableUtils.readString(inStream));
 }
}

代码示例来源:origin: org.apache.hbase/hbase-server

@Override
public FSDataInputStream open(Path f, final int bufferSize)
  throws IOException {
 throw new RemoteException(FileNotFoundException.class.getName(), "");
}
@Override

代码示例来源:origin: harbby/presto-connectors

private static void readStatus(DataInputStream inStream) throws IOException {
 int status = inStream.readInt(); // read status
 if (status != SaslStatus.SUCCESS.state) {
  throw new RemoteException(WritableUtils.readString(inStream),
    WritableUtils.readString(inStream));
 }
}

代码示例来源:origin: org.apache.hbase/hbase-server

@Override
public ExecuteProceduresResponse sendRequest(ServerName server, ExecuteProceduresRequest req)
  throws IOException {
 switch (rand.nextInt(5)) {
  case 0: throw new ServerNotRunningYetException("wait on server startup");
  case 1: throw new SocketTimeoutException("simulate socket timeout");
  case 2: throw new RemoteException("java.io.IOException", "unexpected exception");
  default:
   // fall out
 }
 return super.sendRequest(server, req);
}

代码示例来源:origin: org.apache.beam/beam-sdks-java-io-hadoop-format

@Test
public void testCatchingRemoteException() throws IOException {
 FileSystem mockedFileSystem = Mockito.mock(FileSystem.class);
 RemoteException thrownException =
   new RemoteException(AlreadyBeingCreatedException.class.getName(), "Failed to CREATE_FILE");
 Mockito.when(mockedFileSystem.createNewFile(Mockito.any())).thenThrow(thrownException);
 HDFSSynchronization synchronization =
   new HDFSSynchronization("someDir", (conf) -> mockedFileSystem);
 assertFalse(synchronization.tryAcquireJobLock(configuration));
}

相关文章