org.pentaho.di.repository.Repository.loadDatabaseMetaFromJobEntryAttribute()方法的使用及代码示例

x33g5p2x  于2022-01-28 转载在 其他  
字(13.4k)|赞(0)|评价(0)|浏览(76)

本文整理了Java中org.pentaho.di.repository.Repository.loadDatabaseMetaFromJobEntryAttribute方法的一些代码示例,展示了Repository.loadDatabaseMetaFromJobEntryAttribute的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Repository.loadDatabaseMetaFromJobEntryAttribute方法的具体详情如下:
包路径:org.pentaho.di.repository.Repository
类名称:Repository
方法名:loadDatabaseMetaFromJobEntryAttribute

Repository.loadDatabaseMetaFromJobEntryAttribute介绍

[英]This method is introduced to avoid having to go over an integer/string/whatever in the interface and the job entry code.
[中]引入此方法是为了避免必须检查接口和作业输入代码中的整数/字符串/任何内容。

代码示例

代码示例来源:origin: pentaho/pentaho-kettle

@Override
public DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId objectId, String s, String s1, List<DatabaseMeta> list ) throws KettleException {
 return getDelegate().loadDatabaseMetaFromJobEntryAttribute( objectId, s, s1, list );
}

代码示例来源:origin: pentaho/pentaho-kettle

@Override
public DatabaseMeta loadDatabaseMetaFromJobEntryAttribute( ObjectId objectId, String s, int i, String s1, List<DatabaseMeta> list ) throws KettleException {
 return getDelegate().loadDatabaseMetaFromJobEntryAttribute( objectId, s, i, s1, list );
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
 try {
  this.databaseMeta =
    rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
  this.setCubeName( rep.getStepAttributeString( id_jobentry, "cubeName" ) );
 } catch ( KettleException dbe ) {
  throw new KettleException( "Unable to load job entry for type file exists from the repository for id_jobentry="
    + id_jobentry, dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
 List<SlaveServer> slaveServers ) throws KettleException {
 try {
  tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" );
  schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" );
  connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
 } catch ( KettleDatabaseException dbe ) {
  throw new KettleException(
   BaseMessages.getString( PKG, "TableExists.Meta.UnableLoadRep", "" + id_jobentry ), dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
 List<SlaveServer> slaveServers ) throws KettleException {
 try {
  sql = rep.getJobEntryAttributeString( id_jobentry, "sql" );
  String sSubs = rep.getJobEntryAttributeString( id_jobentry, "useVariableSubstitution" );
  if ( sSubs != null && sSubs.equalsIgnoreCase( "T" ) ) {
   useVariableSubstitution = true;
  }
  String ssql = rep.getJobEntryAttributeString( id_jobentry, "sqlfromfile" );
  if ( ssql != null && ssql.equalsIgnoreCase( "T" ) ) {
   sqlfromfile = true;
  }
  String ssendOneStatement = rep.getJobEntryAttributeString( id_jobentry, "sendOneStatement" );
  if ( ssendOneStatement != null && ssendOneStatement.equalsIgnoreCase( "T" ) ) {
   sendOneStatement = true;
  }
  sqlfilename = rep.getJobEntryAttributeString( id_jobentry, "sqlfilename" );
  connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
 } catch ( KettleDatabaseException dbe ) {
  throw new KettleException( "Unable to load job entry of type 'sql' from the repository with id_jobentry="
   + id_jobentry, dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
 try {
  this.databaseMeta =
    rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
  this.setCubeName( rep.getStepAttributeString( id_jobentry, "cubeName" ) );
  int nrFields = rep.countNrStepAttributes( id_jobentry, "dimensionname" );
  for ( int i = 0; i < nrFields; i++ ) {
   String dimensionName = rep.getStepAttributeString( id_jobentry, i, "dimensionname" );
   this.dimensionNames.add( dimensionName );
  }
 } catch ( KettleException dbe ) {
  throw new KettleException( "Unable to load job entry for type file exists from the repository for id_jobentry="
    + id_jobentry, dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
 List<SlaveServer> slaveServers ) throws KettleException {
 try {
  tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" );
  schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" );
  connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
  // How many arguments?
  int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" );
  arguments = new String[argnr];
  // Read them all...
  for ( int a = 0; a < argnr; a++ ) {
   arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" );
  }
 } catch ( KettleDatabaseException dbe ) {
  throw new KettleException( BaseMessages.getString( PKG, "JobEntryColumnsExist.Meta.UnableLoadRep", ""
   + id_jobentry ), dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
 List<SlaveServer> slaveServers ) throws KettleException {
 try {
  connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
  this.argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
  // How many arguments?
  int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" );
  allocate( argnr );
  // Read them all...
  for ( int a = 0; a < argnr; a++ ) {
   this.arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" );
   this.schemaname[a] = rep.getJobEntryAttributeString( id_jobentry, a, "schemaname" );
  }
 } catch ( KettleDatabaseException dbe ) {
  throw new KettleException( BaseMessages.getString( PKG, "JobEntryTruncateTables.UnableLoadRep", ""
   + id_jobentry ), dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
 List<SlaveServer> slaveServers ) throws KettleException {
 try {
  // How many connections?
  int argnr = rep.countNrJobEntryAttributes( id_jobentry, "id_database" );
  connections = new DatabaseMeta[argnr];
  waitfors = new String[argnr];
  waittimes = new int[argnr];
  // Read them all...
  for ( int a = 0; a < argnr; a++ ) {
   connections[a] =
    rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", a, "id_database", databases );
   waitfors[a] = rep.getJobEntryAttributeString( id_jobentry, a, "waitfor" );
   waittimes[a] =
    getWaitByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, a, "waittime" ), "" ) );
  }
 } catch ( KettleException dbe ) {
  throw new KettleException( BaseMessages.getString(
   PKG, "JobEntryCheckDbConnections.ERROR_0002_Cannot_Load_Job_From_Repository", "" + id_jobentry, dbe
    .getMessage() ) );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
 List<SlaveServer> slaveServers ) throws KettleException {
 try {
  connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
  schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" );
  tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" );
  successCondition =
   getSuccessConditionByCode( Const.NVL(
    rep.getJobEntryAttributeString( id_jobentry, "success_condition" ), "" ) );
  limit = rep.getJobEntryAttributeString( id_jobentry, "limit" );
  useCustomSQL = rep.getJobEntryAttributeBoolean( id_jobentry, "is_custom_sql" );
  useVars = rep.getJobEntryAttributeBoolean( id_jobentry, "is_usevars" );
  addRowsResult = rep.getJobEntryAttributeBoolean( id_jobentry, "add_rows_result" );
  clearResultList = rep.getJobEntryAttributeBoolean( id_jobentry, "clear_result_rows" );
  customSQL = rep.getJobEntryAttributeString( id_jobentry, "custom_sql" );
 } catch ( KettleDatabaseException dbe ) {
  throw new KettleException( BaseMessages.getString( PKG, "JobEntryEvalTableContent.UnableLoadRep", ""
   + id_jobentry ), dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
 List<SlaveServer> slaveServers ) throws KettleException {
 try {
  schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" );
  tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" );
  filename = rep.getJobEntryAttributeString( id_jobentry, "filename" );
  separator = rep.getJobEntryAttributeString( id_jobentry, "separator" );
  enclosed = rep.getJobEntryAttributeString( id_jobentry, "enclosed" );
  lineterminated = rep.getJobEntryAttributeString( id_jobentry, "lineterminated" );
  limitlines = rep.getJobEntryAttributeString( id_jobentry, "limitlines" );
  listcolumn = rep.getJobEntryAttributeString( id_jobentry, "listcolumn" );
  highpriority = rep.getJobEntryAttributeBoolean( id_jobentry, "highpriority" );
  optionenclosed = rep.getJobEntryAttributeBoolean( id_jobentry, "optionenclosed" );
  outdumpvalue = (int) rep.getJobEntryAttributeInteger( id_jobentry, "outdumpvalue" );
  iffileexists = (int) rep.getJobEntryAttributeInteger( id_jobentry, "iffileexists" );
  addfiletoresult = rep.getJobEntryAttributeBoolean( id_jobentry, "addfiletoresult" );
  connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
 } catch ( KettleDatabaseException dbe ) {
  throw new KettleException(
   "Unable to load job entry of type 'table exists' from the repository for id_jobentry=" + id_jobentry,
   dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
 List<SlaveServer> slaveServers ) throws KettleException {
 try {
  schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" );
  tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" );
  filename = rep.getJobEntryAttributeString( id_jobentry, "filename" );
  separator = rep.getJobEntryAttributeString( id_jobentry, "separator" );
  enclosed = rep.getJobEntryAttributeString( id_jobentry, "enclosed" );
  escaped = rep.getJobEntryAttributeString( id_jobentry, "escaped" );
  linestarted = rep.getJobEntryAttributeString( id_jobentry, "linestarted" );
  lineterminated = rep.getJobEntryAttributeString( id_jobentry, "lineterminated" );
  replacedata = rep.getJobEntryAttributeBoolean( id_jobentry, "replacedata" );
  ignorelines = rep.getJobEntryAttributeString( id_jobentry, "ignorelines" );
  listattribut = rep.getJobEntryAttributeString( id_jobentry, "listattribut" );
  localinfile = rep.getJobEntryAttributeBoolean( id_jobentry, "localinfile" );
  prorityvalue = (int) rep.getJobEntryAttributeInteger( id_jobentry, "prorityvalue" );
  addfiletoresult = rep.getJobEntryAttributeBoolean( id_jobentry, "addfiletoresult" );
  connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
 } catch ( KettleDatabaseException dbe ) {
  throw new KettleException(
   "Unable to load job entry of type 'Mysql bulk load' from the repository for id_jobentry=" + id_jobentry,
   dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
 List<SlaveServer> slaveServers ) throws KettleException {
 try {
  connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
  schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" );
  tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" );
  successCondition =
   getSuccessConditionByCode( Const.NVL(
    rep.getJobEntryAttributeString( id_jobentry, "success_condition" ), "" ) );
  rowsCountValue = rep.getJobEntryAttributeString( id_jobentry, "rows_count_value" );
  iscustomSQL = rep.getJobEntryAttributeBoolean( id_jobentry, "is_custom_sql" );
  isUseVars = rep.getJobEntryAttributeBoolean( id_jobentry, "is_usevars" );
  isAddRowsResult = rep.getJobEntryAttributeBoolean( id_jobentry, "add_rows_result" );
  customSQL = rep.getJobEntryAttributeString( id_jobentry, "custom_sql" );
  maximumTimeout = rep.getJobEntryAttributeString( id_jobentry, "maximum_timeout" );
  checkCycleTime = rep.getJobEntryAttributeString( id_jobentry, "check_cycle_time" );
  successOnTimeout = rep.getJobEntryAttributeBoolean( id_jobentry, "success_on_timeout" );
  isClearResultList = rep.getJobEntryAttributeBoolean( id_jobentry, "clear_result_rows" );
 } catch ( KettleDatabaseException dbe ) {
  throw new KettleException( BaseMessages
   .getString( PKG, "JobEntryWaitForSQL.UnableLoadRep", "" + id_jobentry ), dbe );
 }
}

代码示例来源:origin: pentaho/pentaho-kettle

truncate = rep.getJobEntryAttributeBoolean( id_jobentry, "truncate" );
 connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases );
} catch ( KettleDatabaseException dbe ) {
 throw new KettleException(

相关文章

Repository类方法