Kettle java调用

来源:互联网 发布:mysql 用户管理 编辑:程序博客网 时间:2024/06/04 19:24
[java] view plain copy
  1.   
[java] view plain copy
  1. package org.jsmsa.etl.kettle;  
  2.   
  3. import java.sql.ResultSet;  
  4. import java.sql.SQLException;  
  5. import java.sql.Statement;  
  6. import java.util.ArrayList;  
  7. import java.util.List;  
  8.   
  9. import org.apache.log4j.Logger;  
  10. import org.pentaho.di.core.KettleEnvironment;  
  11. import org.pentaho.di.core.database.DatabaseMeta;  
  12. import org.pentaho.di.core.exception.KettleDatabaseException;  
  13. import org.pentaho.di.core.exception.KettleException;  
  14. import org.pentaho.di.core.util.EnvUtil;  
  15. import org.pentaho.di.job.Job;  
  16. import org.pentaho.di.job.JobMeta;  
  17. import org.pentaho.di.repository.LongObjectId;  
  18. import org.pentaho.di.repository.ObjectId;  
  19. import org.pentaho.di.repository.RepositoryDirectory;  
  20. import org.pentaho.di.repository.RepositoryDirectoryInterface;  
  21. import org.pentaho.di.repository.kdr.KettleDatabaseRepository;  
  22. import org.pentaho.di.repository.kdr.KettleDatabaseRepositoryMeta;  
  23.   
  24. public class KettleExecutor {  
  25.   
  26.     Logger log = Logger.getLogger(getClass());  
  27.     KettleDatabaseRepository rep;  
  28.     RepositoryDirectoryInterface dir;  
  29.       
  30.       
  31.     public String getDatabaseRepositoryXMl(){  
  32.         String xml="<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +  
  33.                 "<connection>" +  
  34.                     "<name>jsmsadc_etl</name>" +  
  35.                     "<type>ORACLE</type>" +  
  36.                     "<server>127.0.0.1</server>" +  
  37.                     "<access>Native</access>" +  
  38.                     "<database>orcl</database>" +  
  39.                     "<port>1521</port>" +  
  40.                     "<username>jsmsadc</username>" +  
  41.                     "<password>password</password>" +  
  42.                     "<servername/>" +  
  43.                     "<data_tablespace/>" +  
  44.                     "<index_tablespace/>" +  
  45.                 "</connection>" ;  
  46.         return xml;  
  47.     }  
  48.       
  49.     public void connectRepository(String username,String password){  
  50.         try {  
  51.             EnvUtil.environmentInit();  
  52.             KettleEnvironment.init();  
  53.             DatabaseMeta dataMeta = new DatabaseMeta(getDatabaseRepositoryXMl());   
  54.             KettleDatabaseRepositoryMeta repInfo = new KettleDatabaseRepositoryMeta();  
  55.             repInfo.setConnection(dataMeta);  
  56.               
  57.             rep = new KettleDatabaseRepository();  
  58.             rep.init(repInfo);  
  59.             rep.connect(username, password);  
  60.               
  61.             ObjectId rootId = Long.parseLong(rep.getRootDirectoryID().getId()) > 0 ? rep.getRootDirectoryID() : new LongObjectId(0);  
  62.             dir = new RepositoryDirectory();  
  63.             dir.setObjectId(rootId);  
  64.         } catch (KettleException e) {   
  65.             e.printStackTrace();  
  66.         }  
  67.     }  
  68.       
  69.     public List<ObjectId> getObjectIds(String sql) throws KettleDatabaseException{   
  70.         List<ObjectId> list = new ArrayList<ObjectId>();  
  71.         Statement stmt = null;  
  72.         ResultSet rs = null;  
  73.         try {   
  74.             stmt = rep.getDatabase().getConnection().createStatement();  
  75.             rs = stmt.executeQuery(sql);  
  76.             while(rs.next())  
  77.                 list.add(new LongObjectId(rs.getLong(1)));        
  78.         } catch (SQLException e) {  
  79.             e.printStackTrace();  
  80.         }finally{             
  81.             try {  
  82.                 if(rs != null)  
  83.                     rs.close();  
  84.                 if(stmt != null)  
  85.                     stmt.close();  
  86.             } catch (SQLException e) {  
  87.                 e.printStackTrace();  
  88.             }             
  89.         }   
  90.         return list;  
  91.     }  
  92.       
  93.     public void runAllJobs(){   
  94.         try {  
  95.             List<ObjectId> jobIds = getObjectIds("select id_job from r_job where job_status >= 0 order by id_job");  
  96.             for (final ObjectId oid : jobIds) {   
  97. //              new JobsExecutor(rep, oid).run();  
  98.                 new Runnable() {   
  99.                     @Override  
  100.                     public void run(){   
  101.                         try {  
  102.                             JobMeta jobMeta = rep.loadJob(oid, null);  
  103.                             Job job = new Job(rep,jobMeta);  
  104.                             log.info("***********************************开始执行job:"+job.getJobname());  
  105.                             job.start();  
  106.                             job.waitUntilFinished();  
  107.                             if (job.getErrors() > 0) {  
  108.                                 log.info("***********************************执行job错误:"+job.getJobname());  
  109.                             }  
  110.                         } catch (KettleException e) {  
  111.                             e.printStackTrace();  
  112.                         }   
  113.                     }  
  114.                 }.run();                  
  115.             }   
  116.         } catch (KettleException e) {   
  117.             e.printStackTrace();  
  118.         }  
  119.     }  
  120.       
  121.     public void runJob(ObjectId jobId,String versionLabel){   
  122.         try {  
  123.             JobMeta jobMeta = rep.loadJob(jobId, versionLabel);  
  124.             Job job = new Job(rep,jobMeta);  
  125.             job.start();  
  126.             job.waitUntilFinished();  
  127.             if (job.getErrors() > 0) {  
  128.                 System.out.println("decompress fail!");  
  129.             }  
  130.         } catch (KettleException e) {  
  131.             e.printStackTrace();  
  132.         }  
  133.     }  
  134.       
  135.     /** 
  136.      * 文件方式调用Job 
  137.      * @param fileName Job脚本的路径及名称 
  138.      */  
  139.     public void runJob(String fileName) {  
  140.         try {  
  141.             KettleEnvironment.init();   
  142.             JobMeta jobMeta = new JobMeta(fileName, null);  
  143.             Job job = new Job(null, jobMeta);  
  144.             // 向Job 脚本传递参数,脚本中获取参数值:${参数名}  
  145.             // job.setVariable(paraname, paravalue);  
  146.             job.start();  
  147.             job.waitUntilFinished();  
  148.             if (job.getErrors() > 0) {  
  149.                 System.out.println("decompress fail!");  
  150.             }  
  151.         } catch (KettleException e) {  
  152.             System.out.println(e);  
  153.         }  
  154.     }  
  155.       
  156.     /** 
  157.      * 文件方式调用Transformation 
  158.      * @param filename  Transformation脚本的路径及名称 
  159.      */  
  160.     public void runTran(String filename) {  
  161.         try {  
  162.             KettleEnvironment.init();  
  163.             TransMeta transMeta = new TransMeta(filename);  
  164.             Trans trans = new Trans(transMeta);  
  165.             trans.prepareExecution(null);  
  166.             trans.startThreads();  
  167.             trans.waitUntilFinished();  
  168.   
  169.             if (trans.getErrors() != 0) {  
  170.                 System.out.println("Error");  
  171.             }  
  172.         } catch (KettleException e) {  
  173.             e.printStackTrace();  
  174.         }  
  175.     }  
  176.       
  177. }  
0 0
原创粉丝点击