kettle获取资源库连接+调用job

package org.flhy.ext;

import java.util.ArrayList;

import javax.sql.DataSource;

import org.apache.commons.dbcp.BasicDataSource;
import org.flhy.ext.core.PropsUI;
import org.pentaho.di.core.DBCache;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.logging.DefaultLogLevel;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.logging.LogLevel;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.job.JobExecutionConfiguration;
import org.pentaho.di.repository.LongObjectId;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.kdr.KettleDatabaseRepository;
import org.pentaho.di.repository.kdr.KettleDatabaseRepositoryMeta;
import org.pentaho.di.trans.TransExecutionConfiguration;
import org.pentaho.metastore.stores.delegate.DelegatingMetaStore;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;

public class App implements ApplicationContextAware {

	private static App app;
	public static KettleDatabaseRepositoryMeta meta;

	private LogChannelInterface log;
	private TransExecutionConfiguration transExecutionConfiguration;
	private TransExecutionConfiguration transPreviewExecutionConfiguration;
	private TransExecutionConfiguration transDebugExecutionConfiguration;
	private JobExecutionConfiguration jobExecutionConfiguration;
	public PropsUI props;

	private App() {
		props = PropsUI.getInstance();
		log = new LogChannel( PropsUI.getAppName());
		loadSettings();
		
		transExecutionConfiguration = new TransExecutionConfiguration();
	    transExecutionConfiguration.setGatheringMetrics( true );
	    transPreviewExecutionConfiguration = new TransExecutionConfiguration();
	    transPreviewExecutionConfiguration.setGatheringMetrics( true );
	    transDebugExecutionConfiguration = new TransExecutionConfiguration();
	    transDebugExecutionConfiguration.setGatheringMetrics( true );

	    jobExecutionConfiguration = new JobExecutionConfiguration();
	    
	    variables = new RowMetaAndData( new RowMeta() );
	}
	
	public void loadSettings() {
		LogLevel logLevel = LogLevel.getLogLevelForCode(props.getLogLevel());
		DefaultLogLevel.setLogLevel(logLevel);
		log.setLogLevel(logLevel);
		KettleLogStore.getAppender().setMaxNrLines(props.getMaxNrLinesInLog());

		// transMeta.setMaxUndo(props.getMaxUndo());
		DBCache.getInstance().setActive(props.useDBCache());
	}

	public static App getInstance() {
		if (app == null) {
			app = new App();
		}
		return app;
	}

	private Repository repository;

	public Repository getRepository() {
		return repository;
	}
//	public Repository getRepository() {
//		KettleDatabaseRepository repository = new KettleDatabaseRepository();
		ApplicationContext context = new AnnotationConfigApplicationContext(App.class);
//		try {
			BasicDataSource dataSource = (BasicDataSource) context.getBean(DataSource.class);
//			DatabaseMeta dbMeta = new DatabaseMeta();
			Connection conn = dataSource.getConnection();
			DatabaseMetaData dm = conn.getMetaData();
			System.out.println(conn.getCatalog());
			System.out.println(dm.getUserName());
			System.out.println(dm.getURL());
			System.out.println(dm.getDriverName());
			System.out.println(dm.getDatabaseProductName());
			System.out.println(dm.get);
//
			String url = dataSource.getUrl();
//			String url = "jdbc:mysql://172.22.3.135:3306/kettle_web";
//			String hostname = url.substring(url.indexOf("//") + 2, url.lastIndexOf(":"));
//			String port = url.substring(url.lastIndexOf(":") + 1, url.lastIndexOf("/"));
//			String dbName = url.substring(url.lastIndexOf("/") + 1);
//
//			dbMeta.setName(hostname+"_KettleRepository");
//			dbMeta.setDBName(dbName);
//			dbMeta.setDatabaseType("MYSQL");
//			dbMeta.setAccessType(0);
//			dbMeta.setHostname(hostname);
//			dbMeta.setServername(hostname);
//			dbMeta.setDBPort(port);
			dbMeta.setUsername(dataSource.getUsername());
			dbMeta.setPassword(dataSource.getPassword());
//			dbMeta.setUsername("root");
//			dbMeta.setPassword("Bmw745li*");
//			ObjectId objectId = new LongObjectId(100);
//			dbMeta.setObjectId(objectId);
//			dbMeta.setShared(true);
//			dbMeta.addExtraOption(dbMeta.getPluginId(), "characterEncoding", "utf8");
//			dbMeta.addExtraOption(dbMeta.getPluginId(), "useUnicode", "true");
//			dbMeta.addExtraOption(dbMeta.getPluginId(), "autoReconnect", "true");
//			meta = new KettleDatabaseRepositoryMeta();
//			dbMeta.setForcingIdentifiersToLowerCase(true);
//			meta.setName(hostname+"_KettleRepository");
//			meta.setId("KettleDatabaseRepository");
//			meta.setConnection(dbMeta);
//			meta.setDescription(hostname+"_KettleRepository");
//
//			repository.init(meta);
//			repository.connect("admin", "admin");
//			this.repository = repository;
//		} catch (Exception e) {
//			e.printStackTrace();
//		}
//		return repository;
//	}
	
	private Repository defaultRepository;
	
//	public void initDefault(Repository defaultRepo) {
//		if(this.defaultRepository == null)
//			this.defaultRepository = defaultRepo;
//		this.repository = defaultRepo;
//	}
	
	public Repository getDefaultRepository() {
		return this.defaultRepository;
	}
	
	public void selectRepository(Repository repo) {
		if(repository != null) {
			repository.disconnect();
		}
		repository = repo;
	}

	private DelegatingMetaStore metaStore;

	public DelegatingMetaStore getMetaStore() {
		return metaStore;
	}
	
	public LogChannelInterface getLog() {
		return log;
	}
	
	private RowMetaAndData variables = null;
	private ArrayList<String> arguments = new ArrayList<String>();
	
	public String[] getArguments() {
		return arguments.toArray(new String[arguments.size()]);
	}
	
	public JobExecutionConfiguration getJobExecutionConfiguration() {
		return jobExecutionConfiguration;
	}

	public TransExecutionConfiguration getTransDebugExecutionConfiguration() {
		return transDebugExecutionConfiguration;
	}

	public TransExecutionConfiguration getTransPreviewExecutionConfiguration() {
		return transPreviewExecutionConfiguration;
	}

	public TransExecutionConfiguration getTransExecutionConfiguration() {
		return transExecutionConfiguration;
	}
	
	public RowMetaAndData getVariables() {
		return variables;
	}

	private static ApplicationContext context = null;

	@Override
	public void  setApplicationContext(ApplicationContext context) throws BeansException {
		KettleDatabaseRepository repository = new KettleDatabaseRepository();
		this.context=context;
		try {
			BasicDataSource dataSource = (BasicDataSource) context.getBean(DataSource.class);
			DatabaseMeta dbMeta = new DatabaseMeta();
//			Connection conn = dataSource.getConnection();
//			DatabaseMetaData dm = conn.getMetaData();
//			System.out.println(conn.getCatalog());
//			System.out.println(dm.getUserName());
//			System.out.println(dm.getURL());
//			System.out.println(dm.getDriverName());
//			System.out.println(dm.getDatabaseProductName());
//			System.out.println(dm.get);
			
			String url = dataSource.getUrl();
			String hostname = url.substring(url.indexOf("//") + 2, url.lastIndexOf(":"));
			String port = url.substring(url.lastIndexOf(":") + 1, url.lastIndexOf("/"));
			String dbName = url.substring(url.lastIndexOf("/") + 1);
			
			dbMeta.setName(hostname+"_KettleRepository");
			dbMeta.setDBName(dbName);
			dbMeta.setDatabaseType("MYSQL");
			dbMeta.setAccessType(0);
			dbMeta.setHostname(hostname);
			dbMeta.setServername(hostname);
			dbMeta.setDBPort(port);
			dbMeta.setUsername(dataSource.getUsername());
			dbMeta.setPassword(dataSource.getPassword());
			ObjectId objectId = new LongObjectId(100);
			dbMeta.setObjectId(objectId);
			dbMeta.setShared(true);
			dbMeta.addExtraOption(dbMeta.getPluginId(), "characterEncoding", "utf8");
			dbMeta.addExtraOption(dbMeta.getPluginId(), "useUnicode", "true");
			dbMeta.addExtraOption(dbMeta.getPluginId(), "autoReconnect", "true");
			meta = new KettleDatabaseRepositoryMeta();
			dbMeta.setForcingIdentifiersToLowerCase(true);
			meta.setName(hostname+"_KettleRepository");
			meta.setId("KettleDatabaseRepository");
			meta.setConnection(dbMeta);
			meta.setDescription(hostname+"_KettleRepository");

			repository.init(meta);
			repository.connect("admin", "admin");
			this.repository = repository;
		} catch (Exception e) {
			e.printStackTrace();
		}
		
	}

//	public JSONArray encodeVariables() {
//		Object[] data = variables.getData();
//		String[] fields = variables.getRowMeta().getFieldNames();
//		JSONArray jsonArray = new JSONArray();
//		for (int i = 0; i < fields.length; i++) {
//			JSONObject jsonObject = new JSONObject();
//			jsonObject.put("name", fields[i]);
//			jsonObject.put("value", data[i].toString());
//			jsonArray.add(jsonObject);
//		}
//		return jsonArray;
//	}
	
}

 

package org.flhy.ext;

import java.text.SimpleDateFormat;
import java.util.*;

import com.mxgraph.util.mxUtils;
import org.apache.ibatis.session.SqlSession;
import org.flhy.ext.Task.ExecutionTraceEntity;
import org.flhy.ext.Task.MybatisDaoSuppo;
import org.flhy.ext.utils.ExceptionUtils;
import org.flhy.ext.utils.JSONArray;
import org.flhy.ext.utils.JSONObject;
import org.flhy.ext.utils.StringEscapeHelper;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.gui.JobTracker;
import org.pentaho.di.core.logging.KettleLogLayout;
import org.pentaho.di.core.logging.KettleLogStore;
import org.pentaho.di.core.logging.KettleLoggingEvent;
import org.pentaho.di.core.logging.LoggingObjectType;
import org.pentaho.di.core.logging.LoggingRegistry;
import org.pentaho.di.core.logging.SimpleLoggingObject;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobEntryResult;
import org.pentaho.di.job.JobExecutionConfiguration;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryCopy;
import org.pentaho.di.ui.spoon.job.JobEntryCopyResult;
import org.pentaho.di.www.SlaveServerJobStatus;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;

public class JobExecutor implements Runnable {
	private String username;
	private boolean isClickStop=false;
	private String executionId;
	private JobExecutionConfiguration executionConfiguration;
	private JobMeta jobMeta = null;
	private String carteObjectId = null;
	private Job job = null;
	private static final Class PKG = JobEntryCopyResult.class;
	private boolean finished = false;
	private long errCount = 0;
	private static Hashtable<String, JobExecutor> executors = new Hashtable<String, JobExecutor>();
//	private Map<StepMeta, String> stepLogMap = new HashMap<StepMeta, String>();
	public boolean isFinished() {
		return finished;
	}
	public Job getJob() {
		return job;
	}
	public String getExecutionId() {
		return executionId;
	}
	public static JobExecutor getExecutor(String executionId) {
		return executors.get(executionId);
	}
	public void setJob(Job job) {
		this.job = job;
	}
	public JobExecutionConfiguration getExecutionConfiguration() {
		return executionConfiguration;
	}
	public void setExecutionConfiguration(JobExecutionConfiguration executionConfiguration) {
		this.executionConfiguration = executionConfiguration;
	}

	public String getUsername() {
		return username;
	}

	public void setUsername(String username) {
		this.username = username;
	}

	public boolean isClickStop() {
		return isClickStop;
	}

	public void setIsClickStop(boolean isClickStop) {
		this.isClickStop = isClickStop;
	}

	public String getCarteObjectId() {
		return carteObjectId;
	}
	public JobMeta getJobMeta() {
		return jobMeta;
	}
	public void setJobMeta(JobMeta jobMeta) {
		this.jobMeta = jobMeta;
	}
	public void setExecutionId(String executionId) {
		this.executionId = executionId;
	}

	public long getErrCount() {
		return errCount;
	}
	public static Hashtable<String, JobExecutor> getExecutors(){
		return executors;
	}
	public static void remove(String executionId) {
		executors.remove(executionId);
	}
	public JobExecutor(JobExecutionConfiguration executionConfiguration, JobMeta jobMeta) {
		this.executionId = UUID.randomUUID().toString().replaceAll("-", "");
		this.executionConfiguration = executionConfiguration;
		this.jobMeta = jobMeta;
	}


	public static synchronized JobExecutor initExecutor(JobExecutionConfiguration executionConfiguration, JobMeta jobMeta) {
		JobExecutor jobExecutor = new JobExecutor(executionConfiguration, jobMeta);
		executors.put(jobExecutor.getExecutionId(), jobExecutor);
		return jobExecutor;
	}

	@Override
	public void run() {
		ExecutionTraceEntity trace=new ExecutionTraceEntity();
		try {
			for (String varName : executionConfiguration.getVariables().keySet()) {
				String varValue = executionConfiguration.getVariables().get(varName);
				jobMeta.setVariable(varName, varValue);
			}

			for (String paramName : executionConfiguration.getParams().keySet()) {
				String paramValue = executionConfiguration.getParams().get(paramName);
				jobMeta.setParameterValue(paramName, paramValue);
			}
			trace.setStartTime(new Date());
			if (executionConfiguration.isExecutingLocally()) {
				 SimpleLoggingObject spoonLoggingObject = new SimpleLoggingObject( "SPOON", LoggingObjectType.SPOON, null );
			     spoonLoggingObject.setContainerObjectId( executionId );
			     spoonLoggingObject.setLogLevel( executionConfiguration.getLogLevel() );
				job = new Job( App.getInstance().getRepository(), jobMeta, spoonLoggingObject );
				job.setLogLevel(executionConfiguration.getLogLevel());
				job.shareVariablesWith(jobMeta);
				job.setInteractive(true);
				job.setGatheringMetrics(executionConfiguration.isGatheringMetrics());
				job.setArguments(executionConfiguration.getArgumentStrings());

				job.getExtensionDataMap().putAll(executionConfiguration.getExtensionOptions());

				// If there is an alternative start job entry, pass it to the job
	            //
	            if ( !Const.isEmpty( executionConfiguration.getStartCopyName() ) ) {
	            	JobEntryCopy startJobEntryCopy = jobMeta.findJobEntry( executionConfiguration.getStartCopyName(), executionConfiguration.getStartCopyNr(), false );
	            	job.setStartJobEntryCopy( startJobEntryCopy );
	            }
	            // Set the named parameters
	            Map<String, String> paramMap = executionConfiguration.getParams();
	            Set<String> keys = paramMap.keySet();
				for (String key : keys) {
					job.getJobMeta().setParameterValue(key, Const.NVL(paramMap.get(key), ""));
				}
	            job.getJobMeta().activateParameters();
	            job.start();
				while(!job.isFinished()){
					Thread.sleep(500);
				}
				errCount = job.getErrors();
			} else if (executionConfiguration.isExecutingRemotely()) {
				carteObjectId = Job.sendToSlaveServer( jobMeta, executionConfiguration, App.getInstance().getRepository(), App.getInstance().getMetaStore() );

				SlaveServer remoteSlaveServer = executionConfiguration.getRemoteServer();
				boolean running = true;
				while(running) {
					SlaveServerJobStatus jobStatus = remoteSlaveServer.getJobStatus(jobMeta.getName(), carteObjectId, 0);
					running = jobStatus.isRunning();
					if(!running)
						errCount = jobStatus.getResult().getNrErrors();
					Thread.sleep(500);
				}
			}
			//记录日志
			trace.setEndTime(new Date());
			trace.setJobName(jobMeta.getName());
			//运行结果
			String status="成功";
			if(errCount>0){
				status="失败";
			}
			trace.setStatus(status);
			//任务类型
			trace.setType("job");
			//日志信息
			net.sf.json.JSONObject logJSON=new net.sf.json.JSONObject();
			logJSON.put("jobMeasure",this.getJobMeasure());
			logJSON.put("finished",true);
			logJSON.put("log", StringEscapeHelper.decode(this.getExecutionLog()));
			trace.setExecutionLog(logJSON.toString());
			//运行方式
			String execMethod="";
			if(executionConfiguration.isExecutingLocally()){
				execMethod="本地";
			}else{
				execMethod="远程:"+executionConfiguration.getRemoteServer().getHostname();
			}
			trace.setExecMethod(execMethod);
			//executionConfigration
			net.sf.json.JSONObject json=new net.sf.json.JSONObject();
			String xml=executionConfiguration.getXML();
			//解析xml
			Document doc = mxUtils.parseXml(xml);
			Element root = doc.getDocumentElement();
			NodeList items1=root.getChildNodes();
			for(int i=0;i<items1.getLength();i++){
				Node node1=items1.item(i);
				if(node1.getNodeType()!=3){
					//判断一级节点下是否还存在子节点
					NodeList items2=node1.getChildNodes();
					if(items2.getLength()>1){
						net.sf.json.JSONArray array=new net.sf.json.JSONArray();
						for(int j=0;j<items2.getLength();j++){
							Node node2=items2.item(j);
							if(node2.getNodeType()!=3){
								net.sf.json.JSONObject obj=new net.sf.json.JSONObject();
								obj.put(node2.getNodeName(),node2.getTextContent().replaceAll("\n","").trim());
								array.add(obj);
							}
						}
						json.put(node1.getNodeName(),array);
					}else{
						json.put(node1.getNodeName(),node1.getTextContent().replaceAll("\n","").trim());
					}
				}
			}
			trace.setExecutionConfiguration(json.toString());
		} catch(Exception e) {
			try {
				trace.setEndTime(new Date());
				trace.setJobName(jobMeta.getName());
				trace.setStatus("系统调度失败");
				trace.setExecutionLog(ExceptionUtils.toString(e));
				//任务类型
				trace.setType("trans");
				String execMethod="";
				if(executionConfiguration.isExecutingLocally()){
					execMethod="本地";
				}else{
					execMethod="远程:"+executionConfiguration.getRemoteServer().getHostname();
				}
				trace.setExecMethod(execMethod);
				//executionConfigration
				net.sf.json.JSONObject json=new net.sf.json.JSONObject();
				String xml=executionConfiguration.getXML();
				//解析xml
				Document doc = mxUtils.parseXml(xml);
				Element root = doc.getDocumentElement();
				NodeList items1=root.getChildNodes();
				for(int i=0;i<items1.getLength();i++){
					Node node1=items1.item(i);
					if(node1.getNodeType()!=3){
						//判断一级节点下是否还存在子节点
						NodeList items2=node1.getChildNodes();
						if(items2.getLength()>1){
							net.sf.json.JSONArray array=new net.sf.json.JSONArray();
							for(int j=0;j<items2.getLength();j++){
								Node node2=items2.item(j);
								if(node2.getNodeType()!=3){
									net.sf.json.JSONObject obj=new net.sf.json.JSONObject();
									obj.put(node2.getNodeName(),node2.getTextContent().replaceAll("\n","").trim());
									array.add(obj);
								}
							}
							json.put(node1.getNodeName(),array);
						}else{
							json.put(node1.getNodeName(),node1.getTextContent().replaceAll("\n","").trim());
						}
					}
				}
				trace.setExecutionConfiguration(json.toString());
			} catch (Exception e1) {
				e1.printStackTrace();
			}
		} finally {
			finished = true;
			SqlSession session=MybatisDaoSuppo.sessionFactory.openSession();
			session.insert("org.sxdata.jingwei.dao.ExecutionTraceDao.addExecutionTrace",trace);
			session.commit();
			session.close();
		}
	}
	public void stop(){
		if(null!=job){
			job.stopAll();
		}
	}
	public int previousNrItems;
	public JSONArray getJobMeasure() throws Exception {
    	JSONArray jsonArray = new JSONArray();
    	if(executionConfiguration.isExecutingLocally()) {
    		JobTracker jobTracker = job.getJobTracker();
        	int nrItems = jobTracker.getTotalNumberOfItems();
        	if ( nrItems != previousNrItems ) {
                // Re-populate this...
                String jobName = jobTracker.getJobName();

    			if (Const.isEmpty(jobName)) {
    				if (!Const.isEmpty(jobTracker.getJobFilename())) {
    					jobName = jobTracker.getJobFilename();
    				} else {
    					jobName = BaseMessages.getString(PKG, "JobLog.Tree.StringToDisplayWhenJobHasNoName");
    				}
    			}
    			
    			JSONObject jsonObject = new JSONObject();
    			jsonObject.put("name", jobName);
    			jsonObject.put("expanded", true);

    			JSONArray children = new JSONArray();
                for ( int i = 0; i < jobTracker.nrJobTrackers(); i++ ) {
                	JSONObject jsonObject2 = addTrackerToTree(jobTracker.getJobTracker(i));
                	if(jsonObject2 != null)
                		children.add(jsonObject2);
                }
                jsonObject.put("children", children);
                jsonArray.add(jsonObject);
                
                previousNrItems = nrItems;
        	}
    	}
    	return jsonArray;
	}
	
	private JSONObject addTrackerToTree( JobTracker jobTracker ) {
		JSONObject jsonObject = new JSONObject();
		if ( jobTracker != null ) {
			if ( jobTracker.nrJobTrackers() > 0 ) {
	    		  // This is a sub-job: display the name at the top of the list...
	    		  jsonObject.put("name", BaseMessages.getString( PKG, "JobLog.Tree.JobPrefix" ) + jobTracker.getJobName() );
	    		  jsonObject.put("expanded", true);
	    		  JSONArray children = new JSONArray();
	    		  // then populate the sub-job entries ...
	    		  for ( int i = 0; i < jobTracker.nrJobTrackers(); i++ ) {
	    			  JSONObject jsonObject2 = addTrackerToTree( jobTracker.getJobTracker( i ) );
	    			  if(jsonObject2 != null)
	    				  children.add(jsonObject2);
	    		  }
	    		  jsonObject.put("children", children);
			} else {
	        	JobEntryResult result = jobTracker.getJobEntryResult();
	        	if ( result != null ) {
	        		String jobEntryName = result.getJobEntryName();
					if (!Const.isEmpty(jobEntryName)) {
						jsonObject.put("name", jobEntryName);
						jsonObject.put("fileName", Const.NVL(result.getJobEntryFilename(), ""));
					} else {
						jsonObject.put("name", BaseMessages.getString(PKG, "JobLog.Tree.JobPrefix2") + jobTracker.getJobName());
					}
					String comment = result.getComment();
					if (comment != null) {
						jsonObject.put("comment", comment);
					}
					Result res = result.getResult();
					if ( res != null ) {
						jsonObject.put("result",  res.getResult() ? BaseMessages.getString( PKG, "JobLog.Tree.Success" ) : BaseMessages.getString(PKG, "JobLog.Tree.Failure" ));
	              		jsonObject.put("number", Long.toString( res.getEntryNr()));
					}
					String reason = result.getReason();
					if (reason != null) {
						jsonObject.put("reason", reason);
					}
					Date logDate = result.getLogDate();
					if (logDate != null) {
						jsonObject.put("logDate", new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(logDate));
					}
					jsonObject.put("leaf", true);
	          } else 
	        	  return null;
	        }
	      } else 
	    	  return null;
		return jsonObject;
	}

	public String getExecutionLog() throws Exception {
		if(executionConfiguration.isExecutingLocally()) {
			StringBuffer sb = new StringBuffer();
			KettleLogLayout logLayout = new KettleLogLayout( true );
			List<String> childIds = LoggingRegistry.getInstance().getLogChannelChildren( job.getLogChannelId() );
			List<KettleLoggingEvent> logLines = KettleLogStore.getLogBufferFromTo( childIds, true, -1, KettleLogStore.getLastBufferLineNr() );
			 for ( int i = 0; i < logLines.size(); i++ ) {
	             KettleLoggingEvent event = logLines.get( i );
	             String line = logLayout.format( event ).trim();
	             sb.append(line).append("\n");
			 }
			 return sb.toString();
    	} else {
    		SlaveServer remoteSlaveServer = executionConfiguration.getRemoteServer();
			SlaveServerJobStatus jobStatus = remoteSlaveServer.getJobStatus(jobMeta.getName(), carteObjectId, 0);
			return jobStatus.getLoggingString();
    	}
		
	}


}

 

 

//获取job,后面执行
job = new Job( App.getInstance().getRepository(), jobMeta, spoonLoggingObject );
/**
	 * @Title connectionRepository
	 * @Description 连接资源库对象
	 * @param kRepository 资源库连接信息
	 * @throws KettleException
	 * @return void
	 */
	public static KettleDatabaseRepository connectionRepository(KRepository kRepository) throws KettleException{
		if (null != kRepository){
			//数据库连接元对象
	    	//DatabaseMeta databaseMeta = new DatabaseMeta("repository", "MYSQL", 
					//"Native", "localhost", "kettle-repository", "3306", "root", "1234");    	
	    	DatabaseMeta databaseMeta = new DatabaseMeta(null, kRepository.getRepositoryType(), 
	    			kRepository.getDatabaseAccess(), kRepository.getDatabaseHost(), kRepository.getDatabaseName(), 
	    			kRepository.getDatabasePort(), kRepository.getDatabaseUsername(), kRepository.getDatabasePassword());    	
	    	//资源库元对象
			databaseMeta.setForcingIdentifiersToUpperCase(true);
	        KettleDatabaseRepositoryMeta repositoryInfo = new KettleDatabaseRepositoryMeta();
	        repositoryInfo.setConnection(databaseMeta);
	        //资源库
	        KettleDatabaseRepository repository = new KettleDatabaseRepository();
	        repository.init(repositoryInfo);
	        repository.connect(kRepository.getRepositoryUsername(), kRepository.getRepositoryPassword());
	        //添加缓存
	        if (null != kRepository.getRepositoryId()){
	        	KettleDatabaseRepositoryCatch.put(kRepository.getRepositoryId(), repository);	
	        }	
	        return repository;	
		}
		return null;
	}	

 

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值