Java Code Examples for org.pentaho.di.core.logging.LogChannel#GENERAL
The following examples show how to use
org.pentaho.di.core.logging.LogChannel#GENERAL .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: EditRowsDialog.java From pentaho-pdi-dataset with Apache License 2.0 | 6 votes |
public EditRowsDialog( Shell parent, int style, String title, String message, RowMetaInterface rowMeta, List<Object[]> rowBuffer ) { this.title = title; this.message = message; this.rowBuffer = rowBuffer; this.rowMeta = rowMeta; this.parentShell = parent; this.style = ( style != SWT.None ) ? style : this.style; props = PropsUI.getInstance(); bounds = null; hscroll = -1; vscroll = -1; title = null; message = null; this.log = LogChannel.GENERAL; }
Example 2
Source File: EditRowsDialog.java From pentaho-kettle with Apache License 2.0 | 6 votes |
public EditRowsDialog( Shell parent, int style, String title, String message, RowMetaInterface rowMeta, List<Object[]> rowBuffer ) { this.title = title; this.message = message; this.rowBuffer = rowBuffer; this.rowMeta = rowMeta; this.parentShell = parent; this.style = ( style != SWT.None ) ? style : this.style; props = PropsUI.getInstance(); bounds = null; hscroll = -1; vscroll = -1; title = null; message = null; this.log = LogChannel.GENERAL; }
Example 3
Source File: TransMetaPipelineConverter.java From kettle-beam with Apache License 2.0 | 5 votes |
public Pipeline createPipeline( PipelineOptions pipelineOptions ) throws Exception { LogChannelInterface log = LogChannel.GENERAL; // Create a new Pipeline // RunnerType runnerType = RunnerType.getRunnerTypeByName( beamJobConfig.getRunnerTypeName() ); Class<? extends PipelineRunner<?>> runnerClass = getPipelineRunnerClass(runnerType); pipelineOptions.setRunner( runnerClass ); Pipeline pipeline = Pipeline.create( pipelineOptions ); pipeline.getCoderRegistry().registerCoderForClass( KettleRow.class, new KettleRowCoder() ); log.logBasic( "Created pipeline job with name '" + pipelineOptions.getJobName() + "'" ); // Keep track of which step outputs which Collection // Map<String, PCollection<KettleRow>> stepCollectionMap = new HashMap<>(); // Handle io // handleBeamInputSteps( log, stepCollectionMap, pipeline ); // Transform all the other steps... // handleGenericStep( stepCollectionMap, pipeline ); // Output handling // handleBeamOutputSteps( log, stepCollectionMap, pipeline ); return pipeline; }
Example 4
Source File: JobMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * Clears or reinitializes many of the JobMeta properties. */ @Override public void clear() { jobcopies = new ArrayList<JobEntryCopy>(); jobhops = new ArrayList<JobHopMeta>(); jobLogTable = JobLogTable.getDefault( this, this ); jobEntryLogTable = JobEntryLogTable.getDefault( this, this ); extraLogTables = new ArrayList<LogTableInterface>(); List<PluginInterface> plugins = PluginRegistry.getInstance().getPlugins( LogTablePluginType.class ); for ( PluginInterface plugin : plugins ) { try { LogTablePluginInterface logTablePluginInterface = (LogTablePluginInterface) PluginRegistry.getInstance() .loadClass( plugin ); if ( logTablePluginInterface.getType() == TableType.JOB ) { logTablePluginInterface.setContext( this, this ); extraLogTables.add( logTablePluginInterface ); } } catch ( Exception e ) { LogChannel.GENERAL.logError( "Error loading log table plugin with ID " + plugin.getIds()[0], e ); } } arguments = null; super.clear(); loopCache = new HashMap<String, Boolean>(); addDefaults(); jobStatus = -1; jobVersion = null; // setInternalKettleVariables(); Don't clear the internal variables for // ad-hoc jobs, it's ruins the previews // etc. log = LogChannel.GENERAL; }
Example 5
Source File: JobMeta.java From pentaho-kettle with Apache License 2.0 | 5 votes |
@Override public NamedClusterEmbedManager getNamedClusterEmbedManager( ) { if ( namedClusterEmbedManager == null ) { namedClusterEmbedManager = new NamedClusterEmbedManager( this, LogChannel.GENERAL ); } return namedClusterEmbedManager; }
Example 6
Source File: Trans.java From pentaho-kettle with Apache License 2.0 | 5 votes |
/** * Instantiates a new transformation using any of the provided parameters including the variable bindings, a * repository, a name, a repository directory name, and a filename. This is a multi-purpose method that supports * loading a transformation from a file (if the filename is provided but not a repository object) or from a repository * (if the repository object, repository directory name, and transformation name are specified). * * @param parent the parent variable space and named params * @param rep the repository * @param name the name of the transformation * @param dirname the dirname the repository directory name * @param filename the filename containing the transformation definition * @throws KettleException if any error occurs during loading, parsing, or creation of the transformation */ public <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ) throws KettleException { this(); try { if ( rep != null ) { RepositoryDirectoryInterface repdir = rep.findDirectory( dirname ); if ( repdir != null ) { this.transMeta = rep.loadTransformation( name, repdir, null, false, null ); // reads last version } else { throw new KettleException( BaseMessages.getString( PKG, "Trans.Exception.UnableToLoadTransformation", name, dirname ) ); } } else { transMeta = new TransMeta( filename, false ); } this.log = LogChannel.GENERAL; transMeta.initializeVariablesFrom( parent ); initializeVariablesFrom( parent ); // PDI-3064 do not erase parameters from meta! // instead of this - copy parameters to actual transformation this.copyParametersFrom( parent ); this.activateParameters(); this.setDefaultLogCommitSize(); // Get a valid transactionId in case we run database transactional. transactionId = calculateTransactionId(); threadName = transactionId; // / backward compatibility but deprecated! } catch ( KettleException e ) { throw new KettleException( BaseMessages.getString( PKG, "Trans.Exception.UnableToOpenTransformation", name ), e ); } }
Example 7
Source File: MainBeam.java From kettle-beam with Apache License 2.0 | 4 votes |
public static final int mainMethod( final String[] args, final String environment ) { try { System.out.println( "Starting clustered transformation execution on environment: '"+environment+"'" ); System.out.println( "Transformation ktr / args[0] : " + args[ 0 ] ); System.out.println( "MetaStore JSON / args[1] : " + args[ 1 ] ); System.out.println( "Beam Job Config / args[2] : " + args[ 2 ] ); // Read the transformation XML and MetaStore from Hadoop FS // Configuration hadoopConfiguration = new Configuration(); String transMetaXml = readFileIntoString( args[ 0 ], hadoopConfiguration, "UTF-8" ); String metaStoreJson = readFileIntoString( args[ 1 ], hadoopConfiguration, "UTF-8" ); // Third argument: the beam job config // String jobConfigName = args[ 2 ]; // Inflate the metaStore... // IMetaStore metaStore = new SerializableMetaStore( metaStoreJson ); System.out.println( ">>>>>> Loading Kettle Beam Job Config '" + jobConfigName + "'" ); MetaStoreFactory<BeamJobConfig> configFactory = new MetaStoreFactory<>( BeamJobConfig.class, metaStore, PentahoDefaults.NAMESPACE ); BeamJobConfig jobConfig = configFactory.loadElement( jobConfigName ); List<String> stepPluginsList = new ArrayList<>( Arrays.asList( Const.NVL(jobConfig.getStepPluginClasses(), "").split( "," ) ) ); List<String> xpPluginsList = new ArrayList<>( Arrays.asList( Const.NVL(jobConfig.getXpPluginClasses(), "").split( "," ) ) ); System.out.println( ">>>>>> Initializing Kettle runtime (" + stepPluginsList.size() + " step classes, " + xpPluginsList.size() + " XP classes)" ); BeamKettle.init( stepPluginsList, xpPluginsList ); System.out.println( ">>>>>> Loading transformation metadata" ); TransMeta transMeta = new TransMeta( XMLHandler.loadXMLString( transMetaXml, TransMeta.XML_TAG ), null ); transMeta.setMetaStore( metaStore ); String hadoopConfDir = System.getenv( "HADOOP_CONF_DIR" ); System.out.println( ">>>>>> HADOOP_CONF_DIR='" + hadoopConfDir + "'" ); System.out.println( ">>>>>> Building Apache Beam Kettle Pipeline..." ); PluginRegistry registry = PluginRegistry.getInstance(); PluginInterface beamInputPlugin = registry.getPlugin( StepPluginType.class, BeamConst.STRING_BEAM_INPUT_PLUGIN_ID ); if ( beamInputPlugin != null ) { System.out.println( ">>>>>> Found Beam Input step plugin class loader" ); } else { throw new KettleException( "Unable to find Beam Input step plugin, bailing out!" ); } ClassLoader pluginClassLoader = PluginRegistry.getInstance().getClassLoader( beamInputPlugin ); if ( pluginClassLoader != null ) { System.out.println( ">>>>>> Found Beam Input step plugin class loader" ); } else { System.out.println( ">>>>>> NOT found Beam Input step plugin class loader, using system classloader" ); pluginClassLoader = ClassLoader.getSystemClassLoader(); } KettleBeamPipelineExecutor executor = new KettleBeamPipelineExecutor( LogChannel.GENERAL, transMeta, jobConfig, metaStore, pluginClassLoader, stepPluginsList, xpPluginsList ); System.out.println( ">>>>>> Pipeline executing starting..." ); executor.setLoggingMetrics( true ); executor.execute( true ); System.out.println( ">>>>>> Execution finished..." ); return 0; } catch ( Exception e ) { System.err.println( "Error running Beam pipeline on '"+environment+"': " + e.getMessage() ); e.printStackTrace(); return 1; } }
Example 8
Source File: DatabaseMeta.java From pentaho-kettle with Apache License 2.0 | 4 votes |
/** * For testing */ protected LogChannelInterface getGeneralLogger() { return LogChannel.GENERAL; }
Example 9
Source File: AbstractDelegate.java From pentaho-kettle with Apache License 2.0 | 4 votes |
public AbstractDelegate() { log = LogChannel.GENERAL; }