( );
+
+ public Log4JRepositoryImportLogger( ) {
+ }
- public Log4JRepositoryImportLogger() {
+ public void startJob( OutputStream outputStream, String importRootPath, Level logLevel, StringLayout layout ) {
+ repositoryImportLog.set( new Log4JRepositoryImportLog( outputStream, importRootPath, logLevel, layout ) );
+ getLog4JRepositoryImportLog( ).setCurrentFilePath( getLog4JRepositoryImportLog( ).getImportRootPath( ) );
+ getLogger().info( "Start Import Job" );
}
public void startJob( OutputStream outputStream, String importRootPath, Level logLevel ) {
repositoryImportLog.set( new Log4JRepositoryImportLog( outputStream, importRootPath, logLevel ) );
- getLog4JRepositoryImportLog().setCurrentFilePath( getLog4JRepositoryImportLog().getImportRootPath() );
+ getLog4JRepositoryImportLog( ).setCurrentFilePath( getLog4JRepositoryImportLog( ).getImportRootPath( ) );
getLogger().info( "Start Import Job" );
}
- public void endJob() {
- getLog4JRepositoryImportLog().setCurrentFilePath( getLog4JRepositoryImportLog().getImportRootPath() );
+ public void endJob( ) {
+ getLog4JRepositoryImportLog( ).setCurrentFilePath( getLog4JRepositoryImportLog( ).getImportRootPath( ) );
+ getLog4JRepositoryImportLog( ).endJob( );
getLogger().info( "End Import Job" );
- getLog4JRepositoryImportLog().endJob();
}
public void setCurrentFilePath( String currentFilePath ) {
- getLog4JRepositoryImportLog().setCurrentFilePath( currentFilePath );
- getLogger().info( "Start File Import" );
+ getLog4JRepositoryImportLog( ).setCurrentFilePath( currentFilePath );
}
public void info( String s ) {
- getLogger().info( s );
+ getLogger( ).info( s );
}
public void error( String s ) {
- getLogger().error( s );
+ getLogger( ).error( s );
}
public void debug( String s ) {
- getLogger().debug( s );
+ getLogger( ).debug( s );
}
public void warn( String s ) {
- getLogger().debug( s );
+ getLogger( ).debug( s );
}
@Override
public void error( Exception e ) {
- getLogger().error( e.getMessage(), e );
+ getLogger( ).error( e.getMessage( ), e );
}
- private Log4JRepositoryImportLog getLog4JRepositoryImportLog() {
- Log4JRepositoryImportLog currentLog = repositoryImportLog.get();
+ private Log4JRepositoryImportLog getLog4JRepositoryImportLog( ) {
+ Log4JRepositoryImportLog currentLog = repositoryImportLog.get( );
if ( currentLog == null ) {
throw new IllegalStateException( "No job started for current Thread" );
}
return currentLog;
}
- private Logger getLogger() {
- return getLog4JRepositoryImportLog().getLogger();
+ private Logger getLogger( ) {
+ return getLog4JRepositoryImportLog( ).getLogger( );
}
- public boolean hasLogger() {
- return ( repositoryImportLog.get() == null ) ? false : true;
+ public boolean hasLogger( ) {
+ return ( repositoryImportLog.get( ) == null ) ? false : true;
}
@Override
public void debug( Object arg0 ) {
- getLogger().debug( arg0 );
+ getLogger( ).debug( arg0 );
}
@Override
public void debug( Object arg0, Throwable arg1 ) {
- getLogger().debug( arg0, arg1 );
+ getLogger( ).debug( arg0, arg1 );
}
@Override
public void error( Object arg0 ) {
- getLogger().error( arg0 );
+ getLogger( ).error( arg0 );
}
@Override
public void error( Object arg0, Throwable arg1 ) {
- getLogger().error( arg0, arg1 );
+ getLogger( ).error( arg0, arg1 );
}
@Override
public void fatal( Object arg0 ) {
- getLogger().fatal( arg0 );
+ getLogger( ).fatal( arg0 );
}
@Override
public void fatal( Object arg0, Throwable arg1 ) {
- getLogger().fatal( arg0, arg1 );
+ getLogger( ).fatal( arg0, arg1 );
}
@Override
public void info( Object arg0 ) {
- getLogger().info( arg0 );
+ getLogger( ).info( arg0 );
}
@Override
public void info( Object arg0, Throwable arg1 ) {
- getLogger().info( arg0, arg1 );
+ getLogger( ).info( arg0, arg1 );
}
@Override
- public boolean isDebugEnabled() {
- return getLogger().isDebugEnabled();
+ public boolean isDebugEnabled( ) {
+ return getLogger( ).isDebugEnabled( );
}
@Override
- public boolean isErrorEnabled() {
- return Level.ERROR.isMoreSpecificThan( getLogger().getLevel() );
+ public boolean isErrorEnabled( ) {
+ return Level.ERROR.isMoreSpecificThan( getLogger( ).getLevel( ) );
}
@Override
- public boolean isFatalEnabled() {
- return Level.FATAL.isMoreSpecificThan( getLogger().getLevel() );
+ public boolean isFatalEnabled( ) {
+ return Level.FATAL.isMoreSpecificThan( getLogger( ).getLevel( ) );
}
@Override
- public boolean isInfoEnabled() {
- return getLogger().isInfoEnabled();
+ public boolean isInfoEnabled( ) {
+ return getLogger( ).isInfoEnabled( );
}
@Override
- public boolean isTraceEnabled() {
- return getLogger().isTraceEnabled();
+ public boolean isTraceEnabled( ) {
+ return getLogger( ).isTraceEnabled( );
}
@Override
- public boolean isWarnEnabled() {
- return Level.WARN.isMoreSpecificThan( getLogger().getLevel() );
+ public boolean isWarnEnabled( ) {
+ return Level.WARN.isMoreSpecificThan( getLogger( ).getLevel( ) );
}
@Override
public void trace( Object arg0 ) {
- getLogger().trace( arg0 );
+ getLogger( ).trace( arg0 );
}
@Override
public void trace( Object arg0, Throwable arg1 ) {
- getLogger().trace( arg0, arg1 );
+ getLogger( ).trace( arg0, arg1 );
}
@Override
public void warn( Object arg0 ) {
- getLogger().warn( arg0 );
+ getLogger( ).warn( arg0 );
}
@Override
public void warn( Object arg0, Throwable arg1 ) {
- getLogger().warn( arg0, arg1 );
+ getLogger( ).warn( arg0, arg1 );
}
}
diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/RepositoryTextLayout.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/RepositoryTextLayout.java
new file mode 100644
index 00000000000..683104ae63d
--- /dev/null
+++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/RepositoryTextLayout.java
@@ -0,0 +1,182 @@
+/*! ******************************************************************************
+ *
+ * Pentaho
+ *
+ * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com
+ *
+ * Use of this software is governed by the Business Source License included
+ * in the LICENSE.TXT file.
+ *
+ * Change Date: 2029-07-20
+ ******************************************************************************/
+
+package org.pentaho.platform.plugin.services.importexport;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.ThreadContext;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.StringLayout;
+import org.apache.logging.log4j.core.layout.ByteBufferDestination;
+import org.apache.logging.log4j.status.StatusLogger;
+import org.apache.logging.log4j.util.Strings;
+
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Map;
+
+/**
+ * This class was derived from Log4j HTML
+ *
+ * Appenders using this layout should have their encoding set to UTF-8 or UTF-16, otherwise events containing non ASCII
+ * characters could result in corrupted log files.
+ *
+ * @author tkafalas
+ */
+public class RepositoryTextLayout implements StringLayout {
+
+ protected static final int BUF_SIZE = 256;
+ protected static final int MAX_CAPACITY = 1024;
+ public static final String LINE_SEP = System.getProperty( "line.separator" );
+ private static final String REGEXP = Strings.LINE_SEPARATOR.equals( "\n" ) ? "\n" : Strings.LINE_SEPARATOR + "|\n";
+
+ private Level loggerLogLevel = Level.DEBUG;
+
+ // output buffer appended to when format() is invoked
+ private StringBuffer sbuf = new StringBuffer( BUF_SIZE );
+
+ String title = "Log4J Log Messages";
+
+ public RepositoryTextLayout( Level loggerLogLevel ) {
+ super( );
+ this.loggerLogLevel = loggerLogLevel;
+ }
+
+ /**
+ * The Title option takes a String value. This option sets the document title of the generated HTML document.
+ *
+ *
+ * Defaults to 'Log4J Log Messages'.
+ */
+ public void setTitle( String title ) {
+ this.title = title;
+ }
+
+ /**
+ * Returns the current value of the Title option.
+ */
+ public String getTitle( ) {
+ return title;
+ }
+
+ /**
+ * Returns the content type output by this layout, i.e "text/html".
+ */
+ public String getContentType( ) {
+ return "text/plain";
+ }
+
+ @Override
+ public Map getContentFormat( ) {
+ return null;
+ }
+
+ /**
+ * No options to activate.
+ */
+ public void activateOptions( ) {
+ }
+
+ public String format( LogEvent event ) {
+
+ Level logLevel = event.getLevel( );
+ if ( sbuf.capacity( ) > MAX_CAPACITY ) {
+ sbuf = new StringBuffer( BUF_SIZE );
+ } else {
+ sbuf.setLength( 0 );
+ }
+
+ sbuf.append( LINE_SEP );
+
+ DateFormat df = new SimpleDateFormat( "MM/dd/yyyy HH:mm:ss" );
+ Date date = new Date( );
+ date.setTime( event.getTimeMillis( ) );
+ String time = null;
+ try {
+ time = df.format( date );
+ } catch ( Exception ex ) {
+ StatusLogger.getLogger( ).error( "Error occurred while converting date.", ex );
+ }
+
+ sbuf.append( time );
+
+ // File/Folder
+ String currentFile = ThreadContext.get( "currentFile" );
+ if ( currentFile != null && currentFile.length( ) > 0 ) {
+ sbuf.append( "\t" );
+ sbuf.append( currentFile );
+ }
+ // debug level
+ if ( showLevelColumn( ) ) {
+ sbuf.append( "\t" );
+ sbuf.append( String.valueOf( event.getLevel( ) ) );
+ }
+
+ // Message
+ sbuf.append( "\t" );
+ sbuf.append( event.getMessage( ) );
+
+ return sbuf.toString( );
+ }
+
+ /**
+ * Returns appropriate headers.
+ */
+ public byte[] getHeader( ) {
+ StringBuffer sbuf = new StringBuffer( );
+ sbuf.append( title );
+ return sbuf.toString( ).getBytes( StandardCharsets.UTF_8 );
+ }
+
+ @Override
+ public byte[] toByteArray( LogEvent event ) {
+ return format( event ).getBytes( StandardCharsets.UTF_8 );
+ }
+
+ @Override
+ public String toSerializable( LogEvent event ) {
+ return format( event );
+ }
+
+ /**
+ * Returns the appropriate footers.
+ */
+ public byte[] getFooter( ) {
+ StringBuffer sbuf = new StringBuffer( );
+ sbuf.append( "\n\nEnd of Log\n\n" );
+ return sbuf.toString( ).getBytes( StandardCharsets.UTF_8 );
+ }
+
+ /**
+ * The layout does not handle the throwable contained in logging events. Hence, this method return true
.
+ */
+ public boolean ignoresThrowable( ) {
+ return true;
+ }
+
+ private boolean showLevelColumn( ) {
+ return true;
+ }
+
+ @Override
+ public Charset getCharset( ) {
+ return StandardCharsets.UTF_8;
+ }
+
+ @Override
+ public void encode( LogEvent source, ByteBufferDestination destination ) {
+
+ }
+}
diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/SimpleExportProcessor.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/SimpleExportProcessor.java
index 9598a5951c1..5ec80685927 100644
--- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/SimpleExportProcessor.java
+++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/SimpleExportProcessor.java
@@ -19,6 +19,7 @@
import org.apache.commons.logging.LogFactory;
import org.pentaho.platform.api.repository2.unified.IUnifiedRepository;
import org.pentaho.platform.api.repository2.unified.RepositoryFile;
+import org.pentaho.platform.api.importexport.ExportException;
import java.io.File;
import java.io.FileNotFoundException;
@@ -53,9 +54,8 @@ public SimpleExportProcessor( String path, IUnifiedRepository repository ) {
/**
* Performs the export process, returns a File object
- *
- * @throws ExportException
- * indicates an error in import processing
+ *
+ * @throws ExportException indicates an error in import processing
*/
public File performExport( RepositoryFile exportRepositoryFile ) throws ExportException, IOException {
OutputStream os;
@@ -63,10 +63,10 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx
// create temp file
exportFile = File.createTempFile( EXPORT_TEMP_FILENAME_PREFIX, EXPORT_TEMP_FILENAME_EXT );
- exportFile.deleteOnExit();
+ exportFile.deleteOnExit( );
// get the file path
- String filePath = new File( this.path ).getParent();
+ String filePath = new File( this.path ).getParent( );
// send a response right away if not found
if ( exportRepositoryFile == null ) {
@@ -79,11 +79,11 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx
try {
exportFile( exportRepositoryFile, os, filePath );
} catch ( Exception e ) {
- log.error( e.getMessage() );
- throw ( new ExportException() );
+ log.error( e.getMessage( ) );
+ throw ( new ExportException( ) );
} finally {
// make sure to close output stream
- os.close();
+ os.close( );
}
// clean up
@@ -98,18 +98,17 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx
*/
@Override
public void exportDirectory( RepositoryFile repositoryDir, OutputStream outputStream, String filePath )
- throws ExportException, IOException {
- throw new UnsupportedOperationException();
+ throws ExportException, IOException {
+ throw new UnsupportedOperationException( );
}
/**
- *
* @param repositoryFile
* @param outputStream
*/
@Override
public void exportFile( RepositoryFile repositoryFile, OutputStream outputStream, String filePath )
- throws ExportException, IOException {
+ throws ExportException, IOException {
// iterate through handlers to perform export
for ( ExportHandler exportHandler : exportHandlerList ) {
@@ -120,7 +119,7 @@ public void exportFile( RepositoryFile repositoryFile, OutputStream outputStream
IOUtils.copy( is, outputStream );
- is.close();
+ is.close( );
}
}
}
diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessor.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessor.java
index 0b0b5a1964a..a29ed233aa1 100644
--- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessor.java
+++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessor.java
@@ -23,6 +23,7 @@
import org.pentaho.platform.api.repository2.unified.RepositoryFile;
import org.pentaho.platform.api.repository2.unified.RepositoryFileAcl;
import org.pentaho.platform.api.repository2.unified.RepositoryRequest;
+import org.pentaho.platform.api.importexport.ExportException;
import org.pentaho.platform.engine.core.system.PentahoSessionHolder;
import org.pentaho.platform.engine.core.system.PentahoSystem;
import org.pentaho.platform.plugin.services.importexport.exportManifest.ExportManifest;
@@ -76,29 +77,29 @@ public ZipExportProcessor( String path, IUnifiedRepository repository, boolean w
setUnifiedRepository( repository );
- this.exportHandlerList = new ArrayList<>();
+ this.exportHandlerList = new ArrayList<>( );
- initManifest();
+ initManifest( );
}
- protected void initManifest() {
- this.exportManifest = new ExportManifest();
+ protected void initManifest( ) {
+ this.exportManifest = new ExportManifest( );
// set created by and create date in manifest information
- IPentahoSession session = getSession();
+ IPentahoSession session = getSession( );
- Date todaysDate = new Date();
+ Date todaysDate = new Date( );
SimpleDateFormat dateFormat = new SimpleDateFormat( EXPORT_INFO_DATE_FORMAT );
SimpleDateFormat timeFormat = new SimpleDateFormat( EXPORT_INFO_TIME_FORMAT );
- exportManifest.getManifestInformation().setExportBy( session.getName() );
- exportManifest.getManifestInformation().setExportDate(
- dateFormat.format( todaysDate ) + " " + timeFormat.format( todaysDate ) );
- exportManifest.getManifestInformation().setManifestVersion( "2" );
+ exportManifest.getManifestInformation( ).setExportBy( session.getName( ) );
+ exportManifest.getManifestInformation( ).setExportDate(
+ dateFormat.format( todaysDate ) + " " + timeFormat.format( todaysDate ) );
+ exportManifest.getManifestInformation( ).setManifestVersion( "2" );
}
- protected IPentahoSession getSession() {
- return PentahoSessionHolder.getSession();
+ protected IPentahoSession getSession( ) {
+ return PentahoSessionHolder.getSession( );
}
/**
@@ -111,10 +112,10 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx
// create temp file
exportFile = File.createTempFile( EXPORT_TEMP_FILENAME_PREFIX, EXPORT_TEMP_FILENAME_EXT );
- exportFile.deleteOnExit();
+ exportFile.deleteOnExit( );
// get the file path
- String filePath = new File( this.path ).getParent();
+ String filePath = new File( this.path ).getParent( );
if ( filePath == null ) {
filePath = "/";
}
@@ -126,17 +127,17 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx
}
try ( ZipOutputStream zos = new ZipOutputStream( new FileOutputStream( exportFile ) ) ) {
- if ( exportRepositoryFile.isFolder() ) { // Handle recursive export
- exportManifest.getManifestInformation().setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) );
+ if ( exportRepositoryFile.isFolder( ) ) { // Handle recursive export
+ exportManifest.getManifestInformation( ).setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) );
// don't zip root folder without name
- if ( !ClientRepositoryPaths.getRootFolderPath().equals( exportRepositoryFile.getPath() ) ) {
+ if ( !ClientRepositoryPaths.getRootFolderPath( ).equals( exportRepositoryFile.getPath( ) ) ) {
zos.putNextEntry( new ZipEntry( getFixedZipEntryName( exportRepositoryFile, filePath ) ) );
}
exportDirectory( exportRepositoryFile, zos, filePath );
} else {
- exportManifest.getManifestInformation().setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) );
+ exportManifest.getManifestInformation( ).setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) );
exportFile( exportRepositoryFile, zos, filePath );
}
@@ -153,7 +154,7 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx
log.error( "Error generating export XML" );
}
- zos.closeEntry();
+ zos.closeEntry( );
}
}
@@ -180,13 +181,18 @@ public void exportFile( RepositoryFile repositoryFile, OutputStream outputStream
try ( InputStream is = exportHandler.doExport( repositoryFile, filePath ) ) {
// if we don't get a valid input stream back, skip it
if ( is != null ) {
+ getRepositoryExportLogger( ).debug( "Adding repository object [ " + repositoryFile.getName( ) + " ] to the manifest" );
addToManifest( repositoryFile );
+ getRepositoryExportLogger( ).debug( "Starting to add repository object [ " + repositoryFile.getName( ) + " ] to the export bundle" );
String zipEntryName = getFixedZipEntryName( repositoryFile, filePath );
ZipEntry entry = new ZipEntry( zipEntryName );
zos.putNextEntry( entry );
IOUtils.copy( is, outputStream );
- zos.closeEntry();
- createLocales( repositoryFile, filePath, repositoryFile.isFolder(), outputStream );
+ zos.closeEntry( );
+ getRepositoryExportLogger( ).debug( "Successfully added repository object [ " + repositoryFile.getName( ) + " ] to the export bundle" );
+ getRepositoryExportLogger( ).trace( "Starting to create locale entry for repository object [ " + ( ( repositoryFile != null ) ? repositoryFile.getName( ) : "" ) + " ] " );
+ createLocales( repositoryFile, filePath, repositoryFile.isFolder( ), outputStream );
+ getRepositoryExportLogger( ).trace( "Finished creating locale entry for repository object [ " + ( ( repositoryFile != null ) ? repositoryFile.getName( ) : "" ) + " ] " );
}
}
}
@@ -201,11 +207,11 @@ public void exportFile( RepositoryFile repositoryFile, OutputStream outputStream
protected void addToManifest( RepositoryFile repositoryFile ) throws ExportException {
if ( this.withManifest ) {
// add this entity to the manifest
- RepositoryFileAcl fileAcl = getUnifiedRepository().getAcl( repositoryFile.getId() );
+ RepositoryFileAcl fileAcl = getUnifiedRepository( ).getAcl( repositoryFile.getId( ) );
try {
- getExportManifest().add( repositoryFile, fileAcl );
+ getExportManifest( ).add( repositoryFile, fileAcl );
} catch ( ExportManifestFormatException e ) {
- throw new ExportException( e.getMessage() );
+ throw new ExportException( e.getMessage( ) );
}
}
}
@@ -217,35 +223,45 @@ protected void addToManifest( RepositoryFile repositoryFile ) throws ExportExcep
@Override
public void exportDirectory( RepositoryFile repositoryDir, OutputStream outputStream, String filePath ) throws
ExportException, IOException {
+ getRepositoryExportLogger( ).debug( "Adding repository object [ " + repositoryDir.getName( ) + " ] to the manifest" );
addToManifest( repositoryDir );
- List children = getUnifiedRepository().getChildren( new RepositoryRequest(
- String.valueOf( repositoryDir.getId() ), true, 1, null ) );
+ List children = getUnifiedRepository( ).getChildren( new RepositoryRequest(
+ String.valueOf( repositoryDir.getId( ) ), true, 1, null ) );
+ getRepositoryExportLogger( ).debug( "Found [ " + children.size( ) + " ] children in folder [ " + repositoryDir.getName( ) + " ]" );
for ( RepositoryFile repositoryFile : children ) {
// exclude 'etc' folder - datasources and etc.
- if ( isExportCandidate( repositoryFile.getPath() ) ) {
- if ( repositoryFile.isFolder() ) {
- if ( outputStream.getClass().isAssignableFrom( ZipOutputStream.class ) ) {
+ if ( isExportCandidate( repositoryFile.getPath( ) ) ) {
+ getRepositoryExportLogger( ).trace( "Repository object is a candidate for export [ " + repositoryFile.getName( ) + " ]" );
+ if ( repositoryFile.isFolder( ) ) {
+ getRepositoryExportLogger( ).debug( "Repository Object [ " + repositoryFile.getName( ) + " ] is a folder. Adding it to the export bundle" );
+ if ( outputStream.getClass( ).isAssignableFrom( ZipOutputStream.class ) ) {
ZipOutputStream zos = (ZipOutputStream) outputStream;
String zipEntryName = getFixedZipEntryName( repositoryFile, filePath );
ZipEntry entry = new ZipEntry( zipEntryName );
zos.putNextEntry( entry );
+ getRepositoryExportLogger( ).debug( "Successfully added repository Object [ " + repositoryFile.getName( ) + " ] to the export bundle" );
}
exportDirectory( repositoryFile, outputStream, filePath );
} else {
try {
+ getRepositoryExportLogger( ).debug( "Repository Object [ " + repositoryFile.getName( ) + " ] is a file. Adding it to the export bundle" );
exportFile( repositoryFile, outputStream, filePath );
} catch ( ZipException e ) {
// possible duplicate entry, log it and continue on with the other files in the directory
- log.debug( e.getMessage(), e );
+ log.debug( e.getMessage( ), e );
}
}
+ } else {
+ getRepositoryExportLogger( ).trace( "Repository object is a candidate for export [ " + repositoryFile.getName( ) + " ] skipping it" );
}
}
- createLocales( repositoryDir, filePath, repositoryDir.isFolder(), outputStream );
+ getRepositoryExportLogger( ).trace( "Starting to create locale entry for repository object [ " + repositoryDir.getName( ) + " ] " );
+ createLocales( repositoryDir, filePath, repositoryDir.isFolder( ), outputStream );
+ getRepositoryExportLogger( ).trace( "Finished creating locale entry for repository object [ " + repositoryDir.getName( ) + " ] " );
}
protected boolean isExportCandidate( String path ) {
- return !ClientRepositoryPaths.getEtcFolderPath().equals( path );
+ return !ClientRepositoryPaths.getEtcFolderPath( ).equals( path );
}
/**
@@ -270,15 +286,15 @@ protected String getZipEntryName( RepositoryFile repositoryFile, String filePath
int filePathLength = 0;
if ( filePath.equals( "/" ) || filePath.equals( "\\" ) ) {
- filePathLength = filePath.length();
+ filePathLength = filePath.length( );
} else {
- filePathLength = filePath.length() + 1;
+ filePathLength = filePath.length( ) + 1;
}
- result = repositoryFile.getPath().substring( filePathLength );
+ result = repositoryFile.getPath( ).substring( filePathLength );
// add trailing slash for folders
- if ( repositoryFile.isFolder() ) {
+ if ( repositoryFile.isFolder( ) ) {
result += "/";
}
return FilenameUtils.normalize( result, true );
@@ -294,7 +310,7 @@ protected String getZipEntryName( RepositoryFile repositoryFile, String filePath
* @throws IOException
*/
protected void createLocales( RepositoryFile repositoryFile, String filePath, boolean isFolder,
- OutputStream outputStream ) throws IOException {
+ OutputStream outputStream ) throws IOException {
ZipEntry entry;
String zipEntryName;
String name;
@@ -303,26 +319,26 @@ protected void createLocales( RepositoryFile repositoryFile, String filePath, bo
ZipOutputStream zos = (ZipOutputStream) outputStream;
// only process files and folders that we know will have locale settings
if ( supportedLocaleFileExt( repositoryFile ) ) {
- List locales = getAvailableLocales( repositoryFile.getId() );
+ List locales = getAvailableLocales( repositoryFile.getId( ) );
zipEntryName = getFixedZipEntryName( repositoryFile, filePath );
- name = repositoryFile.getName();
+ name = repositoryFile.getName( );
for ( LocaleMapDto locale : locales ) {
- localeName = locale.getLocale().equalsIgnoreCase( "default" ) ? "" : "_" + locale.getLocale();
+ localeName = locale.getLocale( ).equalsIgnoreCase( "default" ) ? "" : "_" + locale.getLocale( );
if ( isFolder ) {
zipEntryName = getFixedZipEntryName( repositoryFile, filePath ) + "index";
name = "index";
}
- properties = getUnifiedRepository().getLocalePropertiesForFileById( repositoryFile.getId(), locale.getLocale() );
+ properties = getUnifiedRepository( ).getLocalePropertiesForFileById( repositoryFile.getId( ), locale.getLocale( ) );
if ( properties != null ) {
properties.remove( "jcr:primaryType" ); // Pentaho Type
- try ( InputStream is = createLocaleFile( name + localeName, properties, locale.getLocale() ) ) {
+ try ( InputStream is = createLocaleFile( name + localeName, properties, locale.getLocale( ) ) ) {
if ( is != null ) {
entry = new ZipEntry( zipEntryName + localeName + LOCALE_EXT );
zos.putNextEntry( entry );
IOUtils.copy( is, outputStream );
- zos.closeEntry();
+ zos.closeEntry( );
}
}
}
@@ -339,13 +355,13 @@ protected void createLocales( RepositoryFile repositoryFile, String filePath, bo
*/
private boolean supportedLocaleFileExt( RepositoryFile repositoryFile ) {
boolean ans = true;
- String ext = repositoryFile.getName();
- if ( !repositoryFile.isFolder() ) {
+ String ext = repositoryFile.getName( );
+ if ( !repositoryFile.isFolder( ) ) {
int idx = ext.lastIndexOf( "." );
if ( idx > 0 ) {
- ext = ext.substring( idx, ext.length() );
+ ext = ext.substring( idx, ext.length( ) );
}
- List exportList = getLocaleExportList();
+ List exportList = getLocaleExportList( );
if ( exportList != null ) {
ans = exportList.contains( ext );
}
@@ -361,11 +377,11 @@ private boolean supportedLocaleFileExt( RepositoryFile repositoryFile ) {
* @return
*/
private List getAvailableLocales( Serializable fileId ) {
- List availableLocales = new ArrayList();
- List locales = getUnifiedRepository().getAvailableLocalesForFileById( fileId );
- if ( locales != null && !locales.isEmpty() ) {
+ List availableLocales = new ArrayList( );
+ List locales = getUnifiedRepository( ).getAvailableLocalesForFileById( fileId );
+ if ( locales != null && !locales.isEmpty( ) ) {
for ( Locale locale : locales ) {
- availableLocales.add( new LocaleMapDto( locale.toString(), null ) );
+ availableLocales.add( new LocaleMapDto( locale.toString( ), null ) );
}
}
return availableLocales;
@@ -386,7 +402,7 @@ private InputStream createLocaleFile( String name, Properties properties, String
try {
localeFile = PentahoSystem
- .getApplicationContext().createTempFile( getSession(), ExportFileNameEncoder.encodeZipFileName( name ), LOCALE_EXT, true );
+ .getApplicationContext( ).createTempFile( getSession( ), ExportFileNameEncoder.encodeZipFileName( name ), LOCALE_EXT, true );
} catch ( IOException e ) {
// BISERVER-14140 - Retry when temp file name exceeds the limit of OS
// Retry inside a catch because there isn't an accurate mechanism to determine the effective temp file max length
@@ -394,10 +410,10 @@ private InputStream createLocaleFile( String name, Properties properties, String
String smallerName = ExportFileNameEncoder.encodeZipFileName( name ).substring( 0, SAFETY_TMP_FILE_SIZE );
log.debug( "Error with original name file. Retrying with a smaller temp file name - " + smallerName );
localeFile = PentahoSystem
- .getApplicationContext().createTempFile( getSession(), smallerName, LOCALE_EXT, true );
+ .getApplicationContext( ).createTempFile( getSession( ), smallerName, LOCALE_EXT, true );
} finally {
if ( localeFile != null ) {
- localeFile.deleteOnExit();
+ localeFile.deleteOnExit( );
}
}
@@ -414,10 +430,10 @@ private InputStream createLocaleFile( String name, Properties properties, String
*
* @return
*/
- public List getLocaleExportList() {
- if ( this.localeExportList == null || this.localeExportList.isEmpty() ) {
+ public List getLocaleExportList( ) {
+ if ( this.localeExportList == null || this.localeExportList.isEmpty( ) ) {
for ( ExportHandler exportHandler : exportHandlerList ) {
- this.localeExportList = ( (DefaultExportHandler) exportHandler ).getLocaleExportList();
+ this.localeExportList = ( (DefaultExportHandler) exportHandler ).getLocaleExportList( );
break;
}
}
@@ -428,7 +444,7 @@ public void setLocaleExportList( List localeExportList ) {
this.localeExportList = localeExportList;
}
- public ExportManifest getExportManifest() {
+ public ExportManifest getExportManifest( ) {
return exportManifest;
}
diff --git a/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/FileResource.java b/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/FileResource.java
index c5f1f69f1ad..c2c74ba8ff0 100755
--- a/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/FileResource.java
+++ b/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/FileResource.java
@@ -45,7 +45,7 @@
import org.pentaho.platform.engine.core.system.PentahoSessionHolder;
import org.pentaho.platform.engine.core.system.PentahoSystem;
import org.pentaho.platform.plugin.services.importer.PlatformImportException;
-import org.pentaho.platform.plugin.services.importexport.ExportException;
+import org.pentaho.platform.api.importexport.ExportException;
import org.pentaho.platform.plugin.services.importexport.Exporter;
import org.pentaho.platform.repository.RepositoryDownloadWhitelist;
import org.pentaho.platform.repository.RepositoryFilenameUtils;
@@ -83,6 +83,7 @@
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.StreamingOutput;
+import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Providers;
import javax.xml.bind.JAXBContext;
@@ -178,17 +179,22 @@ public static String idToPath( String pathId ) {
* Encrypted file stream
*
*/
- @GET
+ @POST
@Path( "/backup" )
+ @Consumes( MediaType.APPLICATION_FORM_URLENCODED )
@StatusCodes( {
@ResponseCode( code = 200, condition = "Successfully exported the existing Pentaho System" ),
+ @ResponseCode( code = 400, condition = "User has provided a invalid file path" ),
@ResponseCode( code = 403, condition = "User does not have administrative permissions" ),
@ResponseCode( code = 500, condition = "Failure to complete the export." ) } )
- public Response systemBackup( @HeaderParam ( "user-agent" ) String userAgent ) {
+ public Response systemBackup( @HeaderParam ( "user-agent" ) String userAgent, final MultivaluedMap formParams ) {
FileService.DownloadFileWrapper wrapper;
try {
- wrapper = fileService.systemBackup( userAgent );
+ wrapper = fileService.systemBackup( userAgent, formParams.getFirst( "logFile" ), formParams
+ .getFirst( "logLevel" ), formParams.getFirst( "outputFile" ) );
return buildZipOkResponse( wrapper );
+ } catch ( IllegalArgumentException iae ) {
+ throw new WebApplicationException( iae, Response.Status.BAD_REQUEST );
} catch ( IOException e ) {
throw new WebApplicationException( e, Response.Status.INTERNAL_SERVER_ERROR );
} catch ( ExportException e ) {
@@ -212,13 +218,17 @@ public Response systemBackup( @HeaderParam ( "user-agent" ) String userAgent ) {
@Consumes( MediaType.MULTIPART_FORM_DATA )
@StatusCodes( {
@ResponseCode( code = 200, condition = "Successfully imported the Pentaho System" ),
+ @ResponseCode( code = 400, condition = "User has provided a invalid file path" ),
@ResponseCode( code = 403, condition = "User does not have administrative permissions" ),
@ResponseCode( code = 500, condition = "Failure to complete the import." ) } )
public Response systemRestore( @FormDataParam( "fileUpload" ) InputStream fileUpload, @FormDataParam ( "overwriteFile" ) String overwriteFile,
- @FormDataParam ( "applyAclSettings" ) String applyAclSettings, @FormDataParam ( "overwriteAclSettings" ) String overwriteAclSettings ) {
+ @FormDataParam ( "applyAclSettings" ) String applyAclSettings, @FormDataParam ( "overwriteAclSettings" ) String overwriteAclSettings,
+ @FormDataParam ( "logFile" ) String logFile, @FormDataParam ( "logLevel" ) String logLevel ) {
try {
- fileService.systemRestore( fileUpload, overwriteFile, applyAclSettings, overwriteAclSettings );
+ fileService.systemRestore( fileUpload, overwriteFile, applyAclSettings, overwriteAclSettings, logFile, logLevel );
return Response.ok().build();
+ } catch ( IllegalArgumentException iae ) {
+ throw new WebApplicationException( iae, Response.Status.BAD_REQUEST );
} catch ( PlatformImportException e ) {
throw new WebApplicationException( e, Response.Status.INTERNAL_SERVER_ERROR );
} catch ( SecurityException e ) {
diff --git a/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/services/FileService.java b/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/services/FileService.java
index 9860af02bfc..087feb6b85a 100644
--- a/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/services/FileService.java
+++ b/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/services/FileService.java
@@ -40,7 +40,9 @@
import org.pentaho.platform.api.repository2.unified.webservices.RepositoryFileDto;
import org.pentaho.platform.api.repository2.unified.webservices.RepositoryFileTreeDto;
import org.pentaho.platform.api.repository2.unified.webservices.StringKeyStringValueDto;
+import org.pentaho.platform.api.importexport.ExportException;
import org.pentaho.platform.api.util.IPentahoPlatformExporter;
+import org.pentaho.platform.api.util.IRepositoryExportLogger;
import org.pentaho.platform.engine.core.system.PentahoSessionHolder;
import org.pentaho.platform.engine.core.system.PentahoSystem;
import org.pentaho.platform.plugin.services.exporter.PentahoPlatformExporter;
@@ -49,7 +51,7 @@
import org.pentaho.platform.plugin.services.importer.RepositoryFileImportBundle;
import org.pentaho.platform.plugin.services.importexport.BaseExportProcessor;
import org.pentaho.platform.plugin.services.importexport.DefaultExportHandler;
-import org.pentaho.platform.plugin.services.importexport.ExportException;
+import org.pentaho.platform.plugin.services.importexport.RepositoryTextLayout;
import org.pentaho.platform.plugin.services.importexport.ExportHandler;
import org.pentaho.platform.plugin.services.importexport.IRepositoryImportLogger;
import org.pentaho.platform.plugin.services.importexport.ImportSession;
@@ -82,6 +84,7 @@
import javax.ws.rs.core.StreamingOutput;
import java.io.ByteArrayOutputStream;
import java.io.File;
+import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
@@ -95,6 +98,8 @@
import java.security.GeneralSecurityException;
import java.security.InvalidParameterException;
import java.text.Collator;
+import java.text.SimpleDateFormat;
+import java.util.Date;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -131,29 +136,76 @@ public class FileService {
private PentahoPlatformExporter backupExporter;
- public DownloadFileWrapper systemBackup( String userAgent ) throws IOException, ExportException {
+ private void validateFilePath( String logFile ) throws IllegalArgumentException {
+ if ( logFile.contains( ".." ) || logFile.contains( "//" ) || logFile.contains( "\\\\" ) || ( !logFile.endsWith( ".txt" ) && !logFile.endsWith( ".log" ) ) ) {
+ throw new IllegalArgumentException( Messages.getInstance().getString( "FileService.ERROR_INVALID_LOG_FILENAME", logFile ) );
+ }
+ }
+ public DownloadFileWrapper systemBackup( String userAgent, String logFile, String logLevel, String outputFile ) throws IllegalArgumentException, IOException, ExportException {
if ( doCanAdminister() ) {
- String originalFileName;
String encodedFileName;
- originalFileName = "SystemBackup.zip";
- encodedFileName = makeEncodedFileName( originalFileName );
- StreamingOutput streamingOutput = getBackupStream();
- final String attachment = HttpMimeTypeListener.buildContentDispositionValue( originalFileName, true );
-
+ encodedFileName = makeEncodedFileName( outputFile );
+ IRepositoryExportLogger exportLogger;
+ Level level = Level.valueOf( logLevel );
+ FileOutputStream fileOutputStream = null;
+ try {
+ validateFilePath( logFile );
+ fileOutputStream = new FileOutputStream( logFile );
+ } catch ( FileNotFoundException e ) {
+ try {
+ fileOutputStream = retrieveFallbackLogFileLocation( "backup" );
+ } catch ( FileNotFoundException fileNotFoundException ) {
+ throw new ExportException( fileNotFoundException );
+ }
+ }
+ ByteArrayOutputStream exportLoggerSream = new ByteArrayOutputStream();
+ IPentahoPlatformExporter exporter = PentahoSystem.get( IPentahoPlatformExporter.class );
+ exportLogger = exporter.getRepositoryExportLogger();
+ RepositoryTextLayout stringLayout = new RepositoryTextLayout( level );
+ exportLogger.startJob( exportLoggerSream, level, stringLayout );
+ StreamingOutput streamingOutput = getBackupStream( );
+ exportLogger.endJob( );
+ try {
+ exportLoggerSream.writeTo( fileOutputStream );
+ } catch ( IOException e ) {
+ e.printStackTrace();
+ }
+ final String attachment = HttpMimeTypeListener.buildContentDispositionValue( outputFile, true );
return new DownloadFileWrapper( streamingOutput, attachment, encodedFileName );
} else {
throw new SecurityException();
}
}
+ public FileOutputStream retrieveFallbackLogFileLocation( String filePrefix ) throws FileNotFoundException {
+ String defaultBaseDir = System.getProperty( "java.io.tmpdir" );
+ // Get the current timestamp
+ SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMdd_HHmmss" );
+ String timestamp = dateFormat.format( new Date() );
+ String fallbacklogFilePath = defaultBaseDir + File.pathSeparator + filePrefix + "_" + timestamp + ".log";
+ return new FileOutputStream( fallbacklogFilePath );
+ }
+
public void systemRestore( final InputStream fileUpload, String overwriteFile,
- String applyAclSettings, String overwriteAclSettings ) throws PlatformImportException, SecurityException {
+ String applyAclSettings, String overwriteAclSettings, String logFile, String logLevel ) throws IllegalArgumentException, PlatformImportException, SecurityException {
if ( doCanAdminister() ) {
boolean overwriteFileFlag = !"false".equals( overwriteFile );
boolean applyAclSettingsFlag = !"false".equals( applyAclSettings );
boolean overwriteAclSettingsFlag = "true".equals( overwriteAclSettings );
IRepositoryImportLogger importLogger;
- Level level = Level.ERROR;
+ Level level = Level.valueOf( logLevel );
+
+ FileOutputStream fileOutputStream = null;
+ try {
+ validateFilePath( logFile );
+ fileOutputStream = new FileOutputStream( logFile );
+ } catch ( FileNotFoundException e ) {
+ try {
+ fileOutputStream = retrieveFallbackLogFileLocation( "restore" );
+ } catch ( FileNotFoundException fileNotFoundException ) {
+ throw new PlatformImportException( fileNotFoundException.getLocalizedMessage() );
+ }
+ }
ByteArrayOutputStream importLoggerStream = new ByteArrayOutputStream();
String importDirectory = "/";
RepositoryFileImportBundle.Builder bundleBuilder = new RepositoryFileImportBundle.Builder();
@@ -173,19 +225,25 @@ public void systemRestore( final InputStream fileUpload, String overwriteFile,
IPlatformImporter importer = PentahoSystem.get( IPlatformImporter.class );
importLogger = importer.getRepositoryImportLogger();
- importLogger.startJob( importLoggerStream, importDirectory, level );
+ RepositoryTextLayout stringLayout = new RepositoryTextLayout( level );
+ importLogger.startJob( importLoggerStream, importDirectory, level, stringLayout );
try {
importer.importFile( bundleBuilder.build() );
} finally {
importLogger.endJob();
+ try {
+ importLoggerStream.writeTo( fileOutputStream );
+ } catch ( IOException e ) {
+ e.printStackTrace();
+ }
}
} else {
throw new SecurityException();
}
}
- private StreamingOutput getBackupStream() throws IOException, ExportException {
- File zipFile = getBackupExporter().performExport();
+ private StreamingOutput getBackupStream( ) throws IOException, ExportException {
+ File zipFile = getBackupExporter().performExport( );
final FileInputStream inputStream = new FileInputStream( zipFile );
return new StreamingOutput() {
@@ -994,7 +1052,7 @@ public RepositoryFileDto doGetProperties( String pathId ) throws FileNotFoundExc
*/
public String doGetCanEdit() {
String editPermission = PentahoSystem.getSystemSetting( "edit-permission", "" );
- if( editPermission != null && editPermission.length() > 0 ) {
+ if ( editPermission != null && editPermission.length() > 0 ) {
return getPolicy().isAllowed( editPermission ) ? "true" : "false";
} else {
return "true";
diff --git a/extensions/src/main/resources/org/pentaho/platform/plugin/services/messages/messages.properties b/extensions/src/main/resources/org/pentaho/platform/plugin/services/messages/messages.properties
index db771ce9548..836b9ebe7f9 100644
--- a/extensions/src/main/resources/org/pentaho/platform/plugin/services/messages/messages.properties
+++ b/extensions/src/main/resources/org/pentaho/platform/plugin/services/messages/messages.properties
@@ -204,6 +204,7 @@ CommandLineProcessor.ERROR_0005_INVALID_FILE_PATH=Invalid file-path: {0}
CommandLineProcessor.ERROR_0006_NON_ADMIN_CREDENTIALS=Non admin credentials entered
CommandLineProcessor.ERROR_0007_FORBIDDEN=User is not allowed to perform this operation: {0}
CommandLineProcessor.ERROR_0008_INVALID_PARAMETER=Invalid parameter syntax: "{0}"
+CommandLineProcessor.ERROR_0009_INVALID_LOG_FILE_PATH=Invalid log file path: "{0}"
CommandLineProcessor.INFO_OPTION_HELP_DESCRIPTION=print this message
CommandLineProcessor.INFO_OPTION_IMPORT_DESCRIPTION=import
@@ -216,6 +217,7 @@ CommandLineProcessor.INFO_OPTION_URL_DESCRIPTION=url of repository (e.g. http://
CommandLineProcessor.INFO_OPTION_FILEPATH_DESCRIPTION=Path to directory of files for import, or path to .zip file for export
CommandLineProcessor.INFO_OPTION_CHARSET_DESCRIPTION=charset to use for the repository (characters from external systems converted to this charset)
CommandLineProcessor.INFO_OPTION_LOGFILE_DESCRIPTION=full path and filename of logfile messages
+CommandLineProcessor.INFO_OPTION_LOGLEVEL_DESCRIPTION=Log Level
CommandLineProcessor.INFO_OPTION_PATH_DESCRIPTION=repository path to which to add imported files, or to export from (e.g. /public)
CommandLineProcessor.INFO_OPTION_OVERWRITE_DESCRIPTION=overwrite files (import only)
CommandLineProcessor.INFO_OPTION_PERMISSION_DESCRIPTION=apply ACL manifest permissions to files and folders (import only)
@@ -302,10 +304,85 @@ SolutionImportHandler.SkipLocaleFile=Skipping [{0}], it is a locale property fil
SolutionImportHandler.ConnectionWithoutDatabaseType=Can't import connection [{0}] because it doesn't have a databaseType.
SolutionImportHandler.SchedulesWithSpaces=Could not import schedule, attempting to replace spaces with underscores and retrying: {0}
+
+SolutionImportHandler.INFO_START_IMPORT_PROCESS=Starting the import process
+SolutionImportHandler.INFO_START_IMPORT_FILEFOLDER=*********************** [ Start: Import File/Folder(s) ] **************************************
+SolutionImportHandler.INFO_COUNT_FILEFOLDER=Found [ {0} ] files to import
+SolutionImportHandler.ERROR_IMPORTING_REPOSITORY_OBJECT=importing repository object with path [ {0} ] from the cache. Cause [ {1} ]
+SolutionImportHandler.INFO_SUCCESSFUL_REPOSITORY_IMPORT_COUNT=Successfully imported [ {0}} ] out of [ {1} ]
+SolutionImportHandler.INFO_START_IMPORT_LOCALEFILE=****************************[ Start: Import Locale File(s) ] **********************************
+SolutionImportHandler.ERROR_IMPORTING_LOCALE_FILE=Error importing locale files. Cause [ {0} ]
+SolutionImportHandler.INFO_END_IMPORT_LOCALEFILE=****************************[ End: Import Locale File(s) ] **********************************
+SolutionImportHandler.INFO_END_IMPORT_FILEFOLDER=*********************** [ End: Import File/Folder(s) ] ***********************************
+SolutionImportHandler.INFO_START_IMPORT_DATASOURCE=****************************[ Start: Import DataSource(s) ] **********************************
+SolutionImportHandler.INFO_COUNT_DATASOURCE=Found [ {0}} ] DataSource(s) to import
+SolutionImportHandler.ERROR_IMPORTING_JDBC_DATASOURCE=Error importing JDBC DataSource [ {0}} ]. Cause [ {1} ]
+SolutionImportHandler.INFO_END_IMPORT_DATASOURCE=****************************[ End: Import DataSource(s) ] **********************************
+SolutionImportHandler.INFO_SUCCESSFUL_DATASOURCE_IMPORT_COUNT=Successfully imported [ {0} ] out of [ {1} ]
+SolutionImportHandler.INFO_START_IMPORT_SCHEDULE=*********************** [ Start: Import Schedule(s) ] **************************************
+SolutionImportHandler.INFO_END_IMPORT_SCHEDULE=*********************** [ End: Import Schedule(s) ] **************************************
+SolutionImportHandler.INFO_COUNT_SCHEDULUE=Found {0} schedules in the manifest
+SolutionImportHandler.ERROR_IMPORTING_SCHEDULE=Unable to import schedule [ {0} ] cause [ {1} ]
+SolutionImportHandler.INFO_SUCCESSFUL_SCHEDULE_IMPORT_COUNT=Successfully imported [ {0} ] out of [ {1} ]
+SolutionImportHandler.INFO_START_IMPORT_METASTORE=********************** [ Start: Import MetaStore ] ******************************************
+SolutionImportHandler.INFO_END_IMPORT_METASTORE=********************** [ End: Import MetaStore ] ******************************************
+SolutionImportHandler.INFO_SUCCESSFUL_IMPORT_METASTORE=Successfully imported metastore
+SolutionImportHandler.INFO_START_IMPORT_USER=******************************* [Start Import User(s)] ***************************
+SolutionImportHandler.INFO_COUNT_USER=Found [ {0} ] users to import
+SolutionImportHandler.INFO_SUCCESSFUL_USER_COUNT=Successfully imported [ {0} ] out of [ {1} ] user(s)
+SolutionImportHandler.INFO_END_IMPORT_USER=****************************** [End Import User(s)] ***************************
+SolutionImportHandler.INFO_START_IMPORT_USER_SETTING=************************[ Start: Import user specific settings] *************************
+SolutionImportHandler.INFO_COUNT_USER_SETTING=Found [ {0} ] user specific settings for user [ {1} ]
+SolutionImportHandler.INFO_SUCCESSFUL_USER_SETTING_IMPORT_COUNT=Successfully imported [ {0} ] out of [ {1} ] user specific settings
+SolutionImportHandler.INFO_SUCCESSFUL_ROLE_COUNT=Successfully imported [ {0} ] out of [ {1} ] roles
+SolutionImportHandler.INFO_START_IMPORT_ROLE=*********************** [ Start: Import Role(s) ] ***************************************
+SolutionImportHandler.INFO_END_IMPORT_ROLE=*********************** [ End: Import Role(s) ] ***************************************
+SolutionImportHandler.INFO_START_IMPORT_METADATA_DATASOURCE=*********************** [ Start: Import Metadata DataSource(s) ] *****************************
+SolutionImportHandler.INFO_COUNT_METADATA_DATASOURCE=Found [ {0} ] metadata models to import
+SolutionImportHandler.INFO_SUCCESSFUL_METDATA_DATASOURCE_COUNT=Successfully imported [ {0} ] out of [ {1} ] metadata models
+SolutionImportHandler.INFO_END_IMPORT_METADATA_DATASOURCE=*********************** [ End: Import Metadata DataSource(s) ] *****************************
+SolutionImportHandler.INFO_START_IMPORT_MONDRIAN_DATASOURCE=*********************** [ Start: Import Mondrian DataSource(s) ] *****************************
+SolutionImportHandler.INFO_COUNT_MONDRIAN_DATASOURCE=Found [ {0} ] mondrian schemas to import
+SolutionImportHandler.INFO_SUCCESSFUL_MONDRIAN_DATASOURCE_IMPORT_COUNT=Successfully imported [ {0} ] out of [ {1} ] DataSource(s)
+SolutionImportHandler.INFO_END_IMPORT_MONDRIAN_DATASOURCE=*********************** [ End: Import Mondrian DataSource(s) ] *****************************
+SolutionImportHandler.INFO_START_IMPORT_REPOSITORY_OBJECT=****************************** [ Start: Import Repository File/Folder(s) ] **********************************
+SolutionImportHandler.INFO_END_IMPORT_REPOSITORY_OBJECT=****************************** [ End: Import Repository File/Folder(s) ] **********************************
+SolutionImportHandler.ERROR_NOT_=This not a valid file name. Failing the import
PentahoPlatformExporter.UNSUPPORTED_JobTrigger=Unsupported JobTrigger encountered during export, skipping it: {0}
PentahoPlatformExporter.ERROR_EXPORTING_JOBS=There was an error while exporting scheduled jobs
ScheduleExportUtil.JOB_MUST_NOT_BE_NULL=Job can not be null
-
+PentahoPlatformExporter.INFO_START_EXPORT_JDBC_DATASOURCE=*************************** [ Start: Export JDBC Datasource(s) ] *******************************
+PentahoPlatformExporter.INFO_COUNT_JDBC_DATASOURCE_TO_EXPORT=Found [ {0} ] JDBC DataSource(s) to export
+PentahoPlatformExporter.INFO_SUCCESSFUL_JDBC_DATASOURCE_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] JDBC DataSource(s)
+PentahoPlatformExporter.INFO_END_EXPORT_JDBC_DATASOURCE=*************************** [ End: Export JDBC Datasource(s) ] *******************************
+PentahoPlatformExporter.INFO_START_EXPORT_REPOSITORY_OBJECT=********************************* [ Start: Export repository File(s)/Folder(s) ] *******************************
+PentahoPlatformExporter.INFO_END_EXPORT_REPOSITORY_OBJECT=********************************* [ End: Export repository File(s)/Folder(s) ] *******************************
+PentahoPlatformExporter.ERROR_EXPORT_REPOSITORY_OBJECT=Error while exporting of a file [ {0} ]
+PentahoPlatformExporter.INFO_START_EXPORT_USER=********************************* [ Start: Export User(s) ] *******************************
+PentahoPlatformExporter.INFO_COUNT_USER_TO_EXPORT=Found [ {0} ] User(s) to export
+PentahoPlatformExporter.INFO_SUCCESSFUL_USER_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] User(s)
+PentahoPlatformExporter.INFO_END_EXPORT_USER=********************************* [ End: Export User(s) ] *******************************
+PentahoPlatformExporter.INFO_START_EXPORT_ROLE=********************************* [ Start: Export Role(s) ] *******************************
+PentahoPlatformExporter.INFO_COUNT_ROLE_TO_EXPORT=Found [ {0} ] Role(s) to export
+PentahoPlatformExporter.INFO_SUCCESSFUL_ROLE_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] Role(s)
+PentahoPlatformExporter.INFO_END_EXPORT_ROLE=********************************* [ End: Export Role(s) ] *******************************
+PentahoPlatformExporter.INFO_START_EXPORT_METASTORE=********************************* [ Start: Export Metastore ] *******************************
+PentahoPlatformExporter.INFO_END_EXPORT_METASTORE=********************************* [ End: Export Metastore ] *******************************
+PentahoPlatformExporter.INFO_SUCCESSFUL_EXPORT_METASTORE=Finished adding the metastore to the export manifest
+PentahoPlatformExporter.INFO_START_EXPORT_MONDRIAN_DATASOURCE=********************************* [ Start: Export Mondrian datasource(s) ] *******************************
+PentahoPlatformExporter.INFO_COUNT_MONDRIAN_DATASOURCE_TO_EXPORT=Found [ {0} ] Role(s) to export
+PentahoPlatformExporter.INFO_SUCCESSFUL_MONDRIAN_DATASOURCE_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] Role(s)
+PentahoPlatformExporter.ERROR_MONDRIAN_DATASOURCE_EXPORT=Error exporting Mondrian DataSource. Cause [ {0} ]
+PentahoPlatformExporter.INFO_END_EXPORT_MONDRIAN_DATASOURCE=********************************* [ End: Export Mondrian datasource(s) ] *******************************
+PentahoPlatformExporter.INFO_START_EXPORT_METADATA=********************************* [ Start: Export Metadata datasource(s) ] *******************************
+PentahoPlatformExporter.INFO_COUNT_METADATA_DATASOURCE_TO_EXPORT=Found [ {0} ] Metadata DataSource(s) to export
+PentahoPlatformExporter.INFO_SUCCESSFUL_METADATA_DATASOURCE_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] Metadata DataSource(s)
+PentahoPlatformExporter.ERROR_METADATA_DATASOURCE_EXPORT=Error exporting Mondrian DataSource. Cause [ {0} ]
+PentahoPlatformExporter.INFO_END_EXPORT_METADATA=*********************** [ End: Export metadata datasource(s) ]*************************
+PentahoPlatformExporter.ERROR_EXPORT_FILE_CONTENT=Error while exporting file content. Cause {0}
+PentahoPlatformExporter.ERROR_GENERATING_EXPORT_XML=Error generating export XML
+PentahoPlatformExporter.INFO_END_EXPORT_PROCESS=End: Export process
+PentahoPlatformExporter.INFO_START_EXPORT_PROCESS=Start: Export process
ERROR.Encrypting_Password=Could not encrypt password for user {0}
ERROR.Restoring_Password=Could restore password for user {0}. Setting a temporary password.
ERROR.CreatingUser=Could not create user {0}.
diff --git a/extensions/src/main/resources/org/pentaho/platform/web/http/messages/messages.properties b/extensions/src/main/resources/org/pentaho/platform/web/http/messages/messages.properties
index e14d2b9bc4d..372fc237ffc 100644
--- a/extensions/src/main/resources/org/pentaho/platform/web/http/messages/messages.properties
+++ b/extensions/src/main/resources/org/pentaho/platform/web/http/messages/messages.properties
@@ -152,4 +152,5 @@ FileResource.INCORRECT_EXTENSION={0} has incorrect extension.
FileResource.HOME_FOLDER_DISPLAY_TITLE=Home
FileResource.PUBLIC_FOLDER_DISPLAY_TITLE=Public
-RepositoryResource.USER_NOT_AUTHORIZED_TO_EDIT=User is not authorized to edit the content. Please contact your system administrator.
\ No newline at end of file
+RepositoryResource.USER_NOT_AUTHORIZED_TO_EDIT=User is not authorized to edit the content. Please contact your system administrator.
+FileService.ERROR_INVALID_LOG_FILENAME=Invalid log file name {0}
\ No newline at end of file
diff --git a/extensions/src/test/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporterTest.java b/extensions/src/test/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporterTest.java
index 65ca89a6fbd..dc8c88f3e1b 100644
--- a/extensions/src/test/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporterTest.java
+++ b/extensions/src/test/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporterTest.java
@@ -13,6 +13,7 @@
package org.pentaho.platform.plugin.services.exporter;
+import org.apache.logging.log4j.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -29,13 +30,12 @@
import org.pentaho.platform.api.scheduler2.IScheduler;
import org.pentaho.platform.api.usersettings.IAnyUserSettingService;
import org.pentaho.platform.api.usersettings.pojo.IUserSetting;
+import org.pentaho.platform.api.util.IRepositoryExportLogger;
import org.pentaho.platform.engine.core.system.PentahoSessionHolder;
import org.pentaho.platform.engine.core.system.PentahoSystem;
import org.pentaho.platform.plugin.action.mondrian.catalog.IMondrianCatalogService;
import org.pentaho.platform.plugin.action.mondrian.catalog.MondrianCatalog;
-import org.pentaho.platform.plugin.services.importexport.ExportManifestUserSetting;
-import org.pentaho.platform.plugin.services.importexport.RoleExport;
-import org.pentaho.platform.plugin.services.importexport.UserExport;
+import org.pentaho.platform.plugin.services.importexport.*;
import org.pentaho.platform.plugin.services.importexport.exportManifest.ExportManifest;
import org.pentaho.platform.plugin.services.importexport.exportManifest.bindings.DatabaseConnection;
import org.pentaho.platform.plugin.services.importexport.exportManifest.bindings.ExportManifestMetaStore;
@@ -48,6 +48,7 @@
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
+import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.util.ArrayList;
@@ -98,6 +99,9 @@ public void setUp() throws Exception {
doReturn( "session name" ).when( session ).getName();
exporter = new PentahoPlatformExporter( repo );
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ exporter.setRepositoryExportLogger( exportLogger );
}
@After
@@ -154,7 +158,13 @@ public void testExportUsersAndRoles() {
UserDetails userDetails = new User( "testUser", "testPassword", true, true, true, true, authList );
when( userDetailsService.loadUserByUsername( nullable( String.class ) ) ).thenReturn( userDetails );
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) );
+ exporter.setRepositoryExportLogger( exportLogger );
exporter.exportUsersAndRoles();
+ exportLogger.endJob();
verify( manifest ).addUserExport( userCaptor.capture() );
verify( manifest ).addRoleExport( roleCaptor.capture() );
@@ -172,8 +182,13 @@ public void testExportUsersAndRoles() {
public void testExportMetadata_noModels() throws Exception {
IMetadataDomainRepository mdr = mock( IMetadataDomainRepository.class );
exporter.setMetadataDomainRepository( mdr );
-
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) );
+ exporter.setRepositoryExportLogger( exportLogger );
exporter.exportMetadataModels();
+ exportLogger.endJob();
assertEquals( 0, exporter.getExportManifest().getMetadataList().size() );
}
@@ -194,8 +209,13 @@ public void testExportMetadata() throws Exception {
inputMap.put( "test1", is );
doReturn( inputMap ).when( exporterSpy ).getDomainFilesData( "test1" );
-
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) );
+ exporterSpy.setRepositoryExportLogger( exportLogger );
exporterSpy.exportMetadataModels();
+ exportLogger.endJob();
assertEquals( 1, exporterSpy.getExportManifest().getMetadataList().size() );
assertEquals( "test1", exporterSpy.getExportManifest().getMetadataList().get( 0 ).getDomainId() );
@@ -217,8 +237,13 @@ public void testExportDatasources() throws Exception {
datasources.add( icon );
when( svc.getDatasources() ).thenReturn( datasources );
-
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) );
+ exporterSpy.setRepositoryExportLogger( exportLogger );
exporterSpy.exportDatasources();
+ exportLogger.endJob();
assertEquals( 1, exporterSpy.getExportManifest().getDatasourceList().size() );
DatabaseConnection exportedDatabaseConnection = exporterSpy.getExportManifest().getDatasourceList().get( 0 );
@@ -244,9 +269,13 @@ public void testParseXmlaEnabled() throws Exception {
public void testExportMondrianSchemas_noCatalogs() throws Exception {
PentahoSystem.registerObject( mondrianCatalogService );
exporterSpy.setMondrianCatalogRepositoryHelper( mondrianCatalogRepositoryHelper );
-
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) );
+ exporterSpy.setRepositoryExportLogger( exportLogger );
exporterSpy.exportMondrianSchemas();
-
+ exportLogger.endJob();
verify( exportManifest, never() ).addMondrian( ArgumentMatchers.any( ExportManifestMondrian.class ) );
verify( mondrianCatalogRepositoryHelper, never() ).getModrianSchemaFiles( nullable( String.class ) );
}
@@ -316,8 +345,13 @@ private void executeExportMondrianSchemasForDataSourceInfo( String catalogName,
inputMap.put( catalogName, is );
when( mondrianCatalogRepositoryHelper.getModrianSchemaFiles( catalogName ) ).thenReturn( inputMap );
exporterSpy.zos = mock( ZipOutputStream.class );
-
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) );
+ exporterSpy.setRepositoryExportLogger( exportLogger );
exporterSpy.exportMondrianSchemas();
+ exportLogger.endJob();
}
@Test
@@ -327,8 +361,13 @@ public void testExportMetaStore() throws Exception {
exporterSpy.setRepoMetaStore( metastore );
ExportManifest manifest = mock( ExportManifest.class );
exporterSpy.setExportManifest( manifest );
-
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) );
+ exporterSpy.setRepositoryExportLogger( exportLogger );
exporterSpy.exportMetastore();
+ exportLogger.endJob();
verify( exporterSpy.zos ).putNextEntry( ArgumentMatchers.any( ZipEntry.class ) );
verify( manifest ).setMetaStore( ArgumentMatchers.any( ExportManifestMetaStore.class ) );
}
diff --git a/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessorTest.java b/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessorTest.java
index 896ae97029b..78310edc0dc 100644
--- a/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessorTest.java
+++ b/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessorTest.java
@@ -33,6 +33,7 @@
import org.junit.Before;
import org.junit.Test;
import org.pentaho.platform.api.repository2.unified.RepositoryFile;
+import org.pentaho.platform.api.importexport.ExportException;
import java.io.File;
import java.io.IOException;
diff --git a/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessorTest.java b/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessorTest.java
index ceeb44abef3..4f90a02fb79 100644
--- a/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessorTest.java
+++ b/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessorTest.java
@@ -23,11 +23,7 @@
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.Serializable;
+import java.io.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@@ -40,6 +36,7 @@
import java.util.zip.ZipInputStream;
import org.apache.commons.io.FileUtils;
+import org.apache.logging.log4j.Level;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -63,6 +60,7 @@
import org.pentaho.platform.api.repository2.unified.RepositoryFileSid.Type;
import org.pentaho.platform.api.repository2.unified.RepositoryRequest;
import org.pentaho.platform.api.repository2.unified.data.simple.SimpleRepositoryFileData;
+import org.pentaho.platform.api.util.IRepositoryExportLogger;
import org.pentaho.platform.core.mimetype.MimeType;
import org.pentaho.platform.engine.core.system.PentahoSessionHolder;
import org.pentaho.platform.engine.core.system.PentahoSystem;
@@ -300,8 +298,13 @@ public void testPerformExport_withoutManifest() throws Exception {
RepositoryFile expFolder = repo.getFile( expFolderPath );
assertNotNull( expFolder );
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) );
+ zipNoMF.setRepositoryExportLogger( exportLogger );
File result = zipNoMF.performExport( repo.getFile( expFolderPath ) );
-
+ exportLogger.endJob();
Set zipEntriesFiles = extractZipEntries( result );
final String[] expectedEntries =
new String[] { "two words/eval (+)%.prpt", "two words/eval (+)%.prpt_en.locale", "two words/index_en.locale" };
@@ -321,7 +324,12 @@ public void testPerformExport_withManifest() throws Exception {
RepositoryFile expFolder = repo.getFile( expFolderPath );
assertNotNull( expFolder );
-
+ // mock logger to prevent npe
+ IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger();
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) );
+ zipMF.setRepositoryExportLogger( exportLogger );
+ exportLogger.endJob();
File result = zipMF.performExport( repo.getFile( expFolderPath ) );
Set zipEntriesFiles = extractZipEntries( result );