diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ExportException.java b/api/src/main/java/org/pentaho/platform/api/importexport/ExportException.java similarity index 94% rename from extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ExportException.java rename to api/src/main/java/org/pentaho/platform/api/importexport/ExportException.java index f822c06b1b8..caf9575044c 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ExportException.java +++ b/api/src/main/java/org/pentaho/platform/api/importexport/ExportException.java @@ -11,7 +11,7 @@ ******************************************************************************/ -package org.pentaho.platform.plugin.services.importexport; +package org.pentaho.platform.api.importexport; /* * This program is free software; you can redistribute it and/or modify it under the @@ -34,7 +34,7 @@ * Exception that indicates an error during import processing User: dkincade */ public class ExportException extends Exception { - public ExportException() { + public ExportException( ) { } public ExportException( final String message ) { diff --git a/api/src/main/java/org/pentaho/platform/api/util/IExportHelper.java b/api/src/main/java/org/pentaho/platform/api/importexport/IExportHelper.java similarity index 74% rename from api/src/main/java/org/pentaho/platform/api/util/IExportHelper.java rename to api/src/main/java/org/pentaho/platform/api/importexport/IExportHelper.java index 192edcf2ab7..3424ca8d6d8 100644 --- a/api/src/main/java/org/pentaho/platform/api/util/IExportHelper.java +++ b/api/src/main/java/org/pentaho/platform/api/importexport/IExportHelper.java @@ -11,8 +11,11 @@ ******************************************************************************/ -package org.pentaho.platform.api.util; +package org.pentaho.platform.api.importexport; + + public interface IExportHelper { - public void doExport( Object exportArg ); + public void doExport( Object exportArg ) throws ExportException; + public String getName(); } diff --git a/api/src/main/java/org/pentaho/platform/api/util/IPentahoPlatformExporter.java b/api/src/main/java/org/pentaho/platform/api/util/IPentahoPlatformExporter.java index a03dc838056..2aa854e2c2f 100644 --- a/api/src/main/java/org/pentaho/platform/api/util/IPentahoPlatformExporter.java +++ b/api/src/main/java/org/pentaho/platform/api/util/IPentahoPlatformExporter.java @@ -13,6 +13,10 @@ package org.pentaho.platform.api.util; +import org.pentaho.platform.api.importexport.IExportHelper; + public interface IPentahoPlatformExporter { void addExportHelper( IExportHelper helper ); + IRepositoryExportLogger getRepositoryExportLogger(); + } diff --git a/api/src/main/java/org/pentaho/platform/api/util/IRepositoryExportLogger.java b/api/src/main/java/org/pentaho/platform/api/util/IRepositoryExportLogger.java new file mode 100644 index 00000000000..92a0c18d014 --- /dev/null +++ b/api/src/main/java/org/pentaho/platform/api/util/IRepositoryExportLogger.java @@ -0,0 +1,90 @@ +/*! ****************************************************************************** + * + * Pentaho + * + * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com + * + * Use of this software is governed by the Business Source License included + * in the LICENSE.TXT file. + * + * Change Date: 2029-07-20 + ******************************************************************************/ + +package org.pentaho.platform.api.util; + +import org.apache.commons.logging.Log; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.StringLayout; + +import java.io.OutputStream; + +public interface IRepositoryExportLogger extends Log { + + + /** + * Initiates an import job. Each call creates a new log associated with the current thread. + * + * @param outputStream + * Will receive the html content of the log + * @param logLevel + * The log level to be logged. + * @param layout + * The layout to be use. + * + */ + void startJob(OutputStream outputStream, Level logLevel, StringLayout layout ); + + /** + * Initiates an import job. Each call creates a new log associated with the current thread. + * + * @param outputStream + * Will receive the html content of the log + * @param logLevel + * The log level to be logged. + */ + void startJob( OutputStream outputStream, Level logLevel ); + + /** + * Makes an "End Import Job" log entry and releases memory associated with this log. + */ + void endJob(); + + /** + * Log informational data. Should be called when the starting a new file and when finishing that file. + * + * @param s + * The information message to be logged. + */ + void info( String s ); + + /** + * Log an error. + * + * @param s + * The Error message to be logged. + */ + void error( String s ); + + /** + * Log debug information + * + * @param s + * The debug message to be logged + */ + void debug( String s ); + + /** + * Log error information + * + * @param e + * The exception to be logged. + */ + void error( Exception e ); + + /** + * Allows a class to check if an ImportLogger has been instantiated for the current thread. + * + * @return true if the logger is present. + */ + boolean hasLogger(); +} diff --git a/assemblies/pentaho-solutions/src/main/resources/pentaho-solutions/system/importExport.xml b/assemblies/pentaho-solutions/src/main/resources/pentaho-solutions/system/importExport.xml index 0a3195e3ff8..eca37e5f509 100644 --- a/assemblies/pentaho-solutions/src/main/resources/pentaho-solutions/system/importExport.xml +++ b/assemblies/pentaho-solutions/src/main/resources/pentaho-solutions/system/importExport.xml @@ -209,6 +209,9 @@ + + + diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporter.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporter.java index 476e8f3d6da..3bfd0ba83b0 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporter.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporter.java @@ -27,14 +27,12 @@ import org.pentaho.platform.api.repository.datasource.IDatasourceMgmtService; import org.pentaho.platform.api.repository2.unified.IUnifiedRepository; import org.pentaho.platform.api.repository2.unified.RepositoryFile; -import org.pentaho.platform.api.scheduler2.IJobScheduleRequest; import org.pentaho.platform.api.scheduler2.IScheduler; -import org.pentaho.platform.api.scheduler2.IJob; -import org.pentaho.platform.api.scheduler2.SchedulerException; import org.pentaho.platform.api.usersettings.IAnyUserSettingService; import org.pentaho.platform.api.usersettings.IUserSettingService; import org.pentaho.platform.api.usersettings.pojo.IUserSetting; -import org.pentaho.platform.api.util.IExportHelper; +import org.pentaho.platform.api.importexport.ExportException; +import org.pentaho.platform.api.importexport.IExportHelper; import org.pentaho.platform.api.util.IPentahoPlatformExporter; import org.pentaho.platform.engine.core.system.PentahoSystem; import org.pentaho.platform.engine.core.system.TenantUtils; @@ -42,7 +40,6 @@ import org.pentaho.platform.plugin.action.mondrian.catalog.MondrianCatalog; import org.pentaho.platform.plugin.services.importexport.DatabaseConnectionConverter; import org.pentaho.platform.plugin.services.importexport.DefaultExportHandler; -import org.pentaho.platform.plugin.services.importexport.ExportException; import org.pentaho.platform.plugin.services.importexport.ExportFileNameEncoder; import org.pentaho.platform.plugin.services.importexport.ExportManifestUserSetting; import org.pentaho.platform.plugin.services.importexport.RoleExport; @@ -100,15 +97,15 @@ public class PentahoPlatformExporter extends ZipExportProcessor implements IPent private IMetaStore metastore; private IUserSettingService userSettingService; - private List exportHelpers = new ArrayList<>(); + private List exportHelpers = new ArrayList<>( ); public PentahoPlatformExporter( IUnifiedRepository repository ) { super( ROOT, repository, true ); setUnifiedRepository( repository ); - addExportHandler( new DefaultExportHandler() ); + addExportHandler( new DefaultExportHandler( ) ); } - public File performExport() throws ExportException, IOException { + public File performExport( ) throws ExportException, IOException { return this.performExport( null ); } @@ -116,9 +113,13 @@ public void addExportHelper( IExportHelper helper ) { exportHelpers.add( helper ); } - public void runExportHelpers() { + public void runExportHelpers( ) { for ( IExportHelper helper : exportHelpers ) { - helper.doExport( this ); + try { + helper.doExport( this ); + } catch ( ExportException exportException ) { + getRepositoryExportLogger( ).error( "Error exporting component [ " + helper.getName( ) + " ] Cause [ " + exportException.getLocalizedMessage( ) + " ]" ); + } } } @@ -130,22 +131,28 @@ public void runExportHelpers() { @Override public File performExport( RepositoryFile exportRepositoryFile ) throws ExportException, IOException { + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_START_EXPORT_PROCESS" ) ); // always export root - exportRepositoryFile = getUnifiedRepository().getFile( ROOT ); + exportRepositoryFile = getUnifiedRepository( ).getFile( ROOT ); // create temp file File exportFile = File.createTempFile( EXPORT_TEMP_FILENAME_PREFIX, EXPORT_TEMP_FILENAME_EXT ); - exportFile.deleteOnExit(); + exportFile.deleteOnExit( ); zos = new ZipOutputStream( new FileOutputStream( exportFile ) ); - exportFileContent( exportRepositoryFile ); - exportDatasources(); - exportMondrianSchemas(); - exportMetadataModels(); - runExportHelpers(); - exportUsersAndRoles(); - exportMetastore(); + try { + exportFileContent( exportRepositoryFile ); + } catch ( ExportException | IOException exception ) { + getRepositoryExportLogger( ).error( Messages.getInstance( ).getString( "PentahoPlatformExporter.ERROR_EXPORT_FILE_CONTENT", exception.getLocalizedMessage( ) ) ); + } + + exportDatasources( ); + exportMondrianSchemas( ); + exportMetadataModels( ); + runExportHelpers( ); + exportUsersAndRoles( ); + exportMetastore( ); if ( this.withManifest ) { // write manifest to zip output stream @@ -154,49 +161,71 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx // pass output stream to manifest class for writing try { - getExportManifest().toXml( zos ); + getExportManifest( ).toXml( zos ); } catch ( Exception e ) { // todo: add to messages.properties - log.error( "Error generating export XML" ); + getRepositoryExportLogger( ).error( Messages.getInstance( ).getString( "PentahoPlatformExporter.ERROR_GENERATING_EXPORT_XML" ) ); } - zos.closeEntry(); + zos.closeEntry( ); } - zos.close(); + zos.close( ); // clean up - initManifest(); + initManifest( ); zos = null; + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_END_EXPORT_PROCESS" ) ); + return exportFile; } - protected void exportDatasources() { - log.debug( "export datasources" ); + protected void exportDatasources( ) { + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_START_EXPORT_JDBC_DATASOURCE" ) ); // get all connection to export + int successfulExportJDBCDSCount = 0; + int databaseConnectionsSize = 0; try { - for ( IDatabaseConnection datasource : getDatasourceMgmtService().getDatasources() ) { + List databaseConnections = getDatasourceMgmtService( ).getDatasources( ); + if ( databaseConnections != null ) { + databaseConnectionsSize = databaseConnections.size( ); + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_COUNT_JDBC_DATASOURCE_TO_EXPORT", databaseConnectionsSize ) ); + } + for ( IDatabaseConnection datasource : databaseConnections ) { if ( datasource instanceof org.pentaho.database.model.DatabaseConnection ) { - getExportManifest().addDatasource( DatabaseConnectionConverter.model2export( datasource ) ); + getRepositoryExportLogger( ).debug( "Starting to export datasource [ " + datasource.getName( ) + " ]" ); + getExportManifest( ).addDatasource( DatabaseConnectionConverter.model2export( datasource ) ); + getRepositoryExportLogger( ).debug( "Finished exporting datasource [ " + datasource.getName( ) + " ]" ); + successfulExportJDBCDSCount++; } } } catch ( DatasourceMgmtServiceException e ) { - log.warn( e.getMessage(), e ); + getRepositoryExportLogger( ).warn( "Unable to retrieve JDBC datasource(s). Cause [" + e.getMessage( ) + " ]" ); + getRepositoryExportLogger( ).debug( "Unable to retrieve JDBC datasource(s). Cause [" + e.getMessage( ) + " ]", e ); } - } + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_SUCCESSFUL_JDBC_DATASOURCE_EXPORT_COUNT", successfulExportJDBCDSCount, databaseConnectionsSize ) ); - protected void exportMetadataModels() { - log.debug( "export metadata models" ); + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_END_EXPORT_JDBC_DATASOURCE" ) ); + } + protected void exportMetadataModels( ) { + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_START_EXPORT_METADATA" ) ); + int successfulExportMetadataDSCount = 0; + int metadataDSSize = 0; // get all of the metadata models - Set domainIds = getMetadataDomainRepository().getDomainIds(); + Set domainIds = getMetadataDomainRepository( ).getDomainIds( ); + if ( domainIds != null ) { + metadataDSSize = domainIds.size( ); + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_COUNT_METADATA_DATASOURCE_TO_EXPORT", metadataDSSize ) ); + } for ( String domainId : domainIds ) { // get all of the files for this model Map domainFilesData = getDomainFilesData( domainId ); - - for ( String fileName : domainFilesData.keySet() ) { + getRepositoryExportLogger( ).debug( "Starting to export metadata model [ " + domainId + " ]" ); + for ( String fileName : domainFilesData.keySet( ) ) { + getRepositoryExportLogger( ).trace( "Adding metadata file [ " + fileName + " ]" ); // write the file to the zip String metadataFilePath = METADATA_PATH_IN_ZIP + fileName; if ( !metadataFilePath.endsWith( ".xmi" ) ) { @@ -214,40 +243,51 @@ protected void exportMetadataModels() { IOUtils.copy( inputStream, zos ); // add the info to the exportManifest - ExportManifestMetadata metadata = new ExportManifestMetadata(); + ExportManifestMetadata metadata = new ExportManifestMetadata( ); metadata.setDomainId( domainId ); metadata.setFile( metadataFilePath ); - getExportManifest().addMetadata( metadata ); - + getExportManifest( ).addMetadata( metadata ); + successfulExportMetadataDSCount++; } catch ( IOException e ) { - log.warn( e.getMessage(), e ); + getRepositoryExportLogger( ).warn( Messages.getInstance( ).getString( "PentahoPlatformExporter.ERROR_METADATA_DATASOURCE_EXPORT", e.getMessage( ) ), e ); } finally { IOUtils.closeQuietly( inputStream ); try { - zos.closeEntry(); + zos.closeEntry( ); } catch ( IOException e ) { // can't close the entry of input stream } } + getRepositoryExportLogger( ).trace( "Successfully added metadata file [ " + fileName + " ] to the manifest" ); } + getRepositoryExportLogger( ).debug( "Successfully exported metadata model [ " + domainId + " ]" ); } - } + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_SUCCESSFUL_METADATA_DATASOURCE_EXPORT_COUNT", successfulExportMetadataDSCount, metadataDSSize ) ); - protected void exportMondrianSchemas() { - log.debug( "export mondrian schemas" ); + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_END_EXPORT_METADATA" ) ); + } + protected void exportMondrianSchemas( ) { + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_START_EXPORT_MONDRIAN_DATASOURCE" ) ); // Get the mondrian catalogs available in the repo - List catalogs = getMondrianCatalogService().listCatalogs( getSession(), false ); + int successfulExportMondrianDSCount = 0; + int mondrianDSSize = 0; + List catalogs = getMondrianCatalogService( ).listCatalogs( getSession( ), false ); + if ( catalogs != null ) { + mondrianDSSize = catalogs.size( ); + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_COUNT_MONDRIAN_DATASOURCE_TO_EXPORT", mondrianDSSize ) ); + } for ( MondrianCatalog catalog : catalogs ) { - + getRepositoryExportLogger( ).debug( "Starting to export mondrian datasource [ " + catalog.getName( ) + " ]" ); // get the files for this catalog - Map files = getMondrianCatalogRepositoryHelper().getModrianSchemaFiles( catalog.getName() ); + Map files = getMondrianCatalogRepositoryHelper( ).getModrianSchemaFiles( catalog.getName( ) ); - ExportManifestMondrian mondrian = new ExportManifestMondrian(); - for ( String fileName : files.keySet() ) { + ExportManifestMondrian mondrian = new ExportManifestMondrian( ); + for ( String fileName : files.keySet( ) ) { + getRepositoryExportLogger( ).trace( "Starting to add filename [ " + fileName + " ] with datasource [" + catalog.getName( ) + " ] to the export bundle" ); // write the file to the zip - String path = ANALYSIS_PATH_IN_ZIP + catalog.getName() + "/" + fileName; + String path = ANALYSIS_PATH_IN_ZIP + catalog.getName( ) + "/" + fileName; ZipEntry zipEntry = new ZipEntry( new ZipEntry( ExportFileNameEncoder.encodeZipPathName( path ) ) ); InputStream inputStream = files.get( fileName ); @@ -263,21 +303,21 @@ protected void exportMondrianSchemas() { mondrian.setAnnotationsFile( path ); } else { // must be a true mondrian model - mondrian.setCatalogName( catalog.getName() ); - boolean xmlaEnabled = parseXmlaEnabled( catalog.getDataSourceInfo() ); + mondrian.setCatalogName( catalog.getName( ) ); + boolean xmlaEnabled = parseXmlaEnabled( catalog.getDataSourceInfo( ) ); mondrian.setXmlaEnabled( xmlaEnabled ); mondrian.setFile( path ); - Parameters mondrianParameters = new Parameters(); + Parameters mondrianParameters = new Parameters( ); mondrianParameters.put( "Provider", "mondrian" ); //DataSource can be escaped - mondrianParameters.put( "DataSource", StringEscapeUtils.unescapeXml( catalog.getJndi() ) ); + mondrianParameters.put( "DataSource", StringEscapeUtils.unescapeXml( catalog.getJndi( ) ) ); mondrianParameters.put( "EnableXmla", Boolean.toString( xmlaEnabled ) ); - StreamSupport.stream( catalog.getConnectProperties().spliterator(), false ) - .filter( p -> !mondrianParameters.containsKey( p.getKey() ) ) - //if value is escaped it should be unescaped to avoid double escape after export in xml file, because - //marshaller executes escaping as well - .forEach( p -> mondrianParameters.put( p.getKey(), StringEscapeUtils.unescapeXml( p.getValue() ) ) ); + StreamSupport.stream( catalog.getConnectProperties( ).spliterator( ), false ) + .filter( p -> !mondrianParameters.containsKey( p.getKey( ) ) ) + //if value is escaped it should be unescaped to avoid double escape after export in xml file, because + //marshaller executes escaping as well + .forEach( p -> mondrianParameters.put( p.getKey( ), StringEscapeUtils.unescapeXml( p.getValue( ) ) ) ); mondrian.setParameters( mondrianParameters ); } @@ -286,20 +326,25 @@ protected void exportMondrianSchemas() { zos.putNextEntry( zipEntry ); IOUtils.copy( inputStream, zos ); } catch ( IOException e ) { - log.warn( e.getMessage(), e ); + getRepositoryExportLogger( ).error( Messages.getInstance( ).getString( "PentahoPlatformExporter.ERROR_MONDRIAN_DATASOURCE_EXPORT" ) ); } finally { IOUtils.closeQuietly( inputStream ); try { - zos.closeEntry(); + zos.closeEntry( ); } catch ( IOException e ) { // can't close the entry of input stream } } } - if ( mondrian.getCatalogName() != null && mondrian.getFile() != null ) { - getExportManifest().addMondrian( mondrian ); + if ( mondrian.getCatalogName( ) != null && mondrian.getFile( ) != null ) { + getExportManifest( ).addMondrian( mondrian ); + getRepositoryExportLogger( ).debug( "Successfully added filename [ " + mondrian.getFile( ) + " ] with catalog [" + mondrian.getCatalogName( ) + " ] to the export bundle" ); + successfulExportMondrianDSCount++; } } + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_SUCCESSFUL_MONDRIAN_DATASOURCE_EXPORT_COUNT", successfulExportMondrianDSCount, mondrianDSSize ) ); + + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_END_EXPORT_MONDRIAN_DATASOURCE" ) ); } protected boolean parseXmlaEnabled( String dataSourceInfo ) { @@ -309,117 +354,158 @@ protected boolean parseXmlaEnabled( String dataSourceInfo ) { // if not specified, assume false return false; } - int end = dataSourceInfo.indexOf( ";", pos ) > -1 ? dataSourceInfo.indexOf( ";", pos ) : dataSourceInfo.length(); - String xmlaEnabled = dataSourceInfo.substring( pos + key.length(), end ); + int end = dataSourceInfo.indexOf( ";", pos ) > -1 ? dataSourceInfo.indexOf( ";", pos ) : dataSourceInfo.length( ); + String xmlaEnabled = dataSourceInfo.substring( pos + key.length( ), end ); return xmlaEnabled == null ? false : Boolean.parseBoolean( xmlaEnabled.replace( "\"", "" ) ); } - protected void exportUsersAndRoles() { - log.debug( "export users & roles" ); + protected void exportUsersAndRoles( ) { + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_START_EXPORT_USER" ) ); + int successfulExportUsers = 0; + int usersSize = 0; IUserRoleListService userRoleListService = PentahoSystem.get( IUserRoleListService.class ); UserDetailsService userDetailsService = PentahoSystem.get( UserDetailsService.class ); IRoleAuthorizationPolicyRoleBindingDao roleBindingDao = PentahoSystem.get( - IRoleAuthorizationPolicyRoleBindingDao.class ); - ITenant tenant = TenantUtils.getCurrentTenant(); + IRoleAuthorizationPolicyRoleBindingDao.class ); + ITenant tenant = TenantUtils.getCurrentTenant( ); // get the user settings for this user - IUserSettingService service = getUserSettingService(); + IUserSettingService service = getUserSettingService( ); //User Export List userList = userRoleListService.getAllUsers( tenant ); + if ( userList != null ) { + usersSize = userList.size( ); + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_COUNT_USER_TO_EXPORT", usersSize ) ); + } for ( String user : userList ) { - UserExport userExport = new UserExport(); + getRepositoryExportLogger( ).debug( "Starting export of user [ " + user + " ] " ); + UserExport userExport = new UserExport( ); userExport.setUsername( user ); - userExport.setPassword( userDetailsService.loadUserByUsername( user ).getPassword() ); + userExport.setPassword( userDetailsService.loadUserByUsername( user ).getPassword( ) ); for ( String role : userRoleListService.getRolesForUser( tenant, user ) ) { + getRepositoryExportLogger( ).trace( "user [ " + user + " ] has an associated role [ " + role + " ]" ); userExport.setRole( role ); } if ( service != null && service instanceof IAnyUserSettingService ) { + getRepositoryExportLogger( ).debug( "Starting export of user specific settings for user [ " + user + " ] " ); IAnyUserSettingService userSettings = (IAnyUserSettingService) service; List settings = userSettings.getUserSettings( user ); if ( settings != null ) { for ( IUserSetting setting : settings ) { + getRepositoryExportLogger( ).debug( "Adding user specific setting [ " + + setting.getSettingName( ) + " ] with value [ " + setting.getSettingValue( ) + " ] to backup" ); userExport.addUserSetting( new ExportManifestUserSetting( setting ) ); + getRepositoryExportLogger( ).debug( "Successfully added user specific setting [ " + + setting.getSettingName( ) + " ] with value [ " + setting.getSettingValue( ) + " ] to backup" ); } } + getRepositoryExportLogger( ).debug( "Finished export of user specific settings for user [ " + user + " ] " ); } - this.getExportManifest().addUserExport( userExport ); + this.getExportManifest( ).addUserExport( userExport ); + successfulExportUsers++; + getRepositoryExportLogger( ).debug( "Successful export of user [ " + user + " ] " ); } // export the global user settings if ( service != null ) { - List globalUserSettings = service.getGlobalUserSettings(); + getRepositoryExportLogger( ).debug( "Starting export of global user settings" ); + List globalUserSettings = service.getGlobalUserSettings( ); if ( globalUserSettings != null ) { for ( IUserSetting setting : globalUserSettings ) { - getExportManifest().addGlobalUserSetting( new ExportManifestUserSetting( setting ) ); + getExportManifest( ).addGlobalUserSetting( new ExportManifestUserSetting( setting ) ); } } + getRepositoryExportLogger( ).debug( "Finished export of global user settings" ); } + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_SUCCESSFUL_ROLE_EXPORT_COUNT", successfulExportUsers, usersSize ) ); + + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_END_EXPORT_USER" ) ); + + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_START_EXPORT_ROLE" ) ); + int successfulExportRoles = 0; + int rolesSize = 0; //RoleExport - List roles = userRoleListService.getAllRoles(); + List roles = userRoleListService.getAllRoles( ); + if ( roles != null ) { + rolesSize = roles.size( ); + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_COUNT_ROLE_TO_EXPORT", rolesSize ) ); + } for ( String role : roles ) { - RoleExport roleExport = new RoleExport(); + getRepositoryExportLogger( ).debug( "Starting export of role [ " + role + " ] " ); + RoleExport roleExport = new RoleExport( ); roleExport.setRolename( role ); roleExport.setPermission( roleBindingDao.getRoleBindingStruct( null ).bindingMap.get( role ) ); exportManifest.addRoleExport( roleExport ); + successfulExportRoles++; + getRepositoryExportLogger( ).debug( "Finished export of role [ " + role + " ] " ); } + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_SUCCESSFUL_ROLE_EXPORT_COUNT", successfulExportRoles, rolesSize ) ); + + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_END_EXPORT_ROLE" ) ); } - protected void exportMetastore() throws IOException { - log.debug( "export the metastore" ); + protected void exportMetastore( ) throws IOException { + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_START_EXPORT_METASTORE" ) ); try { + getRepositoryExportLogger( ).debug( "Starting to copy metastore to a temp location" ); Path tempDirectory = Files.createTempDirectory( METASTORE ); - IMetaStore xmlMetaStore = new XmlMetaStore( tempDirectory.toString() ); - MetaStoreUtil.copy( getRepoMetaStore(), xmlMetaStore ); - - File zippedMetastore = Files.createTempFile( METASTORE, EXPORT_TEMP_FILENAME_EXT ).toFile(); + IMetaStore xmlMetaStore = new XmlMetaStore( tempDirectory.toString( ) ); + MetaStoreUtil.copy( getRepoMetaStore( ), xmlMetaStore ); + getRepositoryExportLogger( ).debug( "Finished to copying metastore to a temp location" ); + getRepositoryExportLogger( ).debug( "Starting to zip the metastore" ); + File zippedMetastore = Files.createTempFile( METASTORE, EXPORT_TEMP_FILENAME_EXT ).toFile( ); ZipOutputStream zipOutputStream = new ZipOutputStream( new FileOutputStream( zippedMetastore ) ); - zipFolder( tempDirectory.toFile(), zipOutputStream, tempDirectory.toString() ); - zipOutputStream.close(); - + zipFolder( tempDirectory.toFile( ), zipOutputStream, tempDirectory.toString( ) ); + zipOutputStream.close( ); + getRepositoryExportLogger( ).debug( "Finished zipping the metastore" ); // now that we have the zipped content of an xml metastore, we need to write that to the export bundle FileInputStream zis = new FileInputStream( zippedMetastore ); String zipFileLocation = METASTORE + METASTORE_BACKUP_EXT; ZipEntry metastoreZipFileZipEntry = new ZipEntry( zipFileLocation ); + getRepositoryExportLogger( ).debug( "Starting to add the metastore zip to the export bundle" ); zos.putNextEntry( metastoreZipFileZipEntry ); try { IOUtils.copy( zis, zos ); + getRepositoryExportLogger( ).debug( "Finished adding the metastore zip to the export bundle" ); } catch ( IOException e ) { throw e; } finally { - zis.close(); - zos.closeEntry(); + zis.close( ); + zos.closeEntry( ); } - + getRepositoryExportLogger( ).debug( "Starting to add the metastore to the export manifest" ); // add an ExportManifest entry for the metastore. ExportManifestMetaStore exportManifestMetaStore = new ExportManifestMetaStore( zipFileLocation, - getRepoMetaStore().getName(), - getRepoMetaStore().getDescription() ); - - getExportManifest().setMetaStore( exportManifestMetaStore ); + getRepoMetaStore( ).getName( ), + getRepoMetaStore( ).getDescription( ) ); - zippedMetastore.deleteOnExit(); - tempDirectory.toFile().deleteOnExit(); + getExportManifest( ).setMetaStore( exportManifestMetaStore ); + zippedMetastore.deleteOnExit( ); + tempDirectory.toFile( ).deleteOnExit( ); + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_SUCCESSFUL_EXPORT_METASTORE" ) ); } catch ( Exception e ) { - log.error( Messages.getInstance().getString( "PentahoPlatformExporter.ERROR.ExportingMetaStore" ) ); - log.debug( Messages.getInstance().getString( "PentahoPlatformExporter.ERROR.ExportingMetaStore" ), e ); + getRepositoryExportLogger( ).error( Messages.getInstance( ).getString( "PentahoPlatformExporter.ERROR.ExportingMetaStore" ) ); + getRepositoryExportLogger( ).debug( Messages.getInstance( ).getString( "PentahoPlatformExporter.ERROR.ExportingMetaStore" ), e ); } + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_END_EXPORT_METASTORE" ) ); } - protected IMetaStore getRepoMetaStore() { + protected IMetaStore getRepoMetaStore( ) { if ( metastore == null ) { try { - metastore = MetaStoreExportUtil.connectToRepository( null ).getRepositoryMetaStore(); + metastore = MetaStoreExportUtil.connectToRepository( null ).getRepositoryMetaStore( ); } catch ( KettleException e ) { // can't get the metastore to import into - log.debug( "Can't get the metastore to import into" ); + getRepositoryExportLogger( ).debug( "Can't get the metastore to import into" ); + } } return metastore; @@ -430,16 +516,16 @@ protected void setRepoMetaStore( IMetaStore metastore ) { } protected void zipFolder( File file, ZipOutputStream zos, String pathPrefixToRemove ) { - if ( file.isDirectory() ) { - File[] listFiles = file.listFiles(); + if ( file.isDirectory( ) ) { + File[] listFiles = file.listFiles( ); for ( File listFile : listFiles ) { - if ( listFile.isDirectory() ) { + if ( listFile.isDirectory( ) ) { zipFolder( listFile, zos, pathPrefixToRemove ); } else { if ( !pathPrefixToRemove.endsWith( File.separator ) ) { pathPrefixToRemove += File.separator; } - String path = listFile.getPath().replace( pathPrefixToRemove, "" ); + String path = listFile.getPath( ).replace( pathPrefixToRemove, "" ); ZipEntry entry = new ZipEntry( path ); FileInputStream fis = null; try { @@ -447,12 +533,12 @@ protected void zipFolder( File file, ZipOutputStream zos, String pathPrefixToRem fis = new FileInputStream( listFile ); IOUtils.copy( fis, zos ); } catch ( IOException e ) { - e.printStackTrace(); + e.printStackTrace( ); } finally { try { - zos.closeEntry(); + zos.closeEntry( ); } catch ( IOException e ) { - e.printStackTrace(); + e.printStackTrace( ); } IOUtils.closeQuietly( fis ); } @@ -462,8 +548,9 @@ protected void zipFolder( File file, ZipOutputStream zos, String pathPrefixToRem } protected void exportFileContent( RepositoryFile exportRepositoryFile ) throws IOException, ExportException { + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_START_EXPORT_REPOSITORY_OBJECT" ) ); // get the file path - String filePath = new File( this.path ).getParent(); + String filePath = new File( this.path ).getParent( ); if ( filePath == null ) { filePath = "/"; } @@ -474,26 +561,40 @@ protected void exportFileContent( RepositoryFile exportRepositoryFile ) throws I throw new FileNotFoundException( "JCR file not found: " + this.path ); } - if ( exportRepositoryFile.isFolder() ) { // Handle recursive export - getExportManifest().getManifestInformation().setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) ); + + if ( exportRepositoryFile.isFolder( ) ) { // Handle recursive export + getRepositoryExportLogger( ).trace( "Repository object [ " + exportRepositoryFile.getName( ) + "] is a folder" ); + getExportManifest( ).getManifestInformation( ).setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) ); // don't zip root folder without name - if ( !ClientRepositoryPaths.getRootFolderPath().equals( exportRepositoryFile.getPath() ) ) { + if ( !ClientRepositoryPaths.getRootFolderPath( ).equals( exportRepositoryFile.getPath( ) ) ) { + getRepositoryExportLogger( ).trace( "Adding a name to the root folder" ); zos.putNextEntry( new ZipEntry( getFixedZipEntryName( exportRepositoryFile, filePath ) ) ); } + getRepositoryExportLogger( ).debug( "Starting recursive export of a folder [ " + exportRepositoryFile.getName( ) + " ]" ); exportDirectory( exportRepositoryFile, zos, filePath ); } else { - getExportManifest().getManifestInformation().setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) ); - exportFile( exportRepositoryFile, zos, filePath ); + getRepositoryExportLogger( ).trace( "Repository object [ " + exportRepositoryFile.getName( ) + "] is a file" ); + getExportManifest( ).getManifestInformation( ).setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) ); + + try { + getRepositoryExportLogger( ).debug( "Starting export of a file [ " + exportRepositoryFile.getName( ) + " ]" ); + exportFile( exportRepositoryFile, zos, filePath ); + } catch ( ExportException | IOException exception ) { + getRepositoryExportLogger( ).error( Messages.getInstance( ).getString( "PentahoPlatformExporter.ERROR_EXPORT_REPOSITORY_OBJECT", exportRepositoryFile.getName( ) ) ); + } finally { + getRepositoryExportLogger( ).debug( "Finished the export of a file [ " + exportRepositoryFile.getName( ) + " ]" ); + } } + getRepositoryExportLogger( ).info( Messages.getInstance( ).getString( "PentahoPlatformExporter.INFO_END_EXPORT_REPOSITORY_OBJECT" ) ); } protected Map getDomainFilesData( String domainId ) { return ( (IPentahoMetadataDomainRepositoryExporter) metadataDomainRepository ).getDomainFilesData( domainId ); } - public IScheduler getScheduler() { + public IScheduler getScheduler( ) { if ( scheduler == null ) { scheduler = PentahoSystem.get( IScheduler.class, "IScheduler2", null ); //$NON-NLS-1$ } @@ -504,9 +605,9 @@ public void setScheduler( IScheduler scheduler ) { this.scheduler = scheduler; } - public IMetadataDomainRepository getMetadataDomainRepository() { + public IMetadataDomainRepository getMetadataDomainRepository( ) { if ( metadataDomainRepository == null ) { - metadataDomainRepository = PentahoSystem.get( IMetadataDomainRepository.class, getSession() ); + metadataDomainRepository = PentahoSystem.get( IMetadataDomainRepository.class, getSession( ) ); } return metadataDomainRepository; } @@ -515,9 +616,9 @@ public void setMetadataDomainRepository( IMetadataDomainRepository metadataDomai this.metadataDomainRepository = metadataDomainRepository; } - public IDatasourceMgmtService getDatasourceMgmtService() { + public IDatasourceMgmtService getDatasourceMgmtService( ) { if ( datasourceMgmtService == null ) { - datasourceMgmtService = PentahoSystem.get( IDatasourceMgmtService.class, getSession() ); + datasourceMgmtService = PentahoSystem.get( IDatasourceMgmtService.class, getSession( ) ); } return datasourceMgmtService; } @@ -526,33 +627,33 @@ public void setDatasourceMgmtService( IDatasourceMgmtService datasourceMgmtServi this.datasourceMgmtService = datasourceMgmtService; } - public MondrianCatalogRepositoryHelper getMondrianCatalogRepositoryHelper() { + public MondrianCatalogRepositoryHelper getMondrianCatalogRepositoryHelper( ) { if ( this.mondrianCatalogRepositoryHelper == null ) { - mondrianCatalogRepositoryHelper = new MondrianCatalogRepositoryHelper( getUnifiedRepository() ); + mondrianCatalogRepositoryHelper = new MondrianCatalogRepositoryHelper( getUnifiedRepository( ) ); } return mondrianCatalogRepositoryHelper; } public void setMondrianCatalogRepositoryHelper( - MondrianCatalogRepositoryHelper mondrianCatalogRepositoryHelper ) { + MondrianCatalogRepositoryHelper mondrianCatalogRepositoryHelper ) { this.mondrianCatalogRepositoryHelper = mondrianCatalogRepositoryHelper; } - public IMondrianCatalogService getMondrianCatalogService() { + public IMondrianCatalogService getMondrianCatalogService( ) { if ( mondrianCatalogService == null ) { - mondrianCatalogService = PentahoSystem.get( IMondrianCatalogService.class, getSession() ); + mondrianCatalogService = PentahoSystem.get( IMondrianCatalogService.class, getSession( ) ); } return mondrianCatalogService; } public void setMondrianCatalogService( - IMondrianCatalogService mondrianCatalogService ) { + IMondrianCatalogService mondrianCatalogService ) { this.mondrianCatalogService = mondrianCatalogService; } - public IUserSettingService getUserSettingService() { + public IUserSettingService getUserSettingService( ) { if ( userSettingService == null ) { - userSettingService = PentahoSystem.get( IUserSettingService.class, getSession() ); + userSettingService = PentahoSystem.get( IUserSettingService.class, getSession( ) ); } return userSettingService; } @@ -567,7 +668,7 @@ protected boolean isExportCandidate( String path ) { return false; } - String etc = ClientRepositoryPaths.getEtcFolderPath(); + String etc = ClientRepositoryPaths.getEtcFolderPath( ); // we need to include the etc/operation_mart folder and sub folders // but NOT any other folders in /etc @@ -586,7 +687,7 @@ protected boolean isExportCandidate( String path ) { return true; } - public ZipOutputStream getZipStream() { + public ZipOutputStream getZipStream( ) { return zos; } } diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importer/PentahoPlatformImporter.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importer/PentahoPlatformImporter.java index 4fc79b802e2..497727f7420 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importer/PentahoPlatformImporter.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importer/PentahoPlatformImporter.java @@ -179,7 +179,7 @@ private void logImportFile( IPlatformImportBundle file ) { // If doing a mondrian publish then there will be no active logger if ( repositoryImportLogger.hasLogger() && repositoryFilePath != null && repositoryFilePath.length() > 0 ) { repositoryImportLogger.setCurrentFilePath( repositoryFilePath ); - repositoryImportLogger.warn( file.getName() ); + repositoryImportLogger.debug( "Starting import of file " + file.getName() ); } } diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importer/SolutionImportHandler.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importer/SolutionImportHandler.java index 6958969d2ce..5a95052ed7b 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importer/SolutionImportHandler.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importer/SolutionImportHandler.java @@ -88,7 +88,7 @@ public class SolutionImportHandler implements IPlatformImportHandler { private static final String EXPORT_MANIFEST_XML_FILE = "exportManifest.xml"; private static final String DOMAIN_ID = "domain-id"; - private static final String UTF_8 = StandardCharsets.UTF_8.name(); + private static final String UTF_8 = StandardCharsets.UTF_8.name( ); private IUnifiedRepository repository; // TODO inject via Spring protected Map cachedImports; @@ -99,169 +99,166 @@ public class SolutionImportHandler implements IPlatformImportHandler { public SolutionImportHandler( List mimeTypes ) { this.mimeTypes = mimeTypes; - this.solutionHelper = new SolutionFileImportHelper(); + this.solutionHelper = new SolutionFileImportHelper( ); repository = PentahoSystem.get( IUnifiedRepository.class ); } - public ImportSession getImportSession() { - return ImportSession.getSession(); + public ImportSession getImportSession( ) { + return ImportSession.getSession( ); } - public Log getLogger() { - return getImportSession().getLogger(); + public Log getLogger( ) { + return getImportSession( ).getLogger( ); } @Override public void importFile( IPlatformImportBundle bundle ) throws PlatformImportException, DomainIdNullException, - DomainAlreadyExistsException, DomainStorageException, IOException { - + DomainAlreadyExistsException, DomainStorageException, IOException { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_PROCESS" ) ); RepositoryFileImportBundle importBundle = (RepositoryFileImportBundle) bundle; - if ( !processZip( bundle.getInputStream() ) ) { + // Processing file + getLogger( ).debug( " Start pre processing files and folder from import bundle" ); + if ( !processZip( bundle.getInputStream( ) ) ) { // Something went wrong, do not proceed! return; } + getLogger( ).debug( " End pre processing files and folder from import bundle" ); - LocaleFilesProcessor localeFilesProcessor = new LocaleFilesProcessor(); - setOverwriteFile( bundle.overwriteInRepository() ); - - IPlatformImporter importer = PentahoSystem.get( IPlatformImporter.class ); - - cachedImports = new HashMap<>(); + setOverwriteFile( bundle.overwriteInRepository( ) ); + cachedImports = new HashMap<>( ); //Process Manifest Settings - ExportManifest manifest = getImportSession().getManifest(); - String manifestVersion = null; + ExportManifest manifest = getImportSession( ).getManifest( ); // Process Metadata if ( manifest != null ) { - manifestVersion = manifest.getManifestInformation().getManifestVersion(); - // import the users - Map> roleToUserMap = importUsers( manifest.getUserExports() ); + Map> roleToUserMap = importUsers( manifest.getUserExports( ) ); // import the roles - importRoles( manifest.getRoleExports(), roleToUserMap ); + importRoles( manifest.getRoleExports( ), roleToUserMap ); // import the metadata - importMetadata( manifest.getMetadataList(), bundle.isPreserveDsw() ); + importMetadata( manifest.getMetadataList( ), bundle.isPreserveDsw( ) ); // Process Mondrian - importMondrian( manifest.getMondrianList() ); + importMondrian( manifest.getMondrianList( ) ); // import the metastore - importMetaStore( manifest.getMetaStore(), bundle.overwriteInRepository() ); - - // Add DB Connections - List datasourceList = manifest.getDatasourceList(); - if ( datasourceList != null ) { - IDatasourceMgmtService datasourceMgmtSvc = PentahoSystem.get( IDatasourceMgmtService.class ); - for ( org.pentaho.platform.plugin.services.importexport.exportManifest.bindings.DatabaseConnection databaseConnection : datasourceList ) { - if ( databaseConnection.getDatabaseType() == null ) { - // don't try to import the connection if there is no type it will cause an error - // However, if this is the DI Server, and the connection is defined in a ktr, it will import automatically - getLogger().warn( Messages.getInstance() - .getString( "SolutionImportHandler.ConnectionWithoutDatabaseType", databaseConnection.getName() ) ); - continue; - } - try { - IDatabaseConnection existingDBConnection = - datasourceMgmtSvc.getDatasourceByName( databaseConnection.getName() ); - if ( existingDBConnection != null && existingDBConnection.getName() != null ) { - if ( isOverwriteFile() ) { - databaseConnection.setId( existingDBConnection.getId() ); - datasourceMgmtSvc.updateDatasourceByName( databaseConnection.getName(), - DatabaseConnectionConverter.export2model( databaseConnection ) ); - } - } else { - datasourceMgmtSvc.createDatasource( DatabaseConnectionConverter.export2model( databaseConnection ) ); - } - } catch ( Exception e ) { - e.printStackTrace(); - } - } - } + importMetaStore( manifest.getMetaStore( ), bundle.overwriteInRepository( ) ); + + // import jdbc datasource + importJDBCDataSource( manifest ); + } + // import files and folders + importRepositoryFilesAndFolders( manifest, bundle ); + + // import schedules + if ( manifest != null ) { + importSchedules( manifest.getScheduleList( ) ); } + } + + protected void importRepositoryFilesAndFolders( ExportManifest manifest, IPlatformImportBundle bundle ) throws IOException { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_FILEFOLDER" ) ); + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_COUNT_FILEFOLDER", files.size( ) ) ); + int successfulFilesImportCount = 0; + String manifestVersion = null; + if ( manifest != null ) { + manifestVersion = manifest.getManifestInformation( ).getManifestVersion( ); + } + RepositoryFileImportBundle importBundle = (RepositoryFileImportBundle) bundle; + + LocaleFilesProcessor localeFilesProcessor = new LocaleFilesProcessor( ); + IPlatformImporter importer = PentahoSystem.get( IPlatformImporter.class ); for ( IRepositoryFileBundle fileBundle : files ) { - String fileName = fileBundle.getFile().getName(); - String actualFilePath = fileBundle.getPath(); + String fileName = fileBundle.getFile( ).getName( ); + String actualFilePath = fileBundle.getPath( ); if ( manifestVersion != null ) { fileName = ExportFileNameEncoder.decodeZipFileName( fileName ); actualFilePath = ExportFileNameEncoder.decodeZipFileName( actualFilePath ); } String repositoryFilePath = - RepositoryFilenameUtils.concat( PentahoPlatformImporter.computeBundlePath( actualFilePath ), fileName ); + RepositoryFilenameUtils.concat( PentahoPlatformImporter.computeBundlePath( actualFilePath ), fileName ); if ( cachedImports.containsKey( repositoryFilePath ) ) { - byte[] bytes = IOUtils.toByteArray( fileBundle.getInputStream() ); + getLogger( ).debug( "Repository object with path [ " + repositoryFilePath + " ] found in the cache" ); + byte[] bytes = IOUtils.toByteArray( fileBundle.getInputStream( ) ); RepositoryFileImportBundle.Builder builder = cachedImports.get( repositoryFilePath ); builder.input( new ByteArrayInputStream( bytes ) ); - importer.importFile( build( builder ) ); - continue; + try { + importer.importFile( build( builder ) ); + getLogger( ).debug( "Successfully imported repository object with path [ " + repositoryFilePath + " ] from the cache" ); + successfulFilesImportCount++; + continue; + } catch ( PlatformImportException e ) { + getLogger( ).error( Messages.getInstance( ).getString( "SolutionImportHandler.ERROR_IMPORTING_REPOSITORY_OBJECT", repositoryFilePath, e.getLocalizedMessage( ) ) ); + } } - RepositoryFileImportBundle.Builder bundleBuilder = new RepositoryFileImportBundle.Builder(); + RepositoryFileImportBundle.Builder bundleBuilder = new RepositoryFileImportBundle.Builder( ); InputStream bundleInputStream = null; - String decodedFilePath = fileBundle.getPath(); - RepositoryFile decodedFile = fileBundle.getFile(); + String decodedFilePath = fileBundle.getPath( ); + RepositoryFile decodedFile = fileBundle.getFile( ); if ( manifestVersion != null ) { - decodedFile = new RepositoryFile.Builder( decodedFile ).path( decodedFilePath ).name( fileName ).title( fileName ).build(); - decodedFilePath = ExportFileNameEncoder.decodeZipFileName( fileBundle.getPath() ); + decodedFile = new RepositoryFile.Builder( decodedFile ).path( decodedFilePath ).name( fileName ).title( fileName ).build( ); + decodedFilePath = ExportFileNameEncoder.decodeZipFileName( fileBundle.getPath( ) ); } - if ( fileBundle.getFile().isFolder() ) { + if ( fileBundle.getFile( ).isFolder( ) ) { bundleBuilder.mime( "text/directory" ); bundleBuilder.file( decodedFile ); fileName = repositoryFilePath; - repositoryFilePath = importBundle.getPath(); + repositoryFilePath = importBundle.getPath( ); } else { - byte[] bytes = IOUtils.toByteArray( fileBundle.getInputStream() ); + byte[] bytes = IOUtils.toByteArray( fileBundle.getInputStream( ) ); bundleInputStream = new ByteArrayInputStream( bytes ); // If is locale file store it for later processing. - if ( localeFilesProcessor.isLocaleFile( fileBundle, importBundle.getPath(), bytes ) ) { - getLogger().trace( Messages.getInstance() - .getString( "SolutionImportHandler.SkipLocaleFile", repositoryFilePath ) ); + if ( localeFilesProcessor.isLocaleFile( fileBundle, importBundle.getPath( ), bytes ) ) { + getLogger( ).trace( Messages.getInstance( ) + .getString( "SolutionImportHandler.SkipLocaleFile", repositoryFilePath ) ); continue; } bundleBuilder.input( bundleInputStream ); bundleBuilder.mime( solutionHelper.getMime( fileName ) ); String filePath = - ( decodedFilePath.equals( "/" ) || decodedFilePath.equals( "\\" ) ) ? "" : decodedFilePath; - repositoryFilePath = RepositoryFilenameUtils.concat( importBundle.getPath(), filePath ); + ( decodedFilePath.equals( "/" ) || decodedFilePath.equals( "\\" ) ) ? "" : decodedFilePath; + repositoryFilePath = RepositoryFilenameUtils.concat( importBundle.getPath( ), filePath ); } bundleBuilder.name( fileName ); bundleBuilder.path( repositoryFilePath ); String sourcePath; - if ( fileBundle.getFile().isFolder() ) { + if ( fileBundle.getFile( ).isFolder( ) ) { sourcePath = fileName; } else { sourcePath = - RepositoryFilenameUtils.concat( PentahoPlatformImporter.computeBundlePath( actualFilePath ), fileName ); + RepositoryFilenameUtils.concat( PentahoPlatformImporter.computeBundlePath( actualFilePath ), fileName ); } //This clause was added for processing ivb files so that it would not try process acls on folders that the user //may not have rights to such as /home or /public - if ( manifest != null && manifest.getExportManifestEntity( sourcePath ) == null && fileBundle.getFile() - .isFolder() ) { + if ( manifest != null && manifest.getExportManifestEntity( sourcePath ) == null && fileBundle.getFile( ) + .isFolder( ) ) { continue; } - getImportSession().setCurrentManifestKey( sourcePath ); + getImportSession( ).setCurrentManifestKey( sourcePath ); - bundleBuilder.charSet( bundle.getCharSet() ); - bundleBuilder.overwriteFile( bundle.overwriteInRepository() ); - bundleBuilder.applyAclSettings( bundle.isApplyAclSettings() ); - bundleBuilder.retainOwnership( bundle.isRetainOwnership() ); - bundleBuilder.overwriteAclSettings( bundle.isOverwriteAclSettings() ); - bundleBuilder.acl( getImportSession().processAclForFile( sourcePath ) ); - bundleBuilder.extraMetaData( getImportSession().processExtraMetaDataForFile( sourcePath ) ); + bundleBuilder.charSet( bundle.getCharSet( ) ); + bundleBuilder.overwriteFile( bundle.overwriteInRepository( ) ); + bundleBuilder.applyAclSettings( bundle.isApplyAclSettings( ) ); + bundleBuilder.retainOwnership( bundle.isRetainOwnership( ) ); + bundleBuilder.overwriteAclSettings( bundle.isOverwriteAclSettings( ) ); + bundleBuilder.acl( getImportSession( ).processAclForFile( sourcePath ) ); + bundleBuilder.extraMetaData( getImportSession( ).processExtraMetaDataForFile( sourcePath ) ); RepositoryFile file = getFile( importBundle, fileBundle ); - ManifestFile manifestFile = getImportSession().getManifestFile( sourcePath, file != null ); + ManifestFile manifestFile = getImportSession( ).getManifestFile( sourcePath, file != null ); bundleBuilder.hidden( isFileHidden( file, manifestFile, sourcePath ) ); boolean isSchedulable = isSchedulable( file, manifestFile ); @@ -273,61 +270,118 @@ public void importFile( IPlatformImportBundle bundle ) throws PlatformImportExce } IPlatformImportBundle platformImportBundle = build( bundleBuilder ); - importer.importFile( platformImportBundle ); + try { + importer.importFile( platformImportBundle ); + successfulFilesImportCount++; + getLogger( ).debug( "Successfully imported repository object with path [ " + repositoryFilePath + " ]" ); + } catch ( PlatformImportException e ) { + getLogger( ).error( Messages.getInstance( ).getString( "SolutionImportHandler.ERROR_IMPORTING_REPOSITORY_OBJECT", repositoryFilePath, e.getLocalizedMessage( ) ) ); + } if ( bundleInputStream != null ) { - bundleInputStream.close(); + bundleInputStream.close( ); bundleInputStream = null; } } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_SUCCESSFUL_REPOSITORY_IMPORT_COUNT", successfulFilesImportCount, files.size( ) ) ); - if ( manifest != null ) { - importSchedules( manifest.getScheduleList() ); + // Process locale files. + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_LOCALEFILE" ) ); + + try { + localeFilesProcessor.processLocaleFiles( importer ); + } catch ( PlatformImportException e ) { + getLogger( ).error( Messages.getInstance( ).getString( "SolutionImportHandler.ERROR_IMPORTING_LOCALE_FILE", e.getLocalizedMessage( ) ) ); + } finally { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_LOCALEFILE" ) ); } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_FILEFOLDER" ) ); + } - // Process locale files. - localeFilesProcessor.processLocaleFiles( importer ); + protected void importJDBCDataSource( ExportManifest manifest ) { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_DATASOURCE" ) ); + // Add DB Connections + List datasourceList = manifest.getDatasourceList( ); + if ( datasourceList != null ) { + int successfulDatasourceImportCount = 0; + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_COUNT_DATASOURCE", datasourceList.size( ) ) ); + IDatasourceMgmtService datasourceMgmtSvc = PentahoSystem.get( IDatasourceMgmtService.class ); + for ( org.pentaho.platform.plugin.services.importexport.exportManifest.bindings.DatabaseConnection databaseConnection : datasourceList ) { + if ( databaseConnection.getDatabaseType( ) == null ) { + // don't try to import the connection if there is no type it will cause an error + // However, if this is the DI Server, and the connection is defined in a ktr, it will import automatically + getLogger( ).warn( Messages.getInstance( ) + .getString( "SolutionImportHandler.ConnectionWithoutDatabaseType", databaseConnection.getName( ) ) ); + continue; + } + try { + IDatabaseConnection existingDBConnection = + datasourceMgmtSvc.getDatasourceByName( databaseConnection.getName( ) ); + if ( existingDBConnection != null && existingDBConnection.getName( ) != null ) { + if ( isOverwriteFile( ) ) { + databaseConnection.setId( existingDBConnection.getId( ) ); + datasourceMgmtSvc.updateDatasourceByName( databaseConnection.getName( ), + DatabaseConnectionConverter.export2model( databaseConnection ) ); + } + } else { + datasourceMgmtSvc.createDatasource( DatabaseConnectionConverter.export2model( databaseConnection ) ); + } + successfulDatasourceImportCount++; + } catch ( Exception e ) { + getLogger( ).error( Messages.getInstance( ).getString( "SolutionImportHandler.ERROR_IMPORTING_JDBC_DATASOURCE", databaseConnection.getName( ), e.getLocalizedMessage( ) ) ); + } + } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_SUCCESSFUL_DATASOURCE_IMPORT_COUNT", successfulDatasourceImportCount, datasourceList.size( ) ) ); + } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_DATASOURCE" ) ); } List getAllJobs( ISchedulerResource schedulerResource ) { - return schedulerResource.getJobsList(); + return schedulerResource.getJobsList( ); } private RepositoryFile getFile( IPlatformImportBundle importBundle, IRepositoryFileBundle fileBundle ) { String repositoryFilePath = - repositoryPathConcat( importBundle.getPath(), fileBundle.getPath(), fileBundle.getFile().getName() ); + repositoryPathConcat( importBundle.getPath( ), fileBundle.getPath( ), fileBundle.getFile( ).getName( ) ); return repository.getFile( repositoryFilePath ); } protected void importSchedules( List scheduleList ) throws PlatformImportException { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_SCHEDULE" ) ); if ( CollectionUtils.isNotEmpty( scheduleList ) ) { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_COUNT_SCHEDULUE", scheduleList.size( ) ) ); + int successfulScheduleImportCount = 0; IScheduler scheduler = PentahoSystem.get( IScheduler.class, "IScheduler2", null ); //$NON-NLS-1$ - ISchedulerResource schedulerResource = scheduler.createSchedulerResource(); - schedulerResource.pause(); + ISchedulerResource schedulerResource = scheduler.createSchedulerResource( ); + getLogger( ).debug( "Pausing the scheduler before the start of the import process" ); + schedulerResource.pause( ); + getLogger( ).debug( "Successfully paused the scheduler" ); for ( IJobScheduleRequest jobScheduleRequest : scheduleList ) { - + getLogger( ).debug( "Importing schedule name [ " + jobScheduleRequest.getJobName( ) + "] inputFile [ " + jobScheduleRequest.getInputFile( ) + " ] outputFile [ " + jobScheduleRequest.getOutputFile( ) + "]" ); boolean jobExists = false; List jobs = getAllJobs( schedulerResource ); if ( jobs != null ) { //paramRequest to map - Map mapParamsRequest = new HashMap<>(); - for ( IJobScheduleParam paramRequest : jobScheduleRequest.getJobParameters() ) { - mapParamsRequest.put( paramRequest.getName(), paramRequest.getValue() ); + Map mapParamsRequest = new HashMap<>( ); + for ( IJobScheduleParam paramRequest : jobScheduleRequest.getJobParameters( ) ) { + mapParamsRequest.put( paramRequest.getName( ), paramRequest.getValue( ) ); } + // We will check the existing job in the repository. If the job being imported exists, we will remove it from the repository for ( IJob job : jobs ) { if ( ( mapParamsRequest.get( RESERVEDMAPKEY_LINEAGE_ID ) != null ) - && ( mapParamsRequest.get( RESERVEDMAPKEY_LINEAGE_ID ) - .equals( job.getJobParams().get( RESERVEDMAPKEY_LINEAGE_ID ) ) ) ) { + && ( mapParamsRequest.get( RESERVEDMAPKEY_LINEAGE_ID ) + .equals( job.getJobParams( ).get( RESERVEDMAPKEY_LINEAGE_ID ) ) ) ) { jobExists = true; } if ( overwriteFile && jobExists ) { - IJobRequest jobRequest = scheduler.createJobRequest(); - jobRequest.setJobId( job.getJobId() ); + getLogger( ).debug( "Schedule [ " + jobScheduleRequest.getJobName( ) + "] already exists and overwrite flag is set to true. Removing the job so we can add it again" ); + IJobRequest jobRequest = scheduler.createJobRequest( ); + jobRequest.setJobId( job.getJobId( ) ); schedulerResource.removeJob( jobRequest ); jobExists = false; break; @@ -338,11 +392,16 @@ protected void importSchedules( List scheduleList ) throws if ( !jobExists ) { try { Response response = createSchedulerJob( schedulerResource, jobScheduleRequest ); - if ( response.getStatus() == Response.Status.OK.getStatusCode() ) { - if ( response.getEntity() != null ) { + if ( response.getStatus( ) == Response.Status.OK.getStatusCode( ) ) { + if ( response.getEntity( ) != null ) { // get the schedule job id from the response and add it to the import session - ImportSession.getSession().addImportedScheduleJobId( response.getEntity().toString() ); + ImportSession.getSession( ).addImportedScheduleJobId( response.getEntity( ).toString( ) ); + getLogger( ).debug( "Successfully imported schedule [ " + jobScheduleRequest.getJobName( ) + " ] " ); + successfulScheduleImportCount++; } + } else { + getLogger( ).error( Messages.getInstance( ).getString( "SolutionImportHandler.ERROR_IMPORTING_SCHEDULE", jobScheduleRequest.getJobName( ), response.getEntity( ) != null + ? response.getEntity( ).toString( ) : "" ) ); } } catch ( Exception e ) { // there is a scenario where if the file scheduled has a space in the file name, that it won't work. the @@ -352,16 +411,16 @@ protected void importSchedules( List scheduleList ) throws // or not // so, if we failed to import and there is a space in the path, try again but this time with replacing // the space(s) - if ( jobScheduleRequest.getInputFile().contains( " " ) || jobScheduleRequest.getOutputFile() - .contains( " " ) ) { - getLogger().info( Messages.getInstance() - .getString( "SolutionImportHandler.SchedulesWithSpaces", jobScheduleRequest.getInputFile() ) ); - File inFile = new File( jobScheduleRequest.getInputFile() ); - File outFile = new File( jobScheduleRequest.getOutputFile() ); - String inputFileName = inFile.getParent() + RepositoryFile.SEPARATOR - + inFile.getName().replace( " ", "_" ); - String outputFileName = outFile.getParent() + RepositoryFile.SEPARATOR - + outFile.getName().replace( " ", "_" ); + if ( jobScheduleRequest.getInputFile( ).contains( " " ) || jobScheduleRequest.getOutputFile( ) + .contains( " " ) ) { + getLogger( ).debug( Messages.getInstance( ) + .getString( "SolutionImportHandler.SchedulesWithSpaces", jobScheduleRequest.getInputFile( ) ) ); + File inFile = new File( jobScheduleRequest.getInputFile( ) ); + File outFile = new File( jobScheduleRequest.getOutputFile( ) ); + String inputFileName = inFile.getParent( ) + RepositoryFile.SEPARATOR + + inFile.getName( ).replace( " ", "_" ); + String outputFileName = outFile.getParent( ) + RepositoryFile.SEPARATOR + + outFile.getName( ).replace( " ", "_" ); jobScheduleRequest.setInputFile( inputFileName ); jobScheduleRequest.setOutputFile( outputFileName ); try { @@ -369,49 +428,57 @@ protected void importSchedules( List scheduleList ) throws // on windows systems, the backslashes will result in the file not being found in the repository jobScheduleRequest.setInputFile( inputFileName.replace( File.separator, RepositoryFile.SEPARATOR ) ); jobScheduleRequest - .setOutputFile( outputFileName.replace( File.separator, RepositoryFile.SEPARATOR ) ); + .setOutputFile( outputFileName.replace( File.separator, RepositoryFile.SEPARATOR ) ); } Response response = createSchedulerJob( schedulerResource, jobScheduleRequest ); - if ( response.getStatus() == Response.Status.OK.getStatusCode() ) { - if ( response.getEntity() != null ) { + if ( response.getStatus( ) == Response.Status.OK.getStatusCode( ) ) { + if ( response.getEntity( ) != null ) { // get the schedule job id from the response and add it to the import session - ImportSession.getSession().addImportedScheduleJobId( response.getEntity().toString() ); + ImportSession.getSession( ).addImportedScheduleJobId( response.getEntity( ).toString( ) ); + successfulScheduleImportCount++; } } } catch ( Exception ex ) { - // log it and keep going. we should stop processing all schedules just because one fails. - getLogger().error( Messages.getInstance() - .getString( "SolutionImportHandler.ERROR_0001_ERROR_CREATING_SCHEDULE", e.getMessage() ), ex ); + // log it and keep going. we shouldn't stop processing all schedules just because one fails. + getLogger( ).error( Messages.getInstance( ) + .getString( "SolutionImportHandler.ERROR_0001_ERROR_CREATING_SCHEDULE", "[ " + jobScheduleRequest.getJobName( ) + " ] cause [ " + ex.getMessage( ) + " ]" ), ex ); } } else { - // log it and keep going. we should stop processing all schedules just because one fails. - getLogger().error( Messages.getInstance() - .getString( "SolutionImportHandler.ERROR_0001_ERROR_CREATING_SCHEDULE", e.getMessage() ) ); + // log it and keep going. we shouldn't stop processing all schedules just because one fails. + getLogger( ).error( Messages.getInstance( ) + .getString( "SolutionImportHandler.ERROR_0001_ERROR_CREATING_SCHEDULE", "[ " + jobScheduleRequest.getJobName( ) + " ]" ) ); } } } else { - getLogger().info( Messages.getInstance() - .getString( "DefaultImportHandler.ERROR_0009_OVERWRITE_CONTENT", jobScheduleRequest.toString() ) ); + getLogger( ).info( Messages.getInstance( ) + .getString( "DefaultImportHandler.ERROR_0009_OVERWRITE_CONTENT", jobScheduleRequest.toString( ) ) ); } } - schedulerResource.start(); + getLogger( ).info( Messages.getInstance( ) + .getString( "SolutionImportHandler.INFO_SUCCESSFUL_SCHEDULE_IMPORT_COUNT", successfulScheduleImportCount, scheduleList.size( ) ) ); + schedulerResource.start( ); + getLogger( ).debug( "Successfully started the scheduler" ); } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_SCHEDULE" ) ); } protected void importMetaStore( ExportManifestMetaStore manifestMetaStore, boolean overwrite ) { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_METASTORE" ) ); if ( manifestMetaStore != null ) { // get the zipped metastore from the export bundle RepositoryFileImportBundle.Builder bundleBuilder = - new RepositoryFileImportBundle.Builder() - .path( manifestMetaStore.getFile() ) - .name( manifestMetaStore.getName() ) - .withParam( "description", manifestMetaStore.getDescription() ) - .charSet( UTF_8 ) - .overwriteFile( overwrite ) - .mime( "application/vnd.pentaho.metastore" ); - - cachedImports.put( manifestMetaStore.getFile(), bundleBuilder ); + new RepositoryFileImportBundle.Builder( ) + .path( manifestMetaStore.getFile( ) ) + .name( manifestMetaStore.getName( ) ) + .withParam( "description", manifestMetaStore.getDescription( ) ) + .charSet( UTF_8 ) + .overwriteFile( overwrite ) + .mime( "application/vnd.pentaho.metastore" ); + + cachedImports.put( manifestMetaStore.getFile( ), bundleBuilder ); + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_SUCCESSFUL_IMPORT_METASTORE" ) ); } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_METASTORE" ) ); } /** @@ -421,142 +488,187 @@ protected void importMetaStore( ExportManifestMetaStore manifestMetaStore, boole * @return A map of role names to list of users in that role */ protected Map> importUsers( List users ) { - Map> roleToUserMap = new HashMap<>(); + Map> roleToUserMap = new HashMap<>( ); IUserRoleDao roleDao = PentahoSystem.get( IUserRoleDao.class ); - ITenant tenant = new Tenant( "/pentaho/" + TenantUtils.getDefaultTenant(), true ); - + ITenant tenant = new Tenant( "/pentaho/" + TenantUtils.getDefaultTenant( ), true ); + int successFullUserImportCount = 0; + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_USER" ) ); if ( users != null && roleDao != null ) { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_COUNT_USER", users.size( ) ) ); for ( UserExport user : users ) { - String password = user.getPassword(); - getLogger().debug( Messages.getInstance().getString( "USER.importing", user.getUsername() ) ); + String password = user.getPassword( ); + getLogger( ).debug( Messages.getInstance( ).getString( "USER.importing", user.getUsername( ) ) ); // map the user to the roles he/she is in - for ( String role : user.getRoles() ) { + for ( String role : user.getRoles( ) ) { List userList; if ( !roleToUserMap.containsKey( role ) ) { - userList = new ArrayList<>(); + userList = new ArrayList<>( ); roleToUserMap.put( role, userList ); } else { userList = roleToUserMap.get( role ); } - userList.add( user.getUsername() ); + userList.add( user.getUsername( ) ); } - String[] userRoles = user.getRoles().toArray( new String[] {} ); + String[] userRoles = user.getRoles( ).toArray( new String[] {} ); try { - roleDao.createUser( tenant, user.getUsername(), password, null, userRoles ); + getLogger( ).debug( "Importing user [ " + user.getUsername( ) + " ] " ); + roleDao.createUser( tenant, user.getUsername( ), password, null, userRoles ); + getLogger( ).debug( "Successfully imported user [ " + user.getUsername( ) + " ]" ); + successFullUserImportCount++; } catch ( AlreadyExistsException e ) { // it's ok if the user already exists, it is probably a default user - getLogger().info( Messages.getInstance().getString( "USER.Already.Exists", user.getUsername() ) ); + getLogger( ).debug( Messages.getInstance( ).getString( "USER.Already.Exists", user.getUsername( ) ) ); try { - if ( isOverwriteFile() ) { + if ( isOverwriteFile( ) ) { + getLogger( ).debug( "Overwrite is set to true. So reImporting user [ " + user.getUsername( ) + " ]" ); // set the roles, maybe they changed - roleDao.setUserRoles( tenant, user.getUsername(), userRoles ); + roleDao.setUserRoles( tenant, user.getUsername( ), userRoles ); // set the password just in case it changed - roleDao.setPassword( tenant, user.getUsername(), password ); + roleDao.setPassword( tenant, user.getUsername( ), password ); + successFullUserImportCount++; } } catch ( Exception ex ) { // couldn't set the roles or password either - getLogger().debug( Messages.getInstance() - .getString( "ERROR.OverridingExistingUser", user.getUsername() ), ex ); + getLogger( ).warn( Messages.getInstance( ) + .getString( "ERROR.OverridingExistingUser", user.getUsername( ) ) ); + getLogger( ).debug( Messages.getInstance( ) + .getString( "ERROR.OverridingExistingUser", user.getUsername( ) ), ex ); } } catch ( Exception e ) { - getLogger().error( Messages.getInstance() - .getString( "ERROR.OverridingExistingUser", user.getUsername() ), e ); + getLogger( ).debug( Messages.getInstance( ) + .getString( "ERROR.OverridingExistingUser", user.getUsername( ) ), e ); + getLogger( ).error( Messages.getInstance( ) + .getString( "ERROR.OverridingExistingUser", user.getUsername( ) ) ); } + getLogger( ).debug( "Importing user [ " + user.getUsername( ) + " ] specific settings" ); importUserSettings( user ); + getLogger( ).debug( "Successfully imported user [ " + user.getUsername( ) + " ] specific settings" ); } } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_SUCCESSFUL_USER_COUNT", successFullUserImportCount, users.size( ) ) ); + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_USER" ) ); return roleToUserMap; } protected void importGlobalUserSettings( List globalSettings ) { + getLogger( ).debug( "************************[ Start: Import global user settings] *************************" ); IUserSettingService settingService = PentahoSystem.get( IUserSettingService.class ); if ( settingService != null ) { for ( ExportManifestUserSetting globalSetting : globalSettings ) { - if ( isOverwriteFile() ) { - settingService.setGlobalUserSetting( globalSetting.getName(), globalSetting.getValue() ); + if ( isOverwriteFile( ) ) { + getLogger( ).trace( "Overwrite flag is set to true." ); + settingService.setGlobalUserSetting( globalSetting.getName( ), globalSetting.getValue( ) ); + getLogger( ).debug( "Finished import of global user setting with name [ " + globalSetting.getName( ) + " ]" ); } else { - IUserSetting userSetting = settingService.getGlobalUserSetting( globalSetting.getName(), null ); + getLogger( ).trace( "Overwrite flag is set to false." ); + IUserSetting userSetting = settingService.getGlobalUserSetting( globalSetting.getName( ), null ); if ( userSetting == null ) { - settingService.setGlobalUserSetting( globalSetting.getName(), globalSetting.getValue() ); + settingService.setGlobalUserSetting( globalSetting.getName( ), globalSetting.getValue( ) ); + getLogger( ).debug( "Finished import of global user setting with name [ " + globalSetting.getName( ) + " ]" ); } } } } + getLogger( ).debug( "************************[ End: Import global user settings] *************************" ); } protected void importUserSettings( UserExport user ) { IUserSettingService settingService = PentahoSystem.get( IUserSettingService.class ); IAnyUserSettingService userSettingService = null; + int userSettingsListSize = 0; + int successfulUserSettingsImportCount = 0; if ( settingService != null && settingService instanceof IAnyUserSettingService ) { userSettingService = (IAnyUserSettingService) settingService; } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_USER_SETTING" ) ); if ( userSettingService != null ) { - List exportedSettings = user.getUserSettings(); + List exportedSettings = user.getUserSettings( ); + userSettingsListSize = user.getUserSettings( ).size( ); + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_COUNT_USER_SETTING", userSettingsListSize, user.getUsername( ) ) ); try { for ( ExportManifestUserSetting exportedSetting : exportedSettings ) { - if ( isOverwriteFile() ) { - userSettingService.setUserSetting( user.getUsername(), - exportedSetting.getName(), exportedSetting.getValue() ); + getLogger( ).debug( "Importing user specific setting [ " + exportedSetting.getName( ) + " ]" ); + if ( isOverwriteFile( ) ) { + getLogger( ).debug( "Overwrite is set to true. So reImporting setting [ " + exportedSetting.getName( ) + " ]" ); + userSettingService.setUserSetting( user.getUsername( ), + exportedSetting.getName( ), exportedSetting.getValue( ) ); + getLogger( ).debug( "Finished import of user specific setting with name [ " + exportedSetting.getName( ) + " ]" ); } else { // see if it's there first before we set this setting + getLogger( ).debug( "Overwrite is set to false. Only import setting [ " + exportedSetting.getName( ) + " ] if is does not exist" ); IUserSetting userSetting = - userSettingService.getUserSetting( user.getUsername(), exportedSetting.getName(), null ); + userSettingService.getUserSetting( user.getUsername( ), exportedSetting.getName( ), null ); if ( userSetting == null ) { // only set it if we didn't find that it exists already - userSettingService.setUserSetting( user.getUsername(), - exportedSetting.getName(), exportedSetting.getValue() ); + userSettingService.setUserSetting( user.getUsername( ), exportedSetting.getName( ), exportedSetting.getValue( ) ); + getLogger( ).debug( "Finished import of user specific setting with name [ " + exportedSetting.getName( ) + " ]" ); } } + successfulUserSettingsImportCount++; + getLogger( ).debug( "Successfully imported setting [ " + exportedSetting.getName( ) + " ]" ); } } catch ( SecurityException e ) { - String errorMsg = Messages.getInstance().getString( "ERROR.ImportingUserSetting", user.getUsername() ); - getLogger().error( errorMsg ); - getLogger().debug( errorMsg, e ); + String errorMsg = Messages.getInstance( ).getString( "ERROR.ImportingUserSetting", user.getUsername( ) ); + getLogger( ).error( errorMsg ); + getLogger( ).debug( errorMsg, e ); + } finally { + getLogger( ).info( Messages.getInstance( ) + .getString( "SolutionImportHandler.INFO_SUCCESSFUL_USER_SETTING_IMPORT_COUNT", successfulUserSettingsImportCount, userSettingsListSize ) ); + getLogger( ).info( Messages.getInstance( ) + .getString( "SolutionImportHandler.INFO_END_IMPORT_USER_SETTING" ) ); } } } protected void importRoles( List roles, Map> roleToUserMap ) { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_ROLE" ) ); if ( roles != null ) { IUserRoleDao roleDao = PentahoSystem.get( IUserRoleDao.class ); - ITenant tenant = new Tenant( "/pentaho/" + TenantUtils.getDefaultTenant(), true ); + ITenant tenant = new Tenant( "/pentaho/" + TenantUtils.getDefaultTenant( ), true ); IRoleAuthorizationPolicyRoleBindingDao roleBindingDao = PentahoSystem.get( - IRoleAuthorizationPolicyRoleBindingDao.class ); - - Set existingRoles = new HashSet<>(); + IRoleAuthorizationPolicyRoleBindingDao.class ); + Set existingRoles = new HashSet<>( ); + getLogger( ).info( "Found [ " + roles.size( ) + " ] roles to import" ); + int successFullRoleImportCount = 0; for ( RoleExport role : roles ) { - getLogger().debug( Messages.getInstance().getString( "ROLE.importing", role.getRolename() ) ); + getLogger( ).debug( Messages.getInstance( ).getString( "ROLE.importing", role.getRolename( ) ) ); try { - List users = roleToUserMap.get( role.getRolename() ); + List users = roleToUserMap.get( role.getRolename( ) ); String[] userarray = users == null ? new String[] {} : users.toArray( new String[] {} ); - IPentahoRole role1 = roleDao.createRole( tenant, role.getRolename(), null, userarray ); + IPentahoRole role1 = roleDao.createRole( tenant, role.getRolename( ), null, userarray ); + successFullRoleImportCount++; } catch ( AlreadyExistsException e ) { - existingRoles.add( role.getRolename() ); + existingRoles.add( role.getRolename( ) ); // it's ok if the role already exists, it is probably a default role - getLogger().info( Messages.getInstance().getString( "ROLE.Already.Exists", role.getRolename() ) ); + getLogger( ).debug( Messages.getInstance( ).getString( "ROLE.Already.Exists", role.getRolename( ) ) ); } try { - if ( existingRoles.contains( role.getRolename() ) ) { + if ( existingRoles.contains( role.getRolename( ) ) ) { //Only update an existing role if the overwrite flag is set - if ( isOverwriteFile() ) { - roleBindingDao.setRoleBindings( tenant, role.getRolename(), role.getPermissions() ); + if ( isOverwriteFile( ) ) { + getLogger( ).debug( "Overwrite is set to true so reImporting role [ " + role.getRolename( ) + "]" ); + roleBindingDao.setRoleBindings( tenant, role.getRolename( ), role.getPermissions( ) ); } } else { + getLogger( ).debug( "Updating the role mapping from runtime roles to logical roles for [ " + role.getRolename( ) + "]" ); //Always write a roles permissions that were not previously existing - roleBindingDao.setRoleBindings( tenant, role.getRolename(), role.getPermissions() ); + roleBindingDao.setRoleBindings( tenant, role.getRolename( ), role.getPermissions( ) ); } + successFullRoleImportCount++; } catch ( Exception e ) { - getLogger().info( Messages.getInstance() - .getString( "ERROR.SettingRolePermissions", role.getRolename() ), e ); + getLogger( ).error( Messages.getInstance( ) + .getString( "ERROR.SettingRolePermissions", role.getRolename( ) ), e ); } } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_SUCCESSFUL_ROLE_COUNT", successFullRoleImportCount, roles.size( ) ) ); } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_ROLE" ) ); } /** @@ -566,60 +678,75 @@ protected void importRoles( List roles, Map> ro * @param preserveDsw whether or not to preserve DSW settings */ protected void importMetadata( List metadataList, boolean preserveDsw ) { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_METADATA_DATASOURCE" ) ); if ( null != metadataList ) { + int successfulMetadataModelImport = 0; + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_COUNT_METADATA_DATASOURCE", metadataList.size( ) ) ); for ( ExportManifestMetadata exportManifestMetadata : metadataList ) { - String domainId = exportManifestMetadata.getDomainId(); + getLogger( ).debug( "Importing [ " + exportManifestMetadata.getDomainId( ) + " ] model" ); + String domainId = exportManifestMetadata.getDomainId( ); if ( domainId != null && !domainId.endsWith( XMI_EXTENSION ) ) { domainId = domainId + XMI_EXTENSION; } RepositoryFileImportBundle.Builder bundleBuilder = - new RepositoryFileImportBundle.Builder().charSet( UTF_8 ) - .hidden( RepositoryFile.HIDDEN_BY_DEFAULT ).schedulable( RepositoryFile.SCHEDULABLE_BY_DEFAULT ) - // let the parent bundle control whether or not to preserve DSW settings - .preserveDsw( preserveDsw ) - .overwriteFile( isOverwriteFile() ) - .mime( "text/xmi+xml" ) - .withParam( DOMAIN_ID, domainId ); - - cachedImports.put( exportManifestMetadata.getFile(), bundleBuilder ); + new RepositoryFileImportBundle.Builder( ).charSet( UTF_8 ) + .hidden( RepositoryFile.HIDDEN_BY_DEFAULT ).schedulable( RepositoryFile.SCHEDULABLE_BY_DEFAULT ) + // let the parent bundle control whether or not to preserve DSW settings + .preserveDsw( preserveDsw ) + .overwriteFile( isOverwriteFile( ) ) + .mime( "text/xmi+xml" ) + .withParam( DOMAIN_ID, domainId ); + + cachedImports.put( exportManifestMetadata.getFile( ), bundleBuilder ); + getLogger( ).debug( " Successfully Imported [ " + exportManifestMetadata.getDomainId( ) + " ] model" ); + successfulMetadataModelImport++; } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_SUCCESSFUL_METDATA_DATASOURCE_COUNT", successfulMetadataModelImport, metadataList.size( ) ) ); } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_METADATA_DATASOURCE" ) ); } protected void importMondrian( List mondrianList ) { + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_MONDRIAN_DATASOURCE" ) ); if ( null != mondrianList ) { + int successfulMondrianSchemaImport = 0; + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_COUNT_MONDRIAN_DATASOURCE", mondrianList.size( ) ) ); for ( ExportManifestMondrian exportManifestMondrian : mondrianList ) { - - String catName = exportManifestMondrian.getCatalogName(); - Parameters parametersMap = exportManifestMondrian.getParameters(); - StringBuilder parametersStr = new StringBuilder(); - for ( Map.Entry e : parametersMap.entrySet() ) { - parametersStr.append( e.getKey() ).append( '=' ).append( e.getValue() ).append( ';' ); + getLogger( ).debug( "Importing [ " + exportManifestMondrian.getCatalogName( ) + " ] olap model" ); + String catName = exportManifestMondrian.getCatalogName( ); + Parameters parametersMap = exportManifestMondrian.getParameters( ); + StringBuilder parametersStr = new StringBuilder( ); + for ( Map.Entry e : parametersMap.entrySet( ) ) { + parametersStr.append( e.getKey( ) ).append( '=' ).append( e.getValue( ) ).append( ';' ); } RepositoryFileImportBundle.Builder bundleBuilder = - new RepositoryFileImportBundle.Builder().charSet( UTF_8 ).hidden( RepositoryFile.HIDDEN_BY_DEFAULT ) - .schedulable( RepositoryFile.SCHEDULABLE_BY_DEFAULT ).name( catName ).overwriteFile( - isOverwriteFile() ).mime( "application/vnd.pentaho.mondrian+xml" ) - .withParam( "parameters", parametersStr.toString() ) - .withParam( DOMAIN_ID, catName ); // TODO: this is definitely named wrong at the very least. + new RepositoryFileImportBundle.Builder( ).charSet( UTF_8 ).hidden( RepositoryFile.HIDDEN_BY_DEFAULT ) + .schedulable( RepositoryFile.SCHEDULABLE_BY_DEFAULT ).name( catName ).overwriteFile( + isOverwriteFile( ) ).mime( "application/vnd.pentaho.mondrian+xml" ) + .withParam( "parameters", parametersStr.toString( ) ) + .withParam( DOMAIN_ID, catName ); // TODO: this is definitely named wrong at the very least. // pass as param if not in parameters string - String xmlaEnabled = "" + exportManifestMondrian.isXmlaEnabled(); + String xmlaEnabled = "" + exportManifestMondrian.isXmlaEnabled( ); bundleBuilder.withParam( "EnableXmla", xmlaEnabled ); - cachedImports.put( exportManifestMondrian.getFile(), bundleBuilder ); + cachedImports.put( exportManifestMondrian.getFile( ), bundleBuilder ); - String annotationsFile = exportManifestMondrian.getAnnotationsFile(); + String annotationsFile = exportManifestMondrian.getAnnotationsFile( ); if ( annotationsFile != null ) { RepositoryFileImportBundle.Builder annotationsBundle = - new RepositoryFileImportBundle.Builder().path( MondrianCatalogRepositoryHelper.ETC_MONDRIAN_JCR_FOLDER - + RepositoryFile.SEPARATOR + catName ).name( "annotations.xml" ).charSet( UTF_8 ).overwriteFile( - isOverwriteFile() ).mime( "text/xml" ).hidden( RepositoryFile.HIDDEN_BY_DEFAULT ).schedulable( - RepositoryFile.SCHEDULABLE_BY_DEFAULT ).withParam( DOMAIN_ID, catName ); + new RepositoryFileImportBundle.Builder( ).path( MondrianCatalogRepositoryHelper.ETC_MONDRIAN_JCR_FOLDER + + RepositoryFile.SEPARATOR + catName ).name( "annotations.xml" ).charSet( UTF_8 ).overwriteFile( + isOverwriteFile( ) ).mime( "text/xml" ).hidden( RepositoryFile.HIDDEN_BY_DEFAULT ).schedulable( + RepositoryFile.SCHEDULABLE_BY_DEFAULT ).withParam( DOMAIN_ID, catName ); cachedImports.put( annotationsFile, annotationsBundle ); } + successfulMondrianSchemaImport++; + getLogger( ).debug( " Successfully Imported [ " + exportManifestMondrian.getCatalogName( ) + " ] mondrian schema" ); } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_SUCCESSFUL_MONDRIAN_DATASOURCE_IMPORT_COUNT", successfulMondrianSchemaImport, mondrianList.size( ) ) ); } + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_MONDRIAN_DATASOURCE" ) ); } /** @@ -630,16 +757,16 @@ protected void importMondrian( List mondrianList ) { boolean fileIsScheduleInputSource( ExportManifest manifest, String sourcePath ) { boolean isSchedulable = false; if ( sourcePath != null && manifest != null - && manifest.getScheduleList() != null ) { + && manifest.getScheduleList( ) != null ) { String path = sourcePath.startsWith( "/" ) ? sourcePath : "/" + sourcePath; - isSchedulable = manifest.getScheduleList().stream() - .anyMatch( schedule -> path.equals( schedule.getInputFile() ) ); + isSchedulable = manifest.getScheduleList( ).stream( ) + .anyMatch( schedule -> path.equals( schedule.getInputFile( ) ) ); } if ( isSchedulable ) { - getLogger().warn( Messages.getInstance() - .getString( "ERROR.ScheduledWithoutPermission", sourcePath ) ); - getLogger().warn( Messages.getInstance().getString( "SCHEDULE.AssigningPermission", sourcePath ) ); + getLogger( ).warn( Messages.getInstance( ) + .getString( "ERROR.ScheduledWithoutPermission", sourcePath ) ); + getLogger( ).warn( Messages.getInstance( ).getString( "SCHEDULE.AssigningPermission", sourcePath ) ); } return isSchedulable; @@ -647,12 +774,12 @@ boolean fileIsScheduleInputSource( ExportManifest manifest, String sourcePath ) @VisibleForTesting protected boolean isFileHidden( RepositoryFile file, ManifestFile manifestFile, String sourcePath ) { - Boolean result = manifestFile.isFileHidden(); + Boolean result = manifestFile.isFileHidden( ); if ( result != null ) { return result; // file absent or must receive a new setting and the setting is exist } if ( file != null ) { - return file.isHidden(); // old setting + return file.isHidden( ); // old setting } if ( solutionHelper.isInHiddenList( sourcePath ) ) { return true; @@ -662,12 +789,12 @@ protected boolean isFileHidden( RepositoryFile file, ManifestFile manifestFile, @VisibleForTesting protected boolean isSchedulable( RepositoryFile file, ManifestFile manifestFile ) { - Boolean result = manifestFile.isFileSchedulable(); + Boolean result = manifestFile.isFileSchedulable( ); if ( result != null ) { return result; // file absent or must receive a new setting and the setting is exist } if ( file != null ) { - return file.isSchedulable(); // old setting + return file.isSchedulable( ); // old setting } return RepositoryFile.SCHEDULABLE_BY_DEFAULT; // default setting of type } @@ -680,102 +807,106 @@ private String repositoryPathConcat( String path, String... subPaths ) { } private boolean processZip( InputStream inputStream ) { - this.files = new ArrayList<>(); + this.files = new ArrayList<>( ); + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_START_IMPORT_REPOSITORY_OBJECT" ) ); try ( ZipInputStream zipInputStream = new ZipInputStream( inputStream ) ) { - FileService fileService = new FileService(); - ZipEntry entry = zipInputStream.getNextEntry(); + FileService fileService = new FileService( ); + ZipEntry entry = zipInputStream.getNextEntry( ); while ( entry != null ) { - final String entryName = RepositoryFilenameUtils.separatorsToRepository( entry.getName() ); - getLogger().trace( Messages.getInstance().getString( "ZIPFILE.ProcessingEntry", entryName ) ); + final String entryName = RepositoryFilenameUtils.separatorsToRepository( entry.getName( ) ); + getLogger( ).debug( Messages.getInstance( ).getString( "ZIPFILE.ProcessingEntry", entryName ) ); final String decodedEntryName = ExportFileNameEncoder.decodeZipFileName( entryName ); File tempFile = null; - boolean isDir = entry.isDirectory(); + boolean isDir = entry.isDirectory( ); if ( !isDir ) { if ( !solutionHelper.isInApprovedExtensionList( entryName ) ) { - zipInputStream.closeEntry(); - entry = zipInputStream.getNextEntry(); + zipInputStream.closeEntry( ); + entry = zipInputStream.getNextEntry( ); continue; } if ( !fileService.isValidFileName( decodedEntryName ) ) { + getLogger( ).error( Messages.getInstance( ).getString( "DefaultImportHandler.ERROR_0011_INVALID_FILE_NAME", decodedEntryName ) ); throw new PlatformImportException( - Messages.getInstance().getString( "DefaultImportHandler.ERROR_0011_INVALID_FILE_NAME", - entryName ), PlatformImportException.PUBLISH_PROHIBITED_SYMBOLS_ERROR ); + Messages.getInstance( ).getString( "DefaultImportHandler.ERROR_0011_INVALID_FILE_NAME", + entryName ), PlatformImportException.PUBLISH_PROHIBITED_SYMBOLS_ERROR ); } tempFile = File.createTempFile( "zip", null ); - tempFile.deleteOnExit(); + tempFile.deleteOnExit( ); try ( FileOutputStream fos = new FileOutputStream( tempFile ) ) { IOUtils.copy( zipInputStream, fos ); } } else { if ( !fileService.isValidFileName( decodedEntryName ) ) { + getLogger( ).error( Messages.getInstance( ).getString( "DefaultImportHandler.ERROR_0011_INVALID_FILE_NAME", decodedEntryName ) ); throw new PlatformImportException( - Messages.getInstance().getString( "DefaultImportHandler.ERROR_0012_INVALID_FOLDER_NAME", - entryName ), PlatformImportException.PUBLISH_PROHIBITED_SYMBOLS_ERROR ); + Messages.getInstance( ).getString( "DefaultImportHandler.ERROR_0012_INVALID_FOLDER_NAME", + entryName ), PlatformImportException.PUBLISH_PROHIBITED_SYMBOLS_ERROR ); } } File file = new File( entryName ); RepositoryFile repoFile = - new RepositoryFile.Builder( file.getName() ).folder( isDir ).hidden( false ).build(); + new RepositoryFile.Builder( file.getName( ) ).folder( isDir ).hidden( false ).build( ); String parentDir = - file.getParent() == null ? RepositoryFile.SEPARATOR : file.getParent() - + RepositoryFile.SEPARATOR; + file.getParent( ) == null ? RepositoryFile.SEPARATOR : file.getParent( ) + + RepositoryFile.SEPARATOR; IRepositoryFileBundle repoFileBundle = - new RepositoryFileBundle( repoFile, null, parentDir, tempFile, UTF_8, null ); + new RepositoryFileBundle( repoFile, null, parentDir, tempFile, UTF_8, null ); - if ( EXPORT_MANIFEST_XML_FILE.equals( file.getName() ) ) { + if ( EXPORT_MANIFEST_XML_FILE.equals( file.getName( ) ) ) { initializeAclManifest( repoFileBundle ); } else { + getLogger( ).debug( "Adding file " + repoFile.getName( ) + " to list for later processing " ); files.add( repoFileBundle ); } - zipInputStream.closeEntry(); - entry = zipInputStream.getNextEntry(); + zipInputStream.closeEntry( ); + entry = zipInputStream.getNextEntry( ); } } catch ( IOException | PlatformImportException e ) { - getLogger().error( Messages.getInstance() - .getErrorString( "ZIPFILE.ExceptionOccurred", e.getLocalizedMessage() ), e ); + getLogger( ).error( Messages.getInstance( ) + .getErrorString( "ZIPFILE.ExceptionOccurred", e.getLocalizedMessage( ) ), e ); return false; } - + getLogger( ).info( Messages.getInstance( ).getString( "SolutionImportHandler.INFO_END_IMPORT_REPOSITORY_OBJECT" ) ); return true; } private void initializeAclManifest( IRepositoryFileBundle file ) { try { - byte[] bytes = IOUtils.toByteArray( file.getInputStream() ); + byte[] bytes = IOUtils.toByteArray( file.getInputStream( ) ); ByteArrayInputStream in = new ByteArrayInputStream( bytes ); - getImportSession().setManifest( ExportManifest.fromXml( in ) ); + getImportSession( ).setManifest( ExportManifest.fromXml( in ) ); } catch ( Exception e ) { - getLogger().trace( e ); + getLogger( ).trace( e ); } } @Override - public List getMimeTypes() { + public List getMimeTypes( ) { return mimeTypes; } // handlers that extend this class may override this method and perform operations // over the bundle prior to entering its designated importer.importFile() public IPlatformImportBundle build( RepositoryFileImportBundle.Builder builder ) { - return builder != null ? builder.build() : null; + return builder != null ? builder.build( ) : null; } // handlers that extend this class may override this method and perform operations // over the job prior to its creation at scheduler.createJob() public Response createSchedulerJob( ISchedulerResource scheduler, IJobScheduleRequest jobScheduleRequest ) - throws IOException { + throws IOException { Response rs = scheduler != null ? (Response) scheduler.createJob( jobScheduleRequest ) : null; - if ( jobScheduleRequest.getJobState() != JobState.NORMAL ) { - IJobRequest jobRequest = PentahoSystem.get( IScheduler.class, "IScheduler2", null ).createJobRequest(); - jobRequest.setJobId( rs.getEntity().toString() ); + if ( jobScheduleRequest.getJobState( ) != JobState.NORMAL ) { + IJobRequest jobRequest = PentahoSystem.get( IScheduler.class, "IScheduler2", null ).createJobRequest( ); + jobRequest.setJobId( rs.getEntity( ).toString( ) ); scheduler.pauseJob( jobRequest ); } return rs; } - public boolean isOverwriteFile() { + public boolean isOverwriteFile( ) { return overwriteFile; } diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessor.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessor.java index bfde564ba87..c1169dd53ba 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessor.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessor.java @@ -35,6 +35,8 @@ import org.pentaho.platform.api.repository2.unified.IUnifiedRepository; import org.pentaho.platform.api.repository2.unified.RepositoryFile; +import org.pentaho.platform.api.importexport.ExportException; +import org.pentaho.platform.api.util.IRepositoryExportLogger; import java.io.File; import java.io.IOException; @@ -55,6 +57,9 @@ public abstract class BaseExportProcessor { IUnifiedRepository unifiedRepository; + IRepositoryExportLogger repositoryExportLogger; + + protected static final String EXPORT_MANIFEST_FILENAME = "exportManifest.xml"; protected static final String EXPORT_INFO_DATE_FORMAT = "dd-MM-yyyy"; protected static final String EXPORT_INFO_TIME_FORMAT = "hh:mm:ss z"; @@ -106,6 +111,14 @@ public void setUnifiedRepository( IUnifiedRepository unifiedRepository ) { this.unifiedRepository = unifiedRepository; } + public IRepositoryExportLogger getRepositoryExportLogger() { + return repositoryExportLogger; + } + + public void setRepositoryExportLogger( IRepositoryExportLogger repositoryExportLogger ) { + this.repositoryExportLogger = repositoryExportLogger; + } + /** * Performs the export process, returns a zip File object * diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/CommandLineProcessor.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/CommandLineProcessor.java index 33b5e38d3e5..00b8a5dbb1c 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/CommandLineProcessor.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/CommandLineProcessor.java @@ -25,7 +25,9 @@ import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter; import com.sun.jersey.api.json.JSONConfiguration; import com.sun.jersey.core.header.FormDataContentDisposition; +import com.sun.jersey.core.util.MultivaluedMapImpl; import com.sun.jersey.multipart.FormDataMultiPart; +import com.sun.mail.iap.Response; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; @@ -43,6 +45,7 @@ import org.pentaho.platform.util.RepositoryPathEncoder; import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.MultivaluedMap; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -91,6 +94,9 @@ public class CommandLineProcessor { private static final String MULTIPART_FIELD_OVERWRITE_FILE = "overwriteFile"; private static final String MULTIPART_FIELD_APPLY_ACL_SETTINGS = "applyAclSettings"; private static final String MULTIPART_FIELD_OVERWRITE_ACL_SETTINGS = "overwriteAclSettings"; + private static final String MULTIVALUE_FIELD_LOG_FILE = "logFile"; + private static final String MULTIVALUE_FIELD_LOG_LEVEL = "logLevel"; + private static final String MULTIVALUE_FIELD_OUTPUT_FILE_NAME_LEVEL = "outputFile"; private static final String METADATA_DATASOURCE_EXT = "xmi"; @@ -107,7 +113,7 @@ public class CommandLineProcessor { private final CommandLine commandLine; private final RequestType requestType; - + private static final String DEFAULT_LOG_LEVEL = "INFO"; private static final String INFO_OPTION_HELP_KEY = "h"; private static final String INFO_OPTION_HELP_NAME = "help"; private static final String INFO_OPTION_IMPORT_KEY = "i"; @@ -130,6 +136,8 @@ public class CommandLineProcessor { private static final String INFO_OPTION_CHARSET_NAME = "charset"; private static final String INFO_OPTION_LOGFILE_KEY = "l"; private static final String INFO_OPTION_LOGFILE_NAME = "logfile"; + private static final String INFO_OPTION_LOGLEVEL_NAME = "logLevel"; + private static final String INFO_OPTION_LOGLEVEL_KEY = "lL"; private static final String INFO_OPTION_PATH_KEY = "f"; private static final String INFO_OPTION_PATH_NAME = "path"; private static final String INFO_OPTION_OVERWRITE_KEY = "o"; @@ -212,6 +220,9 @@ public enum ResourceType { options.addOption( INFO_OPTION_LOGFILE_KEY, INFO_OPTION_LOGFILE_NAME, true, Messages.getInstance() .getString( "CommandLineProcessor.INFO_OPTION_LOGFILE_DESCRIPTION" ) ); + options.addOption( INFO_OPTION_LOGLEVEL_KEY, INFO_OPTION_LOGLEVEL_NAME, true, Messages.getInstance() + .getString( "CommandLineProcessor.INFO_OPTION_LOGLEVEL_DESCRIPTION" ) ); + options.addOption( INFO_OPTION_PATH_KEY, INFO_OPTION_PATH_NAME, true, Messages.getInstance() .getString( "CommandLineProcessor.INFO_OPTION_PATH_DESCRIPTION" ) ); @@ -692,31 +703,46 @@ private void performImport() throws ParseException, IOException { writeToFile( e.getMessage(), logFile ); } finally { // close input stream and cleanup the jersey resources - client.destroy(); - part.cleanup(); - in.close(); + if ( client != null ) { + client.destroy(); + } + if ( part != null ) { + part.cleanup(); + } + if ( in != null ) { + in.close(); + } } } } private void logResponseMessage( String logFile, String path, ClientResponse response, RequestType requestType ) { + boolean badLogFilePath = false; if ( response.getStatus() == ClientResponse.Status.OK.getStatusCode() ) { errorMessage = Messages.getInstance().getString( "CommandLineProcessor.INFO_" + requestType.toString() + "_SUCCESSFUL" ); } else if ( response.getStatus() == ClientResponse.Status.FORBIDDEN.getStatusCode() ) { errorMessage = Messages.getInstance().getErrorString( "CommandLineProcessor.ERROR_0007_FORBIDDEN", path ); } else if ( response.getStatus() == ClientResponse.Status.NOT_FOUND.getStatusCode() ) { errorMessage = - Messages.getInstance().getErrorString( "CommandLineProcessor.ERROR_0004_UNKNOWN_SOURCE", path ); + Messages.getInstance().getErrorString( "CommandLineProcessor.ERROR_0004_UNKNOWN_SOURCE", path ); + } else if ( response.getStatus() == ClientResponse.Status.BAD_REQUEST.getStatusCode() ) { + errorMessage = + Messages.getInstance().getErrorString( "CommandLineProcessor.ERROR_0009_INVALID_LOG_FILE_PATH", logFile ); + badLogFilePath = true; } StringBuilder message = new StringBuilder( errorMessage ); - message.append( System.getProperty( "line.separator" ) ); - if ( response.hasEntity() ) { - message.append( Messages.getInstance().getString( "CommandLineProcessor.INFO_REST_RESPONSE_RECEIVED", - response.getEntity( String.class ) ) ); - } - System.out.println( message ); - if ( StringUtils.isNotBlank( logFile ) ) { - writeToFile( message.toString(), logFile ); + if ( !badLogFilePath ) { + message.append( System.getProperty( "line.separator" ) ); + if ( response.hasEntity() ) { + message.append( Messages.getInstance().getString( "CommandLineProcessor.INFO_REST_RESPONSE_RECEIVED", + response.getEntity( String.class ) ) ); + } + System.out.println( message ); + if ( StringUtils.isNotBlank( logFile ) ) { + writeToFile( message.toString(), logFile ); + } + } else { + System.out.println( message ); } } @@ -730,7 +756,7 @@ private void logResponseMessage( String logFile, String path, ClientResponse res private void performBackup() throws ParseException, KettleException, URISyntaxException { String contextURL = getOptionValue( INFO_OPTION_URL_NAME, true, false ); String logFile = getOptionValue( INFO_OPTION_LOGFILE_NAME, false, true ); - + String logLevel = getOptionValue( INFO_OPTION_LOGLEVEL_NAME, false, true ); // Output file is validated before executing String outputFile = getOptionValue( INFO_OPTION_FILEPATH_NAME, true, false ); @@ -748,23 +774,28 @@ private void performBackup() throws ParseException, KettleException, URISyntaxEx // Build the complete URL to use String backupURL = buildURL( contextURL, API_REPO_FILES_BACKUP ); - WebResource resource = client.resource( backupURL ); // Response response - Builder builder = resource.type( MediaType.APPLICATION_FORM_URLENCODED ).accept( MediaType.TEXT_HTML_TYPE ); - ClientResponse response = builder.get( ClientResponse.class ); + MultivaluedMap postBody = new MultivaluedMapImpl(); + postBody.add( MULTIVALUE_FIELD_LOG_FILE, logFile ); + postBody.add( MULTIVALUE_FIELD_LOG_LEVEL, logLevel != null && logLevel.length() > 0 ? logLevel : DEFAULT_LOG_LEVEL ); + postBody.add( MULTIVALUE_FIELD_OUTPUT_FILE_NAME_LEVEL, outputFile ); + + ClientResponse response = resource.type( MediaType.APPLICATION_FORM_URLENCODED_TYPE ).post( ClientResponse.class, postBody ); if ( response != null && response.getStatus() == 200 ) { writeEntityToFile( response, outputFile ); - String message = Messages.getInstance().getString( "CommandLineProcessor.INFO_EXPORT_COMPLETED" ).concat( "\n" ); - message += Messages.getInstance().getString( "CommandLineProcessor.INFO_RESPONSE_STATUS", response.getStatus() ); + String message = Messages.getInstance( ).getString( "CommandLineProcessor.INFO_EXPORT_COMPLETED" ).concat( "\n" ); + message += Messages.getInstance( ).getString( "CommandLineProcessor.INFO_RESPONSE_STATUS", response.getStatus( ) ); message += "\n"; - message += Messages.getInstance().getString( "CommandLineProcessor.INFO_EXPORT_WRITTEN_TO", outputFile ); + message += Messages.getInstance( ).getString( "CommandLineProcessor.INFO_EXPORT_WRITTEN_TO", outputFile ); if ( StringUtils.isNotBlank( logFile ) ) { System.out.println( message ); writeToFile( message, logFile ); } + } else if ( response != null && response.getStatus() == 400 ) { + System.out.println( Messages.getInstance().getErrorString( "CommandLineProcessor.ERROR_0009_INVALID_LOG_FILE_PATH", logFile ) ); } else { System.out.println( Messages.getInstance().getErrorString( "CommandLineProcessor.ERROR_0002_INVALID_RESPONSE" ) ); } @@ -799,6 +830,7 @@ private void performRestore() throws ParseException { String contextURL = getOptionValue( INFO_OPTION_URL_NAME, true, false ); String filePath = getOptionValue( INFO_OPTION_FILEPATH_NAME, true, false ); String logFile = getOptionValue( INFO_OPTION_LOGFILE_NAME, false, true ); + String logLevel = getOptionValue( INFO_OPTION_LOGLEVEL_NAME, false, true ); String importURL = contextURL + API_REPO_FILES_SYSTEM_RESTORE; File fileIS = new File( filePath ); @@ -821,10 +853,14 @@ private void performRestore() throws ParseException { String overwriteAclSettings = getOptionValue( INFO_OPTION_OVERWRITE_ACL_SETTINGS_NAME, false, true ); part.field( MULTIPART_FIELD_OVERWRITE_ACL_SETTINGS, "true".equals( overwriteAclSettings ) ? "true" : "false", MediaType.MULTIPART_FORM_DATA_TYPE ); - + part.field( MULTIVALUE_FIELD_LOG_FILE, logFile, MediaType.MULTIPART_FORM_DATA_TYPE ); + part.field( MULTIVALUE_FIELD_LOG_LEVEL, logLevel != null && logLevel.length() > 0 ? logLevel : DEFAULT_LOG_LEVEL, MediaType.MULTIPART_FORM_DATA_TYPE ); // Response response ClientResponse response = resource.type( MediaType.MULTIPART_FORM_DATA ).post( ClientResponse.class, part ); - if ( response != null ) { + if ( response != null && response.getStatus() == Response.BAD ) { + errorMessage = Messages.getInstance().getErrorString( "CommandLineProcessor.ERROR_0009_INVALID_LOG_FILE_PATH", logFile ); + System.out.println( errorMessage ); + } else if ( response != null ) { logResponseMessage( logFile, filePath, response, RequestType.RESTORE ); response.close(); } @@ -981,7 +1017,7 @@ private void writeToFile( String str, String pathName ) { * @see #writeToFile(String, String) */ private static void writeToFile( InputStream inputStream, File file ) throws IOException { - try ( FileOutputStream fos = new FileOutputStream( file ) ) { + try ( FileOutputStream fos = new FileOutputStream( file, true ) ) { IOUtils.copy( inputStream, fos ); } } diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/DefaultExportHandler.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/DefaultExportHandler.java index f7c67e6fb2f..4d928e557ab 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/DefaultExportHandler.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/DefaultExportHandler.java @@ -45,6 +45,7 @@ import org.pentaho.platform.api.repository2.unified.IRepositoryContentConverterHandler; import org.pentaho.platform.api.repository2.unified.IUnifiedRepository; import org.pentaho.platform.api.repository2.unified.RepositoryFile; +import org.pentaho.platform.api.importexport.ExportException; import org.pentaho.platform.engine.core.system.PentahoSystem; import org.pentaho.platform.repository.RepositoryFilenameUtils; diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ExportHandler.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ExportHandler.java index e18e9ea4a76..2ce13f3cc42 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ExportHandler.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ExportHandler.java @@ -34,6 +34,7 @@ */ import org.pentaho.platform.api.repository2.unified.RepositoryFile; +import org.pentaho.platform.api.importexport.ExportException; import java.io.IOException; import java.io.InputStream; diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/IRepositoryImportLogger.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/IRepositoryImportLogger.java index edb9f15e37c..1734ba3e784 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/IRepositoryImportLogger.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/IRepositoryImportLogger.java @@ -15,6 +15,7 @@ import org.apache.commons.logging.Log; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.StringLayout; import java.io.OutputStream; @@ -49,6 +50,22 @@ */ public interface IRepositoryImportLogger extends Log { + + /** + * Initiates an import job. Each call creates a new log associated with the current thread. + * + * @param outputStream + * Will receive the html content of the log + * @param importRootPath + * The root import dir receiving the import + * @param logLevel + * The log level to be logged. + * @param layout + * The layout to be use. + * + */ + void startJob( OutputStream outputStream, String importRootPath, Level logLevel, StringLayout layout ); + /** * Initiates an import job. Each call creates a new log associated with the current thread. * diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryExportLog.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryExportLog.java new file mode 100644 index 00000000000..83717993772 --- /dev/null +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryExportLog.java @@ -0,0 +1,88 @@ +/*! ****************************************************************************** + * + * Pentaho + * + * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com + * + * Use of this software is governed by the Business Source License included + * in the LICENSE.TXT file. + * + * Change Date: 2029-07-20 + ******************************************************************************/ +package org.pentaho.platform.plugin.services.importexport; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.StringLayout; +import org.pentaho.platform.api.util.LogUtil; + +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.nio.charset.Charset; + + +public class Log4JRepositoryExportLog { + + private Logger logger; + private OutputStream outputStream; + private String logName; + private Level logLevel; + private Appender appender; + private StringLayout layout; + + /** + * Constructs an object that keeps track of additional fields for Log4j logging and writes/formats an html file to the + * output stream provided. + * + * @param outputStream + */ + Log4JRepositoryExportLog( OutputStream outputStream, Level logLevel, StringLayout layout ) { + this.outputStream = outputStream; + this.logLevel = logLevel; + this.layout = layout; + init(); + } + + /** + * Constructs an object that keeps track of additional fields for Log4j logging and writes/formats an html file to the + * output stream provided. + * + * @param outputStream + */ + Log4JRepositoryExportLog( OutputStream outputStream, Level logLevel ) { + this.outputStream = outputStream; + this.logLevel = logLevel; + RepositoryImportHTMLLayout htmlLayout = new RepositoryImportHTMLLayout( logLevel ); + htmlLayout.setTitle( "Repository Export Log" ); + this.layout = htmlLayout; + init(); + } + + private void init() { + logName = "RepositoryExportLog." + getThreadName(); + logger = LogManager.getLogger( logName ); + LogUtil.setLevel( logger, logLevel ); + appender = + LogUtil.makeAppender( logName, new OutputStreamWriter( outputStream, Charset.forName( "utf-8" ) ), this.layout ); + LogUtil.addAppender( appender, logger, logLevel ); + } + public Logger getLogger() { + return logger; + } + + + protected void endJob() { + try { + outputStream.write( appender.getLayout().getFooter() ); + } catch ( Exception e ) { + System.out.println( e ); + // Don't try logging a log error. + } + LogUtil.removeAppender( appender, logger ); + } + private String getThreadName() { + return Thread.currentThread().getName(); + } +} diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryExportLogger.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryExportLogger.java new file mode 100644 index 00000000000..cf6725807da --- /dev/null +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryExportLogger.java @@ -0,0 +1,183 @@ +/*! ****************************************************************************** + * + * Pentaho + * + * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com + * + * Use of this software is governed by the Business Source License included + * in the LICENSE.TXT file. + * + * Change Date: 2029-07-20 + ******************************************************************************/ + + +package org.pentaho.platform.plugin.services.importexport; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.StringLayout; +import org.pentaho.platform.api.util.IRepositoryExportLogger; + +import java.io.OutputStream; + +/** + * {@inherit} + * + * @author TKafalas + */ +public class Log4JRepositoryExportLogger implements IRepositoryExportLogger { + + private ThreadLocal repositoryExportLog = new ThreadLocal( ); + + public Log4JRepositoryExportLogger( ) { + } + + public void remove() { + repositoryExportLog.remove(); + } + public void startJob( OutputStream outputStream, Level logLevel, StringLayout layout ) { + repositoryExportLog.set( new Log4JRepositoryExportLog( outputStream, logLevel, layout ) ); + } + + public void startJob( OutputStream outputStream, Level logLevel ) { + repositoryExportLog.set( new Log4JRepositoryExportLog( outputStream, logLevel ) ); + } + + public void endJob( ) { + getLog4JRepositoryImportLog( ).endJob( ); + } + + public void info( String s ) { + getLogger( ).info( s ); + } + + public void error( String s ) { + getLogger( ).error( s ); + } + + public void debug( String s ) { + getLogger( ).debug( s ); + } + + public void warn( String s ) { + getLogger( ).debug( s ); + } + + @Override + public void error( Exception e ) { + getLogger( ).error( e.getMessage( ), e ); + + } + + private Log4JRepositoryExportLog getLog4JRepositoryImportLog( ) { + Log4JRepositoryExportLog currentLog = repositoryExportLog.get( ); + if ( currentLog == null ) { + throw new IllegalStateException( "No job started for current Thread" ); + } + return currentLog; + } + + private Logger getLogger( ) { + return getLog4JRepositoryImportLog( ).getLogger( ); + } + + public boolean hasLogger( ) { + return ( repositoryExportLog.get( ) == null ) ? false : true; + } + + @Override + public void debug( Object arg0 ) { + getLogger( ).debug( arg0 ); + } + + @Override + public void debug( Object arg0, Throwable arg1 ) { + getLogger( ).debug( arg0, arg1 ); + } + + @Override + public void error( Object arg0 ) { + getLogger( ).error( arg0 ); + } + + @Override + public void error( Object arg0, Throwable arg1 ) { + getLogger( ).error( arg0, arg1 ); + + } + + @Override + public void fatal( Object arg0 ) { + getLogger( ).fatal( arg0 ); + + } + + @Override + public void fatal( Object arg0, Throwable arg1 ) { + getLogger( ).fatal( arg0, arg1 ); + + } + + @Override + public void info( Object arg0 ) { + getLogger( ).info( arg0 ); + + } + + @Override + public void info( Object arg0, Throwable arg1 ) { + getLogger( ).info( arg0, arg1 ); + + } + + @Override + public boolean isDebugEnabled( ) { + return getLogger( ).isDebugEnabled( ); + } + + @Override + public boolean isErrorEnabled( ) { + return Level.ERROR.isMoreSpecificThan( getLogger( ).getLevel( ) ); + } + + @Override + public boolean isFatalEnabled( ) { + return Level.FATAL.isMoreSpecificThan( getLogger( ).getLevel( ) ); + } + + @Override + public boolean isInfoEnabled( ) { + return getLogger( ).isInfoEnabled( ); + } + + @Override + public boolean isTraceEnabled( ) { + return getLogger( ).isTraceEnabled( ); + } + + @Override + public boolean isWarnEnabled( ) { + return Level.WARN.isMoreSpecificThan( getLogger( ).getLevel( ) ); + } + + @Override + public void trace( Object arg0 ) { + getLogger( ).trace( arg0 ); + } + + @Override + public void trace( Object arg0, Throwable arg1 ) { + getLogger( ).trace( arg0, arg1 ); + } + + @Override + public void warn( Object arg0 ) { + getLogger( ).warn( arg0 ); + } + + @Override + public void warn( Object arg0, Throwable arg1 ) { + getLogger( ).warn( arg0, arg1 ); + } + +} diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryImportLog.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryImportLog.java index f0c77a9840a..83956dd9759 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryImportLog.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryImportLog.java @@ -15,19 +15,13 @@ import java.io.OutputStream; import java.io.OutputStreamWriter; -import java.io.Writer; import java.nio.charset.Charset; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.Appender; -import org.apache.logging.log4j.core.Layout; -import org.apache.logging.log4j.core.LoggerContext; -import org.apache.logging.log4j.core.appender.WriterAppender; -import org.apache.logging.log4j.core.config.Configuration; -import org.apache.logging.log4j.core.config.Configurator; -import org.apache.logging.log4j.core.config.LoggerConfig; +import org.apache.logging.log4j.core.StringLayout; import org.pentaho.platform.api.util.LogUtil; import org.slf4j.MDC; @@ -41,45 +35,60 @@ public class Log4JRepositoryImportLog { private String importRootPath; private Level logLevel; private Appender appender; + private StringLayout layout; /** * Constructs an object that keeps track of additional fields for Log4j logging and writes/formats an html file to the * output stream provided. - * + * + * @param outputStream + */ + Log4JRepositoryImportLog( OutputStream outputStream, String importRootPath, Level logLevel, StringLayout layout ) { + this.outputStream = outputStream; + this.importRootPath = importRootPath; + this.logLevel = logLevel; + this.layout = layout; + init( ); + } + + /** + * Constructs an object that keeps track of additional fields for Log4j logging and writes/formats an html file to the + * output stream provided. + * * @param outputStream */ Log4JRepositoryImportLog( OutputStream outputStream, String importRootPath, Level logLevel ) { this.outputStream = outputStream; this.importRootPath = importRootPath; this.logLevel = logLevel; - init(); + RepositoryImportHTMLLayout htmlLayout = new RepositoryImportHTMLLayout( logLevel ); + htmlLayout.setTitle( "Repository Import Log" ); + this.layout = htmlLayout; + init( ); } - private void init() { - logName = "RepositoryImportLog." + getThreadName(); + private void init( ) { + logName = "RepositoryImportLog." + getThreadName( ); logger = LogManager.getLogger( logName ); LogUtil.setLevel( logger, logLevel ); - RepositoryImportHTMLLayout htmlLayout = new RepositoryImportHTMLLayout( logLevel ); - htmlLayout.setTitle( "Repository Import Log" ); appender = - LogUtil.makeAppender( logName, new OutputStreamWriter( outputStream, Charset.forName( "utf-8" ) ), htmlLayout ); + LogUtil.makeAppender( logName, new OutputStreamWriter( outputStream, Charset.forName( "utf-8" ) ), this.layout ); LogUtil.addAppender( appender, logger, logLevel ); } - public Logger getLogger() { + public Logger getLogger( ) { return logger; } /** * @return the currentFilePath */ - public String getCurrentFilePath() { + public String getCurrentFilePath( ) { return currentFilePath; } /** - * @param currentFilePath - * the currentFilePath to set + * @param currentFilePath the currentFilePath to set */ public void setCurrentFilePath( String currentFilePath ) { this.currentFilePath = currentFilePath; @@ -89,13 +98,13 @@ public void setCurrentFilePath( String currentFilePath ) { /** * @return the importRootPath */ - public String getImportRootPath() { + public String getImportRootPath( ) { return importRootPath; } - protected void endJob() { + protected void endJob( ) { try { - outputStream.write( appender.getLayout().getFooter() ); + outputStream.write( appender.getLayout( ).getFooter( ) ); } catch ( Exception e ) { System.out.println( e ); // Don't try logging a log error. @@ -103,7 +112,7 @@ protected void endJob() { LogUtil.removeAppender( appender, logger ); } - private String getThreadName() { - return Thread.currentThread().getName(); + private String getThreadName( ) { + return Thread.currentThread( ).getName( ); } } diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryImportLogger.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryImportLogger.java index bd64224b180..da91e158dbd 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryImportLogger.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/Log4JRepositoryImportLogger.java @@ -15,170 +15,175 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.StringLayout; import java.io.OutputStream; /** * {@inherit} - * + * * @author TKafalas - * */ public class Log4JRepositoryImportLogger implements IRepositoryImportLogger { - private ThreadLocal repositoryImportLog = new ThreadLocal(); + private ThreadLocal repositoryImportLog = new ThreadLocal( ); + + public Log4JRepositoryImportLogger( ) { + } - public Log4JRepositoryImportLogger() { + public void startJob( OutputStream outputStream, String importRootPath, Level logLevel, StringLayout layout ) { + repositoryImportLog.set( new Log4JRepositoryImportLog( outputStream, importRootPath, logLevel, layout ) ); + getLog4JRepositoryImportLog( ).setCurrentFilePath( getLog4JRepositoryImportLog( ).getImportRootPath( ) ); + getLogger().info( "Start Import Job" ); } public void startJob( OutputStream outputStream, String importRootPath, Level logLevel ) { repositoryImportLog.set( new Log4JRepositoryImportLog( outputStream, importRootPath, logLevel ) ); - getLog4JRepositoryImportLog().setCurrentFilePath( getLog4JRepositoryImportLog().getImportRootPath() ); + getLog4JRepositoryImportLog( ).setCurrentFilePath( getLog4JRepositoryImportLog( ).getImportRootPath( ) ); getLogger().info( "Start Import Job" ); } - public void endJob() { - getLog4JRepositoryImportLog().setCurrentFilePath( getLog4JRepositoryImportLog().getImportRootPath() ); + public void endJob( ) { + getLog4JRepositoryImportLog( ).setCurrentFilePath( getLog4JRepositoryImportLog( ).getImportRootPath( ) ); + getLog4JRepositoryImportLog( ).endJob( ); getLogger().info( "End Import Job" ); - getLog4JRepositoryImportLog().endJob(); } public void setCurrentFilePath( String currentFilePath ) { - getLog4JRepositoryImportLog().setCurrentFilePath( currentFilePath ); - getLogger().info( "Start File Import" ); + getLog4JRepositoryImportLog( ).setCurrentFilePath( currentFilePath ); } public void info( String s ) { - getLogger().info( s ); + getLogger( ).info( s ); } public void error( String s ) { - getLogger().error( s ); + getLogger( ).error( s ); } public void debug( String s ) { - getLogger().debug( s ); + getLogger( ).debug( s ); } public void warn( String s ) { - getLogger().debug( s ); + getLogger( ).debug( s ); } @Override public void error( Exception e ) { - getLogger().error( e.getMessage(), e ); + getLogger( ).error( e.getMessage( ), e ); } - private Log4JRepositoryImportLog getLog4JRepositoryImportLog() { - Log4JRepositoryImportLog currentLog = repositoryImportLog.get(); + private Log4JRepositoryImportLog getLog4JRepositoryImportLog( ) { + Log4JRepositoryImportLog currentLog = repositoryImportLog.get( ); if ( currentLog == null ) { throw new IllegalStateException( "No job started for current Thread" ); } return currentLog; } - private Logger getLogger() { - return getLog4JRepositoryImportLog().getLogger(); + private Logger getLogger( ) { + return getLog4JRepositoryImportLog( ).getLogger( ); } - public boolean hasLogger() { - return ( repositoryImportLog.get() == null ) ? false : true; + public boolean hasLogger( ) { + return ( repositoryImportLog.get( ) == null ) ? false : true; } @Override public void debug( Object arg0 ) { - getLogger().debug( arg0 ); + getLogger( ).debug( arg0 ); } @Override public void debug( Object arg0, Throwable arg1 ) { - getLogger().debug( arg0, arg1 ); + getLogger( ).debug( arg0, arg1 ); } @Override public void error( Object arg0 ) { - getLogger().error( arg0 ); + getLogger( ).error( arg0 ); } @Override public void error( Object arg0, Throwable arg1 ) { - getLogger().error( arg0, arg1 ); + getLogger( ).error( arg0, arg1 ); } @Override public void fatal( Object arg0 ) { - getLogger().fatal( arg0 ); + getLogger( ).fatal( arg0 ); } @Override public void fatal( Object arg0, Throwable arg1 ) { - getLogger().fatal( arg0, arg1 ); + getLogger( ).fatal( arg0, arg1 ); } @Override public void info( Object arg0 ) { - getLogger().info( arg0 ); + getLogger( ).info( arg0 ); } @Override public void info( Object arg0, Throwable arg1 ) { - getLogger().info( arg0, arg1 ); + getLogger( ).info( arg0, arg1 ); } @Override - public boolean isDebugEnabled() { - return getLogger().isDebugEnabled(); + public boolean isDebugEnabled( ) { + return getLogger( ).isDebugEnabled( ); } @Override - public boolean isErrorEnabled() { - return Level.ERROR.isMoreSpecificThan( getLogger().getLevel() ); + public boolean isErrorEnabled( ) { + return Level.ERROR.isMoreSpecificThan( getLogger( ).getLevel( ) ); } @Override - public boolean isFatalEnabled() { - return Level.FATAL.isMoreSpecificThan( getLogger().getLevel() ); + public boolean isFatalEnabled( ) { + return Level.FATAL.isMoreSpecificThan( getLogger( ).getLevel( ) ); } @Override - public boolean isInfoEnabled() { - return getLogger().isInfoEnabled(); + public boolean isInfoEnabled( ) { + return getLogger( ).isInfoEnabled( ); } @Override - public boolean isTraceEnabled() { - return getLogger().isTraceEnabled(); + public boolean isTraceEnabled( ) { + return getLogger( ).isTraceEnabled( ); } @Override - public boolean isWarnEnabled() { - return Level.WARN.isMoreSpecificThan( getLogger().getLevel() ); + public boolean isWarnEnabled( ) { + return Level.WARN.isMoreSpecificThan( getLogger( ).getLevel( ) ); } @Override public void trace( Object arg0 ) { - getLogger().trace( arg0 ); + getLogger( ).trace( arg0 ); } @Override public void trace( Object arg0, Throwable arg1 ) { - getLogger().trace( arg0, arg1 ); + getLogger( ).trace( arg0, arg1 ); } @Override public void warn( Object arg0 ) { - getLogger().warn( arg0 ); + getLogger( ).warn( arg0 ); } @Override public void warn( Object arg0, Throwable arg1 ) { - getLogger().warn( arg0, arg1 ); + getLogger( ).warn( arg0, arg1 ); } } diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/RepositoryTextLayout.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/RepositoryTextLayout.java new file mode 100644 index 00000000000..683104ae63d --- /dev/null +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/RepositoryTextLayout.java @@ -0,0 +1,182 @@ +/*! ****************************************************************************** + * + * Pentaho + * + * Copyright (C) 2024 by Hitachi Vantara, LLC : http://www.pentaho.com + * + * Use of this software is governed by the Business Source License included + * in the LICENSE.TXT file. + * + * Change Date: 2029-07-20 + ******************************************************************************/ + +package org.pentaho.platform.plugin.services.importexport; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.ThreadContext; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.StringLayout; +import org.apache.logging.log4j.core.layout.ByteBufferDestination; +import org.apache.logging.log4j.status.StatusLogger; +import org.apache.logging.log4j.util.Strings; + +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Map; + +/** + * This class was derived from Log4j HTML + *

+ * Appenders using this layout should have their encoding set to UTF-8 or UTF-16, otherwise events containing non ASCII + * characters could result in corrupted log files. + * + * @author tkafalas + */ +public class RepositoryTextLayout implements StringLayout { + + protected static final int BUF_SIZE = 256; + protected static final int MAX_CAPACITY = 1024; + public static final String LINE_SEP = System.getProperty( "line.separator" ); + private static final String REGEXP = Strings.LINE_SEPARATOR.equals( "\n" ) ? "\n" : Strings.LINE_SEPARATOR + "|\n"; + + private Level loggerLogLevel = Level.DEBUG; + + // output buffer appended to when format() is invoked + private StringBuffer sbuf = new StringBuffer( BUF_SIZE ); + + String title = "Log4J Log Messages"; + + public RepositoryTextLayout( Level loggerLogLevel ) { + super( ); + this.loggerLogLevel = loggerLogLevel; + } + + /** + * The Title option takes a String value. This option sets the document title of the generated HTML document. + * + *

+ * Defaults to 'Log4J Log Messages'. + */ + public void setTitle( String title ) { + this.title = title; + } + + /** + * Returns the current value of the Title option. + */ + public String getTitle( ) { + return title; + } + + /** + * Returns the content type output by this layout, i.e "text/html". + */ + public String getContentType( ) { + return "text/plain"; + } + + @Override + public Map getContentFormat( ) { + return null; + } + + /** + * No options to activate. + */ + public void activateOptions( ) { + } + + public String format( LogEvent event ) { + + Level logLevel = event.getLevel( ); + if ( sbuf.capacity( ) > MAX_CAPACITY ) { + sbuf = new StringBuffer( BUF_SIZE ); + } else { + sbuf.setLength( 0 ); + } + + sbuf.append( LINE_SEP ); + + DateFormat df = new SimpleDateFormat( "MM/dd/yyyy HH:mm:ss" ); + Date date = new Date( ); + date.setTime( event.getTimeMillis( ) ); + String time = null; + try { + time = df.format( date ); + } catch ( Exception ex ) { + StatusLogger.getLogger( ).error( "Error occurred while converting date.", ex ); + } + + sbuf.append( time ); + + // File/Folder + String currentFile = ThreadContext.get( "currentFile" ); + if ( currentFile != null && currentFile.length( ) > 0 ) { + sbuf.append( "\t" ); + sbuf.append( currentFile ); + } + // debug level + if ( showLevelColumn( ) ) { + sbuf.append( "\t" ); + sbuf.append( String.valueOf( event.getLevel( ) ) ); + } + + // Message + sbuf.append( "\t" ); + sbuf.append( event.getMessage( ) ); + + return sbuf.toString( ); + } + + /** + * Returns appropriate headers. + */ + public byte[] getHeader( ) { + StringBuffer sbuf = new StringBuffer( ); + sbuf.append( title ); + return sbuf.toString( ).getBytes( StandardCharsets.UTF_8 ); + } + + @Override + public byte[] toByteArray( LogEvent event ) { + return format( event ).getBytes( StandardCharsets.UTF_8 ); + } + + @Override + public String toSerializable( LogEvent event ) { + return format( event ); + } + + /** + * Returns the appropriate footers. + */ + public byte[] getFooter( ) { + StringBuffer sbuf = new StringBuffer( ); + sbuf.append( "\n\nEnd of Log\n\n" ); + return sbuf.toString( ).getBytes( StandardCharsets.UTF_8 ); + } + + /** + * The layout does not handle the throwable contained in logging events. Hence, this method return true. + */ + public boolean ignoresThrowable( ) { + return true; + } + + private boolean showLevelColumn( ) { + return true; + } + + @Override + public Charset getCharset( ) { + return StandardCharsets.UTF_8; + } + + @Override + public void encode( LogEvent source, ByteBufferDestination destination ) { + + } +} diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/SimpleExportProcessor.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/SimpleExportProcessor.java index 9598a5951c1..5ec80685927 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/SimpleExportProcessor.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/SimpleExportProcessor.java @@ -19,6 +19,7 @@ import org.apache.commons.logging.LogFactory; import org.pentaho.platform.api.repository2.unified.IUnifiedRepository; import org.pentaho.platform.api.repository2.unified.RepositoryFile; +import org.pentaho.platform.api.importexport.ExportException; import java.io.File; import java.io.FileNotFoundException; @@ -53,9 +54,8 @@ public SimpleExportProcessor( String path, IUnifiedRepository repository ) { /** * Performs the export process, returns a File object - * - * @throws ExportException - * indicates an error in import processing + * + * @throws ExportException indicates an error in import processing */ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportException, IOException { OutputStream os; @@ -63,10 +63,10 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx // create temp file exportFile = File.createTempFile( EXPORT_TEMP_FILENAME_PREFIX, EXPORT_TEMP_FILENAME_EXT ); - exportFile.deleteOnExit(); + exportFile.deleteOnExit( ); // get the file path - String filePath = new File( this.path ).getParent(); + String filePath = new File( this.path ).getParent( ); // send a response right away if not found if ( exportRepositoryFile == null ) { @@ -79,11 +79,11 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx try { exportFile( exportRepositoryFile, os, filePath ); } catch ( Exception e ) { - log.error( e.getMessage() ); - throw ( new ExportException() ); + log.error( e.getMessage( ) ); + throw ( new ExportException( ) ); } finally { // make sure to close output stream - os.close(); + os.close( ); } // clean up @@ -98,18 +98,17 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx */ @Override public void exportDirectory( RepositoryFile repositoryDir, OutputStream outputStream, String filePath ) - throws ExportException, IOException { - throw new UnsupportedOperationException(); + throws ExportException, IOException { + throw new UnsupportedOperationException( ); } /** - * * @param repositoryFile * @param outputStream */ @Override public void exportFile( RepositoryFile repositoryFile, OutputStream outputStream, String filePath ) - throws ExportException, IOException { + throws ExportException, IOException { // iterate through handlers to perform export for ( ExportHandler exportHandler : exportHandlerList ) { @@ -120,7 +119,7 @@ public void exportFile( RepositoryFile repositoryFile, OutputStream outputStream IOUtils.copy( is, outputStream ); - is.close(); + is.close( ); } } } diff --git a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessor.java b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessor.java index 0b0b5a1964a..a29ed233aa1 100644 --- a/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessor.java +++ b/extensions/src/main/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessor.java @@ -23,6 +23,7 @@ import org.pentaho.platform.api.repository2.unified.RepositoryFile; import org.pentaho.platform.api.repository2.unified.RepositoryFileAcl; import org.pentaho.platform.api.repository2.unified.RepositoryRequest; +import org.pentaho.platform.api.importexport.ExportException; import org.pentaho.platform.engine.core.system.PentahoSessionHolder; import org.pentaho.platform.engine.core.system.PentahoSystem; import org.pentaho.platform.plugin.services.importexport.exportManifest.ExportManifest; @@ -76,29 +77,29 @@ public ZipExportProcessor( String path, IUnifiedRepository repository, boolean w setUnifiedRepository( repository ); - this.exportHandlerList = new ArrayList<>(); + this.exportHandlerList = new ArrayList<>( ); - initManifest(); + initManifest( ); } - protected void initManifest() { - this.exportManifest = new ExportManifest(); + protected void initManifest( ) { + this.exportManifest = new ExportManifest( ); // set created by and create date in manifest information - IPentahoSession session = getSession(); + IPentahoSession session = getSession( ); - Date todaysDate = new Date(); + Date todaysDate = new Date( ); SimpleDateFormat dateFormat = new SimpleDateFormat( EXPORT_INFO_DATE_FORMAT ); SimpleDateFormat timeFormat = new SimpleDateFormat( EXPORT_INFO_TIME_FORMAT ); - exportManifest.getManifestInformation().setExportBy( session.getName() ); - exportManifest.getManifestInformation().setExportDate( - dateFormat.format( todaysDate ) + " " + timeFormat.format( todaysDate ) ); - exportManifest.getManifestInformation().setManifestVersion( "2" ); + exportManifest.getManifestInformation( ).setExportBy( session.getName( ) ); + exportManifest.getManifestInformation( ).setExportDate( + dateFormat.format( todaysDate ) + " " + timeFormat.format( todaysDate ) ); + exportManifest.getManifestInformation( ).setManifestVersion( "2" ); } - protected IPentahoSession getSession() { - return PentahoSessionHolder.getSession(); + protected IPentahoSession getSession( ) { + return PentahoSessionHolder.getSession( ); } /** @@ -111,10 +112,10 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx // create temp file exportFile = File.createTempFile( EXPORT_TEMP_FILENAME_PREFIX, EXPORT_TEMP_FILENAME_EXT ); - exportFile.deleteOnExit(); + exportFile.deleteOnExit( ); // get the file path - String filePath = new File( this.path ).getParent(); + String filePath = new File( this.path ).getParent( ); if ( filePath == null ) { filePath = "/"; } @@ -126,17 +127,17 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx } try ( ZipOutputStream zos = new ZipOutputStream( new FileOutputStream( exportFile ) ) ) { - if ( exportRepositoryFile.isFolder() ) { // Handle recursive export - exportManifest.getManifestInformation().setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) ); + if ( exportRepositoryFile.isFolder( ) ) { // Handle recursive export + exportManifest.getManifestInformation( ).setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) ); // don't zip root folder without name - if ( !ClientRepositoryPaths.getRootFolderPath().equals( exportRepositoryFile.getPath() ) ) { + if ( !ClientRepositoryPaths.getRootFolderPath( ).equals( exportRepositoryFile.getPath( ) ) ) { zos.putNextEntry( new ZipEntry( getFixedZipEntryName( exportRepositoryFile, filePath ) ) ); } exportDirectory( exportRepositoryFile, zos, filePath ); } else { - exportManifest.getManifestInformation().setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) ); + exportManifest.getManifestInformation( ).setRootFolder( path.substring( 0, path.lastIndexOf( "/" ) + 1 ) ); exportFile( exportRepositoryFile, zos, filePath ); } @@ -153,7 +154,7 @@ public File performExport( RepositoryFile exportRepositoryFile ) throws ExportEx log.error( "Error generating export XML" ); } - zos.closeEntry(); + zos.closeEntry( ); } } @@ -180,13 +181,18 @@ public void exportFile( RepositoryFile repositoryFile, OutputStream outputStream try ( InputStream is = exportHandler.doExport( repositoryFile, filePath ) ) { // if we don't get a valid input stream back, skip it if ( is != null ) { + getRepositoryExportLogger( ).debug( "Adding repository object [ " + repositoryFile.getName( ) + " ] to the manifest" ); addToManifest( repositoryFile ); + getRepositoryExportLogger( ).debug( "Starting to add repository object [ " + repositoryFile.getName( ) + " ] to the export bundle" ); String zipEntryName = getFixedZipEntryName( repositoryFile, filePath ); ZipEntry entry = new ZipEntry( zipEntryName ); zos.putNextEntry( entry ); IOUtils.copy( is, outputStream ); - zos.closeEntry(); - createLocales( repositoryFile, filePath, repositoryFile.isFolder(), outputStream ); + zos.closeEntry( ); + getRepositoryExportLogger( ).debug( "Successfully added repository object [ " + repositoryFile.getName( ) + " ] to the export bundle" ); + getRepositoryExportLogger( ).trace( "Starting to create locale entry for repository object [ " + ( ( repositoryFile != null ) ? repositoryFile.getName( ) : "" ) + " ] " ); + createLocales( repositoryFile, filePath, repositoryFile.isFolder( ), outputStream ); + getRepositoryExportLogger( ).trace( "Finished creating locale entry for repository object [ " + ( ( repositoryFile != null ) ? repositoryFile.getName( ) : "" ) + " ] " ); } } } @@ -201,11 +207,11 @@ public void exportFile( RepositoryFile repositoryFile, OutputStream outputStream protected void addToManifest( RepositoryFile repositoryFile ) throws ExportException { if ( this.withManifest ) { // add this entity to the manifest - RepositoryFileAcl fileAcl = getUnifiedRepository().getAcl( repositoryFile.getId() ); + RepositoryFileAcl fileAcl = getUnifiedRepository( ).getAcl( repositoryFile.getId( ) ); try { - getExportManifest().add( repositoryFile, fileAcl ); + getExportManifest( ).add( repositoryFile, fileAcl ); } catch ( ExportManifestFormatException e ) { - throw new ExportException( e.getMessage() ); + throw new ExportException( e.getMessage( ) ); } } } @@ -217,35 +223,45 @@ protected void addToManifest( RepositoryFile repositoryFile ) throws ExportExcep @Override public void exportDirectory( RepositoryFile repositoryDir, OutputStream outputStream, String filePath ) throws ExportException, IOException { + getRepositoryExportLogger( ).debug( "Adding repository object [ " + repositoryDir.getName( ) + " ] to the manifest" ); addToManifest( repositoryDir ); - List children = getUnifiedRepository().getChildren( new RepositoryRequest( - String.valueOf( repositoryDir.getId() ), true, 1, null ) ); + List children = getUnifiedRepository( ).getChildren( new RepositoryRequest( + String.valueOf( repositoryDir.getId( ) ), true, 1, null ) ); + getRepositoryExportLogger( ).debug( "Found [ " + children.size( ) + " ] children in folder [ " + repositoryDir.getName( ) + " ]" ); for ( RepositoryFile repositoryFile : children ) { // exclude 'etc' folder - datasources and etc. - if ( isExportCandidate( repositoryFile.getPath() ) ) { - if ( repositoryFile.isFolder() ) { - if ( outputStream.getClass().isAssignableFrom( ZipOutputStream.class ) ) { + if ( isExportCandidate( repositoryFile.getPath( ) ) ) { + getRepositoryExportLogger( ).trace( "Repository object is a candidate for export [ " + repositoryFile.getName( ) + " ]" ); + if ( repositoryFile.isFolder( ) ) { + getRepositoryExportLogger( ).debug( "Repository Object [ " + repositoryFile.getName( ) + " ] is a folder. Adding it to the export bundle" ); + if ( outputStream.getClass( ).isAssignableFrom( ZipOutputStream.class ) ) { ZipOutputStream zos = (ZipOutputStream) outputStream; String zipEntryName = getFixedZipEntryName( repositoryFile, filePath ); ZipEntry entry = new ZipEntry( zipEntryName ); zos.putNextEntry( entry ); + getRepositoryExportLogger( ).debug( "Successfully added repository Object [ " + repositoryFile.getName( ) + " ] to the export bundle" ); } exportDirectory( repositoryFile, outputStream, filePath ); } else { try { + getRepositoryExportLogger( ).debug( "Repository Object [ " + repositoryFile.getName( ) + " ] is a file. Adding it to the export bundle" ); exportFile( repositoryFile, outputStream, filePath ); } catch ( ZipException e ) { // possible duplicate entry, log it and continue on with the other files in the directory - log.debug( e.getMessage(), e ); + log.debug( e.getMessage( ), e ); } } + } else { + getRepositoryExportLogger( ).trace( "Repository object is a candidate for export [ " + repositoryFile.getName( ) + " ] skipping it" ); } } - createLocales( repositoryDir, filePath, repositoryDir.isFolder(), outputStream ); + getRepositoryExportLogger( ).trace( "Starting to create locale entry for repository object [ " + repositoryDir.getName( ) + " ] " ); + createLocales( repositoryDir, filePath, repositoryDir.isFolder( ), outputStream ); + getRepositoryExportLogger( ).trace( "Finished creating locale entry for repository object [ " + repositoryDir.getName( ) + " ] " ); } protected boolean isExportCandidate( String path ) { - return !ClientRepositoryPaths.getEtcFolderPath().equals( path ); + return !ClientRepositoryPaths.getEtcFolderPath( ).equals( path ); } /** @@ -270,15 +286,15 @@ protected String getZipEntryName( RepositoryFile repositoryFile, String filePath int filePathLength = 0; if ( filePath.equals( "/" ) || filePath.equals( "\\" ) ) { - filePathLength = filePath.length(); + filePathLength = filePath.length( ); } else { - filePathLength = filePath.length() + 1; + filePathLength = filePath.length( ) + 1; } - result = repositoryFile.getPath().substring( filePathLength ); + result = repositoryFile.getPath( ).substring( filePathLength ); // add trailing slash for folders - if ( repositoryFile.isFolder() ) { + if ( repositoryFile.isFolder( ) ) { result += "/"; } return FilenameUtils.normalize( result, true ); @@ -294,7 +310,7 @@ protected String getZipEntryName( RepositoryFile repositoryFile, String filePath * @throws IOException */ protected void createLocales( RepositoryFile repositoryFile, String filePath, boolean isFolder, - OutputStream outputStream ) throws IOException { + OutputStream outputStream ) throws IOException { ZipEntry entry; String zipEntryName; String name; @@ -303,26 +319,26 @@ protected void createLocales( RepositoryFile repositoryFile, String filePath, bo ZipOutputStream zos = (ZipOutputStream) outputStream; // only process files and folders that we know will have locale settings if ( supportedLocaleFileExt( repositoryFile ) ) { - List locales = getAvailableLocales( repositoryFile.getId() ); + List locales = getAvailableLocales( repositoryFile.getId( ) ); zipEntryName = getFixedZipEntryName( repositoryFile, filePath ); - name = repositoryFile.getName(); + name = repositoryFile.getName( ); for ( LocaleMapDto locale : locales ) { - localeName = locale.getLocale().equalsIgnoreCase( "default" ) ? "" : "_" + locale.getLocale(); + localeName = locale.getLocale( ).equalsIgnoreCase( "default" ) ? "" : "_" + locale.getLocale( ); if ( isFolder ) { zipEntryName = getFixedZipEntryName( repositoryFile, filePath ) + "index"; name = "index"; } - properties = getUnifiedRepository().getLocalePropertiesForFileById( repositoryFile.getId(), locale.getLocale() ); + properties = getUnifiedRepository( ).getLocalePropertiesForFileById( repositoryFile.getId( ), locale.getLocale( ) ); if ( properties != null ) { properties.remove( "jcr:primaryType" ); // Pentaho Type - try ( InputStream is = createLocaleFile( name + localeName, properties, locale.getLocale() ) ) { + try ( InputStream is = createLocaleFile( name + localeName, properties, locale.getLocale( ) ) ) { if ( is != null ) { entry = new ZipEntry( zipEntryName + localeName + LOCALE_EXT ); zos.putNextEntry( entry ); IOUtils.copy( is, outputStream ); - zos.closeEntry(); + zos.closeEntry( ); } } } @@ -339,13 +355,13 @@ protected void createLocales( RepositoryFile repositoryFile, String filePath, bo */ private boolean supportedLocaleFileExt( RepositoryFile repositoryFile ) { boolean ans = true; - String ext = repositoryFile.getName(); - if ( !repositoryFile.isFolder() ) { + String ext = repositoryFile.getName( ); + if ( !repositoryFile.isFolder( ) ) { int idx = ext.lastIndexOf( "." ); if ( idx > 0 ) { - ext = ext.substring( idx, ext.length() ); + ext = ext.substring( idx, ext.length( ) ); } - List exportList = getLocaleExportList(); + List exportList = getLocaleExportList( ); if ( exportList != null ) { ans = exportList.contains( ext ); } @@ -361,11 +377,11 @@ private boolean supportedLocaleFileExt( RepositoryFile repositoryFile ) { * @return */ private List getAvailableLocales( Serializable fileId ) { - List availableLocales = new ArrayList(); - List locales = getUnifiedRepository().getAvailableLocalesForFileById( fileId ); - if ( locales != null && !locales.isEmpty() ) { + List availableLocales = new ArrayList( ); + List locales = getUnifiedRepository( ).getAvailableLocalesForFileById( fileId ); + if ( locales != null && !locales.isEmpty( ) ) { for ( Locale locale : locales ) { - availableLocales.add( new LocaleMapDto( locale.toString(), null ) ); + availableLocales.add( new LocaleMapDto( locale.toString( ), null ) ); } } return availableLocales; @@ -386,7 +402,7 @@ private InputStream createLocaleFile( String name, Properties properties, String try { localeFile = PentahoSystem - .getApplicationContext().createTempFile( getSession(), ExportFileNameEncoder.encodeZipFileName( name ), LOCALE_EXT, true ); + .getApplicationContext( ).createTempFile( getSession( ), ExportFileNameEncoder.encodeZipFileName( name ), LOCALE_EXT, true ); } catch ( IOException e ) { // BISERVER-14140 - Retry when temp file name exceeds the limit of OS // Retry inside a catch because there isn't an accurate mechanism to determine the effective temp file max length @@ -394,10 +410,10 @@ private InputStream createLocaleFile( String name, Properties properties, String String smallerName = ExportFileNameEncoder.encodeZipFileName( name ).substring( 0, SAFETY_TMP_FILE_SIZE ); log.debug( "Error with original name file. Retrying with a smaller temp file name - " + smallerName ); localeFile = PentahoSystem - .getApplicationContext().createTempFile( getSession(), smallerName, LOCALE_EXT, true ); + .getApplicationContext( ).createTempFile( getSession( ), smallerName, LOCALE_EXT, true ); } finally { if ( localeFile != null ) { - localeFile.deleteOnExit(); + localeFile.deleteOnExit( ); } } @@ -414,10 +430,10 @@ private InputStream createLocaleFile( String name, Properties properties, String * * @return */ - public List getLocaleExportList() { - if ( this.localeExportList == null || this.localeExportList.isEmpty() ) { + public List getLocaleExportList( ) { + if ( this.localeExportList == null || this.localeExportList.isEmpty( ) ) { for ( ExportHandler exportHandler : exportHandlerList ) { - this.localeExportList = ( (DefaultExportHandler) exportHandler ).getLocaleExportList(); + this.localeExportList = ( (DefaultExportHandler) exportHandler ).getLocaleExportList( ); break; } } @@ -428,7 +444,7 @@ public void setLocaleExportList( List localeExportList ) { this.localeExportList = localeExportList; } - public ExportManifest getExportManifest() { + public ExportManifest getExportManifest( ) { return exportManifest; } diff --git a/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/FileResource.java b/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/FileResource.java index c5f1f69f1ad..c2c74ba8ff0 100755 --- a/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/FileResource.java +++ b/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/FileResource.java @@ -45,7 +45,7 @@ import org.pentaho.platform.engine.core.system.PentahoSessionHolder; import org.pentaho.platform.engine.core.system.PentahoSystem; import org.pentaho.platform.plugin.services.importer.PlatformImportException; -import org.pentaho.platform.plugin.services.importexport.ExportException; +import org.pentaho.platform.api.importexport.ExportException; import org.pentaho.platform.plugin.services.importexport.Exporter; import org.pentaho.platform.repository.RepositoryDownloadWhitelist; import org.pentaho.platform.repository.RepositoryFilenameUtils; @@ -83,6 +83,7 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.StreamingOutput; +import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.ext.ContextResolver; import javax.ws.rs.ext.Providers; import javax.xml.bind.JAXBContext; @@ -178,17 +179,22 @@ public static String idToPath( String pathId ) { * Encrypted file stream * */ - @GET + @POST @Path( "/backup" ) + @Consumes( MediaType.APPLICATION_FORM_URLENCODED ) @StatusCodes( { @ResponseCode( code = 200, condition = "Successfully exported the existing Pentaho System" ), + @ResponseCode( code = 400, condition = "User has provided a invalid file path" ), @ResponseCode( code = 403, condition = "User does not have administrative permissions" ), @ResponseCode( code = 500, condition = "Failure to complete the export." ) } ) - public Response systemBackup( @HeaderParam ( "user-agent" ) String userAgent ) { + public Response systemBackup( @HeaderParam ( "user-agent" ) String userAgent, final MultivaluedMap formParams ) { FileService.DownloadFileWrapper wrapper; try { - wrapper = fileService.systemBackup( userAgent ); + wrapper = fileService.systemBackup( userAgent, formParams.getFirst( "logFile" ), formParams + .getFirst( "logLevel" ), formParams.getFirst( "outputFile" ) ); return buildZipOkResponse( wrapper ); + } catch ( IllegalArgumentException iae ) { + throw new WebApplicationException( iae, Response.Status.BAD_REQUEST ); } catch ( IOException e ) { throw new WebApplicationException( e, Response.Status.INTERNAL_SERVER_ERROR ); } catch ( ExportException e ) { @@ -212,13 +218,17 @@ public Response systemBackup( @HeaderParam ( "user-agent" ) String userAgent ) { @Consumes( MediaType.MULTIPART_FORM_DATA ) @StatusCodes( { @ResponseCode( code = 200, condition = "Successfully imported the Pentaho System" ), + @ResponseCode( code = 400, condition = "User has provided a invalid file path" ), @ResponseCode( code = 403, condition = "User does not have administrative permissions" ), @ResponseCode( code = 500, condition = "Failure to complete the import." ) } ) public Response systemRestore( @FormDataParam( "fileUpload" ) InputStream fileUpload, @FormDataParam ( "overwriteFile" ) String overwriteFile, - @FormDataParam ( "applyAclSettings" ) String applyAclSettings, @FormDataParam ( "overwriteAclSettings" ) String overwriteAclSettings ) { + @FormDataParam ( "applyAclSettings" ) String applyAclSettings, @FormDataParam ( "overwriteAclSettings" ) String overwriteAclSettings, + @FormDataParam ( "logFile" ) String logFile, @FormDataParam ( "logLevel" ) String logLevel ) { try { - fileService.systemRestore( fileUpload, overwriteFile, applyAclSettings, overwriteAclSettings ); + fileService.systemRestore( fileUpload, overwriteFile, applyAclSettings, overwriteAclSettings, logFile, logLevel ); return Response.ok().build(); + } catch ( IllegalArgumentException iae ) { + throw new WebApplicationException( iae, Response.Status.BAD_REQUEST ); } catch ( PlatformImportException e ) { throw new WebApplicationException( e, Response.Status.INTERNAL_SERVER_ERROR ); } catch ( SecurityException e ) { diff --git a/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/services/FileService.java b/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/services/FileService.java index 9860af02bfc..087feb6b85a 100644 --- a/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/services/FileService.java +++ b/extensions/src/main/java/org/pentaho/platform/web/http/api/resources/services/FileService.java @@ -40,7 +40,9 @@ import org.pentaho.platform.api.repository2.unified.webservices.RepositoryFileDto; import org.pentaho.platform.api.repository2.unified.webservices.RepositoryFileTreeDto; import org.pentaho.platform.api.repository2.unified.webservices.StringKeyStringValueDto; +import org.pentaho.platform.api.importexport.ExportException; import org.pentaho.platform.api.util.IPentahoPlatformExporter; +import org.pentaho.platform.api.util.IRepositoryExportLogger; import org.pentaho.platform.engine.core.system.PentahoSessionHolder; import org.pentaho.platform.engine.core.system.PentahoSystem; import org.pentaho.platform.plugin.services.exporter.PentahoPlatformExporter; @@ -49,7 +51,7 @@ import org.pentaho.platform.plugin.services.importer.RepositoryFileImportBundle; import org.pentaho.platform.plugin.services.importexport.BaseExportProcessor; import org.pentaho.platform.plugin.services.importexport.DefaultExportHandler; -import org.pentaho.platform.plugin.services.importexport.ExportException; +import org.pentaho.platform.plugin.services.importexport.RepositoryTextLayout; import org.pentaho.platform.plugin.services.importexport.ExportHandler; import org.pentaho.platform.plugin.services.importexport.IRepositoryImportLogger; import org.pentaho.platform.plugin.services.importexport.ImportSession; @@ -82,6 +84,7 @@ import javax.ws.rs.core.StreamingOutput; import java.io.ByteArrayOutputStream; import java.io.File; +import java.io.FileOutputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; @@ -95,6 +98,8 @@ import java.security.GeneralSecurityException; import java.security.InvalidParameterException; import java.text.Collator; +import java.text.SimpleDateFormat; +import java.util.Date; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -131,29 +136,76 @@ public class FileService { private PentahoPlatformExporter backupExporter; - public DownloadFileWrapper systemBackup( String userAgent ) throws IOException, ExportException { + private void validateFilePath( String logFile ) throws IllegalArgumentException { + if ( logFile.contains( ".." ) || logFile.contains( "//" ) || logFile.contains( "\\\\" ) || ( !logFile.endsWith( ".txt" ) && !logFile.endsWith( ".log" ) ) ) { + throw new IllegalArgumentException( Messages.getInstance().getString( "FileService.ERROR_INVALID_LOG_FILENAME", logFile ) ); + } + } + public DownloadFileWrapper systemBackup( String userAgent, String logFile, String logLevel, String outputFile ) throws IllegalArgumentException, IOException, ExportException { if ( doCanAdminister() ) { - String originalFileName; String encodedFileName; - originalFileName = "SystemBackup.zip"; - encodedFileName = makeEncodedFileName( originalFileName ); - StreamingOutput streamingOutput = getBackupStream(); - final String attachment = HttpMimeTypeListener.buildContentDispositionValue( originalFileName, true ); - + encodedFileName = makeEncodedFileName( outputFile ); + IRepositoryExportLogger exportLogger; + Level level = Level.valueOf( logLevel ); + FileOutputStream fileOutputStream = null; + try { + validateFilePath( logFile ); + fileOutputStream = new FileOutputStream( logFile ); + } catch ( FileNotFoundException e ) { + try { + fileOutputStream = retrieveFallbackLogFileLocation( "backup" ); + } catch ( FileNotFoundException fileNotFoundException ) { + throw new ExportException( fileNotFoundException ); + } + } + ByteArrayOutputStream exportLoggerSream = new ByteArrayOutputStream(); + IPentahoPlatformExporter exporter = PentahoSystem.get( IPentahoPlatformExporter.class ); + exportLogger = exporter.getRepositoryExportLogger(); + RepositoryTextLayout stringLayout = new RepositoryTextLayout( level ); + exportLogger.startJob( exportLoggerSream, level, stringLayout ); + StreamingOutput streamingOutput = getBackupStream( ); + exportLogger.endJob( ); + try { + exportLoggerSream.writeTo( fileOutputStream ); + } catch ( IOException e ) { + e.printStackTrace(); + } + final String attachment = HttpMimeTypeListener.buildContentDispositionValue( outputFile, true ); return new DownloadFileWrapper( streamingOutput, attachment, encodedFileName ); } else { throw new SecurityException(); } } + public FileOutputStream retrieveFallbackLogFileLocation( String filePrefix ) throws FileNotFoundException { + String defaultBaseDir = System.getProperty( "java.io.tmpdir" ); + // Get the current timestamp + SimpleDateFormat dateFormat = new SimpleDateFormat( "yyyyMMdd_HHmmss" ); + String timestamp = dateFormat.format( new Date() ); + String fallbacklogFilePath = defaultBaseDir + File.pathSeparator + filePrefix + "_" + timestamp + ".log"; + return new FileOutputStream( fallbacklogFilePath ); + } + public void systemRestore( final InputStream fileUpload, String overwriteFile, - String applyAclSettings, String overwriteAclSettings ) throws PlatformImportException, SecurityException { + String applyAclSettings, String overwriteAclSettings, String logFile, String logLevel ) throws IllegalArgumentException, PlatformImportException, SecurityException { if ( doCanAdminister() ) { boolean overwriteFileFlag = !"false".equals( overwriteFile ); boolean applyAclSettingsFlag = !"false".equals( applyAclSettings ); boolean overwriteAclSettingsFlag = "true".equals( overwriteAclSettings ); IRepositoryImportLogger importLogger; - Level level = Level.ERROR; + Level level = Level.valueOf( logLevel ); + + FileOutputStream fileOutputStream = null; + try { + validateFilePath( logFile ); + fileOutputStream = new FileOutputStream( logFile ); + } catch ( FileNotFoundException e ) { + try { + fileOutputStream = retrieveFallbackLogFileLocation( "restore" ); + } catch ( FileNotFoundException fileNotFoundException ) { + throw new PlatformImportException( fileNotFoundException.getLocalizedMessage() ); + } + } ByteArrayOutputStream importLoggerStream = new ByteArrayOutputStream(); String importDirectory = "/"; RepositoryFileImportBundle.Builder bundleBuilder = new RepositoryFileImportBundle.Builder(); @@ -173,19 +225,25 @@ public void systemRestore( final InputStream fileUpload, String overwriteFile, IPlatformImporter importer = PentahoSystem.get( IPlatformImporter.class ); importLogger = importer.getRepositoryImportLogger(); - importLogger.startJob( importLoggerStream, importDirectory, level ); + RepositoryTextLayout stringLayout = new RepositoryTextLayout( level ); + importLogger.startJob( importLoggerStream, importDirectory, level, stringLayout ); try { importer.importFile( bundleBuilder.build() ); } finally { importLogger.endJob(); + try { + importLoggerStream.writeTo( fileOutputStream ); + } catch ( IOException e ) { + e.printStackTrace(); + } } } else { throw new SecurityException(); } } - private StreamingOutput getBackupStream() throws IOException, ExportException { - File zipFile = getBackupExporter().performExport(); + private StreamingOutput getBackupStream( ) throws IOException, ExportException { + File zipFile = getBackupExporter().performExport( ); final FileInputStream inputStream = new FileInputStream( zipFile ); return new StreamingOutput() { @@ -994,7 +1052,7 @@ public RepositoryFileDto doGetProperties( String pathId ) throws FileNotFoundExc */ public String doGetCanEdit() { String editPermission = PentahoSystem.getSystemSetting( "edit-permission", "" ); - if( editPermission != null && editPermission.length() > 0 ) { + if ( editPermission != null && editPermission.length() > 0 ) { return getPolicy().isAllowed( editPermission ) ? "true" : "false"; } else { return "true"; diff --git a/extensions/src/main/resources/org/pentaho/platform/plugin/services/messages/messages.properties b/extensions/src/main/resources/org/pentaho/platform/plugin/services/messages/messages.properties index db771ce9548..836b9ebe7f9 100644 --- a/extensions/src/main/resources/org/pentaho/platform/plugin/services/messages/messages.properties +++ b/extensions/src/main/resources/org/pentaho/platform/plugin/services/messages/messages.properties @@ -204,6 +204,7 @@ CommandLineProcessor.ERROR_0005_INVALID_FILE_PATH=Invalid file-path: {0} CommandLineProcessor.ERROR_0006_NON_ADMIN_CREDENTIALS=Non admin credentials entered CommandLineProcessor.ERROR_0007_FORBIDDEN=User is not allowed to perform this operation: {0} CommandLineProcessor.ERROR_0008_INVALID_PARAMETER=Invalid parameter syntax: "{0}" +CommandLineProcessor.ERROR_0009_INVALID_LOG_FILE_PATH=Invalid log file path: "{0}" CommandLineProcessor.INFO_OPTION_HELP_DESCRIPTION=print this message CommandLineProcessor.INFO_OPTION_IMPORT_DESCRIPTION=import @@ -216,6 +217,7 @@ CommandLineProcessor.INFO_OPTION_URL_DESCRIPTION=url of repository (e.g. http:// CommandLineProcessor.INFO_OPTION_FILEPATH_DESCRIPTION=Path to directory of files for import, or path to .zip file for export CommandLineProcessor.INFO_OPTION_CHARSET_DESCRIPTION=charset to use for the repository (characters from external systems converted to this charset) CommandLineProcessor.INFO_OPTION_LOGFILE_DESCRIPTION=full path and filename of logfile messages +CommandLineProcessor.INFO_OPTION_LOGLEVEL_DESCRIPTION=Log Level CommandLineProcessor.INFO_OPTION_PATH_DESCRIPTION=repository path to which to add imported files, or to export from (e.g. /public) CommandLineProcessor.INFO_OPTION_OVERWRITE_DESCRIPTION=overwrite files (import only) CommandLineProcessor.INFO_OPTION_PERMISSION_DESCRIPTION=apply ACL manifest permissions to files and folders (import only) @@ -302,10 +304,85 @@ SolutionImportHandler.SkipLocaleFile=Skipping [{0}], it is a locale property fil SolutionImportHandler.ConnectionWithoutDatabaseType=Can't import connection [{0}] because it doesn't have a databaseType. SolutionImportHandler.SchedulesWithSpaces=Could not import schedule, attempting to replace spaces with underscores and retrying: {0} + +SolutionImportHandler.INFO_START_IMPORT_PROCESS=Starting the import process +SolutionImportHandler.INFO_START_IMPORT_FILEFOLDER=*********************** [ Start: Import File/Folder(s) ] ************************************** +SolutionImportHandler.INFO_COUNT_FILEFOLDER=Found [ {0} ] files to import +SolutionImportHandler.ERROR_IMPORTING_REPOSITORY_OBJECT=importing repository object with path [ {0} ] from the cache. Cause [ {1} ] +SolutionImportHandler.INFO_SUCCESSFUL_REPOSITORY_IMPORT_COUNT=Successfully imported [ {0}} ] out of [ {1} ] +SolutionImportHandler.INFO_START_IMPORT_LOCALEFILE=****************************[ Start: Import Locale File(s) ] ********************************** +SolutionImportHandler.ERROR_IMPORTING_LOCALE_FILE=Error importing locale files. Cause [ {0} ] +SolutionImportHandler.INFO_END_IMPORT_LOCALEFILE=****************************[ End: Import Locale File(s) ] ********************************** +SolutionImportHandler.INFO_END_IMPORT_FILEFOLDER=*********************** [ End: Import File/Folder(s) ] *********************************** +SolutionImportHandler.INFO_START_IMPORT_DATASOURCE=****************************[ Start: Import DataSource(s) ] ********************************** +SolutionImportHandler.INFO_COUNT_DATASOURCE=Found [ {0}} ] DataSource(s) to import +SolutionImportHandler.ERROR_IMPORTING_JDBC_DATASOURCE=Error importing JDBC DataSource [ {0}} ]. Cause [ {1} ] +SolutionImportHandler.INFO_END_IMPORT_DATASOURCE=****************************[ End: Import DataSource(s) ] ********************************** +SolutionImportHandler.INFO_SUCCESSFUL_DATASOURCE_IMPORT_COUNT=Successfully imported [ {0} ] out of [ {1} ] +SolutionImportHandler.INFO_START_IMPORT_SCHEDULE=*********************** [ Start: Import Schedule(s) ] ************************************** +SolutionImportHandler.INFO_END_IMPORT_SCHEDULE=*********************** [ End: Import Schedule(s) ] ************************************** +SolutionImportHandler.INFO_COUNT_SCHEDULUE=Found {0} schedules in the manifest +SolutionImportHandler.ERROR_IMPORTING_SCHEDULE=Unable to import schedule [ {0} ] cause [ {1} ] +SolutionImportHandler.INFO_SUCCESSFUL_SCHEDULE_IMPORT_COUNT=Successfully imported [ {0} ] out of [ {1} ] +SolutionImportHandler.INFO_START_IMPORT_METASTORE=********************** [ Start: Import MetaStore ] ****************************************** +SolutionImportHandler.INFO_END_IMPORT_METASTORE=********************** [ End: Import MetaStore ] ****************************************** +SolutionImportHandler.INFO_SUCCESSFUL_IMPORT_METASTORE=Successfully imported metastore +SolutionImportHandler.INFO_START_IMPORT_USER=******************************* [Start Import User(s)] *************************** +SolutionImportHandler.INFO_COUNT_USER=Found [ {0} ] users to import +SolutionImportHandler.INFO_SUCCESSFUL_USER_COUNT=Successfully imported [ {0} ] out of [ {1} ] user(s) +SolutionImportHandler.INFO_END_IMPORT_USER=****************************** [End Import User(s)] *************************** +SolutionImportHandler.INFO_START_IMPORT_USER_SETTING=************************[ Start: Import user specific settings] ************************* +SolutionImportHandler.INFO_COUNT_USER_SETTING=Found [ {0} ] user specific settings for user [ {1} ] +SolutionImportHandler.INFO_SUCCESSFUL_USER_SETTING_IMPORT_COUNT=Successfully imported [ {0} ] out of [ {1} ] user specific settings +SolutionImportHandler.INFO_SUCCESSFUL_ROLE_COUNT=Successfully imported [ {0} ] out of [ {1} ] roles +SolutionImportHandler.INFO_START_IMPORT_ROLE=*********************** [ Start: Import Role(s) ] *************************************** +SolutionImportHandler.INFO_END_IMPORT_ROLE=*********************** [ End: Import Role(s) ] *************************************** +SolutionImportHandler.INFO_START_IMPORT_METADATA_DATASOURCE=*********************** [ Start: Import Metadata DataSource(s) ] ***************************** +SolutionImportHandler.INFO_COUNT_METADATA_DATASOURCE=Found [ {0} ] metadata models to import +SolutionImportHandler.INFO_SUCCESSFUL_METDATA_DATASOURCE_COUNT=Successfully imported [ {0} ] out of [ {1} ] metadata models +SolutionImportHandler.INFO_END_IMPORT_METADATA_DATASOURCE=*********************** [ End: Import Metadata DataSource(s) ] ***************************** +SolutionImportHandler.INFO_START_IMPORT_MONDRIAN_DATASOURCE=*********************** [ Start: Import Mondrian DataSource(s) ] ***************************** +SolutionImportHandler.INFO_COUNT_MONDRIAN_DATASOURCE=Found [ {0} ] mondrian schemas to import +SolutionImportHandler.INFO_SUCCESSFUL_MONDRIAN_DATASOURCE_IMPORT_COUNT=Successfully imported [ {0} ] out of [ {1} ] DataSource(s) +SolutionImportHandler.INFO_END_IMPORT_MONDRIAN_DATASOURCE=*********************** [ End: Import Mondrian DataSource(s) ] ***************************** +SolutionImportHandler.INFO_START_IMPORT_REPOSITORY_OBJECT=****************************** [ Start: Import Repository File/Folder(s) ] ********************************** +SolutionImportHandler.INFO_END_IMPORT_REPOSITORY_OBJECT=****************************** [ End: Import Repository File/Folder(s) ] ********************************** +SolutionImportHandler.ERROR_NOT_=This not a valid file name. Failing the import PentahoPlatformExporter.UNSUPPORTED_JobTrigger=Unsupported JobTrigger encountered during export, skipping it: {0} PentahoPlatformExporter.ERROR_EXPORTING_JOBS=There was an error while exporting scheduled jobs ScheduleExportUtil.JOB_MUST_NOT_BE_NULL=Job can not be null - +PentahoPlatformExporter.INFO_START_EXPORT_JDBC_DATASOURCE=*************************** [ Start: Export JDBC Datasource(s) ] ******************************* +PentahoPlatformExporter.INFO_COUNT_JDBC_DATASOURCE_TO_EXPORT=Found [ {0} ] JDBC DataSource(s) to export +PentahoPlatformExporter.INFO_SUCCESSFUL_JDBC_DATASOURCE_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] JDBC DataSource(s) +PentahoPlatformExporter.INFO_END_EXPORT_JDBC_DATASOURCE=*************************** [ End: Export JDBC Datasource(s) ] ******************************* +PentahoPlatformExporter.INFO_START_EXPORT_REPOSITORY_OBJECT=********************************* [ Start: Export repository File(s)/Folder(s) ] ******************************* +PentahoPlatformExporter.INFO_END_EXPORT_REPOSITORY_OBJECT=********************************* [ End: Export repository File(s)/Folder(s) ] ******************************* +PentahoPlatformExporter.ERROR_EXPORT_REPOSITORY_OBJECT=Error while exporting of a file [ {0} ] +PentahoPlatformExporter.INFO_START_EXPORT_USER=********************************* [ Start: Export User(s) ] ******************************* +PentahoPlatformExporter.INFO_COUNT_USER_TO_EXPORT=Found [ {0} ] User(s) to export +PentahoPlatformExporter.INFO_SUCCESSFUL_USER_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] User(s) +PentahoPlatformExporter.INFO_END_EXPORT_USER=********************************* [ End: Export User(s) ] ******************************* +PentahoPlatformExporter.INFO_START_EXPORT_ROLE=********************************* [ Start: Export Role(s) ] ******************************* +PentahoPlatformExporter.INFO_COUNT_ROLE_TO_EXPORT=Found [ {0} ] Role(s) to export +PentahoPlatformExporter.INFO_SUCCESSFUL_ROLE_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] Role(s) +PentahoPlatformExporter.INFO_END_EXPORT_ROLE=********************************* [ End: Export Role(s) ] ******************************* +PentahoPlatformExporter.INFO_START_EXPORT_METASTORE=********************************* [ Start: Export Metastore ] ******************************* +PentahoPlatformExporter.INFO_END_EXPORT_METASTORE=********************************* [ End: Export Metastore ] ******************************* +PentahoPlatformExporter.INFO_SUCCESSFUL_EXPORT_METASTORE=Finished adding the metastore to the export manifest +PentahoPlatformExporter.INFO_START_EXPORT_MONDRIAN_DATASOURCE=********************************* [ Start: Export Mondrian datasource(s) ] ******************************* +PentahoPlatformExporter.INFO_COUNT_MONDRIAN_DATASOURCE_TO_EXPORT=Found [ {0} ] Role(s) to export +PentahoPlatformExporter.INFO_SUCCESSFUL_MONDRIAN_DATASOURCE_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] Role(s) +PentahoPlatformExporter.ERROR_MONDRIAN_DATASOURCE_EXPORT=Error exporting Mondrian DataSource. Cause [ {0} ] +PentahoPlatformExporter.INFO_END_EXPORT_MONDRIAN_DATASOURCE=********************************* [ End: Export Mondrian datasource(s) ] ******************************* +PentahoPlatformExporter.INFO_START_EXPORT_METADATA=********************************* [ Start: Export Metadata datasource(s) ] ******************************* +PentahoPlatformExporter.INFO_COUNT_METADATA_DATASOURCE_TO_EXPORT=Found [ {0} ] Metadata DataSource(s) to export +PentahoPlatformExporter.INFO_SUCCESSFUL_METADATA_DATASOURCE_EXPORT_COUNT=Successfully exported [ {0} ] out of [ {1} ] Metadata DataSource(s) +PentahoPlatformExporter.ERROR_METADATA_DATASOURCE_EXPORT=Error exporting Mondrian DataSource. Cause [ {0} ] +PentahoPlatformExporter.INFO_END_EXPORT_METADATA=*********************** [ End: Export metadata datasource(s) ]************************* +PentahoPlatformExporter.ERROR_EXPORT_FILE_CONTENT=Error while exporting file content. Cause {0} +PentahoPlatformExporter.ERROR_GENERATING_EXPORT_XML=Error generating export XML +PentahoPlatformExporter.INFO_END_EXPORT_PROCESS=End: Export process +PentahoPlatformExporter.INFO_START_EXPORT_PROCESS=Start: Export process ERROR.Encrypting_Password=Could not encrypt password for user {0} ERROR.Restoring_Password=Could restore password for user {0}. Setting a temporary password. ERROR.CreatingUser=Could not create user {0}. diff --git a/extensions/src/main/resources/org/pentaho/platform/web/http/messages/messages.properties b/extensions/src/main/resources/org/pentaho/platform/web/http/messages/messages.properties index e14d2b9bc4d..372fc237ffc 100644 --- a/extensions/src/main/resources/org/pentaho/platform/web/http/messages/messages.properties +++ b/extensions/src/main/resources/org/pentaho/platform/web/http/messages/messages.properties @@ -152,4 +152,5 @@ FileResource.INCORRECT_EXTENSION={0} has incorrect extension. FileResource.HOME_FOLDER_DISPLAY_TITLE=Home FileResource.PUBLIC_FOLDER_DISPLAY_TITLE=Public -RepositoryResource.USER_NOT_AUTHORIZED_TO_EDIT=User is not authorized to edit the content. Please contact your system administrator. \ No newline at end of file +RepositoryResource.USER_NOT_AUTHORIZED_TO_EDIT=User is not authorized to edit the content. Please contact your system administrator. +FileService.ERROR_INVALID_LOG_FILENAME=Invalid log file name {0} \ No newline at end of file diff --git a/extensions/src/test/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporterTest.java b/extensions/src/test/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporterTest.java index 65ca89a6fbd..dc8c88f3e1b 100644 --- a/extensions/src/test/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporterTest.java +++ b/extensions/src/test/java/org/pentaho/platform/plugin/services/exporter/PentahoPlatformExporterTest.java @@ -13,6 +13,7 @@ package org.pentaho.platform.plugin.services.exporter; +import org.apache.logging.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -29,13 +30,12 @@ import org.pentaho.platform.api.scheduler2.IScheduler; import org.pentaho.platform.api.usersettings.IAnyUserSettingService; import org.pentaho.platform.api.usersettings.pojo.IUserSetting; +import org.pentaho.platform.api.util.IRepositoryExportLogger; import org.pentaho.platform.engine.core.system.PentahoSessionHolder; import org.pentaho.platform.engine.core.system.PentahoSystem; import org.pentaho.platform.plugin.action.mondrian.catalog.IMondrianCatalogService; import org.pentaho.platform.plugin.action.mondrian.catalog.MondrianCatalog; -import org.pentaho.platform.plugin.services.importexport.ExportManifestUserSetting; -import org.pentaho.platform.plugin.services.importexport.RoleExport; -import org.pentaho.platform.plugin.services.importexport.UserExport; +import org.pentaho.platform.plugin.services.importexport.*; import org.pentaho.platform.plugin.services.importexport.exportManifest.ExportManifest; import org.pentaho.platform.plugin.services.importexport.exportManifest.bindings.DatabaseConnection; import org.pentaho.platform.plugin.services.importexport.exportManifest.bindings.ExportManifestMetaStore; @@ -48,6 +48,7 @@ import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UserDetailsService; +import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.IOException; import java.util.ArrayList; @@ -98,6 +99,9 @@ public void setUp() throws Exception { doReturn( "session name" ).when( session ).getName(); exporter = new PentahoPlatformExporter( repo ); + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + exporter.setRepositoryExportLogger( exportLogger ); } @After @@ -154,7 +158,13 @@ public void testExportUsersAndRoles() { UserDetails userDetails = new User( "testUser", "testPassword", true, true, true, true, authList ); when( userDetailsService.loadUserByUsername( nullable( String.class ) ) ).thenReturn( userDetails ); + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) ); + exporter.setRepositoryExportLogger( exportLogger ); exporter.exportUsersAndRoles(); + exportLogger.endJob(); verify( manifest ).addUserExport( userCaptor.capture() ); verify( manifest ).addRoleExport( roleCaptor.capture() ); @@ -172,8 +182,13 @@ public void testExportUsersAndRoles() { public void testExportMetadata_noModels() throws Exception { IMetadataDomainRepository mdr = mock( IMetadataDomainRepository.class ); exporter.setMetadataDomainRepository( mdr ); - + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) ); + exporter.setRepositoryExportLogger( exportLogger ); exporter.exportMetadataModels(); + exportLogger.endJob(); assertEquals( 0, exporter.getExportManifest().getMetadataList().size() ); } @@ -194,8 +209,13 @@ public void testExportMetadata() throws Exception { inputMap.put( "test1", is ); doReturn( inputMap ).when( exporterSpy ).getDomainFilesData( "test1" ); - + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) ); + exporterSpy.setRepositoryExportLogger( exportLogger ); exporterSpy.exportMetadataModels(); + exportLogger.endJob(); assertEquals( 1, exporterSpy.getExportManifest().getMetadataList().size() ); assertEquals( "test1", exporterSpy.getExportManifest().getMetadataList().get( 0 ).getDomainId() ); @@ -217,8 +237,13 @@ public void testExportDatasources() throws Exception { datasources.add( icon ); when( svc.getDatasources() ).thenReturn( datasources ); - + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) ); + exporterSpy.setRepositoryExportLogger( exportLogger ); exporterSpy.exportDatasources(); + exportLogger.endJob(); assertEquals( 1, exporterSpy.getExportManifest().getDatasourceList().size() ); DatabaseConnection exportedDatabaseConnection = exporterSpy.getExportManifest().getDatasourceList().get( 0 ); @@ -244,9 +269,13 @@ public void testParseXmlaEnabled() throws Exception { public void testExportMondrianSchemas_noCatalogs() throws Exception { PentahoSystem.registerObject( mondrianCatalogService ); exporterSpy.setMondrianCatalogRepositoryHelper( mondrianCatalogRepositoryHelper ); - + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) ); + exporterSpy.setRepositoryExportLogger( exportLogger ); exporterSpy.exportMondrianSchemas(); - + exportLogger.endJob(); verify( exportManifest, never() ).addMondrian( ArgumentMatchers.any( ExportManifestMondrian.class ) ); verify( mondrianCatalogRepositoryHelper, never() ).getModrianSchemaFiles( nullable( String.class ) ); } @@ -316,8 +345,13 @@ private void executeExportMondrianSchemasForDataSourceInfo( String catalogName, inputMap.put( catalogName, is ); when( mondrianCatalogRepositoryHelper.getModrianSchemaFiles( catalogName ) ).thenReturn( inputMap ); exporterSpy.zos = mock( ZipOutputStream.class ); - + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) ); + exporterSpy.setRepositoryExportLogger( exportLogger ); exporterSpy.exportMondrianSchemas(); + exportLogger.endJob(); } @Test @@ -327,8 +361,13 @@ public void testExportMetaStore() throws Exception { exporterSpy.setRepoMetaStore( metastore ); ExportManifest manifest = mock( ExportManifest.class ); exporterSpy.setExportManifest( manifest ); - + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) ); + exporterSpy.setRepositoryExportLogger( exportLogger ); exporterSpy.exportMetastore(); + exportLogger.endJob(); verify( exporterSpy.zos ).putNextEntry( ArgumentMatchers.any( ZipEntry.class ) ); verify( manifest ).setMetaStore( ArgumentMatchers.any( ExportManifestMetaStore.class ) ); } diff --git a/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessorTest.java b/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessorTest.java index 896ae97029b..78310edc0dc 100644 --- a/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessorTest.java +++ b/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/BaseExportProcessorTest.java @@ -33,6 +33,7 @@ import org.junit.Before; import org.junit.Test; import org.pentaho.platform.api.repository2.unified.RepositoryFile; +import org.pentaho.platform.api.importexport.ExportException; import java.io.File; import java.io.IOException; diff --git a/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessorTest.java b/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessorTest.java index ceeb44abef3..4f90a02fb79 100644 --- a/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessorTest.java +++ b/extensions/src/test/java/org/pentaho/platform/plugin/services/importexport/ZipExportProcessorTest.java @@ -23,11 +23,7 @@ import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.Serializable; +import java.io.*; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -40,6 +36,7 @@ import java.util.zip.ZipInputStream; import org.apache.commons.io.FileUtils; +import org.apache.logging.log4j.Level; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -63,6 +60,7 @@ import org.pentaho.platform.api.repository2.unified.RepositoryFileSid.Type; import org.pentaho.platform.api.repository2.unified.RepositoryRequest; import org.pentaho.platform.api.repository2.unified.data.simple.SimpleRepositoryFileData; +import org.pentaho.platform.api.util.IRepositoryExportLogger; import org.pentaho.platform.core.mimetype.MimeType; import org.pentaho.platform.engine.core.system.PentahoSessionHolder; import org.pentaho.platform.engine.core.system.PentahoSystem; @@ -300,8 +298,13 @@ public void testPerformExport_withoutManifest() throws Exception { RepositoryFile expFolder = repo.getFile( expFolderPath ); assertNotNull( expFolder ); + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) ); + zipNoMF.setRepositoryExportLogger( exportLogger ); File result = zipNoMF.performExport( repo.getFile( expFolderPath ) ); - + exportLogger.endJob(); Set zipEntriesFiles = extractZipEntries( result ); final String[] expectedEntries = new String[] { "two words/eval (+)%.prpt", "two words/eval (+)%.prpt_en.locale", "two words/index_en.locale" }; @@ -321,7 +324,12 @@ public void testPerformExport_withManifest() throws Exception { RepositoryFile expFolder = repo.getFile( expFolderPath ); assertNotNull( expFolder ); - + // mock logger to prevent npe + IRepositoryExportLogger exportLogger = new Log4JRepositoryExportLogger(); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + exportLogger.startJob( outputStream, Level.INFO, new RepositoryTextLayout( Level.INFO ) ); + zipMF.setRepositoryExportLogger( exportLogger ); + exportLogger.endJob(); File result = zipMF.performExport( repo.getFile( expFolderPath ) ); Set zipEntriesFiles = extractZipEntries( result );