pax_global_header00006660000000000000000000000064114540436050014514gustar00rootroot0000000000000052 comment=36105c2878ddffa3d670929d24a5e3ccf99f4412 plexus-archiver-plexus-archiver-1.2/000077500000000000000000000000001145404360500176165ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/pom.xml000066400000000000000000000037731145404360500211450ustar00rootroot00000000000000 4.0.0 plexus-components org.codehaus.plexus 1.1.18 plexus-archiver 1.2 Plexus Archiver Component scm:svn:http://svn.codehaus.org/plexus/plexus-components/tags/plexus-archiver-1.2 scm:svn:https://svn.codehaus.org/plexus/plexus-components/tags/plexus-archiver-1.2 http://fisheye.codehaus.org/browse/plexus/plexus-components/tags/plexus-archiver-1.2 false Dan Tran Richard van der Hoff org.codehaus.plexus plexus-container-default org.codehaus.plexus plexus-utils 2.0.5 org.codehaus.plexus plexus-io 1.0.1 org.apache.maven.plugins maven-surefire-plugin 2.6 ${useJvmChmod} plexus-archiver-plexus-archiver-1.2/src/000077500000000000000000000000001145404360500204055ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/000077500000000000000000000000001145404360500213315ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/000077500000000000000000000000001145404360500222525ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/000077500000000000000000000000001145404360500230415ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/000077500000000000000000000000001145404360500246345ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/000077500000000000000000000000001145404360500261545ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/000077500000000000000000000000001145404360500277575ustar00rootroot00000000000000AbstractArchiveFinalizer.java000066400000000000000000000006211145404360500354530ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; public abstract class AbstractArchiveFinalizer implements ArchiveFinalizer { protected AbstractArchiveFinalizer() { } public void finalizeArchiveCreation( Archiver archiver ) throws ArchiverException { } public void finalizeArchiveExtraction( UnArchiver unarchiver ) throws ArchiverException { } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/AbstractArchiver.java000066400000000000000000000716221145404360500340610ustar00rootroot00000000000000package org.codehaus.plexus.archiver; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.PlexusConstants; import org.codehaus.plexus.PlexusContainer; import org.codehaus.plexus.archiver.manager.ArchiverManager; import org.codehaus.plexus.archiver.manager.NoSuchArchiverException; import org.codehaus.plexus.archiver.util.DefaultArchivedFileSet; import org.codehaus.plexus.archiver.util.DefaultFileSet; import org.codehaus.plexus.archiver.util.FilterSupport; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributes; import org.codehaus.plexus.components.io.resources.PlexusIoArchivedResourceCollection; import org.codehaus.plexus.components.io.resources.PlexusIoFileResourceCollection; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection; import org.codehaus.plexus.components.io.resources.PlexusIoResourceWithAttributes; import org.codehaus.plexus.components.io.resources.proxy.PlexusIoProxyResourceCollection; import org.codehaus.plexus.context.Context; import org.codehaus.plexus.context.ContextException; import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.logging.console.ConsoleLogger; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable; import org.codehaus.plexus.util.IOUtil; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.UndeclaredThrowableException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; /** * @version $Id$ */ public abstract class AbstractArchiver extends AbstractLogEnabled implements Archiver, Contextualizable, FilterEnabled, FinalizerEnabled { private Logger logger; private File destFile; /** * A list of the following objects: * */ private final List resources = new ArrayList(); private boolean includeEmptyDirs = true; private int fileMode = -1; private int directoryMode = -1; private int defaultFileMode = -1; private int defaultDirectoryMode = -1; private boolean forced = true; private FilterSupport filterSupport; private List finalizers; private File dotFileDirectory; private String duplicateBehavior = Archiver.DUPLICATES_SKIP; /** * @since 1.1 */ private boolean useJvmChmod = false; // contextualized. private ArchiverManager archiverManager; /** * @since 1.1 */ private boolean ignorePermissions = false; public String getDuplicateBehavior() { return duplicateBehavior; } public void setDuplicateBehavior( final String duplicate ) { if ( !Archiver.DUPLICATES_VALID_BEHAVIORS.contains( duplicate ) ) { throw new IllegalArgumentException( "Invalid duplicate-file behavior: \'" + duplicate + "\'. Please specify one of: " + Archiver.DUPLICATES_VALID_BEHAVIORS ); } duplicateBehavior = duplicate; } public final void setFileMode( final int mode ) { fileMode = ( mode & UnixStat.PERM_MASK ) | UnixStat.FILE_FLAG; } public final void setDefaultFileMode( final int mode ) { defaultFileMode = ( mode & UnixStat.PERM_MASK ) | UnixStat.FILE_FLAG; } public final int getOverrideFileMode() { return fileMode; } public final int getFileMode() { if ( fileMode < 0 ) { if ( defaultFileMode < 0 ) { return DEFAULT_FILE_MODE; } return defaultFileMode; } return fileMode; } public final int getDefaultFileMode() { return defaultFileMode; } /** * @deprecated Use {@link Archiver#getDefaultFileMode()}. */ public final int getRawDefaultFileMode() { return getDefaultFileMode(); } public final void setDirectoryMode( final int mode ) { directoryMode = ( mode & UnixStat.PERM_MASK ) | UnixStat.DIR_FLAG; } public final void setDefaultDirectoryMode( final int mode ) { defaultDirectoryMode = ( mode & UnixStat.PERM_MASK ) | UnixStat.DIR_FLAG; } public final int getOverrideDirectoryMode() { return directoryMode; } public final int getDirectoryMode() { if ( directoryMode < 0 ) { if ( defaultDirectoryMode < 0 ) { return DEFAULT_DIR_MODE; } return defaultDirectoryMode; } return directoryMode; } public final int getDefaultDirectoryMode() { return defaultDirectoryMode; } /** * @deprecated Use {@link Archiver#getDefaultDirectoryMode()}. */ public final int getRawDefaultDirectoryMode() { return getDefaultDirectoryMode(); } public boolean getIncludeEmptyDirs() { return includeEmptyDirs; } public void setIncludeEmptyDirs( final boolean includeEmptyDirs ) { this.includeEmptyDirs = includeEmptyDirs; } public void addDirectory( final File directory ) throws ArchiverException { addDirectory( directory, "" ); } public void addDirectory( final File directory, final String prefix ) throws ArchiverException { addDirectory( directory, prefix, null, null ); } public void addDirectory( final File directory, final String[] includes, final String[] excludes ) throws ArchiverException { addDirectory( directory, "", includes, excludes ); } public void addDirectory( final File directory, final String prefix, final String[] includes, final String[] excludes ) throws ArchiverException { final DefaultFileSet fileSet = new DefaultFileSet(); fileSet.setDirectory( directory ); fileSet.setPrefix( prefix ); fileSet.setIncludes( includes ); fileSet.setExcludes( excludes ); fileSet.setIncludingEmptyDirectories( includeEmptyDirs ); addFileSet( fileSet ); } public void addFileSet( final FileSet fileSet ) throws ArchiverException { final File directory = fileSet.getDirectory(); if ( directory == null ) { throw new ArchiverException( "The file sets base directory is null." ); } if ( !directory.isDirectory() ) { throw new ArchiverException( directory.getAbsolutePath() + " isn't a directory." ); } final PlexusIoFileResourceCollection collection = new PlexusIoFileResourceCollection( getLogger() ); collection.setIncludes( fileSet.getIncludes() ); collection.setExcludes( fileSet.getExcludes() ); collection.setBaseDir( directory ); collection.setFileSelectors( fileSet.getFileSelectors() ); collection.setIncludingEmptyDirectories( fileSet.isIncludingEmptyDirectories() ); collection.setPrefix( fileSet.getPrefix() ); collection.setCaseSensitive( fileSet.isCaseSensitive() ); collection.setUsingDefaultExcludes( fileSet.isUsingDefaultExcludes() ); if ( getOverrideDirectoryMode() > -1 || getOverrideFileMode() > -1 ) { collection.setOverrideAttributes( -1, null, -1, null, getOverrideFileMode(), getOverrideDirectoryMode() ); } if ( getDefaultDirectoryMode() > -1 || getDefaultFileMode() > -1 ) { collection.setDefaultAttributes( -1, null, -1, null, getDefaultFileMode(), getDefaultDirectoryMode() ); } addResources( collection ); } public void addFile( final File inputFile, final String destFileName ) throws ArchiverException { final int fileMode = getOverrideFileMode(); addFile( inputFile, destFileName, fileMode ); } protected ArchiveEntry asArchiveEntry( final PlexusIoResource resource, final String destFileName, final int permissions ) throws ArchiverException { if ( !resource.isExisting() ) { throw new ArchiverException( resource.getName() + " not found." ); } if ( resource.isFile() ) { return ArchiveEntry.createFileEntry( destFileName, resource, permissions ); } else { return ArchiveEntry.createDirectoryEntry( destFileName, resource, permissions ); } } protected ArchiveEntry asArchiveEntry( final PlexusIoResourceCollection collection, final PlexusIoResource resource ) throws ArchiverException { try { final String destFileName = collection.getName( resource ); int permissions = -1; if ( resource instanceof PlexusIoResourceWithAttributes ) { final PlexusIoResourceAttributes attrs = ( (PlexusIoResourceWithAttributes) resource ).getAttributes(); if ( attrs != null ) { permissions = attrs.getOctalMode(); } } return asArchiveEntry( resource, destFileName, permissions ); } catch ( final IOException e ) { throw new ArchiverException( e.getMessage(), e ); } } public void addResource( final PlexusIoResource resource, final String destFileName, final int permissions ) throws ArchiverException { resources.add( asArchiveEntry( resource, destFileName, permissions ) ); } public void addFile( final File inputFile, String destFileName, int permissions ) throws ArchiverException { if ( !inputFile.isFile() || !inputFile.exists() ) { throw new ArchiverException( inputFile.getAbsolutePath() + " isn't a file." ); } FileInputStream fileStream = null; destFileName = destFileName.replace( '\\', '/' ); if ( permissions < 0 ) { permissions = getOverrideFileMode(); } try { // do a null check here, to avoid creating a file stream if there are no filters... if ( filterSupport != null ) { fileStream = new FileInputStream( inputFile ); if ( include( fileStream, destFileName ) ) { resources.add( ArchiveEntry.createFileEntry( destFileName, inputFile, permissions ) ); } } else { resources.add( ArchiveEntry.createFileEntry( destFileName, inputFile, permissions ) ); } } catch ( final IOException e ) { throw new ArchiverException( "Failed to determine inclusion status for: " + inputFile, e ); } catch ( final ArchiveFilterException e ) { throw new ArchiverException( "Failed to determine inclusion status for: " + inputFile, e ); } finally { IOUtil.close( fileStream ); } } public ResourceIterator getResources() throws ArchiverException { return new ResourceIterator() { private final Iterator addedResourceIter = resources.iterator(); private PlexusIoResourceCollection currentResourceCollection; private Iterator ioResourceIter; private ArchiveEntry nextEntry; private final Set seenEntries = new HashSet(); public boolean hasNext() throws ArchiverException { if ( nextEntry == null ) { if ( ioResourceIter == null ) { if ( addedResourceIter.hasNext() ) { final Object o = addedResourceIter.next(); if ( o instanceof ArchiveEntry ) { nextEntry = (ArchiveEntry) o; } else if ( o instanceof PlexusIoResourceCollection ) { currentResourceCollection = (PlexusIoResourceCollection) o; try { ioResourceIter = currentResourceCollection.getResources(); } catch ( final IOException e ) { throw new ArchiverException( e.getMessage(), e ); } return hasNext(); } else { throw new IllegalStateException( "An invalid resource of type: " + o.getClass() .getName() + " was added to archiver: " + getClass().getName() ); } } else { nextEntry = null; } } else { if ( ioResourceIter.hasNext() ) { final PlexusIoResource resource = (PlexusIoResource) ioResourceIter.next(); nextEntry = asArchiveEntry( currentResourceCollection, resource ); } else { ioResourceIter = null; return hasNext(); } } } if ( nextEntry != null && seenEntries.contains( nextEntry.getName() ) ) { final String path = nextEntry.getName(); if ( Archiver.DUPLICATES_PRESERVE.equals( duplicateBehavior ) || Archiver.DUPLICATES_SKIP.equals( duplicateBehavior ) ) { getLogger().info( path + " already added, skipping" ); nextEntry = null; return hasNext(); } else if ( Archiver.DUPLICATES_FAIL.equals( duplicateBehavior ) ) { throw new ArchiverException( "Duplicate file " + path + " was found and the duplicate " + "attribute is 'fail'." ); } else { // duplicate equal to add, so we continue getLogger().debug( "duplicate file " + path + " found, adding." ); } } return nextEntry != null; } public ArchiveEntry next() throws ArchiverException { if ( !hasNext() ) { throw new NoSuchElementException(); } final ArchiveEntry next = nextEntry; nextEntry = null; seenEntries.add( next.getName() ); return next; } }; } public Map getFiles() { try { final Map map = new HashMap(); for ( final ResourceIterator iter = getResources(); iter.hasNext(); ) { final ArchiveEntry entry = iter.next(); if ( includeEmptyDirs || entry.getType() == ArchiveEntry.FILE ) { map.put( entry.getName(), entry ); } } return map; } catch ( final ArchiverException e ) { throw new UndeclaredThrowableException( e ); } } public File getDestFile() { return destFile; } public void setDestFile( final File destFile ) { this.destFile = destFile; if ( destFile != null ) { destFile.getParentFile() .mkdirs(); } } protected Logger getLogger() { if ( logger == null ) { if ( super.getLogger() != null ) { logger = super.getLogger(); } else { logger = new ConsoleLogger( Logger.LEVEL_INFO, "console" ); } } return logger; } public Map getDirs() { try { final Map map = new HashMap(); for ( final ResourceIterator iter = getResources(); iter.hasNext(); ) { final ArchiveEntry entry = iter.next(); if ( entry.getType() == ArchiveEntry.DIRECTORY ) { map.put( entry.getName(), entry ); } } return map; } catch ( final ArchiverException e ) { throw new UndeclaredThrowableException( e ); } } protected PlexusIoResourceCollection asResourceCollection( final ArchivedFileSet fileSet ) throws ArchiverException { final File archiveFile = fileSet.getArchive(); final PlexusIoResourceCollection resources; try { resources = archiverManager.getResourceCollection( archiveFile ); } catch ( final NoSuchArchiverException e ) { throw new ArchiverException( "Error adding archived file-set. PlexusIoResourceCollection not found for: " + archiveFile, e ); } if ( resources instanceof PlexusIoArchivedResourceCollection ) { ( (PlexusIoArchivedResourceCollection) resources ).setFile( fileSet.getArchive() ); } else { throw new ArchiverException( "Expected " + PlexusIoArchivedResourceCollection.class.getName() + ", got " + resources.getClass() .getName() ); } final PlexusIoProxyResourceCollection proxy = new PlexusIoProxyResourceCollection(); proxy.setSrc( resources ); proxy.setExcludes( fileSet.getExcludes() ); proxy.setIncludes( fileSet.getIncludes() ); proxy.setIncludingEmptyDirectories( fileSet.isIncludingEmptyDirectories() ); proxy.setCaseSensitive( fileSet.isCaseSensitive() ); proxy.setPrefix( fileSet.getPrefix() ); proxy.setUsingDefaultExcludes( fileSet.isUsingDefaultExcludes() ); proxy.setFileSelectors( fileSet.getFileSelectors() ); if ( getOverrideDirectoryMode() > -1 || getOverrideFileMode() > -1 ) { proxy.setOverrideAttributes( -1, null, -1, null, getOverrideFileMode(), getOverrideDirectoryMode() ); } if ( getDefaultDirectoryMode() > -1 || getDefaultFileMode() > -1 ) { proxy.setDefaultAttributes( -1, null, -1, null, getDefaultFileMode(), getDefaultDirectoryMode() ); } return proxy; } /** * Adds a resource collection to the archive. */ public void addResources( final PlexusIoResourceCollection collection ) throws ArchiverException { resources.add( collection ); } public void addArchivedFileSet( final ArchivedFileSet fileSet ) throws ArchiverException { final PlexusIoResourceCollection resourceCollection = asResourceCollection( fileSet ); addResources( resourceCollection ); } /** * @since 1.0-alpha-7 */ public void addArchivedFileSet( final File archiveFile, final String prefix, final String[] includes, final String[] excludes ) throws ArchiverException { final DefaultArchivedFileSet fileSet = new DefaultArchivedFileSet(); fileSet.setArchive( archiveFile ); fileSet.setPrefix( prefix ); fileSet.setIncludes( includes ); fileSet.setExcludes( excludes ); fileSet.setIncludingEmptyDirectories( includeEmptyDirs ); addArchivedFileSet( fileSet ); } /** * @since 1.0-alpha-7 */ public void addArchivedFileSet( final File archiveFile, final String prefix ) throws ArchiverException { addArchivedFileSet( archiveFile, prefix, null, null ); } /** * @since 1.0-alpha-7 */ public void addArchivedFileSet( final File archiveFile, final String[] includes, final String[] excludes ) throws ArchiverException { addArchivedFileSet( archiveFile, null, includes, excludes ); } /** * @since 1.0-alpha-7 */ public void addArchivedFileSet( final File archiveFile ) throws ArchiverException { addArchivedFileSet( archiveFile, null, null, null ); } /** * Allows us to pull the ArchiverManager instance out of the container without causing a chicken-and-egg * instantiation/composition problem. */ public void contextualize( final Context context ) throws ContextException { final PlexusContainer container = (PlexusContainer) context.get( PlexusConstants.PLEXUS_KEY ); try { archiverManager = (ArchiverManager) container.lookup( ArchiverManager.ROLE ); } catch ( final ComponentLookupException e ) { throw new ContextException( "Error retrieving ArchiverManager instance: " + e.getMessage(), e ); } } public boolean isForced() { return forced; } public void setForced( final boolean forced ) { this.forced = forced; } public void setArchiveFilters( final List filters ) { filterSupport = new FilterSupport( filters, getLogger() ); } public void addArchiveFinalizer( final ArchiveFinalizer finalizer ) { if ( finalizers == null ) { finalizers = new ArrayList(); } finalizers.add( finalizer ); } public void setArchiveFinalizers( final List archiveFinalizers ) { finalizers = archiveFinalizers; } public void setDotFileDirectory( final File dotFileDirectory ) { this.dotFileDirectory = dotFileDirectory; } protected boolean isUptodate() throws ArchiverException { final File zipFile = getDestFile(); final long destTimestamp = zipFile.lastModified(); if ( destTimestamp == 0 ) { getLogger().debug( "isUp2date: false (Destination " + zipFile.getPath() + " not found.)" ); return false; // File doesn't yet exist } final Iterator it = resources.iterator(); if ( !it.hasNext() ) { getLogger().debug( "isUp2date: false (No input files.)" ); return false; // No timestamp to compare } while ( it.hasNext() ) { final Object o = it.next(); final long l; if ( o instanceof ArchiveEntry ) { l = ( (ArchiveEntry) o ).getResource() .getLastModified(); } else if ( o instanceof PlexusIoResourceCollection ) { try { l = ( (PlexusIoResourceCollection) o ).getLastModified(); } catch ( final IOException e ) { throw new ArchiverException( e.getMessage(), e ); } } else { throw new IllegalStateException( "Invalid object type: " + o.getClass() .getName() ); } if ( l == PlexusIoResource.UNKNOWN_MODIFICATION_DATE ) { // Don't know what to do. Safe thing is to assume not up2date. getLogger().debug( "isUp2date: false (Resource with unknown modification date found.)" ); return false; } if ( l > destTimestamp ) { getLogger().debug( "isUp2date: false (Resource with newer modification date found.)" ); return false; } } getLogger().debug( "isUp2date: true" ); return true; } protected boolean checkForced() throws ArchiverException { if ( !isForced() && isSupportingForced() && isUptodate() ) { getLogger().debug( "Archive " + getDestFile() + " is uptodate." ); return false; } return true; } public boolean isSupportingForced() { return false; } protected List getArchiveFinalizers() { return finalizers; } protected void runArchiveFinalizers() throws ArchiverException { if ( finalizers != null ) { for ( final Iterator it = finalizers.iterator(); it.hasNext(); ) { final ArchiveFinalizer finalizer = (ArchiveFinalizer) it.next(); finalizer.finalizeArchiveCreation( this ); } } } private boolean include( final InputStream in, final String path ) throws ArchiveFilterException { return ( filterSupport == null ) || filterSupport.include( in, path ); } public final void createArchive() throws ArchiverException, IOException { validate(); try { try { if ( dotFileDirectory != null ) { addArchiveFinalizer( new DotDirectiveArchiveFinalizer( dotFileDirectory ) ); } runArchiveFinalizers(); execute(); } finally { close(); } } catch ( final IOException e ) { String msg = "Problem creating " + getArchiveType() + ": " + e.getMessage(); final StringBuffer revertBuffer = new StringBuffer(); if ( !revert( revertBuffer ) ) { msg += revertBuffer.toString(); } throw new ArchiverException( msg, e ); } finally { cleanUp(); } } protected boolean hasVirtualFiles() { if ( finalizers != null ) { for ( final Iterator it = finalizers.iterator(); it.hasNext(); ) { final ArchiveFinalizer finalizer = (ArchiveFinalizer) it.next(); final List virtualFiles = finalizer.getVirtualFiles(); if ( ( virtualFiles != null ) && !virtualFiles.isEmpty() ) { return true; } } } return false; } protected boolean revert( final StringBuffer messageBuffer ) { return true; } protected void validate() throws ArchiverException, IOException { } protected abstract String getArchiveType(); protected abstract void close() throws IOException; protected void cleanUp() { resources.clear(); } protected abstract void execute() throws ArchiverException, IOException; /** * @since 1.1 */ public boolean isUseJvmChmod() { return useJvmChmod; } /** * @since 1.1 */ public void setUseJvmChmod( final boolean useJvmChmod ) { this.useJvmChmod = useJvmChmod; } /** * @since 1.1 */ public boolean isIgnorePermissions() { return ignorePermissions; } /** * @since 1.1 */ public void setIgnorePermissions( final boolean ignorePermissions ) { this.ignorePermissions = ignorePermissions; } } AbstractUnArchiver.java000066400000000000000000000156761145404360500343140ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.util.FilterSupport; import org.codehaus.plexus.components.io.fileselectors.FileSelector; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import org.codehaus.plexus.logging.AbstractLogEnabled; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * @author Emmanuel Venisse * @version $Revision$ $Date$ * @todo there should really be constructors which take the source file. */ public abstract class AbstractUnArchiver extends AbstractLogEnabled implements UnArchiver, FinalizerEnabled, FilterEnabled { private File destDirectory; private File destFile; private File sourceFile; private boolean overwrite = true; private FilterSupport filterSupport; private List finalizers; private FileSelector[] fileSelectors; /** * @since 1.1 */ private boolean useJvmChmod = false; /** * @since 1.1 */ private boolean ignorePermissions = false; public AbstractUnArchiver() { // no op } public AbstractUnArchiver( final File sourceFile ) { this.sourceFile = sourceFile; } public File getDestDirectory() { return destDirectory; } public void setDestDirectory( final File destDirectory ) { this.destDirectory = destDirectory; } public File getDestFile() { return destFile; } public void setDestFile( final File destFile ) { this.destFile = destFile; } public File getSourceFile() { return sourceFile; } public void setSourceFile( final File sourceFile ) { this.sourceFile = sourceFile; } public boolean isOverwrite() { return overwrite; } public void setOverwrite( final boolean b ) { overwrite = b; } public final void extract() throws ArchiverException { validate(); execute(); runArchiveFinalizers(); } public final void extract( final String path, final File outputDirectory ) throws ArchiverException { validate( path, outputDirectory ); execute( path, outputDirectory ); runArchiveFinalizers(); } public void setArchiveFilters( final List filters ) { filterSupport = new FilterSupport( filters, getLogger() ); } public void addArchiveFinalizer( final ArchiveFinalizer finalizer ) { if ( finalizers == null ) { finalizers = new ArrayList(); } finalizers.add( finalizer ); } public void setArchiveFinalizers( final List archiveFinalizers ) { finalizers = archiveFinalizers; } private final void runArchiveFinalizers() throws ArchiverException { if ( finalizers != null ) { for ( final Iterator it = finalizers.iterator(); it.hasNext(); ) { final ArchiveFinalizer finalizer = (ArchiveFinalizer) it.next(); finalizer.finalizeArchiveExtraction( this ); } } } protected boolean include( final InputStream inputStream, final String name ) throws ArchiveFilterException { return filterSupport == null || filterSupport.include( inputStream, name ); } protected void validate( final String path, final File outputDirectory ) { } protected void validate() throws ArchiverException { if ( sourceFile == null ) { throw new ArchiverException( "The source file isn't defined." ); } if ( sourceFile.isDirectory() ) { throw new ArchiverException( "The source must not be a directory." ); } if ( !sourceFile.exists() ) { throw new ArchiverException( "The source file " + sourceFile + " doesn't exist." ); } if ( destDirectory == null && destFile == null ) { throw new ArchiverException( "The destination isn't defined." ); } if ( destDirectory != null && destFile != null ) { throw new ArchiverException( "You must choose between a destination directory and a destination file." ); } if ( destDirectory != null && !destDirectory.isDirectory() ) { destFile = destDirectory; destDirectory = null; } if ( destFile != null && destFile.isDirectory() ) { destDirectory = destFile; destFile = null; } } public void setFileSelectors( final FileSelector[] fileSelectors ) { this.fileSelectors = fileSelectors; } public FileSelector[] getFileSelectors() { return fileSelectors; } protected boolean isSelected( final String fileName, final PlexusIoResource fileInfo ) throws ArchiverException { if ( fileSelectors != null ) { for ( int i = 0; i < fileSelectors.length; i++ ) { try { if ( !fileSelectors[i].isSelected( fileInfo ) ) { return false; } } catch ( final IOException e ) { throw new ArchiverException( "Failed to check, whether " + fileInfo.getName() + " is selected: " + e.getMessage(), e ); } } } return true; } protected abstract void execute() throws ArchiverException; protected abstract void execute( String path, File outputDirectory ) throws ArchiverException; /** * @since 1.1 */ public boolean isUseJvmChmod() { return useJvmChmod; } /** * @since 1.1 */ public void setUseJvmChmod( final boolean useJvmChmod ) { this.useJvmChmod = useJvmChmod; } /** * @since 1.1 */ public boolean isIgnorePermissions() { return ignorePermissions; } /** * @since 1.1 */ public void setIgnorePermissions( final boolean ignorePermissions ) { this.ignorePermissions = ignorePermissions; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/ArchiveEntry.java000066400000000000000000000156221145404360500332330ustar00rootroot00000000000000package org.codehaus.plexus.archiver; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import java.io.IOException; import java.io.InputStream; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributeUtils; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributes; import org.codehaus.plexus.components.io.resources.PlexusIoFileResource; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import org.codehaus.plexus.components.io.resources.PlexusIoResourceWithAttributes; /** * @version $Revision: 1502 $ $Date$ */ public class ArchiveEntry { public static final String ROLE = ArchiveEntry.class.getName(); public static final int FILE = 1; public static final int DIRECTORY = 2; private PlexusIoResource resource; private String name; private int type; private int mode; private PlexusIoResourceAttributes attributes; /** * @param name the filename as it will appear in the archive * @param original original filename * @param type FILE or DIRECTORY * @param mode octal unix style permissions */ private ArchiveEntry( String name, PlexusIoResource resource, int type, int mode ) { this.name = name; this.resource = resource; this.attributes = ( resource instanceof PlexusIoResourceWithAttributes ) ? ( (PlexusIoResourceWithAttributes) resource ).getAttributes() : null; this.type = type; int permissions = mode; if ( mode == -1 && this.attributes == null ) { permissions = resource.isFile() ? Archiver.DEFAULT_FILE_MODE : Archiver.DEFAULT_DIR_MODE; } this.mode = permissions == -1 ? permissions : ( permissions & UnixStat.PERM_MASK ) | ( type == FILE ? UnixStat.FILE_FLAG : UnixStat.DIR_FLAG ); } /** * @return the filename of this entry in the archive. */ public String getName() { return name; } /** * @return The original file that will be stored in the archive. * @deprecated As of 1.0-alpha-10, file entries are no longer backed * by files, but by instances of {@link PlexusIoResource}. * Consequently, you should use {@link #getInputStream()}- */ public File getFile() { if ( resource instanceof PlexusIoFileResource ) { return ((PlexusIoFileResource) resource).getFile(); } return null; } /** * @return The resource contents. */ public InputStream getInputStream() throws IOException { return resource.getContents(); } /** * TODO: support for SYMLINK? * * @return FILE or DIRECTORY */ public int getType() { return type; } /** * @return octal user/group/other unix like permissions. */ public int getMode() { if ( mode != -1 ) { return mode; } if ( attributes != null && attributes.getOctalMode() > -1 ) { return attributes.getOctalMode(); } return ( ( type == FILE ? Archiver.DEFAULT_FILE_MODE : Archiver.DEFAULT_DIR_MODE ) & UnixStat.PERM_MASK ) | ( type == FILE ? UnixStat.FILE_FLAG : UnixStat.DIR_FLAG ); } public static ArchiveEntry createFileEntry( String target, PlexusIoResource resource, int permissions ) throws ArchiverException { if ( resource.isDirectory() ) { throw new ArchiverException( "Not a file: " + resource.getName() ); } return new ArchiveEntry( target, resource, FILE, permissions ); } public static ArchiveEntry createFileEntry( String target, File file, int permissions ) throws ArchiverException { if ( ! file.isFile() ) { throw new ArchiverException( "Not a file: " + file ); } PlexusIoResourceAttributes attrs; try { attrs = PlexusIoResourceAttributeUtils.getFileAttributes( file ); } catch ( IOException e ) { throw new ArchiverException( "Failed to read filesystem attributes for: " + file, e ); } final PlexusIoFileResource res = new PlexusIoFileResource( file, attrs ); return new ArchiveEntry( target, res, FILE, permissions ); } public static ArchiveEntry createDirectoryEntry( String target, PlexusIoResource resource, int permissions ) throws ArchiverException { if ( ! resource.isDirectory() ) { throw new ArchiverException( "Not a directory: " + resource.getName() ); } return new ArchiveEntry( target, resource, DIRECTORY, permissions ); } public static ArchiveEntry createDirectoryEntry( String target, final File file, int permissions ) throws ArchiverException { if ( ! file.isDirectory() ) { throw new ArchiverException( "Not a directory: " + file ); } PlexusIoResourceAttributes attrs; try { attrs = PlexusIoResourceAttributeUtils.getFileAttributes( file ); } catch ( IOException e ) { throw new ArchiverException( "Failed to read filesystem attributes for: " + file, e ); } final PlexusIoFileResource res = new PlexusIoFileResource( file, attrs ); return new ArchiveEntry( target, res, DIRECTORY, permissions ); } public static ArchiveEntry createEntry( String target, File file, int filePerm, int dirPerm ) throws ArchiverException { if ( file.isDirectory() ) { return createDirectoryEntry( target, file, dirPerm ); } else if ( file.isFile() ) { return createFileEntry( target, file, filePerm ); } else // FIXME: handle symlinks? { throw new ArchiverException( "Neither a file nor a directory: " + file ); } } public PlexusIoResourceAttributes getResourceAttributes() { return attributes; } public void setResourceAttributes( PlexusIoResourceAttributes attributes ) { this.attributes = attributes; } public PlexusIoResource getResource() { return resource; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/ArchiveFile.java000066400000000000000000000023341145404360500330050ustar00rootroot00000000000000package org.codehaus.plexus.archiver; import java.io.IOException; import java.io.InputStream; import java.util.Enumeration; /** * Interface of a zip, or tar file. */ public interface ArchiveFile { /** * Interfave of a archive file entry. An entry may be a file, * or directory. */ public interface Entry { /** * Returns the entries name. */ String getName(); /** * Returns, whether the entry is a directory. */ boolean isDirectory(); /** * Returns the time of the entries last modification. * @return Modification time, or -1, if unknown. */ long getLastModificationTime(); /** * Returns the entries size. * @return File size; undefined for directories. */ long getSize(); } /** * Returns an enumeration with the archive files entries. * Any element returned by the enumeration is an instance * of {@link Entry}. */ public Enumeration getEntries() throws IOException; /** * Returns an {@link InputStream} with the given entries contents. */ InputStream getInputStream(Entry entry) throws IOException; } ArchiveFileFilter.java000066400000000000000000000004101145404360500340650ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; import java.io.InputStream; /** * @deprecated Use {@link FileSelector} */ public interface ArchiveFileFilter { boolean include( InputStream dataStream, String entryName ) throws ArchiveFilterException; } ArchiveFilterException.java000066400000000000000000000004641145404360500351550ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; public class ArchiveFilterException extends Exception { public ArchiveFilterException( String message, Throwable cause ) { super( message, cause ); } public ArchiveFilterException( String message ) { super( message ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/ArchiveFinalizer.java000066400000000000000000000005071145404360500340510ustar00rootroot00000000000000package org.codehaus.plexus.archiver; import java.util.List; public interface ArchiveFinalizer { void finalizeArchiveCreation( Archiver archiver ) throws ArchiverException; void finalizeArchiveExtraction( UnArchiver unarchiver ) throws ArchiverException; List getVirtualFiles(); } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/ArchivedFileSet.java000066400000000000000000000004521145404360500336240ustar00rootroot00000000000000package org.codehaus.plexus.archiver; import java.io.File; /** * A file set, which consists of the files and directories in * an archive. * @since 1.0-alpha-9 */ public interface ArchivedFileSet extends BaseFileSet { /** * Returns the archive file. */ File getArchive(); } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/Archiver.java000066400000000000000000000225531145404360500323740ustar00rootroot00000000000000package org.codehaus.plexus.archiver; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.components.io.resources.PlexusIoResource; import org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection; import java.io.File; import java.io.IOException; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * @version $Revision$ $Date$ */ public interface Archiver { /** * Default value for the dirmode attribute. */ int DEFAULT_DIR_MODE = UnixStat.DIR_FLAG | UnixStat.DEFAULT_DIR_PERM; /** * Default value for the filemode attribute. */ int DEFAULT_FILE_MODE = UnixStat.FILE_FLAG | UnixStat.DEFAULT_FILE_PERM; String ROLE = Archiver.class.getName(); public static final String DUPLICATES_ADD = "add"; public static final String DUPLICATES_PRESERVE = "preserve"; public static final String DUPLICATES_SKIP = "skip"; public static final String DUPLICATES_FAIL = "fail"; public static final Set DUPLICATES_VALID_BEHAVIORS = new HashSet() { private static final long serialVersionUID = 1L; { add( DUPLICATES_ADD ); add( DUPLICATES_PRESERVE ); add( DUPLICATES_SKIP ); add( DUPLICATES_FAIL ); } }; void createArchive() throws ArchiverException, IOException; /** * Obsolete, use {@link #addFileSet(FileSet)}. */ void addDirectory( File directory ) throws ArchiverException; /** * Obsolete, use {@link #addFileSet(FileSet)}. */ void addDirectory( File directory, String prefix ) throws ArchiverException; /** * Obsolete, use {@link #addFileSet(FileSet)}. */ void addDirectory( File directory, String[] includes, String[] excludes ) throws ArchiverException; /** * Obsolete, use {@link #addFileSet(FileSet)}. */ void addDirectory( File directory, String prefix, String[] includes, String[] excludes ) throws ArchiverException; /** * Adds the given file set to the archive. This method is basically obsoleting {@link #addDirectory(File)}, * {@link #addDirectory(File, String)}, {@link #addDirectory(File, String[], String[])}, and * {@link #addDirectory(File, String, String[], String[])}. However, as these methods are in widespread use, they * cannot easily be made deprecated. * * @throws ArchiverException * Adding the file set failed. * @since 1.0-alpha-9 */ void addFileSet( FileSet fileSet ) throws ArchiverException; void addFile( File inputFile, String destFileName ) throws ArchiverException; void addFile( File inputFile, String destFileName, int permissions ) throws ArchiverException; void addArchivedFileSet( File archiveFile ) throws ArchiverException; void addArchivedFileSet( File archiveFile, String prefix ) throws ArchiverException; void addArchivedFileSet( File archiveFile, String[] includes, String[] excludes ) throws ArchiverException; void addArchivedFileSet( File archiveFile, String prefix, String[] includes, String[] excludes ) throws ArchiverException; /** * Adds the given archive file set to the archive. This method is basically obsoleting * {@link #addArchivedFileSet(File)}, {@link #addArchivedFileSet(File, String[], String[])}, and * {@link #addArchivedFileSet(File, String, String[], String[])}. However, as these methods are in widespread use, * they cannot easily be made deprecated. * * @since 1.0-alpha-9 */ void addArchivedFileSet( ArchivedFileSet fileSet ) throws ArchiverException; /** * Adds the given resource collection to the archive. * * @since 1.0-alpha-10 */ void addResource( PlexusIoResource resource, String destFileName, int permissions ) throws ArchiverException; /** * Adds the given resource collection to the archive. * * @since 1.0-alpha-10 */ void addResources( PlexusIoResourceCollection resources ) throws ArchiverException; File getDestFile(); void setDestFile( File destFile ); void setFileMode( int mode ); int getFileMode(); int getOverrideFileMode(); void setDefaultFileMode( int mode ); int getDefaultFileMode(); void setDirectoryMode( int mode ); int getDirectoryMode(); int getOverrideDirectoryMode(); void setDefaultDirectoryMode( int mode ); int getDefaultDirectoryMode(); boolean getIncludeEmptyDirs(); void setIncludeEmptyDirs( boolean includeEmptyDirs ); void setDotFileDirectory( File dotFileDirectory ); /** * Returns an iterator over instances of {@link ArchiveEntry}, which have previously been added by calls to * {@link #addResources(PlexusIoResourceCollection)}, {@link #addResource(PlexusIoResource, String, int)}, * {@link #addFileSet(FileSet)}, etc. * * @since 1.0-alpha-10 */ ResourceIterator getResources() throws ArchiverException; /** * @deprecated Use {@link #getResources()} */ Map getFiles(); /** *

* Returns, whether recreating the archive is forced (default). Setting this option to false means, that the * archiver should compare the timestamps of included files with the timestamp of the target archive and rebuild the * archive only, if the latter timestamp precedes the former timestamps. Checking for timestamps will typically * offer a performance gain (in particular, if the following steps in a build can be suppressed, if an archive isn't * recrated) on the cost that you get inaccurate results from time to time. In particular, removal of source files * won't be detected. *

*

* An archiver doesn't necessarily support checks for uptodate. If so, setting this option to true will simply be * ignored. The method {@link #isSupportingForced()} may be called to check whether an archiver does support * uptodate checks. *

* * @return True, if the target archive should always be created; false otherwise * @see #setForced(boolean) * @see #isSupportingForced() */ boolean isForced(); /** *

* Sets, whether recreating the archive is forced (default). Setting this option to false means, that the archiver * should compare the timestamps of included files with the timestamp of the target archive and rebuild the archive * only, if the latter timestamp precedes the former timestamps. Checking for timestamps will typically offer a * performance gain (in particular, if the following steps in a build can be suppressed, if an archive isn't * recrated) on the cost that you get inaccurate results from time to time. In particular, removal of source files * won't be detected. *

*

* An archiver doesn't necessarily support checks for uptodate. If so, setting this option to true will simply be * ignored. The method {@link #isSupportingForced()} may be called to check whether an archiver does support * uptodate checks. *

* * @param forced * True, if the target archive should always be created; false otherwise * @see #isForced() * @see #isSupportingForced() */ void setForced( boolean forced ); /** * Returns, whether the archive supports uptodate checks. If so, you may set {@link #setForced(boolean)} to true. * * @return True, if the archiver does support uptodate checks, false otherwise * @see #setForced(boolean) * @see #isForced() */ boolean isSupportingForced(); /** * Returns the behavior of this archiver when duplicate files are detected. */ String getDuplicateBehavior(); /** * Set the behavior of this archiver when duplicate files are detected. One of:
* *
* See {@link Archiver#DUPLICATES_ADD}, {@link Archiver#DUPLICATES_SKIP}, {@link Archiver#DUPLICATES_PRESERVE}, * {@link Archiver#DUPLICATES_FAIL}. */ void setDuplicateBehavior( String duplicate ); /** * to use or not the jvm method for file permissions : user all not active for group permissions * * @since 1.1 * @param useJvmChmod */ void setUseJvmChmod( boolean useJvmChmod ); /** * @since 1.1 * @return */ boolean isUseJvmChmod(); /** * @since 1.1 */ boolean isIgnorePermissions(); /** * @since 1.1 */ void setIgnorePermissions( final boolean ignorePermissions ); } ArchiverException.java000066400000000000000000000016261145404360500341720ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; /** * Copyright 2004 The Apache Software Foundation *

* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at *

* http://www.apache.org/licenses/LICENSE-2.0 *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public class ArchiverException extends Exception { public ArchiverException( String message ) { super( message ); } public ArchiverException( String message, Throwable cause ) { super( message, cause ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/BaseFileSet.java000066400000000000000000000021601145404360500327470ustar00rootroot00000000000000package org.codehaus.plexus.archiver; import org.codehaus.plexus.components.io.fileselectors.FileSelector; /** * A file set is a set of files, which may be added to an * archive. * @since 1.0-alpha-9 */ public interface BaseFileSet { /** * Returns the prefix, which the file sets contents shall * have. */ String getPrefix(); /** * Returns a string of patterns, which included files * should match. */ String[] getIncludes(); /** * Returns a string of patterns, which excluded files * should match. */ String[] getExcludes(); /** * Returns, whether the include/exclude patterns are * case sensitive. */ boolean isCaseSensitive(); /** * Returns, whether the default excludes are being * applied. */ boolean isUsingDefaultExcludes(); /** * Returns, whether empty directories are being included. */ boolean isIncludingEmptyDirectories(); /** * Returns a set of file selectors, which should be used * to select the included files. */ FileSelector[] getFileSelectors(); } DotDirectiveArchiveFinalizer.java000066400000000000000000000055701145404360500363050ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.StringUtils; import java.io.File; import java.io.IOException; import java.io.BufferedReader; import java.io.FileReader; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * An @{link ArchiveFinalizer} that process dot files with archiver directives * contained within. This basically means you can communicate archive creation * instructions between processes using dot files. * * @author Jason van Zyl */ public class DotDirectiveArchiveFinalizer extends AbstractArchiveFinalizer { private static String DEFAULT_DOT_FILE_PREFIX = ".plxarc"; private File dotFileDirectory; private String dotFilePrefix; public DotDirectiveArchiveFinalizer( File dotFileDirectory ) { this( dotFileDirectory, DEFAULT_DOT_FILE_PREFIX ); } public DotDirectiveArchiveFinalizer( File dotFileDirectory, String dotFilePrefix ) { this.dotFileDirectory = dotFileDirectory; this.dotFilePrefix = dotFilePrefix; } public void finalizeArchiveCreation( Archiver archiver ) throws ArchiverException { try { List dotFiles = FileUtils.getFiles( dotFileDirectory, dotFilePrefix + "*", null ); for ( Iterator i = dotFiles.iterator(); i.hasNext(); ) { File dotFile = (File) i.next(); BufferedReader in = new BufferedReader( new FileReader( dotFile ) ); String line; while ( ( line = in.readLine() ) != null ) { String[] s = StringUtils.split( line, ":" ); if ( s.length == 1 ) { File directory = new File( dotFileDirectory, s[0] ); System.out.println( "adding directory = " + directory ); archiver.addDirectory( directory ); } else { File directory = new File( dotFileDirectory, s[0] ); System.out.println( "adding directory = " + directory + " to: " + s[1] ); if ( s[1].endsWith( "/" ) ) { archiver.addDirectory( directory, s[1] ); } else { archiver.addDirectory( directory, s[1] + "/" ); } } } in.close(); } } catch ( IOException e ) { throw new ArchiverException( "Error processing dot files.", e ); } } public List getVirtualFiles() { return Collections.EMPTY_LIST; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/FileSet.java000066400000000000000000000004751145404360500321630ustar00rootroot00000000000000package org.codehaus.plexus.archiver; import java.io.File; /** * A file set, which consists of the files and directories in * a common base directory. * @since 1.0-alpha-9 */ public interface FileSet extends BaseFileSet { /** * Returns the file sets base directory. */ File getDirectory(); } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/FilterEnabled.java000066400000000000000000000003251145404360500333220ustar00rootroot00000000000000package org.codehaus.plexus.archiver; import java.util.List; /** * @deprecated Use {@link FileSelector file selectors}. */ public interface FilterEnabled { void setArchiveFilters( List filters ); } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/FinalizerEnabled.java000066400000000000000000000003331145404360500340170ustar00rootroot00000000000000package org.codehaus.plexus.archiver; import java.util.List; public interface FinalizerEnabled { void addArchiveFinalizer( ArchiveFinalizer finalizer ); void setArchiveFinalizers( List archiveFinalizers ); } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/ResourceIterator.java000066400000000000000000000002731145404360500341250ustar00rootroot00000000000000/** * */ package org.codehaus.plexus.archiver; public interface ResourceIterator { boolean hasNext() throws ArchiverException; ArchiveEntry next() throws ArchiverException; }plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/UnArchiver.java000066400000000000000000000057341145404360500327010ustar00rootroot00000000000000package org.codehaus.plexus.archiver; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.components.io.fileselectors.FileSelector; import java.io.File; /** * @version $Revision$ $Date$ */ public interface UnArchiver { String ROLE = UnArchiver.class.getName(); /** * Extract the archive. * * @throws ArchiverException */ void extract() throws ArchiverException; /** * Take a patch into the archive and extract it to the specified directory. * * @param path * Path inside the archive to be extracted. * @param outputDirectory * Directory to extract to. * @throws ArchiverException */ void extract( String path, File outputDirectory ) throws ArchiverException; File getDestDirectory(); void setDestDirectory( File destDirectory ); // todo What is this? If you're extracting isn't it always to a directory. I think it would be cool to extract an // archive to another archive but I don't think we support this right now. File getDestFile(); void setDestFile( File destFile ); File getSourceFile(); void setSourceFile( File sourceFile ); /** * Should we overwrite files in dest, even if they are newer than the corresponding entries in the archive? */ void setOverwrite( boolean b ); /** * Sets a set of {@link FileSelector} instances, which may be used to select the files to extract from the archive. * If file selectors are present, then a file is only extracted, if it is confirmed by all file selectors. */ void setFileSelectors( FileSelector[] selectors ); /** * Returns a set of {@link FileSelector} instances, which may be used to select the files to extract from the * archive. If file selectors are present, then a file is only extracted, if it is confirmed by all file selectors. */ FileSelector[] getFileSelectors(); /** * to use or not the jvm method for file permissions : user all not active for group permissions * * @since 1.1 * @param useJvmChmod */ void setUseJvmChmod( boolean useJvmChmod ); /** * @since 1.1 * @return */ boolean isUseJvmChmod(); /** * @since 1.1 */ boolean isIgnorePermissions(); /** * @since 1.1 */ void setIgnorePermissions( final boolean ignorePermissions ); } UnArchiverException.java000066400000000000000000000007111145404360500344670ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; /** * @author Jason van Zyl */ public class UnArchiverException extends Exception { public UnArchiverException( String string ) { super( string ); } public UnArchiverException( String string, Throwable throwable ) { super( string, throwable ); } public UnArchiverException( Throwable throwable ) { super( throwable ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/UnixStat.java000066400000000000000000000034701145404360500324050ustar00rootroot00000000000000package org.codehaus.plexus.archiver; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * Constants from stat.h on Unix systems. * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.UnixStat v1.9 */ public interface UnixStat { /** * Bits used for permissions (and sticky bit) * * @since 1.1 */ int PERM_MASK = 07777; /** * Indicates symbolic links. * * @since 1.1 */ int LINK_FLAG = 0120000; /** * Indicates plain files. * * @since 1.1 */ int FILE_FLAG = 0100000; /** * Indicates directories. * * @since 1.1 */ int DIR_FLAG = 040000; // ---------------------------------------------------------- // somewhat arbitrary choices that are quite common for shared // installations // ----------------------------------------------------------- /** * Default permissions for symbolic links. * * @since 1.1 */ int DEFAULT_LINK_PERM = 0777; /** * Default permissions for directories. * * @since 1.1 */ int DEFAULT_DIR_PERM = 0755; /** * Default permissions for plain files. * * @since 1.1 */ int DEFAULT_FILE_PERM = 0644; } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/bzip2/000077500000000000000000000000001145404360500310055ustar00rootroot00000000000000BZip2Archiver.java000066400000000000000000000033511145404360500342050ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/bzip2package org.codehaus.plexus.archiver.bzip2; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import org.codehaus.plexus.archiver.AbstractArchiver; import org.codehaus.plexus.archiver.ArchiveEntry; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.ResourceIterator; /** * @version $Revision$ $Date$ */ public class BZip2Archiver extends AbstractArchiver { private BZip2Compressor compressor = new BZip2Compressor(); public void execute() throws ArchiverException, IOException { if ( ! checkForced() ) { return; } ResourceIterator iter = getResources(); ArchiveEntry entry = iter.next(); if ( iter.hasNext() ) { throw new ArchiverException( "There is more than one file in input." ); } compressor.setSource( entry.getResource() ); compressor.setDestFile( getDestFile() ); compressor.compress(); } public boolean isSupportingForced() { return true; } protected void close() { compressor.close(); } protected String getArchiveType() { return "bzip2"; } } BZip2Compressor.java000066400000000000000000000036361145404360500346040ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/bzip2package org.codehaus.plexus.archiver.bzip2; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.util.Compressor; import java.io.BufferedOutputStream; import java.io.FileOutputStream; import java.io.IOException; /** * @version $Revision$ $Date$ */ public class BZip2Compressor extends Compressor { private CBZip2OutputStream zOut; /** * perform the GZip compression operation. */ public void compress() throws ArchiverException { try { BufferedOutputStream bos = new BufferedOutputStream( new FileOutputStream( getDestFile() ) ); bos.write( 'B' ); bos.write( 'Z' ); zOut = new CBZip2OutputStream( bos ); compress( getSource(), zOut ); } catch ( IOException ioe ) { String msg = "Problem creating bzip2 " + ioe.getMessage(); throw new ArchiverException( msg, ioe ); } } public void close() { if ( zOut != null ) { try { // close up zOut.close(); } catch ( IOException e ) { //ignore } zOut = null; } } } BZip2Constants.java000066400000000000000000000104001145404360500344070ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/bzip2package org.codehaus.plexus.archiver.bzip2; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Keiron Liddle, Aftex Software * to whom the Ant project is very grateful for his * great code. */ /** * Base class for both the compress and decompress classes. * Holds common arrays, and static data. * * @version $Revision$ $Date$ * from org.apache.ant.tools.bzip2.BZip2Constants v1.8 */ public interface BZip2Constants { int baseBlockSize = 100000; int MAX_ALPHA_SIZE = 258; int MAX_CODE_LEN = 23; int RUNA = 0; int RUNB = 1; int N_GROUPS = 6; int G_SIZE = 50; int N_ITERS = 4; int MAX_SELECTORS = ( 2 + ( 900000 / G_SIZE ) ); int NUM_OVERSHOOT_BYTES = 20; int[] rNums = { 619, 720, 127, 481, 931, 816, 813, 233, 566, 247, 985, 724, 205, 454, 863, 491, 741, 242, 949, 214, 733, 859, 335, 708, 621, 574, 73, 654, 730, 472, 419, 436, 278, 496, 867, 210, 399, 680, 480, 51, 878, 465, 811, 169, 869, 675, 611, 697, 867, 561, 862, 687, 507, 283, 482, 129, 807, 591, 733, 623, 150, 238, 59, 379, 684, 877, 625, 169, 643, 105, 170, 607, 520, 932, 727, 476, 693, 425, 174, 647, 73, 122, 335, 530, 442, 853, 695, 249, 445, 515, 909, 545, 703, 919, 874, 474, 882, 500, 594, 612, 641, 801, 220, 162, 819, 984, 589, 513, 495, 799, 161, 604, 958, 533, 221, 400, 386, 867, 600, 782, 382, 596, 414, 171, 516, 375, 682, 485, 911, 276, 98, 553, 163, 354, 666, 933, 424, 341, 533, 870, 227, 730, 475, 186, 263, 647, 537, 686, 600, 224, 469, 68, 770, 919, 190, 373, 294, 822, 808, 206, 184, 943, 795, 384, 383, 461, 404, 758, 839, 887, 715, 67, 618, 276, 204, 918, 873, 777, 604, 560, 951, 160, 578, 722, 79, 804, 96, 409, 713, 940, 652, 934, 970, 447, 318, 353, 859, 672, 112, 785, 645, 863, 803, 350, 139, 93, 354, 99, 820, 908, 609, 772, 154, 274, 580, 184, 79, 626, 630, 742, 653, 282, 762, 623, 680, 81, 927, 626, 789, 125, 411, 521, 938, 300, 821, 78, 343, 175, 128, 250, 170, 774, 972, 275, 999, 639, 495, 78, 352, 126, 857, 956, 358, 619, 580, 124, 737, 594, 701, 612, 669, 112, 134, 694, 363, 992, 809, 743, 168, 974, 944, 375, 748, 52, 600, 747, 642, 182, 862, 81, 344, 805, 988, 739, 511, 655, 814, 334, 249, 515, 897, 955, 664, 981, 649, 113, 974, 459, 893, 228, 433, 837, 553, 268, 926, 240, 102, 654, 459, 51, 686, 754, 806, 760, 493, 403, 415, 394, 687, 700, 946, 670, 656, 610, 738, 392, 760, 799, 887, 653, 978, 321, 576, 617, 626, 502, 894, 679, 243, 440, 680, 879, 194, 572, 640, 724, 926, 56, 204, 700, 707, 151, 457, 449, 797, 195, 791, 558, 945, 679, 297, 59, 87, 824, 713, 663, 412, 693, 342, 606, 134, 108, 571, 364, 631, 212, 174, 643, 304, 329, 343, 97, 430, 751, 497, 314, 983, 374, 822, 928, 140, 206, 73, 263, 980, 736, 876, 478, 430, 305, 170, 514, 364, 692, 829, 82, 855, 953, 676, 246, 369, 970, 294, 750, 807, 827, 150, 790, 288, 923, 804, 378, 215, 828, 592, 281, 565, 555, 710, 82, 896, 831, 547, 261, 524, 462, 293, 465, 502, 56, 661, 821, 976, 991, 658, 869, 905, 758, 745, 193, 768, 550, 608, 933, 378, 286, 215, 979, 792, 961, 61, 688, 793, 644, 986, 403, 106, 366, 905, 644, 372, 567, 466, 434, 645, 210, 389, 550, 919, 135, 780, 773, 635, 389, 707, 100, 626, 958, 165, 504, 920, 176, 193, 713, 857, 265, 203, 50, 668, 108, 645, 990, 626, 197, 510, 357, 358, 850, 858, 364, 936, 638 }; } BZip2UnArchiver.java000066400000000000000000000106161145404360500345120ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/bzip2package org.codehaus.plexus.archiver.bzip2; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import org.codehaus.plexus.archiver.AbstractUnArchiver; import org.codehaus.plexus.archiver.ArchiverException; /** * @author Emmanuel Venisse * @version $Revision$ $Date$ */ public class BZip2UnArchiver extends AbstractUnArchiver { public BZip2UnArchiver() { } public BZip2UnArchiver( File sourceFile ) { super( sourceFile ); } protected void execute() throws ArchiverException { if ( getSourceFile().lastModified() > getDestFile().lastModified() ) { getLogger().info( "Expanding " + getSourceFile().getAbsolutePath() + " to " + getDestFile().getAbsolutePath() ); FileOutputStream out = null; CBZip2InputStream zIn = null; FileInputStream fis = null; BufferedInputStream bis = null; try { out = new FileOutputStream( getDestFile() ); fis = new FileInputStream( getSourceFile() ); bis = new BufferedInputStream( fis ); zIn = getBZip2InputStream( bis ); if ( zIn == null ) { throw new ArchiverException( getSourceFile().getAbsolutePath() + " is an invalid bz2 file." ); } byte[] buffer = new byte[8 * 1024]; int count = 0; do { out.write( buffer, 0, count ); count = zIn.read( buffer, 0, buffer.length ); } while ( count != -1 ); } catch ( IOException ioe ) { String msg = "Problem expanding bzip2 " + ioe.getMessage(); throw new ArchiverException( msg, ioe ); } finally { if ( bis != null ) { try { bis.close(); } catch ( IOException ioex ) { // ignore } } if ( fis != null ) { try { fis.close(); } catch ( IOException ioex ) { // ignore } } if ( out != null ) { try { out.close(); } catch ( IOException ioex ) { // ignore } } if ( zIn != null ) { try { zIn.close(); } catch ( IOException ioex ) { // ignore } } } } } public static CBZip2InputStream getBZip2InputStream( InputStream bis ) throws IOException { int b = bis.read(); if ( b != 'B' ) { return null; } b = bis.read(); if ( b != 'Z' ) { return null; } return new CBZip2InputStream( bis ); } protected void execute( String path, File outputDirectory ) { throw new UnsupportedOperationException( "Targeted extraction not supported in BZIP2 format." ); } } CBZip2InputStream.java000066400000000000000000000623071145404360500350260ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/bzip2package org.codehaus.plexus.archiver.bzip2; /* * Copyright 2001-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Keiron Liddle, Aftex Software * to whom the Ant project is very grateful for his * great code. */ import java.io.IOException; import java.io.InputStream; /** * An input stream that decompresses from the BZip2 format (without the file * header chars) to be read as any other stream. * * @version $Revision$ $Date$ * from org.apache.ant.tools.bzip2.CBZip2InputStream v1.18 */ public class CBZip2InputStream extends InputStream implements BZip2Constants { private static void cadvise() { System.out.println( "CRC Error" ); //throw new CCoruptionError(); } private static void compressedStreamEOF() { cadvise(); } private void makeMaps() { int i; nInUse = 0; for ( i = 0; i < 256; i++ ) { if ( inUse[ i ] ) { seqToUnseq[ nInUse ] = (char) i; unseqToSeq[ i ] = (char) nInUse; nInUse++; } } } /* index of the last char in the block, so the block size == last + 1. */ private int last; /* index in zptr[] of original string after sorting. */ private int origPtr; /* always: in the range 0 .. 9. The current block size is 100000 * this number. */ private int blockSize100k; private boolean blockRandomised; private int bsBuff; private int bsLive; private CRC mCrc = new CRC(); private boolean[] inUse = new boolean[256]; private int nInUse; private char[] seqToUnseq = new char[256]; private char[] unseqToSeq = new char[256]; private char[] selector = new char[MAX_SELECTORS]; private char[] selectorMtf = new char[MAX_SELECTORS]; private int[] tt; private char[] ll8; /* freq table collected to save a pass over the data during decompression. */ private int[] unzftab = new int[256]; private int[][] limit = new int[N_GROUPS][MAX_ALPHA_SIZE]; private int[][] base = new int[N_GROUPS][MAX_ALPHA_SIZE]; private int[][] perm = new int[N_GROUPS][MAX_ALPHA_SIZE]; private int[] minLens = new int[N_GROUPS]; private InputStream bsStream; private boolean streamEnd = false; private int currentChar = -1; private static final int START_BLOCK_STATE = 1; private static final int RAND_PART_A_STATE = 2; private static final int RAND_PART_B_STATE = 3; private static final int RAND_PART_C_STATE = 4; private static final int NO_RAND_PART_A_STATE = 5; private static final int NO_RAND_PART_B_STATE = 6; private static final int NO_RAND_PART_C_STATE = 7; private int currentState = START_BLOCK_STATE; private int storedBlockCRC; private int computedCombinedCRC; private int i2, count, chPrev, ch2; private int i, tPos; private int rNToGo = 0; private int rTPos = 0; private int j2; private char z; public CBZip2InputStream( InputStream zStream ) { bsSetStream( zStream ); initialize(); initBlock(); setupBlock(); } public int read() { if ( streamEnd ) { return -1; } else { int retChar = currentChar; switch ( currentState ) { case START_BLOCK_STATE: break; case RAND_PART_A_STATE: break; case RAND_PART_B_STATE: setupRandPartB(); break; case RAND_PART_C_STATE: setupRandPartC(); break; case NO_RAND_PART_A_STATE: break; case NO_RAND_PART_B_STATE: setupNoRandPartB(); break; case NO_RAND_PART_C_STATE: setupNoRandPartC(); break; default: break; } return retChar; } } private void initialize() { char magic3, magic4; magic3 = bsGetUChar(); magic4 = bsGetUChar(); if ( magic3 != 'h' || magic4 < '1' || magic4 > '9' ) { bsFinishedWithStream(); streamEnd = true; return; } setDecompressStructureSizes( magic4 - '0' ); computedCombinedCRC = 0; } private void initBlock() { char magic1, magic2, magic3, magic4; char magic5, magic6; magic1 = bsGetUChar(); magic2 = bsGetUChar(); magic3 = bsGetUChar(); magic4 = bsGetUChar(); magic5 = bsGetUChar(); magic6 = bsGetUChar(); if ( magic1 == 0x17 && magic2 == 0x72 && magic3 == 0x45 && magic4 == 0x38 && magic5 == 0x50 && magic6 == 0x90 ) { complete(); return; } if ( magic1 != 0x31 || magic2 != 0x41 || magic3 != 0x59 || magic4 != 0x26 || magic5 != 0x53 || magic6 != 0x59 ) { badBlockHeader(); streamEnd = true; return; } storedBlockCRC = bsGetInt32(); if ( bsR( 1 ) == 1 ) { blockRandomised = true; } else { blockRandomised = false; } // currBlockNo++; getAndMoveToFrontDecode(); mCrc.initialiseCRC(); currentState = START_BLOCK_STATE; } private void endBlock() { int computedBlockCRC = mCrc.getFinalCRC(); /* A bad CRC is considered a fatal error. */ if ( storedBlockCRC != computedBlockCRC ) { crcError(); } computedCombinedCRC = ( computedCombinedCRC << 1 ) | ( computedCombinedCRC >>> 31 ); computedCombinedCRC ^= computedBlockCRC; } private void complete() { int storedCombinedCRC = bsGetInt32(); if ( storedCombinedCRC != computedCombinedCRC ) { crcError(); } bsFinishedWithStream(); streamEnd = true; } private static void blockOverrun() { cadvise(); } private static void badBlockHeader() { cadvise(); } private static void crcError() { cadvise(); } private void bsFinishedWithStream() { try { if ( this.bsStream != null ) { if ( this.bsStream != System.in ) { this.bsStream.close(); this.bsStream = null; } } } catch ( IOException ioe ) { //ignore } } private void bsSetStream( InputStream f ) { bsStream = f; bsLive = 0; bsBuff = 0; } private int bsR( int n ) { int v; while ( bsLive < n ) { int zzi; char thech = 0; try { thech = (char) bsStream.read(); } catch ( IOException e ) { compressedStreamEOF(); } if ( thech == -1 ) { compressedStreamEOF(); } zzi = thech; bsBuff = ( bsBuff << 8 ) | ( zzi & 0xff ); bsLive += 8; } v = ( bsBuff >> ( bsLive - n ) ) & ( ( 1 << n ) - 1 ); bsLive -= n; return v; } private char bsGetUChar() { return (char) bsR( 8 ); } private int bsGetint() { int u = 0; u = ( u << 8 ) | bsR( 8 ); u = ( u << 8 ) | bsR( 8 ); u = ( u << 8 ) | bsR( 8 ); u = ( u << 8 ) | bsR( 8 ); return u; } private int bsGetIntVS( int numBits ) { return bsR( numBits ); } private int bsGetInt32() { return bsGetint(); } private void hbCreateDecodeTables( int[] limit, int[] base, int[] perm, char[] length, int minLen, int maxLen, int alphaSize ) { int pp, i, j, vec; pp = 0; for ( i = minLen; i <= maxLen; i++ ) { for ( j = 0; j < alphaSize; j++ ) { if ( length[ j ] == i ) { perm[ pp ] = j; pp++; } } } for ( i = 0; i < MAX_CODE_LEN; i++ ) { base[ i ] = 0; } for ( i = 0; i < alphaSize; i++ ) { base[ length[ i ] + 1 ]++; } for ( i = 1; i < MAX_CODE_LEN; i++ ) { base[ i ] += base[ i - 1 ]; } for ( i = 0; i < MAX_CODE_LEN; i++ ) { limit[ i ] = 0; } vec = 0; for ( i = minLen; i <= maxLen; i++ ) { vec += ( base[ i + 1 ] - base[ i ] ); limit[ i ] = vec - 1; vec <<= 1; } for ( i = minLen + 1; i <= maxLen; i++ ) { base[ i ] = ( ( limit[ i - 1 ] + 1 ) << 1 ) - base[ i ]; } } private void recvDecodingTables() { char len[][] = new char[N_GROUPS][MAX_ALPHA_SIZE]; int i, j, t, nGroups, nSelectors, alphaSize; int minLen, maxLen; boolean[] inUse16 = new boolean[16]; /* Receive the mapping table */ for ( i = 0; i < 16; i++ ) { if ( bsR( 1 ) == 1 ) { inUse16[ i ] = true; } else { inUse16[ i ] = false; } } for ( i = 0; i < 256; i++ ) { inUse[ i ] = false; } for ( i = 0; i < 16; i++ ) { if ( inUse16[ i ] ) { for ( j = 0; j < 16; j++ ) { if ( bsR( 1 ) == 1 ) { inUse[ i * 16 + j ] = true; } } } } makeMaps(); alphaSize = nInUse + 2; /* Now the selectors */ nGroups = bsR( 3 ); nSelectors = bsR( 15 ); for ( i = 0; i < nSelectors; i++ ) { j = 0; while ( bsR( 1 ) == 1 ) { j++; } selectorMtf[ i ] = (char) j; } /* Undo the MTF values for the selectors. */ { char[] pos = new char[N_GROUPS]; char tmp, v; for ( v = 0; v < nGroups; v++ ) { pos[ v ] = v; } for ( i = 0; i < nSelectors; i++ ) { v = selectorMtf[ i ]; tmp = pos[ v ]; while ( v > 0 ) { pos[ v ] = pos[ v - 1 ]; v--; } pos[ 0 ] = tmp; selector[ i ] = tmp; } } /* Now the coding tables */ for ( t = 0; t < nGroups; t++ ) { int curr = bsR( 5 ); for ( i = 0; i < alphaSize; i++ ) { while ( bsR( 1 ) == 1 ) { if ( bsR( 1 ) == 0 ) { curr++; } else { curr--; } } len[ t ][ i ] = (char) curr; } } /* Create the Huffman decoding tables */ for ( t = 0; t < nGroups; t++ ) { minLen = 32; maxLen = 0; for ( i = 0; i < alphaSize; i++ ) { if ( len[ t ][ i ] > maxLen ) { maxLen = len[ t ][ i ]; } if ( len[ t ][ i ] < minLen ) { minLen = len[ t ][ i ]; } } hbCreateDecodeTables( limit[ t ], base[ t ], perm[ t ], len[ t ], minLen, maxLen, alphaSize ); minLens[ t ] = minLen; } } private void getAndMoveToFrontDecode() { char[] yy = new char[256]; int i, j, nextSym, limitLast; int EOB, groupNo, groupPos; limitLast = baseBlockSize * blockSize100k; origPtr = bsGetIntVS( 24 ); recvDecodingTables(); EOB = nInUse + 1; groupNo = -1; groupPos = 0; /* Setting up the unzftab entries here is not strictly necessary, but it does save having to do it later in a separate pass, and so saves a block's worth of cache misses. */ for ( i = 0; i <= 255; i++ ) { unzftab[ i ] = 0; } for ( i = 0; i <= 255; i++ ) { yy[ i ] = (char) i; } last = -1; { int zt, zn, zvec, zj; if ( groupPos == 0 ) { groupNo++; groupPos = G_SIZE; } groupPos--; zt = selector[ groupNo ]; zn = minLens[ zt ]; zvec = bsR( zn ); while ( zvec > limit[ zt ][ zn ] ) { zn++; { { while ( bsLive < 1 ) { int zzi; char thech = 0; try { thech = (char) bsStream.read(); } catch ( IOException e ) { compressedStreamEOF(); } if ( thech == -1 ) { compressedStreamEOF(); } zzi = thech; bsBuff = ( bsBuff << 8 ) | ( zzi & 0xff ); bsLive += 8; } } zj = ( bsBuff >> ( bsLive - 1 ) ) & 1; bsLive--; } zvec = ( zvec << 1 ) | zj; } nextSym = perm[ zt ][ zvec - base[ zt ][ zn ] ]; } while ( true ) { if ( nextSym == EOB ) { break; } if ( nextSym == RUNA || nextSym == RUNB ) { char ch; int s = -1; int N = 1; do { if ( nextSym == RUNA ) { s = s + ( 0 + 1 ) * N; } else if ( nextSym == RUNB ) { s = s + ( 1 + 1 ) * N; } N = N * 2; { int zt, zn, zvec, zj; if ( groupPos == 0 ) { groupNo++; groupPos = G_SIZE; } groupPos--; zt = selector[ groupNo ]; zn = minLens[ zt ]; zvec = bsR( zn ); while ( zvec > limit[ zt ][ zn ] ) { zn++; { { while ( bsLive < 1 ) { int zzi; char thech = 0; try { thech = (char) bsStream.read(); } catch ( IOException e ) { compressedStreamEOF(); } if ( thech == -1 ) { compressedStreamEOF(); } zzi = thech; bsBuff = ( bsBuff << 8 ) | ( zzi & 0xff ); bsLive += 8; } } zj = ( bsBuff >> ( bsLive - 1 ) ) & 1; bsLive--; } zvec = ( zvec << 1 ) | zj; } nextSym = perm[ zt ][ zvec - base[ zt ][ zn ] ]; } } while ( nextSym == RUNA || nextSym == RUNB ); s++; ch = seqToUnseq[ yy[ 0 ] ]; unzftab[ ch ] += s; while ( s > 0 ) { last++; ll8[ last ] = ch; s--; } if ( last >= limitLast ) { blockOverrun(); } } else { char tmp; last++; if ( last >= limitLast ) { blockOverrun(); } tmp = yy[ nextSym - 1 ]; unzftab[ seqToUnseq[ tmp ] ]++; ll8[ last ] = seqToUnseq[ tmp ]; /* This loop is hammered during decompression, hence the unrolling. for (j = nextSym-1; j > 0; j--) yy[j] = yy[j-1]; */ j = nextSym - 1; for ( ; j > 3; j -= 4 ) { yy[ j ] = yy[ j - 1 ]; yy[ j - 1 ] = yy[ j - 2 ]; yy[ j - 2 ] = yy[ j - 3 ]; yy[ j - 3 ] = yy[ j - 4 ]; } for ( ; j > 0; j-- ) { yy[ j ] = yy[ j - 1 ]; } yy[ 0 ] = tmp; { int zt, zn, zvec, zj; if ( groupPos == 0 ) { groupNo++; groupPos = G_SIZE; } groupPos--; zt = selector[ groupNo ]; zn = minLens[ zt ]; zvec = bsR( zn ); while ( zvec > limit[ zt ][ zn ] ) { zn++; { { while ( bsLive < 1 ) { int zzi; char thech = 0; try { thech = (char) bsStream.read(); } catch ( IOException e ) { compressedStreamEOF(); } zzi = thech; bsBuff = ( bsBuff << 8 ) | ( zzi & 0xff ); bsLive += 8; } } zj = ( bsBuff >> ( bsLive - 1 ) ) & 1; bsLive--; } zvec = ( zvec << 1 ) | zj; } nextSym = perm[ zt ][ zvec - base[ zt ][ zn ] ]; } } } } private void setupBlock() { int[] cftab = new int[257]; char ch; cftab[ 0 ] = 0; for ( i = 1; i <= 256; i++ ) { cftab[ i ] = unzftab[ i - 1 ]; } for ( i = 1; i <= 256; i++ ) { cftab[ i ] += cftab[ i - 1 ]; } for ( i = 0; i <= last; i++ ) { ch = ll8[ i ]; tt[ cftab[ ch ] ] = i; cftab[ ch ]++; } tPos = tt[ origPtr ]; count = 0; i2 = 0; ch2 = 256; /* not a char and not EOF */ if ( blockRandomised ) { rNToGo = 0; rTPos = 0; setupRandPartA(); } else { setupNoRandPartA(); } } private void setupRandPartA() { if ( i2 <= last ) { chPrev = ch2; ch2 = ll8[ tPos ]; tPos = tt[ tPos ]; if ( rNToGo == 0 ) { rNToGo = rNums[ rTPos ]; rTPos++; if ( rTPos == 512 ) { rTPos = 0; } } rNToGo--; ch2 ^= ( rNToGo == 1 ) ? 1 : 0; i2++; currentChar = ch2; currentState = RAND_PART_B_STATE; mCrc.updateCRC( ch2 ); } else { endBlock(); initBlock(); setupBlock(); } } private void setupNoRandPartA() { if ( i2 <= last ) { chPrev = ch2; ch2 = ll8[ tPos ]; tPos = tt[ tPos ]; i2++; currentChar = ch2; currentState = NO_RAND_PART_B_STATE; mCrc.updateCRC( ch2 ); } else { endBlock(); initBlock(); setupBlock(); } } private void setupRandPartB() { if ( ch2 != chPrev ) { currentState = RAND_PART_A_STATE; count = 1; setupRandPartA(); } else { count++; if ( count >= 4 ) { z = ll8[ tPos ]; tPos = tt[ tPos ]; if ( rNToGo == 0 ) { rNToGo = rNums[ rTPos ]; rTPos++; if ( rTPos == 512 ) { rTPos = 0; } } rNToGo--; z ^= ( ( rNToGo == 1 ) ? 1 : 0 ); j2 = 0; currentState = RAND_PART_C_STATE; setupRandPartC(); } else { currentState = RAND_PART_A_STATE; setupRandPartA(); } } } private void setupRandPartC() { if ( j2 < (int) z ) { currentChar = ch2; mCrc.updateCRC( ch2 ); j2++; } else { currentState = RAND_PART_A_STATE; i2++; count = 0; setupRandPartA(); } } private void setupNoRandPartB() { if ( ch2 != chPrev ) { currentState = NO_RAND_PART_A_STATE; count = 1; setupNoRandPartA(); } else { count++; if ( count >= 4 ) { z = ll8[ tPos ]; tPos = tt[ tPos ]; currentState = NO_RAND_PART_C_STATE; j2 = 0; setupNoRandPartC(); } else { currentState = NO_RAND_PART_A_STATE; setupNoRandPartA(); } } } private void setupNoRandPartC() { if ( j2 < (int) z ) { currentChar = ch2; mCrc.updateCRC( ch2 ); j2++; } else { currentState = NO_RAND_PART_A_STATE; i2++; count = 0; setupNoRandPartA(); } } private void setDecompressStructureSizes( int newSize100k ) { if ( !( 0 <= newSize100k && newSize100k <= 9 && 0 <= blockSize100k && blockSize100k <= 9 ) ) { // throw new IOException("Invalid block size"); } blockSize100k = newSize100k; if ( newSize100k == 0 ) { return; } int n = baseBlockSize * newSize100k; ll8 = new char[n]; tt = new int[n]; } } CBZip2OutputStream.java000066400000000000000000001421711145404360500352250ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/bzip2package org.codehaus.plexus.archiver.bzip2; /* * Copyright 2001-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Keiron Liddle, Aftex Software * to whom the Ant project is very grateful for his * great code. */ import java.io.IOException; import java.io.OutputStream; /** * An output stream that compresses into the BZip2 format (without the file * header chars) into another stream. *

* TODO: Update to BZip2 1.0.1 * * @version $Revision$ $Date$ * from org.apache.ant.tools.bzip2.CBZip2OutputStream v1.22 */ public class CBZip2OutputStream extends OutputStream implements BZip2Constants { protected static final int SETMASK = ( 1 << 21 ); protected static final int CLEARMASK = ( ~SETMASK ); protected static final int GREATER_ICOST = 15; protected static final int LESSER_ICOST = 0; protected static final int SMALL_THRESH = 20; protected static final int DEPTH_THRESH = 10; /* If you are ever unlucky/improbable enough to get a stack overflow whilst sorting, increase the following constant and try again. In practice I have never seen the stack go above 27 elems, so the following limit seems very generous. */ protected static final int QSORT_STACK_SIZE = 1000; private static void panic() { System.out.println( "panic" ); //throw new CError(); } private void makeMaps() { int i; nInUse = 0; for ( i = 0; i < 256; i++ ) { if ( inUse[ i ] ) { seqToUnseq[ nInUse ] = (char) i; unseqToSeq[ i ] = (char) nInUse; nInUse++; } } } protected static void hbMakeCodeLengths( char[] len, int[] freq, int alphaSize, int maxLen ) { /* Nodes and heap entries run from 1. Entry 0 for both the heap and nodes is a sentinel. */ int nNodes, nHeap, n1, n2, i, j, k; boolean tooLong; int[] heap = new int[MAX_ALPHA_SIZE + 2]; int[] weight = new int[MAX_ALPHA_SIZE * 2]; int[] parent = new int[MAX_ALPHA_SIZE * 2]; for ( i = 0; i < alphaSize; i++ ) { weight[ i + 1 ] = ( freq[ i ] == 0 ? 1 : freq[ i ] ) << 8; } while ( true ) { nNodes = alphaSize; nHeap = 0; heap[ 0 ] = 0; weight[ 0 ] = 0; parent[ 0 ] = -2; for ( i = 1; i <= alphaSize; i++ ) { parent[ i ] = -1; nHeap++; heap[ nHeap ] = i; { int zz, tmp; zz = nHeap; tmp = heap[ zz ]; while ( weight[ tmp ] < weight[ heap[ zz >> 1 ] ] ) { heap[ zz ] = heap[ zz >> 1 ]; zz >>= 1; } heap[ zz ] = tmp; } } if ( !( nHeap < ( MAX_ALPHA_SIZE + 2 ) ) ) { panic(); } while ( nHeap > 1 ) { n1 = heap[ 1 ]; heap[ 1 ] = heap[ nHeap ]; nHeap--; { int zz = 0, yy, tmp; zz = 1; tmp = heap[ zz ]; while ( true ) { yy = zz << 1; if ( yy > nHeap ) { break; } if ( yy < nHeap && weight[ heap[ yy + 1 ] ] < weight[ heap[ yy ] ] ) { yy++; } if ( weight[ tmp ] < weight[ heap[ yy ] ] ) { break; } heap[ zz ] = heap[ yy ]; zz = yy; } heap[ zz ] = tmp; } n2 = heap[ 1 ]; heap[ 1 ] = heap[ nHeap ]; nHeap--; { int zz, yy, tmp; zz = 1; tmp = heap[ zz ]; while ( true ) { yy = zz << 1; if ( yy > nHeap ) { break; } if ( yy < nHeap && weight[ heap[ yy + 1 ] ] < weight[ heap[ yy ] ] ) { yy++; } if ( weight[ tmp ] < weight[ heap[ yy ] ] ) { break; } heap[ zz ] = heap[ yy ]; zz = yy; } heap[ zz ] = tmp; } nNodes++; parent[ n1 ] = parent[ n2 ] = nNodes; weight[ nNodes ] = ( ( weight[ n1 ] & 0xffffff00 ) + ( weight[ n2 ] & 0xffffff00 ) ) | ( 1 + ( ( ( weight[ n1 ] & 0x000000ff ) > ( weight[ n2 ] & 0x000000ff ) ) ? ( weight[ n1 ] & 0x000000ff ) : ( weight[ n2 ] & 0x000000ff ) ) ); parent[ nNodes ] = -1; nHeap++; heap[ nHeap ] = nNodes; { int zz, tmp; zz = nHeap; tmp = heap[ zz ]; while ( weight[ tmp ] < weight[ heap[ zz >> 1 ] ] ) { heap[ zz ] = heap[ zz >> 1 ]; zz >>= 1; } heap[ zz ] = tmp; } } if ( !( nNodes < ( MAX_ALPHA_SIZE * 2 ) ) ) { panic(); } tooLong = false; for ( i = 1; i <= alphaSize; i++ ) { j = 0; k = i; while ( parent[ k ] >= 0 ) { k = parent[ k ]; j++; } len[ i - 1 ] = (char) j; if ( j > maxLen ) { tooLong = true; } } if ( !tooLong ) { break; } for ( i = 1; i < alphaSize; i++ ) { j = weight[ i ] >> 8; j = 1 + ( j / 2 ); weight[ i ] = j << 8; } } } /* index of the last char in the block, so the block size == last + 1. */ int last; /* index in zptr[] of original string after sorting. */ int origPtr; /* always: in the range 0 .. 9. The current block size is 100000 * this number. */ int blockSize100k; boolean blockRandomised; int bytesOut; int bsBuff; int bsLive; CRC mCrc = new CRC(); private boolean[] inUse = new boolean[256]; private int nInUse; private char[] seqToUnseq = new char[256]; private char[] unseqToSeq = new char[256]; private char[] selector = new char[MAX_SELECTORS]; private char[] selectorMtf = new char[MAX_SELECTORS]; private char[] block; private int[] quadrant; private int[] zptr; private short[] szptr; private int[] ftab; private int nMTF; private int[] mtfFreq = new int[MAX_ALPHA_SIZE]; /* * Used when sorting. If too many long comparisons * happen, we stop sorting, randomise the block * slightly, and try again. */ private int workFactor; private int workDone; private int workLimit; private boolean firstAttempt; private int nBlocksRandomised; private int currentChar = -1; private int runLength = 0; public CBZip2OutputStream( OutputStream inStream ) throws IOException { this( inStream, 9 ); } public CBZip2OutputStream( OutputStream inStream, int inBlockSize ) throws IOException { block = null; quadrant = null; zptr = null; ftab = null; bsSetStream( inStream ); workFactor = 50; if ( inBlockSize > 9 ) { inBlockSize = 9; } if ( inBlockSize < 1 ) { inBlockSize = 1; } blockSize100k = inBlockSize; allocateCompressStructures(); initialize(); initBlock(); } /** * modified by Oliver Merkel, 010128 */ public void write( int bv ) throws IOException { int b = ( 256 + bv ) % 256; if ( currentChar != -1 ) { if ( currentChar == b ) { runLength++; if ( runLength > 254 ) { writeRun(); currentChar = -1; runLength = 0; } } else { writeRun(); runLength = 1; currentChar = b; } } else { currentChar = b; runLength++; } } private void writeRun() throws IOException { if ( last < allowableBlockSize ) { inUse[ currentChar ] = true; for ( int i = 0; i < runLength; i++ ) { mCrc.updateCRC( (char) currentChar ); } switch ( runLength ) { case 1: last++; block[ last + 1 ] = (char) currentChar; break; case 2: last++; block[ last + 1 ] = (char) currentChar; last++; block[ last + 1 ] = (char) currentChar; break; case 3: last++; block[ last + 1 ] = (char) currentChar; last++; block[ last + 1 ] = (char) currentChar; last++; block[ last + 1 ] = (char) currentChar; break; default: inUse[ runLength - 4 ] = true; last++; block[ last + 1 ] = (char) currentChar; last++; block[ last + 1 ] = (char) currentChar; last++; block[ last + 1 ] = (char) currentChar; last++; block[ last + 1 ] = (char) currentChar; last++; block[ last + 1 ] = (char) ( runLength - 4 ); break; } } else { endBlock(); initBlock(); writeRun(); } } boolean closed = false; protected void finalize() throws Throwable { close(); super.finalize(); } public void close() throws IOException { if ( closed ) { return; } if ( runLength > 0 ) { writeRun(); } currentChar = -1; endBlock(); endCompression(); closed = true; super.close(); bsStream.close(); } public void flush() throws IOException { super.flush(); bsStream.flush(); } private int blockCRC, combinedCRC; private void initialize() throws IOException { bytesOut = 0; nBlocksRandomised = 0; /* Write `magic' bytes h indicating file-format == huffmanised, followed by a digit indicating blockSize100k. */ bsPutUChar( 'h' ); bsPutUChar( '0' + blockSize100k ); combinedCRC = 0; } private int allowableBlockSize; private void initBlock() { // blockNo++; mCrc.initialiseCRC(); last = -1; // ch = 0; for ( int i = 0; i < 256; i++ ) { inUse[ i ] = false; } /* 20 is just a paranoia constant */ allowableBlockSize = baseBlockSize * blockSize100k - 20; } private void endBlock() throws IOException { blockCRC = mCrc.getFinalCRC(); combinedCRC = ( combinedCRC << 1 ) | ( combinedCRC >>> 31 ); combinedCRC ^= blockCRC; /* sort the block and establish posn of original string */ doReversibleTransformation(); /* A 6-byte block header, the value chosen arbitrarily as 0x314159265359 :-). A 32 bit value does not really give a strong enough guarantee that the value will not appear by chance in the compressed datastream. Worst-case probability of this event, for a 900k block, is about 2.0e-3 for 32 bits, 1.0e-5 for 40 bits and 4.0e-8 for 48 bits. For a compressed file of size 100Gb -- about 100000 blocks -- only a 48-bit marker will do. NB: normal compression/ decompression do *not* rely on these statistical properties. They are only important when trying to recover blocks from damaged files. */ bsPutUChar( 0x31 ); bsPutUChar( 0x41 ); bsPutUChar( 0x59 ); bsPutUChar( 0x26 ); bsPutUChar( 0x53 ); bsPutUChar( 0x59 ); /* Now the block's CRC, so it is in a known place. */ bsPutint( blockCRC ); /* Now a single bit indicating randomisation. */ if ( blockRandomised ) { bsW( 1, 1 ); nBlocksRandomised++; } else { bsW( 1, 0 ); } /* Finally, block's contents proper. */ moveToFrontCodeAndSend(); } private void endCompression() throws IOException { /* Now another magic 48-bit number, 0x177245385090, to indicate the end of the last block. (sqrt(pi), if you want to know. I did want to use e, but it contains too much repetition -- 27 18 28 18 28 46 -- for me to feel statistically comfortable. Call me paranoid.) */ bsPutUChar( 0x17 ); bsPutUChar( 0x72 ); bsPutUChar( 0x45 ); bsPutUChar( 0x38 ); bsPutUChar( 0x50 ); bsPutUChar( 0x90 ); bsPutint( combinedCRC ); bsFinishedWithStream(); } private void hbAssignCodes( int[] code, char[] length, int minLen, int maxLen, int alphaSize ) { int n, vec, i; vec = 0; for ( n = minLen; n <= maxLen; n++ ) { for ( i = 0; i < alphaSize; i++ ) { if ( length[ i ] == n ) { code[ i ] = vec; vec++; } } vec <<= 1; } } private void bsSetStream( OutputStream f ) { bsStream = f; bsLive = 0; bsBuff = 0; bytesOut = 0; } private void bsFinishedWithStream() throws IOException { while ( bsLive > 0 ) { int ch = ( bsBuff >> 24 ); try { bsStream.write( ch ); // write 8-bit } catch ( IOException e ) { throw e; } bsBuff <<= 8; bsLive -= 8; bytesOut++; } } private void bsW( int n, int v ) throws IOException { while ( bsLive >= 8 ) { int ch = ( bsBuff >> 24 ); try { bsStream.write( ch ); // write 8-bit } catch ( IOException e ) { throw e; } bsBuff <<= 8; bsLive -= 8; bytesOut++; } bsBuff |= ( v << ( 32 - bsLive - n ) ); bsLive += n; } private void bsPutUChar( int c ) throws IOException { bsW( 8, c ); } private void bsPutint( int u ) throws IOException { bsW( 8, ( u >> 24 ) & 0xff ); bsW( 8, ( u >> 16 ) & 0xff ); bsW( 8, ( u >> 8 ) & 0xff ); bsW( 8, u & 0xff ); } private void bsPutIntVS( int numBits, int c ) throws IOException { bsW( numBits, c ); } private void sendMTFValues() throws IOException { char len[][] = new char[N_GROUPS][MAX_ALPHA_SIZE]; int v, t, i, j, gs, ge, bt, bc, iter; int nSelectors = 0, alphaSize, minLen, maxLen, selCtr; int nGroups; alphaSize = nInUse + 2; for ( t = 0; t < N_GROUPS; t++ ) { for ( v = 0; v < alphaSize; v++ ) { len[ t ][ v ] = (char) GREATER_ICOST; } } /* Decide how many coding tables to use */ if ( nMTF <= 0 ) { panic(); } if ( nMTF < 200 ) { nGroups = 2; } else if ( nMTF < 600 ) { nGroups = 3; } else if ( nMTF < 1200 ) { nGroups = 4; } else if ( nMTF < 2400 ) { nGroups = 5; } else { nGroups = 6; } /* Generate an initial set of coding tables */ { int nPart, remF, tFreq, aFreq; nPart = nGroups; remF = nMTF; gs = 0; while ( nPart > 0 ) { tFreq = remF / nPart; ge = gs - 1; aFreq = 0; while ( aFreq < tFreq && ge < alphaSize - 1 ) { ge++; aFreq += mtfFreq[ ge ]; } if ( ge > gs && nPart != nGroups && nPart != 1 && ( ( nGroups - nPart ) % 2 == 1 ) ) { aFreq -= mtfFreq[ ge ]; ge--; } for ( v = 0; v < alphaSize; v++ ) { if ( v >= gs && v <= ge ) { len[ nPart - 1 ][ v ] = (char) LESSER_ICOST; } else { len[ nPart - 1 ][ v ] = (char) GREATER_ICOST; } } nPart--; gs = ge + 1; remF -= aFreq; } } int[][] rfreq = new int[N_GROUPS][MAX_ALPHA_SIZE]; int[] fave = new int[N_GROUPS]; short[] cost = new short[N_GROUPS]; /* Iterate up to N_ITERS times to improve the tables. */ for ( iter = 0; iter < N_ITERS; iter++ ) { for ( t = 0; t < nGroups; t++ ) { fave[ t ] = 0; } for ( t = 0; t < nGroups; t++ ) { for ( v = 0; v < alphaSize; v++ ) { rfreq[ t ][ v ] = 0; } } nSelectors = 0; gs = 0; while ( true ) { /* Set group start & end marks. */ if ( gs >= nMTF ) { break; } ge = gs + G_SIZE - 1; if ( ge >= nMTF ) { ge = nMTF - 1; } /* Calculate the cost of this group as coded by each of the coding tables. */ for ( t = 0; t < nGroups; t++ ) { cost[ t ] = 0; } if ( nGroups == 6 ) { short cost0, cost1, cost2, cost3, cost4, cost5; cost0 = cost1 = cost2 = cost3 = cost4 = cost5 = 0; for ( i = gs; i <= ge; i++ ) { short icv = szptr[ i ]; cost0 += len[ 0 ][ icv ]; cost1 += len[ 1 ][ icv ]; cost2 += len[ 2 ][ icv ]; cost3 += len[ 3 ][ icv ]; cost4 += len[ 4 ][ icv ]; cost5 += len[ 5 ][ icv ]; } cost[ 0 ] = cost0; cost[ 1 ] = cost1; cost[ 2 ] = cost2; cost[ 3 ] = cost3; cost[ 4 ] = cost4; cost[ 5 ] = cost5; } else { for ( i = gs; i <= ge; i++ ) { short icv = szptr[ i ]; for ( t = 0; t < nGroups; t++ ) { cost[ t ] += len[ t ][ icv ]; } } } /* Find the coding table which is best for this group, and record its identity in the selector table. */ bc = 999999999; bt = -1; for ( t = 0; t < nGroups; t++ ) { if ( cost[ t ] < bc ) { bc = cost[ t ]; bt = t; } } fave[ bt ]++; selector[ nSelectors ] = (char) bt; nSelectors++; /* Increment the symbol frequencies for the selected table. */ for ( i = gs; i <= ge; i++ ) { rfreq[ bt ][ szptr[ i ] ]++; } gs = ge + 1; } /* Recompute the tables based on the accumulated frequencies. */ for ( t = 0; t < nGroups; t++ ) { hbMakeCodeLengths( len[ t ], rfreq[ t ], alphaSize, 20 ); } } if ( !( nGroups < 8 ) ) { panic(); } if ( !( nSelectors < 32768 && nSelectors <= ( 2 + ( 900000 / G_SIZE ) ) ) ) { panic(); } /* Compute MTF values for the selectors. */ { char[] pos = new char[N_GROUPS]; char ll_i, tmp2, tmp; for ( i = 0; i < nGroups; i++ ) { pos[ i ] = (char) i; } for ( i = 0; i < nSelectors; i++ ) { ll_i = selector[ i ]; j = 0; tmp = pos[ j ]; while ( ll_i != tmp ) { j++; tmp2 = tmp; tmp = pos[ j ]; pos[ j ] = tmp2; } pos[ 0 ] = tmp; selectorMtf[ i ] = (char) j; } } int[][] code = new int[N_GROUPS][MAX_ALPHA_SIZE]; /* Assign actual codes for the tables. */ for ( t = 0; t < nGroups; t++ ) { minLen = 32; maxLen = 0; for ( i = 0; i < alphaSize; i++ ) { if ( len[ t ][ i ] > maxLen ) { maxLen = len[ t ][ i ]; } if ( len[ t ][ i ] < minLen ) { minLen = len[ t ][ i ]; } } if ( maxLen > 20 ) { panic(); } if ( minLen < 1 ) { panic(); } hbAssignCodes( code[ t ], len[ t ], minLen, maxLen, alphaSize ); } /* Transmit the mapping table. */ { boolean[] inUse16 = new boolean[16]; for ( i = 0; i < 16; i++ ) { inUse16[ i ] = false; for ( j = 0; j < 16; j++ ) { if ( inUse[ i * 16 + j ] ) { inUse16[ i ] = true; } } } for ( i = 0; i < 16; i++ ) { if ( inUse16[ i ] ) { bsW( 1, 1 ); } else { bsW( 1, 0 ); } } for ( i = 0; i < 16; i++ ) { if ( inUse16[ i ] ) { for ( j = 0; j < 16; j++ ) { if ( inUse[ i * 16 + j ] ) { bsW( 1, 1 ); } else { bsW( 1, 0 ); } } } } } /* Now the selectors. */ bsW( 3, nGroups ); bsW( 15, nSelectors ); for ( i = 0; i < nSelectors; i++ ) { for ( j = 0; j < selectorMtf[ i ]; j++ ) { bsW( 1, 1 ); } bsW( 1, 0 ); } /* Now the coding tables. */ for ( t = 0; t < nGroups; t++ ) { int curr = len[ t ][ 0 ]; bsW( 5, curr ); for ( i = 0; i < alphaSize; i++ ) { while ( curr < len[ t ][ i ] ) { bsW( 2, 2 ); curr++; /* 10 */ } while ( curr > len[ t ][ i ] ) { bsW( 2, 3 ); curr--; /* 11 */ } bsW( 1, 0 ); } } /* And finally, the block data proper */ selCtr = 0; gs = 0; while ( true ) { if ( gs >= nMTF ) { break; } ge = gs + G_SIZE - 1; if ( ge >= nMTF ) { ge = nMTF - 1; } for ( i = gs; i <= ge; i++ ) { bsW( len[ selector[ selCtr ] ][ szptr[ i ] ], code[ selector[ selCtr ] ][ szptr[ i ] ] ); } gs = ge + 1; selCtr++; } if ( !( selCtr == nSelectors ) ) { panic(); } } private void moveToFrontCodeAndSend() throws IOException { bsPutIntVS( 24, origPtr ); generateMTFValues(); sendMTFValues(); } private OutputStream bsStream; private void simpleSort( int lo, int hi, int d ) { int i, j, h, bigN, hp; int v; bigN = hi - lo + 1; if ( bigN < 2 ) { return; } hp = 0; while ( incs[ hp ] < bigN ) { hp++; } hp--; for ( ; hp >= 0; hp-- ) { h = incs[ hp ]; i = lo + h; while ( true ) { /* copy 1 */ if ( i > hi ) { break; } v = zptr[ i ]; j = i; while ( fullGtU( zptr[ j - h ] + d, v + d ) ) { zptr[ j ] = zptr[ j - h ]; j = j - h; if ( j <= ( lo + h - 1 ) ) { break; } } zptr[ j ] = v; i++; /* copy 2 */ if ( i > hi ) { break; } v = zptr[ i ]; j = i; while ( fullGtU( zptr[ j - h ] + d, v + d ) ) { zptr[ j ] = zptr[ j - h ]; j = j - h; if ( j <= ( lo + h - 1 ) ) { break; } } zptr[ j ] = v; i++; /* copy 3 */ if ( i > hi ) { break; } v = zptr[ i ]; j = i; while ( fullGtU( zptr[ j - h ] + d, v + d ) ) { zptr[ j ] = zptr[ j - h ]; j = j - h; if ( j <= ( lo + h - 1 ) ) { break; } } zptr[ j ] = v; i++; if ( workDone > workLimit && firstAttempt ) { return; } } } } private void vswap( int p1, int p2, int n ) { int temp; while ( n > 0 ) { temp = zptr[ p1 ]; zptr[ p1 ] = zptr[ p2 ]; zptr[ p2 ] = temp; p1++; p2++; n--; } } private char med3( char a, char b, char c ) { char t; if ( a > b ) { t = a; a = b; b = t; } if ( b > c ) { b = c; } if ( a > b ) { b = a; } return b; } private static class StackElem { int ll; int hh; int dd; } private void qSort3( int loSt, int hiSt, int dSt ) { int unLo, unHi, ltLo, gtHi, med, n, m; int sp, lo, hi, d; StackElem[] stack = new StackElem[QSORT_STACK_SIZE]; for ( int count = 0; count < QSORT_STACK_SIZE; count++ ) { stack[ count ] = new StackElem(); } sp = 0; stack[ sp ].ll = loSt; stack[ sp ].hh = hiSt; stack[ sp ].dd = dSt; sp++; while ( sp > 0 ) { if ( sp >= QSORT_STACK_SIZE ) { panic(); } sp--; lo = stack[ sp ].ll; hi = stack[ sp ].hh; d = stack[ sp ].dd; if ( hi - lo < SMALL_THRESH || d > DEPTH_THRESH ) { simpleSort( lo, hi, d ); if ( workDone > workLimit && firstAttempt ) { return; } continue; } med = med3( block[ zptr[ lo ] + d + 1 ], block[ zptr[ hi ] + d + 1 ], block[ zptr[ ( lo + hi ) >> 1 ] + d + 1 ] ); unLo = ltLo = lo; unHi = gtHi = hi; while ( true ) { while ( true ) { if ( unLo > unHi ) { break; } n = ( (int) block[ zptr[ unLo ] + d + 1 ] ) - med; if ( n == 0 ) { int temp; temp = zptr[ unLo ]; zptr[ unLo ] = zptr[ ltLo ]; zptr[ ltLo ] = temp; ltLo++; unLo++; continue; } if ( n > 0 ) { break; } unLo++; } while ( true ) { if ( unLo > unHi ) { break; } n = ( (int) block[ zptr[ unHi ] + d + 1 ] ) - med; if ( n == 0 ) { int temp; temp = zptr[ unHi ]; zptr[ unHi ] = zptr[ gtHi ]; zptr[ gtHi ] = temp; gtHi--; unHi--; continue; } if ( n < 0 ) { break; } unHi--; } if ( unLo > unHi ) { break; } int temp; temp = zptr[ unLo ]; zptr[ unLo ] = zptr[ unHi ]; zptr[ unHi ] = temp; unLo++; unHi--; } if ( gtHi < ltLo ) { stack[ sp ].ll = lo; stack[ sp ].hh = hi; stack[ sp ].dd = d + 1; sp++; continue; } n = ( ( ltLo - lo ) < ( unLo - ltLo ) ) ? ( ltLo - lo ) : ( unLo - ltLo ); vswap( lo, unLo - n, n ); m = ( ( hi - gtHi ) < ( gtHi - unHi ) ) ? ( hi - gtHi ) : ( gtHi - unHi ); vswap( unLo, hi - m + 1, m ); n = lo + unLo - ltLo - 1; m = hi - ( gtHi - unHi ) + 1; stack[ sp ].ll = lo; stack[ sp ].hh = n; stack[ sp ].dd = d; sp++; stack[ sp ].ll = n + 1; stack[ sp ].hh = m - 1; stack[ sp ].dd = d + 1; sp++; stack[ sp ].ll = m; stack[ sp ].hh = hi; stack[ sp ].dd = d; sp++; } } private void mainSort() { int i, j, ss, sb; int[] runningOrder = new int[256]; int[] copy = new int[256]; boolean[] bigDone = new boolean[256]; int c1, c2; /* In the various block-sized structures, live data runs from 0 to last+NUM_OVERSHOOT_BYTES inclusive. First, set up the overshoot area for block. */ // if (verbosity >= 4) fprintf ( stderr, " sort initialise ...\n" ); for ( i = 0; i < NUM_OVERSHOOT_BYTES; i++ ) { block[ last + i + 2 ] = block[ ( i % ( last + 1 ) ) + 1 ]; } for ( i = 0; i <= last + NUM_OVERSHOOT_BYTES; i++ ) { quadrant[ i ] = 0; } block[ 0 ] = block[ last + 1 ]; if ( last < 4000 ) { /* Use simpleSort(), since the full sorting mechanism has quite a large constant overhead. */ for ( i = 0; i <= last; i++ ) { zptr[ i ] = i; } firstAttempt = false; workDone = workLimit = 0; simpleSort( 0, last, 0 ); } else { for ( i = 0; i <= 255; i++ ) { bigDone[ i ] = false; } for ( i = 0; i <= 65536; i++ ) { ftab[ i ] = 0; } c1 = block[ 0 ]; for ( i = 0; i <= last; i++ ) { c2 = block[ i + 1 ]; ftab[ ( c1 << 8 ) + c2 ]++; c1 = c2; } for ( i = 1; i <= 65536; i++ ) { ftab[ i ] += ftab[ i - 1 ]; } c1 = block[ 1 ]; for ( i = 0; i < last; i++ ) { c2 = block[ i + 2 ]; j = ( c1 << 8 ) + c2; c1 = c2; ftab[ j ]--; zptr[ ftab[ j ] ] = i; } j = ( ( block[ last + 1 ] ) << 8 ) + ( block[ 1 ] ); ftab[ j ]--; zptr[ ftab[ j ] ] = last; /* Now ftab contains the first loc of every small bucket. Calculate the running order, from smallest to largest big bucket. */ for ( i = 0; i <= 255; i++ ) { runningOrder[ i ] = i; } { int vv; int h = 1; do { h = 3 * h + 1; } while ( h <= 256 ); do { h = h / 3; for ( i = h; i <= 255; i++ ) { vv = runningOrder[ i ]; j = i; while ( ( ftab[ ( ( runningOrder[ j - h ] ) + 1 ) << 8 ] - ftab[ ( runningOrder[ j - h ] ) << 8 ] ) > ( ftab[ ( ( vv ) + 1 ) << 8 ] - ftab[ ( vv ) << 8 ] ) ) { runningOrder[ j ] = runningOrder[ j - h ]; j = j - h; if ( j <= ( h - 1 ) ) { break; } } runningOrder[ j ] = vv; } } while ( h != 1 ); } /* The main sorting loop. */ for ( i = 0; i <= 255; i++ ) { /* Process big buckets, starting with the least full. */ ss = runningOrder[ i ]; /* Complete the big bucket [ss] by quicksorting any unsorted small buckets [ss, j]. Hopefully previous pointer-scanning phases have already completed many of the small buckets [ss, j], so we don't have to sort them at all. */ for ( j = 0; j <= 255; j++ ) { sb = ( ss << 8 ) + j; if ( !( ( ftab[ sb ] & SETMASK ) == SETMASK ) ) { int lo = ftab[ sb ] & CLEARMASK; int hi = ( ftab[ sb + 1 ] & CLEARMASK ) - 1; if ( hi > lo ) { qSort3( lo, hi, 2 ); if ( workDone > workLimit && firstAttempt ) { return; } } ftab[ sb ] |= SETMASK; } } /* The ss big bucket is now done. Record this fact, and update the quadrant descriptors. Remember to update quadrants in the overshoot area too, if necessary. The "if (i < 255)" test merely skips this updating for the last bucket processed, since updating for the last bucket is pointless. */ bigDone[ ss ] = true; if ( i < 255 ) { int bbStart = ftab[ ss << 8 ] & CLEARMASK; int bbSize = ( ftab[ ( ss + 1 ) << 8 ] & CLEARMASK ) - bbStart; int shifts = 0; while ( ( bbSize >> shifts ) > 65534 ) { shifts++; } for ( j = 0; j < bbSize; j++ ) { int a2update = zptr[ bbStart + j ]; int qVal = ( j >> shifts ); quadrant[ a2update ] = qVal; if ( a2update < NUM_OVERSHOOT_BYTES ) { quadrant[ a2update + last + 1 ] = qVal; } } if ( !( ( ( bbSize - 1 ) >> shifts ) <= 65535 ) ) { panic(); } } /* Now scan this big bucket so as to synthesise the sorted order for small buckets [t, ss] for all t != ss. */ for ( j = 0; j <= 255; j++ ) { copy[ j ] = ftab[ ( j << 8 ) + ss ] & CLEARMASK; } for ( j = ftab[ ss << 8 ] & CLEARMASK; j < ( ftab[ ( ss + 1 ) << 8 ] & CLEARMASK ); j++ ) { c1 = block[ zptr[ j ] ]; if ( !bigDone[ c1 ] ) { zptr[ copy[ c1 ] ] = zptr[ j ] == 0 ? last : zptr[ j ] - 1; copy[ c1 ]++; } } for ( j = 0; j <= 255; j++ ) { ftab[ ( j << 8 ) + ss ] |= SETMASK; } } } } private void randomiseBlock() { int i; int rNToGo = 0; int rTPos = 0; for ( i = 0; i < 256; i++ ) { inUse[ i ] = false; } for ( i = 0; i <= last; i++ ) { if ( rNToGo == 0 ) { rNToGo = (char) rNums[ rTPos ]; rTPos++; if ( rTPos == 512 ) { rTPos = 0; } } rNToGo--; block[ i + 1 ] ^= ( ( rNToGo == 1 ) ? 1 : 0 ); // handle 16 bit signed numbers block[ i + 1 ] &= 0xFF; inUse[ block[ i + 1 ] ] = true; } } private void doReversibleTransformation() { int i; workLimit = workFactor * last; workDone = 0; blockRandomised = false; firstAttempt = true; mainSort(); if ( workDone > workLimit && firstAttempt ) { randomiseBlock(); workLimit = workDone = 0; blockRandomised = true; firstAttempt = false; mainSort(); } origPtr = -1; for ( i = 0; i <= last; i++ ) { if ( zptr[ i ] == 0 ) { origPtr = i; break; } } if ( origPtr == -1 ) { panic(); } } private boolean fullGtU( int i1, int i2 ) { int k; char c1, c2; int s1, s2; c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } i1++; i2++; c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } i1++; i2++; c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } i1++; i2++; c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } i1++; i2++; c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } i1++; i2++; c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } i1++; i2++; k = last + 1; do { c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } s1 = quadrant[ i1 ]; s2 = quadrant[ i2 ]; if ( s1 != s2 ) { return ( s1 > s2 ); } i1++; i2++; c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } s1 = quadrant[ i1 ]; s2 = quadrant[ i2 ]; if ( s1 != s2 ) { return ( s1 > s2 ); } i1++; i2++; c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } s1 = quadrant[ i1 ]; s2 = quadrant[ i2 ]; if ( s1 != s2 ) { return ( s1 > s2 ); } i1++; i2++; c1 = block[ i1 + 1 ]; c2 = block[ i2 + 1 ]; if ( c1 != c2 ) { return ( c1 > c2 ); } s1 = quadrant[ i1 ]; s2 = quadrant[ i2 ]; if ( s1 != s2 ) { return ( s1 > s2 ); } i1++; i2++; if ( i1 > last ) { i1 -= last; i1--; } if ( i2 > last ) { i2 -= last; i2--; } k -= 4; workDone++; } while ( k >= 0 ); return false; } /* Knuth's increments seem to work better than Incerpi-Sedgewick here. Possibly because the number of elems to sort is usually small, typically <= 20. */ private int[] incs = {1, 4, 13, 40, 121, 364, 1093, 3280, 9841, 29524, 88573, 265720, 797161, 2391484}; private void allocateCompressStructures() { int n = baseBlockSize * blockSize100k; block = new char[( n + 1 + NUM_OVERSHOOT_BYTES )]; quadrant = new int[( n + NUM_OVERSHOOT_BYTES )]; zptr = new int[n]; ftab = new int[65537]; /* The back end needs a place to store the MTF values whilst it calculates the coding tables. We could put them in the zptr array. However, these values will fit in a short, so we overlay szptr at the start of zptr, in the hope of reducing the number of cache misses induced by the multiple traversals of the MTF values when calculating coding tables. Seems to improve compression speed by about 1%. */ // szptr = zptr; szptr = new short[2 * n]; } private void generateMTFValues() { char[] yy = new char[256]; int i, j; char tmp; char tmp2; int zPend; int wr; int EOB; makeMaps(); EOB = nInUse + 1; for ( i = 0; i <= EOB; i++ ) { mtfFreq[ i ] = 0; } wr = 0; zPend = 0; for ( i = 0; i < nInUse; i++ ) { yy[ i ] = (char) i; } for ( i = 0; i <= last; i++ ) { char ll_i; ll_i = unseqToSeq[ block[ zptr[ i ] ] ]; j = 0; tmp = yy[ j ]; while ( ll_i != tmp ) { j++; tmp2 = tmp; tmp = yy[ j ]; yy[ j ] = tmp2; } yy[ 0 ] = tmp; if ( j == 0 ) { zPend++; } else { if ( zPend > 0 ) { zPend--; while ( true ) { switch ( zPend % 2 ) { case 0: szptr[ wr ] = (short) RUNA; wr++; mtfFreq[ RUNA ]++; break; case 1: szptr[ wr ] = (short) RUNB; wr++; mtfFreq[ RUNB ]++; break; } if ( zPend < 2 ) { break; } zPend = ( zPend - 2 ) / 2; } zPend = 0; } szptr[ wr ] = (short) ( j + 1 ); wr++; mtfFreq[ j + 1 ]++; } } if ( zPend > 0 ) { zPend--; while ( true ) { switch ( zPend % 2 ) { case 0: szptr[ wr ] = (short) RUNA; wr++; mtfFreq[ RUNA ]++; break; case 1: szptr[ wr ] = (short) RUNB; wr++; mtfFreq[ RUNB ]++; break; } if ( zPend < 2 ) { break; } zPend = ( zPend - 2 ) / 2; } } szptr[ wr ] = (short) EOB; wr++; mtfFreq[ EOB ]++; nMTF = wr; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/bzip2/CRC.java000066400000000000000000000122061145404360500322600ustar00rootroot00000000000000package org.codehaus.plexus.archiver.bzip2; /* * Copyright 2001-2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Keiron Liddle, Aftex Software * to whom the Ant project is very grateful for his * great code. */ /** * A simple class the hold and calculate the CRC for sanity checking * of the data. * * @version $Revision$ $Date$ * from org.apache.ant.tools.bzip2.CRC v1.9 */ class CRC { public static int crc32Table[] = { 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 }; public CRC() { initialiseCRC(); } void initialiseCRC() { globalCrc = 0xffffffff; } int getFinalCRC() { return ~globalCrc; } int getGlobalCRC() { return globalCrc; } void setGlobalCRC( int newCrc ) { globalCrc = newCrc; } void updateCRC( int inCh ) { int temp = ( globalCrc >> 24 ) ^ inCh; if ( temp < 0 ) { temp = 256 + temp; } globalCrc = ( globalCrc << 8 ) ^ CRC.crc32Table[ temp ]; } int globalCrc; } PlexusIoBzip2ResourceCollection.java000066400000000000000000000023021145404360500377710ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/bzip2package org.codehaus.plexus.archiver.bzip2; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import org.codehaus.plexus.components.io.resources.PlexusIoCompressedFileResourceCollection; import org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection; import org.codehaus.plexus.util.IOUtil; /** * Implementation of {@link PlexusIoResourceCollection} for * bzip2 compressed files. */ public class PlexusIoBzip2ResourceCollection extends PlexusIoCompressedFileResourceCollection { protected InputStream getInputStream( File file ) throws IOException { InputStream fis = new FileInputStream( file ); try { final InputStream result = BZip2UnArchiver.getBZip2InputStream( fis ); if ( result == null ) { throw new IOException( file.getPath() + " is an invalid bzip2 file. " ); } fis = null; return result; } finally { IOUtil.close( fis ); } } protected String getDefaultExtension() { return ".bz2"; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/dir/000077500000000000000000000000001145404360500305355ustar00rootroot00000000000000DirectoryArchiver.java000066400000000000000000000143731145404360500347610ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/dirpackage org.codehaus.plexus.archiver.dir; /* * Copyright 2001-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.AbstractArchiver; import org.codehaus.plexus.archiver.ArchiveEntry; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.ResourceIterator; import org.codehaus.plexus.archiver.util.ArchiveEntryUtils; import org.codehaus.plexus.archiver.util.ResourceUtils; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import java.io.File; import java.io.IOException; /** * A plexus archiver implementation that stores the files to archive in a directory. */ public class DirectoryArchiver extends AbstractArchiver { public void resetArchiver() { cleanUp(); } public void execute() throws ArchiverException, IOException { // Most of this method was copied from org.codehaus.plexus.archiver.tar.TarArchiver // and modified to store files in a directory, not a tar archive. final ResourceIterator iter = getResources(); if ( !iter.hasNext() ) { throw new ArchiverException( "You must set at least one file." ); } final File destDirectory = getDestFile(); if ( destDirectory == null ) { throw new ArchiverException( "You must set the destination directory." ); } if ( destDirectory.exists() && !destDirectory.isDirectory() ) { throw new ArchiverException( destDirectory + " is not a directory." ); } if ( destDirectory.exists() && !destDirectory.canWrite() ) { throw new ArchiverException( destDirectory + " is not writable." ); } getLogger().info( "Copying files to " + destDirectory.getAbsolutePath() ); try { while ( iter.hasNext() ) { final ArchiveEntry f = iter.next(); // Check if we don't add directory file in itself if ( ResourceUtils.isSame( f.getResource(), destDirectory ) ) { throw new ArchiverException( "The destination directory cannot include itself." ); } String fileName = f.getName(); final String destDir = destDirectory.getCanonicalPath(); fileName = destDir + File.separator + fileName; copyFile( f, fileName ); } } catch ( final IOException ioe ) { final String message = "Problem copying files : " + ioe.getMessage(); throw new ArchiverException( message, ioe ); } } /** * Copies the specified file to the specified path, creating any ancestor directory structure as necessary. * * @param file * The file to copy (IOException will be thrown if this does not exist) * @param vPath * The fully qualified path to copy the file to. * @throws ArchiverException * If there is a problem creating the directory structure * @throws IOException * If there is a problem copying the file */ protected void copyFile( final ArchiveEntry entry, final String vPath ) throws ArchiverException, IOException { // don't add "" to the archive if ( vPath.length() <= 0 ) { return; } final PlexusIoResource in = entry.getResource(); final File outFile = new File( vPath ); final long inLastModified = in.getLastModified(); final long outLastModified = outFile.lastModified(); if ( ResourceUtils.isUptodate( inLastModified, outLastModified ) ) { return; } outFile.setLastModified( inLastModified == PlexusIoResource.UNKNOWN_MODIFICATION_DATE ? System.currentTimeMillis() : inLastModified ); if ( !in.isDirectory() ) { if ( !outFile.getParentFile() .exists() ) { // create the parent directory... if ( !outFile.getParentFile() .mkdirs() ) { // Failure, unable to create specified directory for some unknown reason. throw new ArchiverException( "Unable to create directory or parent directory of " + outFile ); } } ResourceUtils.copyFile( in, outFile ); if ( !isIgnorePermissions() ) { ArchiveEntryUtils.chmod( outFile, entry.getMode(), getLogger(), isUseJvmChmod() ); } } else { // file is a directory if ( outFile.exists() ) { if ( !outFile.isDirectory() ) { // should we just delete the file and replace it with a directory? // throw an exception, let the user delete the file manually. throw new ArchiverException( "Expected directory and found file at copy destination of " + in.getName() + " to " + outFile ); } } else if ( !outFile.mkdirs() ) { // Failure, unable to create specified directory for some unknown reason. throw new ArchiverException( "Unable to create directory or parent directory of " + outFile ); } } } protected void cleanUp() { super.cleanUp(); setIncludeEmptyDirs( false ); setIncludeEmptyDirs( true ); } protected void close() throws IOException { } protected String getArchiveType() { return "directory"; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/ear/000077500000000000000000000000001145404360500305265ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/ear/EarArchiver.java000066400000000000000000000102071145404360500335640ustar00rootroot00000000000000package org.codehaus.plexus.archiver.ear; /* * Copyright 2001-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import org.codehaus.plexus.archiver.ArchiveEntry; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.jar.JarArchiver; import org.codehaus.plexus.archiver.util.ResourceUtils; import org.codehaus.plexus.archiver.zip.ZipOutputStream; import java.io.File; import java.io.IOException; /** * Creates a EAR archive. Based on WAR task */ public class EarArchiver extends JarArchiver { private File deploymentDescriptor; private boolean descriptorAdded; /** * Create an Ear. */ public EarArchiver() { super(); archiveType = "ear"; } /** * File to incorporate as application.xml. */ public void setAppxml( File descr ) throws ArchiverException { deploymentDescriptor = descr; if ( !deploymentDescriptor.exists() ) { throw new ArchiverException( "Deployment descriptor: " + deploymentDescriptor + " does not exist." ); } addFile( descr, "META-INF/application.xml" ); } /** * Adds archive. */ public void addArchive( File fileName ) throws ArchiverException { addDirectory( fileName.getParentFile(), "/", new String[]{fileName.getName()}, null ); } /** * Adds archives. */ public void addArchives( File directoryName, String[] includes, String[] excludes ) throws ArchiverException { addDirectory( directoryName, "/", includes, excludes ); } protected void initZipOutputStream( ZipOutputStream zOut ) throws IOException, ArchiverException { // If no webxml file is specified, it's an error. if ( deploymentDescriptor == null && !isInUpdateMode() ) { throw new ArchiverException( "appxml attribute is required" ); } super.initZipOutputStream( zOut ); } /** * Overridden from ZipArchiver class to deal with application.xml */ protected void zipFile( ArchiveEntry entry, ZipOutputStream zOut, String vPath, int mode ) throws IOException, ArchiverException { // If the file being added is META-INF/application.xml, we // warn if it's not the one specified in the "appxml" // attribute - or if it's being added twice, meaning the same // file is specified by the "appxml" attribute and in a // element. if ( vPath.equalsIgnoreCase( "META-INF/application.xml" ) ) { if ( deploymentDescriptor == null || !ResourceUtils.isCanonicalizedSame( entry.getResource(), deploymentDescriptor ) || descriptorAdded ) { getLogger().warn( "Warning: selected " + archiveType + " files include a META-INF/application.xml which will" + " be ignored (please use appxml attribute to " + archiveType + " task)" ); } else { super.zipFile( entry, zOut, vPath ); descriptorAdded = true; } } else { super.zipFile( entry, zOut, vPath ); } } /** * Make sure we don't think we already have a application.xml next * time this task gets executed. */ protected void cleanUp() { descriptorAdded = false; super.cleanUp(); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/filters/000077500000000000000000000000001145404360500314275ustar00rootroot00000000000000JarSecurityFileFilter.java000066400000000000000000000016201145404360500364240ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/filterspackage org.codehaus.plexus.archiver.filters; import java.io.InputStream; import org.codehaus.plexus.archiver.ArchiveFileFilter; import org.codehaus.plexus.archiver.ArchiveFilterException; import org.codehaus.plexus.util.SelectorUtils; /** * @deprecated Use {@link JarSecurityFileSelector} */ public class JarSecurityFileFilter implements ArchiveFileFilter { public static final String[] SECURITY_FILE_PATTERNS = JarSecurityFileSelector.SECURITY_FILE_PATTERNS; public boolean include( InputStream dataStream, String entryName ) throws ArchiveFilterException { for ( int i = 0; i < SECURITY_FILE_PATTERNS.length; i++ ) { String pattern = SECURITY_FILE_PATTERNS[i]; if ( SelectorUtils.match( pattern, entryName ) ) { return false; } } return true; } } JarSecurityFileSelector.java000066400000000000000000000031771145404360500367700ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/filterspackage org.codehaus.plexus.archiver.filters; /* * Copyright 2007 The Codehaus Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ import java.io.IOException; import org.codehaus.plexus.components.io.fileselectors.FileInfo; import org.codehaus.plexus.components.io.fileselectors.FileSelector; import org.codehaus.plexus.util.SelectorUtils; /** * @version $Id$ * @since 1.0-alpha-9 */ public class JarSecurityFileSelector implements FileSelector { public static final String ROLE_HINT = "jar-security"; public static final String[] SECURITY_FILE_PATTERNS = { "META-INF/*.RSA", "META-INF/*.DSA", "META-INF/*.SF", "META-INF/*.rsa", "META-INF/*.dsa", "META-INF/*.sf" }; public boolean isSelected( FileInfo fileInfo ) throws IOException { String name = fileInfo.getName(); for ( int i = 0; i < SECURITY_FILE_PATTERNS.length; i++ ) { String pattern = SECURITY_FILE_PATTERNS[i]; if ( SelectorUtils.match( pattern, name ) ) { return false; } } return true; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/gzip/000077500000000000000000000000001145404360500307305ustar00rootroot00000000000000GZipArchiver.java000066400000000000000000000033521145404360500340540ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/gzippackage org.codehaus.plexus.archiver.gzip; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.AbstractArchiver; import org.codehaus.plexus.archiver.ArchiveEntry; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.ResourceIterator; import java.io.IOException; /** * @version $Revision$ $Date$ */ public class GZipArchiver extends AbstractArchiver { GZipCompressor compressor = new GZipCompressor(); protected void execute() throws ArchiverException, IOException { if ( ! checkForced() ) { return; } ResourceIterator iter = getResources(); ArchiveEntry entry = iter.next(); if ( iter.hasNext() ) { throw new ArchiverException( "There is more than one file in input." ); } compressor.setSource( entry.getResource() ); compressor.setDestFile( getDestFile() ); compressor.compress(); } public boolean isSupportingForced() { return true; } protected void close() { compressor.close(); } protected String getArchiveType() { return "gzip"; } } GZipCompressor.java000066400000000000000000000034061145404360500344450ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/gzippackage org.codehaus.plexus.archiver.gzip; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.util.Compressor; import java.io.FileOutputStream; import java.io.IOException; import java.util.zip.GZIPOutputStream; /** * @version $Revision$ $Date$ */ public class GZipCompressor extends Compressor { private GZIPOutputStream zOut; /** * perform the GZip compression operation. */ public void compress() throws ArchiverException { try { zOut = new GZIPOutputStream( new FileOutputStream( getDestFile() ) ); compress( getSource(), zOut ); } catch ( IOException ioe ) { String msg = "Problem creating gzip " + ioe.getMessage(); throw new ArchiverException( msg, ioe ); } } public void close() { if ( zOut != null ) { try { // close up zOut.close(); } catch ( IOException e ) { // do nothing } zOut = null; } } } GZipUnArchiver.java000066400000000000000000000067021145404360500343610ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/gzippackage org.codehaus.plexus.archiver.gzip; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.AbstractUnArchiver; import org.codehaus.plexus.archiver.ArchiverException; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.File; import java.util.zip.GZIPInputStream; /** * @author Emmanuel Venisse * @version $Revision$ $Date$ */ public class GZipUnArchiver extends AbstractUnArchiver { public GZipUnArchiver() { } public GZipUnArchiver( File sourceFile ) { super( sourceFile ); } protected void execute() throws ArchiverException { if ( getSourceFile().lastModified() > getDestFile().lastModified() ) { getLogger().info( "Expanding " + getSourceFile().getAbsolutePath() + " to " + getDestFile().getAbsolutePath() ); FileOutputStream out = null; GZIPInputStream zIn = null; FileInputStream fis = null; try { out = new FileOutputStream( getDestFile() ); fis = new FileInputStream( getSourceFile() ); zIn = new GZIPInputStream( fis ); byte[] buffer = new byte[8 * 1024]; int count = 0; do { out.write( buffer, 0, count ); count = zIn.read( buffer, 0, buffer.length ); } while ( count != -1 ); } catch ( IOException ioe ) { String msg = "Problem expanding gzip " + ioe.getMessage(); throw new ArchiverException( msg, ioe ); } finally { if ( fis != null ) { try { fis.close(); } catch ( IOException ioex ) { //ignore } } if ( out != null ) { try { out.close(); } catch ( IOException ioex ) { //ignore } } if ( zIn != null ) { try { zIn.close(); } catch ( IOException ioex ) { //ignore } } } } } protected void execute( String path, File outputDirectory ) { throw new UnsupportedOperationException( "Targeted extraction not supported in GZIP format." ); } } PlexusIoGzipResourceCollection.java000066400000000000000000000016771145404360500376550ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/gzippackage org.codehaus.plexus.archiver.gzip; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.zip.GZIPInputStream; import org.codehaus.plexus.components.io.resources.PlexusIoCompressedFileResourceCollection; import org.codehaus.plexus.util.IOUtil; /** * Abstract base class for compressed files, aka singleton * resource collections. */ public class PlexusIoGzipResourceCollection extends PlexusIoCompressedFileResourceCollection { protected String getDefaultExtension() { return ".gz"; } protected InputStream getInputStream( File file ) throws IOException { InputStream fis = new FileInputStream( file ); try { InputStream result = new GZIPInputStream( fis ); fis = null; return result; } finally { IOUtil.close( fis ); } } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/jar/000077500000000000000000000000001145404360500305335ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/jar/JarArchiver.java000066400000000000000000000756621145404360500336160ustar00rootroot00000000000000package org.codehaus.plexus.archiver.jar; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Reader; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; import java.util.TreeMap; import java.util.Vector; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.util.EnumeratedAttribute; import org.codehaus.plexus.archiver.zip.ZipArchiver; import org.codehaus.plexus.archiver.zip.ZipEntry; import org.codehaus.plexus.archiver.zip.ZipFile; import org.codehaus.plexus.archiver.zip.ZipOutputStream; /** * Base class for tasks that build archives in JAR file format. * * @version $Revision$ $Date$ */ public class JarArchiver extends ZipArchiver { /** * the name of the meta-inf dir */ private static final String META_INF_NAME = "META-INF"; /** * The index file name. */ private static final String INDEX_NAME = "META-INF/INDEX.LIST"; /** * The manifest file name. */ private static final String MANIFEST_NAME = "META-INF/MANIFEST.MF"; /** * merged manifests added through addConfiguredManifest */ private Manifest configuredManifest; /** * shadow of the above if upToDate check alters the value */ private Manifest savedConfiguredManifest; /** * merged manifests added through filesets */ private Manifest filesetManifest; /** * Manifest of original archive, will be set to null if not in * update mode. */ private Manifest originalManifest; /** * whether to merge fileset manifests; * value is true if filesetmanifest is 'merge' or 'mergewithoutmain' */ private FilesetManifestConfig filesetManifestConfig; /** * whether to merge the main section of fileset manifests; * value is true if filesetmanifest is 'merge' */ private boolean mergeManifestsMain = true; /** * the manifest specified by the 'manifest' attribute * */ private Manifest manifest; /** * The encoding to use when reading in a manifest file */ private String manifestEncoding; /** * The file found from the 'manifest' attribute. This can be * either the location of a manifest, or the name of a jar added * through a fileset. If its the name of an added jar, the * manifest is looked for in META-INF/MANIFEST.MF */ private File manifestFile; /** * jar index is JDK 1.3+ only */ private boolean index = false; /** * whether to really create the archive in createEmptyZip, will * get set in getResourcesToAdd. */ private boolean createEmpty = false; /** * Stores all files that are in the root of the archive (i.e. that * have a name that doesn't contain a slash) so they can get * listed in the index. *

* Will not be filled unless the user has asked for an index. */ private Vector rootEntries; /** * Path containing jars that shall be indexed in addition to this archive. */ private ArrayList indexJars; /** * constructor */ public JarArchiver() { super(); archiveType = "jar"; setEncoding( "UTF8" ); rootEntries = new Vector(); } /** * Set whether or not to create an index list for classes. * This may speed up classloading in some cases. */ public void setIndex( boolean flag ) { index = flag; } /** * Set whether or not to create an index list for classes. * This may speed up classloading in some cases. */ public void setManifestEncoding( String manifestEncoding ) { this.manifestEncoding = manifestEncoding; } /** * Allows the manifest for the archive file to be provided inline * in the build file rather than in an external file. * * @param newManifest * @throws ManifestException */ public void addConfiguredManifest( Manifest newManifest ) throws ManifestException { if ( configuredManifest == null ) { configuredManifest = newManifest; } else { configuredManifest.merge( newManifest ); } savedConfiguredManifest = configuredManifest; } /** * The manifest file to use. This can be either the location of a manifest, * or the name of a jar added through a fileset. If its the name of an added * jar, the task expects the manifest to be in the jar at META-INF/MANIFEST.MF. * * @param manifestFile the manifest file to use. */ public void setManifest( File manifestFile ) throws ArchiverException { if ( !manifestFile.exists() ) { throw new ArchiverException( "Manifest file: " + manifestFile + " does not exist." ); } this.manifestFile = manifestFile; } private Manifest getManifest( File manifestFile ) throws ArchiverException { Manifest newManifest = null; FileInputStream fis; InputStreamReader isr = null; try { fis = new FileInputStream( manifestFile ); if ( manifestEncoding == null ) { isr = new InputStreamReader( fis ); } else { isr = new InputStreamReader( fis, manifestEncoding ); } newManifest = getManifest( isr ); } catch ( UnsupportedEncodingException e ) { throw new ArchiverException( "Unsupported encoding while reading manifest: " + e.getMessage(), e ); } catch ( IOException e ) { throw new ArchiverException( "Unable to read manifest file: " + manifestFile + " (" + e.getMessage() + ")", e ); } finally { if ( isr != null ) { try { isr.close(); } catch ( IOException e ) { // do nothing } } } return newManifest; } private Manifest getManifest( Reader r ) throws ArchiverException { Manifest newManifest; try { newManifest = new Manifest( r ); } catch ( ManifestException e ) { getLogger().error( "Manifest is invalid: " + e.getMessage() ); throw new ArchiverException( "Invalid Manifest: " + manifestFile, e ); } catch ( IOException e ) { throw new ArchiverException( "Unable to read manifest file" + " (" + e.getMessage() + ")", e ); } return newManifest; } /** * Behavior when a Manifest is found in a zipfileset or zipgroupfileset file. * Valid values are "skip", "merge", and "mergewithoutmain". * "merge" will merge all of manifests together, and merge this into any * other specified manifests. * "mergewithoutmain" merges everything but the Main section of the manifests. * Default value is "skip". *

* Note: if this attribute's value is not "skip", the created jar will not * be readable by using java.util.jar.JarInputStream * * @param config setting for found manifest behavior. */ public void setFilesetmanifest( FilesetManifestConfig config ) { filesetManifestConfig = config; mergeManifestsMain = "merge".equals( config.getValue() ); if ( ( filesetManifestConfig != null ) && !filesetManifestConfig.getValue().equals( "skip" ) ) { doubleFilePass = true; } } /** * Adds a zipfileset to include in the META-INF directory. * * @param fs zipfileset to add */ /* public void addMetainf(ZipFileSet fs) { // We just set the prefix for this fileset, and pass it up. fs.setPrefix("META-INF/"); super.addFileset(fs); } */ /** * */ public void addConfiguredIndexJars( File indexJar ) { if ( indexJars == null ) { indexJars = new ArrayList(); } indexJars.add( indexJar.getAbsolutePath() ); } protected void initZipOutputStream( ZipOutputStream zOut ) throws IOException, ArchiverException { if ( !skipWriting ) { Manifest jarManifest = createManifest(); writeManifest( zOut, jarManifest ); } } protected boolean hasVirtualFiles() { getLogger().debug( "\n\n\nChecking for jar manifest virtual files...\n\n\n" ); System.out.flush(); return ( configuredManifest != null ) || ( manifest != null ) || ( manifestFile != null ) || super.hasVirtualFiles(); } private Manifest createManifest() throws ArchiverException { try { Manifest finalManifest = Manifest.getDefaultManifest(); if ( manifest == null ) { if ( manifestFile != null ) { // if we haven't got the manifest yet, attempt to // get it now and have manifest be the final merge manifest = getManifest( manifestFile ); } } /* * Precedence: manifestFile wins over inline manifest, * over manifests read from the filesets over the original * manifest. * * merge with null argument is a no-op */ if ( isInUpdateMode() ) { finalManifest.merge( originalManifest ); } finalManifest.merge( filesetManifest ); finalManifest.merge( configuredManifest ); finalManifest.merge( manifest, !mergeManifestsMain ); return finalManifest; } catch ( ManifestException e ) { getLogger().error( "Manifest is invalid: " + e.getMessage() ); throw new ArchiverException( "Invalid Manifest", e ); } } private void writeManifest( ZipOutputStream zOut, Manifest manifest ) throws IOException, ArchiverException { for ( Enumeration e = manifest.getWarnings(); e.hasMoreElements(); ) { getLogger().warn( "Manifest warning: " + e.nextElement() ); } zipDir( null, zOut, "META-INF/", DEFAULT_DIR_MODE ); // time to write the manifest ByteArrayOutputStream baos = new ByteArrayOutputStream(); OutputStreamWriter osw = new OutputStreamWriter( baos, "UTF-8" ); PrintWriter writer = new PrintWriter( osw ); manifest.write( writer ); writer.flush(); ByteArrayInputStream bais = new ByteArrayInputStream( baos.toByteArray() ); super.zipFile( bais, zOut, MANIFEST_NAME, System.currentTimeMillis(), null, DEFAULT_FILE_MODE ); super.initZipOutputStream( zOut ); } protected void finalizeZipOutputStream( ZipOutputStream zOut ) throws IOException, ArchiverException { if ( index ) { createIndexList( zOut ); } } /** * Create the index list to speed up classloading. * This is a JDK 1.3+ specific feature and is enabled by default. See * * the JAR index specification for more details. * * @param zOut the zip stream representing the jar being built. * @throws IOException thrown if there is an error while creating the * index and adding it to the zip stream. */ private void createIndexList( ZipOutputStream zOut ) throws IOException, ArchiverException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); // encoding must be UTF8 as specified in the specs. PrintWriter writer = new PrintWriter( new OutputStreamWriter( baos, "UTF8" ) ); // version-info blankline writer.println( "JarIndex-Version: 1.0" ); writer.println(); // header newline writer.println( getDestFile().getName() ); // filter out META-INF if it doesn't contain anything other than the index and manifest. // this is what sun.misc.JarIndex does, guess we ought to be consistent. HashSet filteredDirs = new HashSet(addedDirs.keySet()); // our added dirs always have a trailing slash if(filteredDirs.contains(META_INF_NAME+"/")) { boolean add = false; Iterator i = entries.keySet().iterator(); while(i.hasNext()) { String entry = (String)i.next(); if(entry.startsWith(META_INF_NAME+"/") && !entry.equals(INDEX_NAME) && !entry.equals(MANIFEST_NAME)) { add = true; break; } } if(!add) { filteredDirs.remove(META_INF_NAME+"/"); } } writeIndexLikeList( new ArrayList( filteredDirs ), rootEntries, writer ); writer.println(); if ( indexJars != null ) { Manifest mf = createManifest(); Manifest.Attribute classpath = mf.getMainSection().getAttribute( Manifest.ATTRIBUTE_CLASSPATH ); String[] cpEntries = null; if ( classpath != null ) { StringTokenizer tok = new StringTokenizer( classpath.getValue(), " " ); cpEntries = new String[tok.countTokens()]; int c = 0; while ( tok.hasMoreTokens() ) { cpEntries[ c++ ] = tok.nextToken(); } } for ( Iterator i = indexJars.iterator(); i.hasNext(); ) { String indexJar = (String)i.next(); String name = findJarName( indexJar, cpEntries ); if ( name != null ) { ArrayList dirs = new ArrayList(); ArrayList files = new ArrayList(); grabFilesAndDirs( indexJar, dirs, files ); if ( dirs.size() + files.size() > 0 ) { writer.println( name ); writeIndexLikeList( dirs, files, writer ); writer.println(); } } } } writer.flush(); ByteArrayInputStream bais = new ByteArrayInputStream( baos.toByteArray() ); super.zipFile( bais, zOut, INDEX_NAME, System.currentTimeMillis(), null, DEFAULT_FILE_MODE ); } /** * Overridden from Zip class to deal with manifests and index lists. */ protected void zipFile( InputStream is, ZipOutputStream zOut, String vPath, long lastModified, File fromArchive, int mode ) throws IOException, ArchiverException { if ( MANIFEST_NAME.equalsIgnoreCase( vPath ) ) { if ( !doubleFilePass || skipWriting ) { filesetManifest( fromArchive, is ); } } else if ( INDEX_NAME.equalsIgnoreCase( vPath ) && index ) { getLogger().warn( "Warning: selected " + archiveType + " files include a META-INF/INDEX.LIST which will" + " be replaced by a newly generated one." ); } else { if ( index && ( vPath.indexOf( "/" ) == -1 ) ) { rootEntries.addElement( vPath ); } super.zipFile( is, zOut, vPath, lastModified, fromArchive, mode ); } } private void filesetManifest( File file, InputStream is ) throws ArchiverException { if ( ( manifestFile != null ) && manifestFile.equals( file ) ) { // If this is the same name specified in 'manifest', this // is the manifest to use getLogger().debug( "Found manifest " + file ); try { if ( is != null ) { InputStreamReader isr; if ( manifestEncoding == null ) { isr = new InputStreamReader( is ); } else { isr = new InputStreamReader( is, manifestEncoding ); } manifest = getManifest( isr ); } else { manifest = getManifest( file ); } } catch ( UnsupportedEncodingException e ) { throw new ArchiverException( "Unsupported encoding while reading " + "manifest: " + e.getMessage(), e ); } } else if ( ( filesetManifestConfig != null ) && !filesetManifestConfig.getValue().equals( "skip" ) ) { // we add this to our group of fileset manifests getLogger().debug( "Found manifest to merge in file " + file ); try { Manifest newManifest; if ( is != null ) { InputStreamReader isr; if ( manifestEncoding == null ) { isr = new InputStreamReader( is ); } else { isr = new InputStreamReader( is, manifestEncoding ); } newManifest = getManifest( isr ); } else { newManifest = getManifest( file ); } if ( filesetManifest == null ) { filesetManifest = newManifest; } else { filesetManifest.merge( newManifest ); } } catch ( UnsupportedEncodingException e ) { throw new ArchiverException( "Unsupported encoding while reading " + "manifest: " + e.getMessage(), e ); } catch ( ManifestException e ) { getLogger().error( "Manifest in file " + file + " is invalid: " + e.getMessage() ); throw new ArchiverException( "Invalid Manifest", e ); } } else { // assuming 'skip' otherwise // don't warn if skip has been requested explicitly, warn if user // didn't set the attribute // Hide warning also as it makes no sense since // the filesetmanifest attribute itself has been // hidden //int logLevel = filesetManifestConfig == null ? // Project.MSG_WARN : Project.MSG_VERBOSE; //log("File " + file // + " includes a META-INF/MANIFEST.MF which will be ignored. " // + "To include this file, set filesetManifest to a value other " // + "than 'skip'.", logLevel); } } /** * Collect the resources that are newer than the corresponding * entries (or missing) in the original archive. *

*

If we are going to recreate the archive instead of updating * it, all resources should be considered as new, if a single one * is. Because of this, subclasses overriding this method must * call super.getResourcesToAdd and indicate with the * third arg if they already know that the archive is * out-of-date.

* * @param filesets The filesets to grab resources from * @param zipFile intended archive file (may or may not exist) * @param needsUpdate whether we already know that the archive is * out-of-date. Subclasses overriding this method are supposed to * set this value correctly in their call to * super.getResourcesToAdd. * @return an map of resources to add for each fileset passed in as well * as a flag that indicates whether the archive is uptodate. * @throws ArchiverException if it likes */ /* protected Map getResourcesToAdd(FileSet[] filesets, File zipFile, boolean needsUpdate) throws ArchiverException { // need to handle manifest as a special check if (zipFile.exists()) { // if it doesn't exist, it will get created anyway, don't // bother with any up-to-date checks. try { originalManifest = getManifestFromJar(zipFile); if (originalManifest == null) { getLogger().debug("Updating jar since the current jar has no manifest"); needsUpdate = true; } else { Manifest mf = createManifest(); if (!mf.equals(originalManifest)) { getLogger().debug("Updating jar since jar manifest has changed"); needsUpdate = true; } } } catch (Throwable t) { getLogger().warn("error while reading original manifest: " + t.getMessage()); needsUpdate = true; } } else { // no existing archive needsUpdate = true; } createEmpty = needsUpdate; return super.getResourcesToAdd(filesets, zipFile, needsUpdate); } */ /** */ protected boolean createEmptyZip( File zipFile ) throws ArchiverException { if ( !createEmpty ) { return true; } ZipOutputStream zOut = null; try { getLogger().debug( "Building MANIFEST-only jar: " + getDestFile().getAbsolutePath() ); zOut = new ZipOutputStream( new FileOutputStream( getDestFile() ) ); zOut.setEncoding( getEncoding() ); if ( isCompress() ) { zOut.setMethod( ZipOutputStream.DEFLATED ); } else { zOut.setMethod( ZipOutputStream.STORED ); } initZipOutputStream( zOut ); finalizeZipOutputStream( zOut ); } catch ( IOException ioe ) { throw new ArchiverException( "Could not create almost empty JAR archive" + " (" + ioe.getMessage() + ")", ioe ); } finally { // Close the output stream. try { if ( zOut != null ) { zOut.close(); } } catch ( IOException ex ) { } createEmpty = false; } return true; } /** * Make sure we don't think we already have a MANIFEST next time this task * gets executed. * * @see ZipArchiver#cleanUp */ protected void cleanUp() { super.cleanUp(); // we want to save this info if we are going to make another pass if ( !doubleFilePass || !skipWriting ) { manifest = null; configuredManifest = savedConfiguredManifest; filesetManifest = null; originalManifest = null; } rootEntries.removeAllElements(); } /** * reset to default values. * * @see ZipArchiver#reset */ public void reset() { super.reset(); configuredManifest = null; filesetManifestConfig = null; mergeManifestsMain = false; manifestFile = null; index = false; } public static class FilesetManifestConfig extends EnumeratedAttribute { public String[] getValues() { return new String[]{"skip", "merge", "mergewithoutmain"}; } } /** * Writes the directory entries from the first and the filenames * from the second list to the given writer, one entry per line. */ protected final void writeIndexLikeList( List dirs, List files, PrintWriter writer ) { // JarIndex is sorting the directories by ascending order. // it has no value but cosmetic since it will be read into a // hashtable by the classloader, but we'll do so anyway. Collections.sort( dirs ); Collections.sort( files ); Iterator iter = dirs.iterator(); while ( iter.hasNext() ) { String dir = (String) iter.next(); // try to be smart, not to be fooled by a weird directory name dir = dir.replace( '\\', '/' ); if ( dir.startsWith( "./" ) ) { dir = dir.substring( 2 ); } while ( dir.startsWith( "/" ) ) { dir = dir.substring( 1 ); } int pos = dir.lastIndexOf( '/' ); if ( pos != -1 ) { dir = dir.substring( 0, pos ); } // name newline writer.println( dir ); } iter = files.iterator(); while ( iter.hasNext() ) { writer.println( iter.next() ); } } /** * try to guess the name of the given file. *

*

If this jar has a classpath attribute in its manifest, we * can assume that it will only require an index of jars listed * there. try to find which classpath entry is most likely the * one the given file name points to.

*

*

In the absence of a classpath attribute, assume the other * files will be placed inside the same directory as this jar and * use their basename.

*

*

if there is a classpath and the given file doesn't match any * of its entries, return null.

*/ protected static final String findJarName( String fileName, String[] classpath ) { if ( classpath == null ) { return ( new File( fileName ) ).getName(); } fileName = fileName.replace( File.separatorChar, '/' ); TreeMap matches = new TreeMap( new Comparator() { // longest match comes first public int compare( Object o1, Object o2 ) { if ( ( o1 instanceof String ) && ( o2 instanceof String ) ) { return ( (String) o2 ).length() - ( (String) o1 ).length(); } return 0; } } ); for ( int i = 0; i < classpath.length; i++ ) { if ( fileName.endsWith( classpath[ i ] ) ) { matches.put( classpath[ i ], classpath[ i ] ); } else { int slash = classpath[ i ].indexOf( "/" ); String candidate = classpath[ i ]; while ( slash > -1 ) { candidate = candidate.substring( slash + 1 ); if ( fileName.endsWith( candidate ) ) { matches.put( candidate, classpath[ i ] ); break; } slash = candidate.indexOf( "/" ); } } } return matches.size() == 0 ? null : (String) matches.get( matches.firstKey() ); } /** * Grab lists of all root-level files and all directories * contained in the given archive. */ protected static final void grabFilesAndDirs( String file, List dirs, List files ) throws IOException { ZipFile zf = null; try { zf = new ZipFile( file, "utf-8" ); Enumeration entries = zf.getEntries(); HashSet dirSet = new HashSet(); while ( entries.hasMoreElements() ) { ZipEntry ze = (ZipEntry) entries.nextElement(); String name = ze.getName(); // avoid index for manifest-only jars. if (!name.equals(META_INF_NAME) && !name.equals(META_INF_NAME+"/") && !name.equals(INDEX_NAME) && !name.equals(MANIFEST_NAME)) { if ( ze.isDirectory() ) { dirSet.add( name ); } else if ( name.indexOf( "/" ) == -1 ) { files.add( name ); } else { // a file, not in the root // since the jar may be one without directory // entries, add the parent dir of this file as // well. dirSet.add( name.substring( 0, name.lastIndexOf( "/" ) + 1 ) ); } } } dirs.addAll( dirSet ); } finally { if ( zf != null ) { zf.close(); } } } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/jar/Manifest.java000066400000000000000000001152011145404360500331440ustar00rootroot00000000000000package org.codehaus.plexus.archiver.jar; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.ArchiverException; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.io.Reader; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.util.Enumeration; import java.util.Hashtable; import java.util.StringTokenizer; import java.util.Vector; /** * Holds the data of a jar manifest. *

* Manifests are processed according to the * {@link Jar * file specification.}. * Specifically, a manifest element consists of * a set of attributes and sections. These sections in turn may contain * attributes. Note in particular that this may result in manifest lines * greater than 72 bytes being wrapped and continued on the next * line. If an application can not handle the continuation mechanism, it * is a defect in the application, not this task. * * @since Ant 1.4 */ public class Manifest { /** * The standard manifest version header */ public static final String ATTRIBUTE_MANIFEST_VERSION = "Manifest-Version"; /** * The standard Signature Version header */ public static final String ATTRIBUTE_SIGNATURE_VERSION = "Signature-Version"; /** * The Name Attribute is the first in a named section */ public static final String ATTRIBUTE_NAME = "Name"; /** * The From Header is disallowed in a Manifest */ public static final String ATTRIBUTE_FROM = "From"; /** * The Class-Path Header is special - it can be duplicated */ public static final String ATTRIBUTE_CLASSPATH = "Class-Path"; /** * Default Manifest version if one is not specified */ public static final String DEFAULT_MANIFEST_VERSION = "1.0"; /** * The max length of a line in a Manifest */ public static final int MAX_LINE_LENGTH = 72; /** * Max length of a line section which is continued. Need to allow * for the CRLF. */ public static final int MAX_SECTION_LENGTH = MAX_LINE_LENGTH - 2; /** * The End-Of-Line marker in manifests */ public static final String EOL = "\r\n"; /** * An attribute for the manifest. * Those attributes that are not nested into a section will be added to the "Main" section. */ public static class Attribute { /** * The attribute's name */ private String name = null; /** * The attribute's value */ private Vector values = new Vector(); /** * For multivalued attributes, this is the index of the attribute * currently being defined. */ private int currentIndex = 0; /** * Construct an empty attribute */ public Attribute() { } /** * Construct an attribute by parsing a line from the Manifest * * @param line the line containing the attribute name and value * @throws ManifestException if the line is not valid */ public Attribute( String line ) throws ManifestException { parse( line ); } /** * Construct a manifest by specifying its name and value * * @param name the attribute's name * @param value the Attribute's value */ public Attribute( String name, String value ) { this.name = name; setValue( value ); } /** * @see java.lang.Object#hashCode */ public int hashCode() { int hashCode = 0; if ( name != null ) { hashCode += name.hashCode(); } hashCode += values.hashCode(); return hashCode; } /** * @see java.lang.Object#equals */ public boolean equals( Object rhs ) { if ( rhs == null || rhs.getClass() != getClass() ) { return false; } if ( rhs == this ) { return true; } Attribute rhsAttribute = (Attribute) rhs; String lhsKey = getKey(); String rhsKey = rhsAttribute.getKey(); if ( ( lhsKey == null && rhsKey != null ) || ( lhsKey != null && rhsKey == null ) || !lhsKey.equals( rhsKey ) ) { return false; } if ( rhsAttribute.values == null ) { return false; } return values.equals( rhsAttribute.values ); } /** * Parse a line into name and value pairs * * @param line the line to be parsed * @throws ManifestException if the line does not contain a colon * separating the name and value */ public void parse( String line ) throws ManifestException { int index = line.indexOf( ": " ); if ( index == -1 ) { throw new ManifestException( "Manifest line \"" + line + "\" is not valid as it does not " + "contain a name and a value separated by ': ' " ); } name = line.substring( 0, index ); setValue( line.substring( index + 2 ) ); } /** * Set the Attribute's name; required * * @param name the attribute's name */ public void setName( String name ) { this.name = name; } /** * Get the Attribute's name * * @return the attribute's name. */ public String getName() { return name; } /** * Get the attribute's Key - its name in lower case. * * @return the attribute's key. */ public String getKey() { if ( name == null ) { return null; } return name.toLowerCase(); } /** * Set the Attribute's value; required * * @param value the attribute's value */ public void setValue( String value ) { if ( currentIndex >= values.size() ) { values.addElement( value ); currentIndex = values.size() - 1; } else { values.setElementAt( value, currentIndex ); } } /** * Get the Attribute's value. * * @return the attribute's value. */ public String getValue() { if ( values.size() == 0 ) { return null; } String fullValue = ""; for ( Enumeration e = getValues(); e.hasMoreElements(); ) { String value = (String) e.nextElement(); fullValue += value + " "; } return fullValue.trim(); } /** * Add a new value to this attribute - making it multivalued. * * @param value the attribute's additional value */ public void addValue( String value ) { currentIndex++; setValue( value ); } /** * Get all the attribute's values. * * @return an enumeration of the attributes values */ public Enumeration getValues() { return values.elements(); } /** * Add a continuation line from the Manifest file. *

* When lines are too long in a manifest, they are continued on the * next line by starting with a space. This method adds the continuation * data to the attribute value by skipping the first character. * * @param line the continuation line. */ public void addContinuation( String line ) { String currentValue = (String) values.elementAt( currentIndex ); setValue( currentValue + line.substring( 1 ) ); } /** * Write the attribute out to a print writer. * * @param writer the Writer to which the attribute is written * @throws IOException if the attribute value cannot be written */ public void write( PrintWriter writer ) throws IOException { StringWriter sWriter = new StringWriter(); PrintWriter bufferWriter = new PrintWriter( sWriter ); for ( Enumeration e = getValues(); e.hasMoreElements(); ) { writeValue( bufferWriter, (String) e.nextElement() ); } byte[] convertedToUtf8 = sWriter.toString().getBytes( "UTF-8" ); writer.print( new String( convertedToUtf8, "UTF-8" ) ); } /** * Write a single attribute value out. Should handle multiple lines of attribute value. * * @param writer the Writer to which the attribute is written * @param value the attribute value * @throws IOException if the attribute value cannot be written */ private void writeValue( PrintWriter writer, String value ) throws IOException { String nameValue = name + ": " + value; StringTokenizer tokenizer = new StringTokenizer( nameValue, "\n\r" ); String prefix = ""; while ( tokenizer.hasMoreTokens() ) { writeLine( writer, prefix + tokenizer.nextToken() ); prefix = " "; } } /** * Write a single Manifest line. Should handle more than 72 characters of line * * @param writer the Writer to which the attribute is written * @param line the manifest line to be written */ private void writeLine( PrintWriter writer, String line ) throws IOException { while ( line.getBytes().length > MAX_LINE_LENGTH ) { // try to find a MAX_LINE_LENGTH byte section int breakIndex = MAX_SECTION_LENGTH; String section = line.substring( 0, breakIndex ); while ( section.getBytes().length > MAX_SECTION_LENGTH && breakIndex > 0 ) { breakIndex--; section = line.substring( 0, breakIndex ); } if ( breakIndex == 0 ) { throw new IOException( "Unable to write manifest line " + line ); } writer.print( section + EOL ); line = " " + line.substring( breakIndex ); } writer.print( line + EOL ); } } /** * A manifest section - you can nest attribute elements into sections. * A section consists of a set of attribute values, * separated from other sections by a blank line. */ public static class Section { /** * Warnings for this section */ private Vector warnings = new Vector(); /** * The section's name if any. The main section in a * manifest is unnamed. */ private String name = null; /** * The section's attributes. */ private Hashtable attributes = new Hashtable(); /** * Index used to retain the attribute ordering */ private Vector attributeIndex = new Vector(); /** * The name of the section; optional -default is the main section. * * @param name the section's name */ public void setName( String name ) { this.name = name; } /** * Get the Section's name. * * @return the section's name. */ public String getName() { return name; } /** * Read a section through a reader. * * @param reader the reader from which the section is read * @return the name of the next section if it has been read as * part of this section - This only happens if the * Manifest is malformed. * @throws ManifestException if the section is not valid according * to the JAR spec * @throws IOException if the section cannot be read from the reader. */ public String read( BufferedReader reader ) throws ManifestException, IOException { Attribute attribute = null; while ( true ) { String line = reader.readLine(); if ( line == null || line.length() == 0 ) { return null; } if ( line.charAt( 0 ) == ' ' ) { // continuation line if ( attribute == null ) { if ( name != null ) { // a continuation on the first line is a // continuation of the name - concatenate this // line and the name name += line.substring( 1 ); } else { throw new ManifestException( "Can't start an " + "attribute with a continuation line " + line ); } } else { attribute.addContinuation( line ); } } else { attribute = new Attribute( line ); String nameReadAhead = addAttributeAndCheck( attribute ); // refresh attribute in case of multivalued attributes. attribute = getAttribute( attribute.getKey() ); if ( nameReadAhead != null ) { return nameReadAhead; } } } } /** * Merge in another section * * @param section the section to be merged with this one. * @throws ManifestException if the sections cannot be merged. */ public void merge( Section section ) throws ManifestException { if ( name == null && section.getName() != null || name != null && !( name.equalsIgnoreCase( section.getName() ) ) ) { throw new ManifestException( "Unable to merge sections " + "with different names" ); } Enumeration e = section.getAttributeKeys(); Attribute classpathAttribute = null; while ( e.hasMoreElements() ) { String attributeName = (String) e.nextElement(); Attribute attribute = section.getAttribute( attributeName ); if ( attributeName.equalsIgnoreCase( ATTRIBUTE_CLASSPATH ) ) { if ( classpathAttribute == null ) { classpathAttribute = new Attribute(); classpathAttribute.setName( ATTRIBUTE_CLASSPATH ); } Enumeration cpe = attribute.getValues(); while ( cpe.hasMoreElements() ) { String value = (String) cpe.nextElement(); classpathAttribute.addValue( value ); } } else { // the merge file always wins storeAttribute( attribute ); } } if ( classpathAttribute != null ) { // the merge file *always* wins, even for Class-Path storeAttribute( classpathAttribute ); } // add in the warnings Enumeration warnEnum = section.warnings.elements(); while ( warnEnum.hasMoreElements() ) { warnings.addElement( warnEnum.nextElement() ); } } /** * Write the section out to a print writer. * * @param writer the Writer to which the section is written * @throws IOException if the section cannot be written */ public void write( PrintWriter writer ) throws IOException { if ( name != null ) { Attribute nameAttr = new Attribute( ATTRIBUTE_NAME, name ); nameAttr.write( writer ); } Enumeration e = getAttributeKeys(); while ( e.hasMoreElements() ) { String key = (String) e.nextElement(); Attribute attribute = getAttribute( key ); attribute.write( writer ); } writer.print( EOL ); } /** * Get a attribute of the section * * @param attributeName the name of the attribute * @return a Manifest.Attribute instance if the attribute is * single-valued, otherwise a Vector of Manifest.Attribute * instances. */ public Attribute getAttribute( String attributeName ) { return (Attribute) attributes.get( attributeName.toLowerCase() ); } /** * Get the attribute keys. * * @return an Enumeration of Strings, each string being the lower case * key of an attribute of the section. */ public Enumeration getAttributeKeys() { return attributeIndex.elements(); } /** * Get the value of the attribute with the name given. * * @param attributeName the name of the attribute to be returned. * @return the attribute's value or null if the attribute does not exist * in the section */ public String getAttributeValue( String attributeName ) { Attribute attribute = getAttribute( attributeName.toLowerCase() ); if ( attribute == null ) { return null; } return attribute.getValue(); } /** * Remove tge given attribute from the section * * @param attributeName the name of the attribute to be removed. */ public void removeAttribute( String attributeName ) { String key = attributeName.toLowerCase(); attributes.remove( key ); attributeIndex.removeElement( key ); } /** * Add an attribute to the section. * * @param attribute the attribute to be added to the section * @throws ManifestException if the attribute is not valid. */ public void addConfiguredAttribute( Attribute attribute ) throws ManifestException { String check = addAttributeAndCheck( attribute ); if ( check != null ) { throw new ManifestException( "Specify the section name using " + "the \"name\" attribute of the

element rather " + "than using a \"Name\" manifest attribute" ); } } /** * Add an attribute to the section * * @param attribute the attribute to be added. * @return the value of the attribute if it is a name * attribute - null other wise * @throws ManifestException if the attribute already * exists in this section. */ public String addAttributeAndCheck( Attribute attribute ) throws ManifestException { if ( attribute.getName() == null || attribute.getValue() == null ) { throw new ManifestException( "Attributes must have name and value" ); } if ( attribute.getKey().equalsIgnoreCase( ATTRIBUTE_NAME ) ) { warnings.addElement( "\"" + ATTRIBUTE_NAME + "\" attributes " + "should not occur in the main section and must be the " + "first element in all other sections: \"" + attribute.getName() + ": " + attribute.getValue() + "\"" ); return attribute.getValue(); } if ( attribute.getKey().startsWith( ATTRIBUTE_FROM.toLowerCase() ) ) { warnings.addElement( "Manifest attributes should not start " + "with \"" + ATTRIBUTE_FROM + "\" in \"" + attribute.getName() + ": " + attribute.getValue() + "\"" ); } else { // classpath attributes go into a vector String attributeKey = attribute.getKey(); if ( attributeKey.equalsIgnoreCase( ATTRIBUTE_CLASSPATH ) ) { Attribute classpathAttribute = (Attribute) attributes.get( attributeKey ); if ( classpathAttribute == null ) { storeAttribute( attribute ); } else { warnings.addElement( "Multiple Class-Path attributes " + "are supported but violate the Jar " + "specification and may not be correctly " + "processed in all environments" ); Enumeration e = attribute.getValues(); while ( e.hasMoreElements() ) { String value = (String) e.nextElement(); classpathAttribute.addValue( value ); } } } else if ( attributes.containsKey( attributeKey ) ) { throw new ManifestException( "The attribute \"" + attribute.getName() + "\" may not occur more " + "than once in the same section" ); } else { storeAttribute( attribute ); } } return null; } /** * Clone this section * * @return the cloned Section * @since Ant 1.5.2 */ public Object clone() { Section cloned = new Section(); cloned.setName( name ); Enumeration e = getAttributeKeys(); while ( e.hasMoreElements() ) { String key = (String) e.nextElement(); Attribute attribute = getAttribute( key ); cloned.storeAttribute( new Attribute( attribute.getName(), attribute.getValue() ) ); } return cloned; } /** * Store an attribute and update the index. * * @param attribute the attribute to be stored */ private void storeAttribute( Attribute attribute ) { if ( attribute == null ) { return; } String attributeKey = attribute.getKey(); attributes.put( attributeKey, attribute ); if ( !attributeIndex.contains( attributeKey ) ) { attributeIndex.addElement( attributeKey ); } } /** * Get the warnings for this section. * * @return an Enumeration of warning strings. */ public Enumeration getWarnings() { return warnings.elements(); } /** * @see java.lang.Object#hashCode */ public int hashCode() { int hashCode = 0; if ( name != null ) { hashCode += name.hashCode(); } hashCode += attributes.hashCode(); return hashCode; } /** * @see java.lang.Object#equals */ public boolean equals( Object rhs ) { if ( rhs == null || rhs.getClass() != getClass() ) { return false; } if ( rhs == this ) { return true; } Section rhsSection = (Section) rhs; if ( rhsSection.attributes == null ) { return false; } return attributes.equals( rhsSection.attributes ); } } /** * The version of this manifest */ private String manifestVersion = DEFAULT_MANIFEST_VERSION; /** * The main section of this manifest */ private Section mainSection = new Section(); /** * The named sections of this manifest */ private Hashtable sections = new Hashtable(); /** * Index of sections - used to retain order of sections in manifest */ private Vector sectionIndex = new Vector(); /** * Construct a manifest from Ant's default manifest file. * * @return the default manifest. * @throws ArchiverException if there is a problem loading the * default manifest */ public static Manifest getDefaultManifest() throws ArchiverException { try { String defManifest = "/org/codehaus/plexus/archiver/jar/defaultManifest.mf"; InputStream in = Manifest.class.getResourceAsStream( defManifest ); if ( in == null ) { throw new ArchiverException( "Could not find default manifest: " + defManifest ); } try { Manifest defaultManifest = new Manifest( new InputStreamReader( in, "UTF-8" ) ); Attribute createdBy = new Attribute( "Created-By", System.getProperty( "java.vm.version" ) + " (" + System.getProperty( "java.vm.vendor" ) + ")" ); defaultManifest.getMainSection().storeAttribute( createdBy ); return defaultManifest; } catch ( UnsupportedEncodingException e ) { return new Manifest( new InputStreamReader( in ) ); } } catch ( ManifestException e ) { throw new ArchiverException( "Default manifest is invalid !!", e ); } catch ( IOException e ) { throw new ArchiverException( "Unable to read default manifest", e ); } } /** * Construct an empty manifest */ public Manifest() { } /** * Read a manifest file from the given reader * * @param r is the reader from which the Manifest is read * @throws ManifestException if the manifest is not valid according * to the JAR spec * @throws IOException if the manifest cannot be read from the reader. */ public Manifest( Reader r ) throws ManifestException, IOException { BufferedReader reader = new BufferedReader( r ); // This should be the manifest version String nextSectionName = mainSection.read( reader ); String readManifestVersion = mainSection.getAttributeValue( ATTRIBUTE_MANIFEST_VERSION ); if ( readManifestVersion != null ) { manifestVersion = readManifestVersion; mainSection.removeAttribute( ATTRIBUTE_MANIFEST_VERSION ); } String line; while ( ( line = reader.readLine() ) != null ) { if ( line.length() == 0 ) { continue; } Section section = new Section(); if ( nextSectionName == null ) { Attribute sectionName = new Attribute( line ); if ( !sectionName.getName().equalsIgnoreCase( ATTRIBUTE_NAME ) ) { throw new ManifestException( "Manifest sections should " + "start with a \"" + ATTRIBUTE_NAME + "\" attribute and not \"" + sectionName.getName() + "\"" ); } nextSectionName = sectionName.getValue(); } else { // we have already started reading this section // this line is the first attribute. set it and then // let the normal read handle the rest Attribute firstAttribute = new Attribute( line ); section.addAttributeAndCheck( firstAttribute ); } section.setName( nextSectionName ); nextSectionName = section.read( reader ); addConfiguredSection( section ); } } /** * Add a section to the manifest * * @param section the manifest section to be added * @throws ManifestException if the secti0on is not valid. */ public void addConfiguredSection( Section section ) throws ManifestException { String sectionName = section.getName(); if ( sectionName == null ) { throw new ManifestException( "Sections must have a name" ); } sections.put( sectionName, section ); if ( !sectionIndex.contains( sectionName ) ) { sectionIndex.addElement( sectionName ); } } /** * Add an attribute to the manifest - it is added to the main section. * * @param attribute the attribute to be added. * @throws ManifestException if the attribute is not valid. */ public void addConfiguredAttribute( Attribute attribute ) throws ManifestException { if ( attribute.getKey() == null || attribute.getValue() == null ) { throw new ManifestException( "Attributes must have name and value" ); } if ( attribute.getKey().equalsIgnoreCase( ATTRIBUTE_MANIFEST_VERSION ) ) { manifestVersion = attribute.getValue(); } else { mainSection.addConfiguredAttribute( attribute ); } } /** * Merge the contents of the given manifest into this manifest * * @param other the Manifest to be merged with this one. * @throws ManifestException if there is a problem merging the * manifest according to the Manifest spec. */ public void merge( Manifest other ) throws ManifestException { merge( other, false ); } /** * Merge the contents of the given manifest into this manifest * * @param other the Manifest to be merged with this one. * @param overwriteMain whether to overwrite the main section * of the current manifest * @throws ManifestException if there is a problem merging the * manifest according to the Manifest spec. */ public void merge( Manifest other, boolean overwriteMain ) throws ManifestException { if ( other != null ) { if ( overwriteMain ) { mainSection = (Section) other.mainSection.clone(); } else { mainSection.merge( other.mainSection ); } if ( other.manifestVersion != null ) { manifestVersion = other.manifestVersion; } Enumeration e = other.getSectionNames(); while ( e.hasMoreElements() ) { String sectionName = (String) e.nextElement(); Section ourSection = (Section) sections.get( sectionName ); Section otherSection = (Section) other.sections.get( sectionName ); if ( ourSection == null ) { if ( otherSection != null ) { addConfiguredSection( (Section) otherSection.clone() ); } } else { ourSection.merge( otherSection ); } } } } /** * Write the manifest out to a print writer. * * @param writer the Writer to which the manifest is written * @throws IOException if the manifest cannot be written */ public void write( PrintWriter writer ) throws IOException { writer.print( ATTRIBUTE_MANIFEST_VERSION + ": " + manifestVersion + EOL ); String signatureVersion = mainSection.getAttributeValue( ATTRIBUTE_SIGNATURE_VERSION ); if ( signatureVersion != null ) { writer.print( ATTRIBUTE_SIGNATURE_VERSION + ": " + signatureVersion + EOL ); mainSection.removeAttribute( ATTRIBUTE_SIGNATURE_VERSION ); } mainSection.write( writer ); // add it back if ( signatureVersion != null ) { try { Attribute svAttr = new Attribute( ATTRIBUTE_SIGNATURE_VERSION, signatureVersion ); mainSection.addConfiguredAttribute( svAttr ); } catch ( ManifestException e ) { // shouldn't happen - ignore } } Enumeration e = sectionIndex.elements(); while ( e.hasMoreElements() ) { String sectionName = (String) e.nextElement(); Section section = getSection( sectionName ); section.write( writer ); } } /** * Convert the manifest to its string representation * * @return a multiline string with the Manifest as it * appears in a Manifest file. */ public String toString() { StringWriter sw = new StringWriter(); try { write( new PrintWriter( sw ) ); } catch ( IOException e ) { return null; } return sw.toString(); } /** * Get the warnings for this manifest. * * @return an enumeration of warning strings */ public Enumeration getWarnings() { Vector warnings = new Vector(); Enumeration warnEnum = mainSection.getWarnings(); while ( warnEnum.hasMoreElements() ) { warnings.addElement( warnEnum.nextElement() ); } // create a vector and add in the warnings for all the sections Enumeration e = sections.elements(); while ( e.hasMoreElements() ) { Section section = (Section) e.nextElement(); Enumeration e2 = section.getWarnings(); while ( e2.hasMoreElements() ) { warnings.addElement( e2.nextElement() ); } } return warnings.elements(); } /** * @see java.lang.Object#hashCode */ public int hashCode() { int hashCode = 0; if ( manifestVersion != null ) { hashCode += manifestVersion.hashCode(); } hashCode += mainSection.hashCode(); hashCode += sections.hashCode(); return hashCode; } /** * @see java.lang.Object#equals */ public boolean equals( Object rhs ) { if ( rhs == null || rhs.getClass() != getClass() ) { return false; } if ( rhs == this ) { return true; } Manifest rhsManifest = (Manifest) rhs; if ( manifestVersion == null ) { if ( rhsManifest.manifestVersion != null ) { return false; } } else if ( !manifestVersion.equals( rhsManifest.manifestVersion ) ) { return false; } if ( !mainSection.equals( rhsManifest.mainSection ) ) { return false; } if ( rhsManifest.sections == null ) { return false; } return sections.equals( rhsManifest.sections ); } /** * Get the version of the manifest * * @return the manifest's version string */ public String getManifestVersion() { return manifestVersion; } /** * Get the main section of the manifest * * @return the main section of the manifest */ public Section getMainSection() { return mainSection; } /** * Get a particular section from the manifest * * @param name the name of the section desired. * @return the specified section or null if that section * does not exist in the manifest */ public Section getSection( String name ) { return (Section) sections.get( name ); } /** * Get the section names in this manifest. * * @return an Enumeration of section names */ public Enumeration getSectionNames() { return sectionIndex.elements(); } } ManifestException.java000066400000000000000000000020051145404360500347410ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/jarpackage org.codehaus.plexus.archiver.jar; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Exception thrown indicating problems in a JAR Manifest */ public class ManifestException extends Exception { /** * Constructs an exception with the given descriptive message. * * @param msg Description of or information about the exception. */ public ManifestException( String msg ) { super( msg ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/manager/000077500000000000000000000000001145404360500313715ustar00rootroot00000000000000ArchiverManager.java000066400000000000000000000030711145404360500352140ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/managerpackage org.codehaus.plexus.archiver.manager; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.io.File; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.UnArchiver; import org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection; /** * @author dantran * @version $Revision: */ public interface ArchiverManager { String ROLE = ArchiverManager.class.getName(); Archiver getArchiver( String archiverName ) throws NoSuchArchiverException; Archiver getArchiver( File file ) throws NoSuchArchiverException; UnArchiver getUnArchiver( String unArchiverName ) throws NoSuchArchiverException; UnArchiver getUnArchiver( File file ) throws NoSuchArchiverException; PlexusIoResourceCollection getResourceCollection( File file ) throws NoSuchArchiverException; PlexusIoResourceCollection getResourceCollection( String unArchiverName ) throws NoSuchArchiverException; } DefaultArchiverManager.java000066400000000000000000000100151145404360500365150ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/managerpackage org.codehaus.plexus.archiver.manager; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.io.File; import org.codehaus.plexus.PlexusConstants; import org.codehaus.plexus.PlexusContainer; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.UnArchiver; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; import org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection; import org.codehaus.plexus.context.Context; import org.codehaus.plexus.context.ContextException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.StringUtils; /** * @author dantran * @version $Revision: */ public class DefaultArchiverManager implements ArchiverManager, Contextualizable { private PlexusContainer container; // ---------------------------------------------------------------------- // Component Lifecycle // ---------------------------------------------------------------------- public void contextualize( Context context ) throws ContextException { container = (PlexusContainer) context.get( PlexusConstants.PLEXUS_KEY ); } public Archiver getArchiver( String archiverName ) throws NoSuchArchiverException { try { return (Archiver) container.lookup( Archiver.ROLE, archiverName ); } catch ( ComponentLookupException e ) { throw new NoSuchArchiverException( archiverName ); } } public UnArchiver getUnArchiver( String unArchiverName ) throws NoSuchArchiverException { try { return (UnArchiver) container.lookup( UnArchiver.ROLE, unArchiverName ); } catch ( ComponentLookupException e ) { throw new NoSuchArchiverException( unArchiverName ); } } public PlexusIoResourceCollection getResourceCollection( String resourceCollectionName ) throws NoSuchArchiverException { try { return (PlexusIoResourceCollection) container.lookup( PlexusIoResourceCollection.ROLE, resourceCollectionName ); } catch ( ComponentLookupException e ) { throw new NoSuchArchiverException( resourceCollectionName ); } } private static String getFileExtention ( File file ) { String path = file.getAbsolutePath(); String archiveExt = FileUtils.getExtension( path ).toLowerCase(); if ( "gz".equals( archiveExt ) || "bz2".equals( archiveExt ) ) { String [] tokens = StringUtils.split( path, "." ); if ( tokens.length > 2 && "tar".equals( tokens[tokens.length -2].toLowerCase() ) ) { archiveExt = "tar." + archiveExt; } } return archiveExt; } public Archiver getArchiver( File file ) throws NoSuchArchiverException { return getArchiver( getFileExtention( file ) ); } public UnArchiver getUnArchiver( File file ) throws NoSuchArchiverException { return getUnArchiver( getFileExtention( file ) ); } public PlexusIoResourceCollection getResourceCollection( File file ) throws NoSuchArchiverException { return getResourceCollection( getFileExtention( file ) ); } }NoSuchArchiverException.java000066400000000000000000000020661145404360500367230ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/managerpackage org.codehaus.plexus.archiver.manager; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * @author dantran * @version $Revision: */ public class NoSuchArchiverException extends Exception { private String archiverName; public NoSuchArchiverException( String archiverName ) { super( "No such archiver: '" + archiverName + "'." ); this.archiverName = archiverName; } public String getArchiver() { return archiverName; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/000077500000000000000000000000001145404360500305455ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/BZip2TarFile.java000066400000000000000000000011451145404360500336060ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; import java.io.File; import java.io.IOException; import java.io.InputStream; import org.codehaus.plexus.archiver.bzip2.BZip2UnArchiver; /** * Extension of {@link TarFile} for bzip2 compressed files. */ public class BZip2TarFile extends TarFile { /** * Creates a new instance with the given file. */ public BZip2TarFile( File file ) { super( file ); } protected InputStream getInputStream( File file ) throws IOException { return BZip2UnArchiver.getBZip2InputStream( super.getInputStream( file ) ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/GZipTarFile.java000066400000000000000000000013061145404360500335300ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.zip.GZIPInputStream; /** * Extension of {@link TarFile} for bzip2 compressed files. */ public class GZipTarFile extends TarFile { /** * Creates a new instance with the given file. */ public GZipTarFile( File file ) { super( file ); } protected InputStream getInputStream( File file ) throws IOException { return new GZIPInputStream( super.getInputStream( file ) ){ public void close() throws IOException { super.close(); } }; } } PlexusIoTarBZip2FileResourceCollection.java000066400000000000000000000004171145404360500407450ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; import java.io.File; public class PlexusIoTarBZip2FileResourceCollection extends PlexusIoTarFileResourceCollection { protected TarFile newTarFile( File file ) { return new BZip2TarFile( file ); } } PlexusIoTarFileResourceCollection.java000066400000000000000000000026341145404360500401010ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; import java.io.File; import java.io.IOException; import java.util.Enumeration; import java.util.Iterator; import org.codehaus.plexus.components.io.resources.AbstractPlexusIoArchiveResourceCollection; public class PlexusIoTarFileResourceCollection extends AbstractPlexusIoArchiveResourceCollection { /** * The zip file resource collections role hint. */ public static final String ROLE_HINT = "tar"; protected TarFile newTarFile( File file ) { return new TarFile( file ); } protected Iterator getEntries() throws IOException { final File f = getFile(); if ( f == null ) { throw new IOException( "The tar archive file has not been set." ); } final TarFile tarFile = newTarFile( f ); final Enumeration en = tarFile.getEntries(); return new Iterator(){ public boolean hasNext() { return en.hasMoreElements(); } public Object next() { final TarEntry entry = (TarEntry) en.nextElement(); final TarResource res = new TarResource( tarFile, entry ); return res; } public void remove() { throw new UnsupportedOperationException( "Removing isn't implemented." ); } }; } } PlexusIoTarGZipFileResourceCollection.java000066400000000000000000000004101145404360500406610ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; import java.io.File; public class PlexusIoTarGZipFileResourceCollection extends PlexusIoTarFileResourceCollection { protected TarFile newTarFile( File file ) { return new GZipTarFile( file ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/TarArchiver.java000066400000000000000000000423041145404360500336250ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.zip.GZIPOutputStream; import org.codehaus.plexus.archiver.AbstractArchiver; import org.codehaus.plexus.archiver.ArchiveEntry; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.ResourceIterator; import org.codehaus.plexus.archiver.UnixStat; import org.codehaus.plexus.archiver.bzip2.CBZip2OutputStream; import org.codehaus.plexus.archiver.util.EnumeratedAttribute; import org.codehaus.plexus.archiver.util.ResourceUtils; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributes; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import org.codehaus.plexus.util.StringUtils; /** * @author Emmanuel Venisse * @version $Revision$ $Date$ */ public class TarArchiver extends AbstractArchiver { /** * Indicates whether the user has been warned about long files already. */ private boolean longWarningGiven = false; private TarLongFileMode longFileMode = new TarLongFileMode(); private TarCompressionMethod compression = new TarCompressionMethod(); private TarOptions options = new TarOptions(); private TarOutputStream tOut; /** * */ public TarOptions getOptions() { return options; } /** * Set all tar options * * @param options options */ public void setOptions( TarOptions options ) { this.options = options; // FIXME: do these options have precedence over // setDefaultFileMode / setDefaultDirMode // or the other way around? Assuming these // take precedende since they're more specific. // Better refactor this when usage is known. setDefaultFileMode( options.getMode() ); setDefaultDirectoryMode( options.getMode() ); } // /** // * Override AbstractArchiver.setDefaultFileMode to // * update TarOptions. // */ // public void setDefaultFileMode( int mode ) // { // super.setDefaultFileMode( mode ); // // options.setMode( mode ); // } // // /** // * Override AbstractArchiver.setDefaultDirectoryMode to // * update TarOptions. // */ // public void setDefaultDirectoryMode( int mode ) // { // super.setDefaultDirectoryMode( mode ); // // options.setDirMode( mode ); // } // // /** * Set how to handle long files, those with a path>100 chars. * Optional, default=warn. *

* Allowable values are *

* * @param mode the mode to handle long file names. */ public void setLongfile( TarLongFileMode mode ) { this.longFileMode = mode; } /** * Set compression method. * Allowable values are * * * @param mode the compression method. */ public void setCompression( TarCompressionMethod mode ) { this.compression = mode; } protected void execute() throws ArchiverException, IOException { if ( ! checkForced() ) { return; } ResourceIterator iter = getResources(); if ( !iter.hasNext() ) { throw new ArchiverException( "You must set at least one file." ); } File tarFile = getDestFile(); if ( tarFile == null ) { throw new ArchiverException( "You must set the destination tar file." ); } if ( tarFile.exists() && !tarFile.isFile() ) { throw new ArchiverException( tarFile + " isn't a file." ); } if ( tarFile.exists() && !tarFile.canWrite() ) { throw new ArchiverException( tarFile + " is read-only." ); } getLogger().info( "Building tar : " + tarFile.getAbsolutePath() ); tOut = new TarOutputStream( compression.compress( new BufferedOutputStream( new FileOutputStream( tarFile ) ) ) ); tOut.setDebug( true ); if ( longFileMode.isTruncateMode() ) { tOut.setLongFileMode( TarOutputStream.LONGFILE_TRUNCATE ); } else if ( longFileMode.isFailMode() || longFileMode.isOmitMode() ) { tOut.setLongFileMode( TarOutputStream.LONGFILE_ERROR ); } else { // warn or GNU tOut.setLongFileMode( TarOutputStream.LONGFILE_GNU ); } longWarningGiven = false; while ( iter.hasNext() ) { ArchiveEntry entry = iter.next(); // Check if we don't add tar file in inself if ( ResourceUtils.isSame( entry.getResource(), tarFile ) ) { throw new ArchiverException( "A tar file cannot include itself." ); } String fileName = entry.getName(); String name = StringUtils.replace( fileName, File.separatorChar, '/' ); tarFile( entry, tOut, name ); } } /** * tar a file * * @param entry the file to tar * @param tOut the output stream * @param vPath the path name of the file to tar * @throws IOException on error */ protected void tarFile( ArchiveEntry entry, TarOutputStream tOut, String vPath ) throws ArchiverException, IOException { InputStream fIn = null; // don't add "" to the archive if ( vPath.length() <= 0 ) { return; } if ( entry.getResource().isDirectory() && !vPath.endsWith( "/" ) ) { vPath += "/"; } if ( vPath.startsWith( "/" ) && !options.getPreserveLeadingSlashes() ) { int l = vPath.length(); if ( l <= 1 ) { // we would end up adding "" to the archive return; } vPath = vPath.substring( 1, l ); } try { if ( vPath.length() >= TarConstants.NAMELEN ) { if ( longFileMode.isOmitMode() ) { getLogger().info( "Omitting: " + vPath ); return; } else if ( longFileMode.isWarnMode() ) { getLogger().warn( "Entry: " + vPath + " longer than " + TarConstants.NAMELEN + " characters." ); if ( !longWarningGiven ) { getLogger().warn( "Resulting tar file can only be processed " + "successfully by GNU compatible tar commands" ); longWarningGiven = true; } } else if ( longFileMode.isFailMode() ) { throw new ArchiverException( "Entry: " + vPath + " longer than " + TarConstants.NAMELEN + "characters." ); } } TarEntry te = new TarEntry( vPath ); long teLastModified = entry.getResource().getLastModified(); te.setModTime( teLastModified == PlexusIoResource.UNKNOWN_MODIFICATION_DATE ? System.currentTimeMillis() : teLastModified ); if ( !entry.getResource().isDirectory() ) { final long size = entry.getResource().getSize(); te.setSize( size == PlexusIoResource.UNKNOWN_RESOURCE_SIZE ? 0 : size ); te.setMode( entry.getMode() ); } else { te.setMode( entry.getMode() ); } PlexusIoResourceAttributes attributes = entry.getResourceAttributes(); te.setUserName( ( attributes != null && attributes.getUserName() != null ) ? attributes.getUserName() : options.getUserName() ); te.setGroupName( ( attributes != null && attributes.getGroupName() != null ) ? attributes.getGroupName() : options.getGroup() ); te.setUserId( ( attributes != null && attributes.getUserId() != -1 ) ? attributes.getUserId() : options.getUid() ); te.setGroupId( ( attributes != null && attributes.getGroupId() != -1 ) ? attributes.getGroupId() : options.getGid() ); tOut.putNextEntry( te ); if ( !entry.getResource().isDirectory() ) { fIn = entry.getInputStream(); byte[] buffer = new byte[8 * 1024]; int count = 0; do { tOut.write( buffer, 0, count ); count = fIn.read( buffer, 0, buffer.length ); } while ( count != -1 ); } tOut.closeEntry(); } finally { if ( fIn != null ) { fIn.close(); } } } /** * Valid Modes for Compression attribute to Tar Task */ public class TarOptions { /** * @deprecated */ private int fileMode = UnixStat.FILE_FLAG | UnixStat.DEFAULT_FILE_PERM; /** * @deprecated */ private int dirMode = UnixStat.DIR_FLAG | UnixStat.DEFAULT_DIR_PERM; private String userName = ""; private String groupName = ""; private int uid; private int gid; private boolean preserveLeadingSlashes = false; /** * A 3 digit octal string, specify the user, group and * other modes in the standard Unix fashion; * optional, default=0644 * * @param octalString a 3 digit octal string. * @deprecated use AbstractArchiver.setDefaultFileMode(int) */ public void setMode( String octalString ) { setMode( Integer.parseInt( octalString, 8 ) ); } /** * @param mode unix file mode * @deprecated use AbstractArchiver.setDefaultFileMode(int) */ public void setMode( int mode ) { this.fileMode = UnixStat.FILE_FLAG | ( mode & UnixStat.PERM_MASK ); } /** * @return the current mode. * @deprecated use AbstractArchiver.getDefaultFileMode() */ public int getMode() { return fileMode; } /** * A 3 digit octal string, specify the user, group and * other modes in the standard Unix fashion; * optional, default=0755 * * @param octalString a 3 digit octal string. * @since Ant 1.6 * @deprecated use AbstractArchiver.setDefaultDirectoryMode(int) */ public void setDirMode( String octalString ) { setDirMode( Integer.parseInt( octalString, 8 ) ); } /** * @param mode unix directory mode * @deprecated use AbstractArchiver.setDefaultDirectoryMode(int) */ public void setDirMode( int mode ) { this.dirMode = UnixStat.DIR_FLAG | ( mode & UnixStat.PERM_MASK ); } /** * @return the current directory mode * @since Ant 1.6 * @deprecated use AbstractArchiver.getDefaultDirectoryMode() */ public int getDirMode() { return dirMode; } /** * The username for the tar entry * This is not the same as the UID. * * @param userName the user name for the tar entry. */ public void setUserName( String userName ) { this.userName = userName; } /** * @return the user name for the tar entry */ public String getUserName() { return userName; } /** * The uid for the tar entry * This is not the same as the User name. * * @param uid the id of the user for the tar entry. */ public void setUid( int uid ) { this.uid = uid; } /** * @return the uid for the tar entry */ public int getUid() { return uid; } /** * The groupname for the tar entry; optional, default="" * This is not the same as the GID. * * @param groupName the group name string. */ public void setGroup( String groupName ) { this.groupName = groupName; } /** * @return the group name string. */ public String getGroup() { return groupName; } /** * The GID for the tar entry; optional, default="0" * This is not the same as the group name. * * @param gid the group id. */ public void setGid( int gid ) { this.gid = gid; } /** * @return the group identifier. */ public int getGid() { return gid; } /** * @return the leading slashes flag. */ public boolean getPreserveLeadingSlashes() { return preserveLeadingSlashes; } /** * Flag to indicates whether leading `/'s should * be preserved in the file names. * Optional, default is false. * * @param preserveLeadingSlashes the leading slashes flag. */ public void setPreserveLeadingSlashes( boolean preserveLeadingSlashes ) { this.preserveLeadingSlashes = preserveLeadingSlashes; } } /** * Valid Modes for Compression attribute to Tar Task */ public static final class TarCompressionMethod extends EnumeratedAttribute { // permissible values for compression attribute /** * No compression */ private static final String NONE = "none"; /** * GZIP compression */ private static final String GZIP = "gzip"; /** * BZIP2 compression */ private static final String BZIP2 = "bzip2"; /** * Default constructor */ public TarCompressionMethod() { super(); try { setValue( NONE ); } catch ( ArchiverException ae ) { //Do nothing } } /** * Get valid enumeration values. * * @return valid enumeration values */ public String[] getValues() { return new String[]{NONE, GZIP, BZIP2}; } /** * This method wraps the output stream with the * corresponding compression method * * @param ostream output stream * @return output stream with on-the-fly compression * @throws IOException thrown if file is not writable */ private OutputStream compress( final OutputStream ostream ) throws IOException { final String value = getValue(); if ( GZIP.equals( value ) ) { return new GZIPOutputStream( ostream ); } else { if ( BZIP2.equals( value ) ) { ostream.write( 'B' ); ostream.write( 'Z' ); return new CBZip2OutputStream( ostream ); } } return ostream; } } public boolean isSupportingForced() { return true; } protected void cleanUp() { super.cleanUp(); tOut = null; } protected void close() throws IOException { if ( tOut != null ) { try { // close up tOut.close(); } catch ( IOException e ) { // ignore } } } protected String getArchiveType() { return "TAR"; } } TarBZip2UnArchiver.java000066400000000000000000000030151145404360500347140ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.bzip2.BZip2UnArchiver; import org.codehaus.plexus.archiver.tar.TarUnArchiver.UntarCompressionMethod; import java.io.File; import java.io.IOException; /** * Extract files in tar with bzip2 compression * @author Dan Tran * @version $Revision: $ */ public class TarBZip2UnArchiver extends TarUnArchiver { public TarBZip2UnArchiver() { this.setupCompressionMethod(); } public TarBZip2UnArchiver( File sourceFile ) { super( sourceFile ); this.setupCompressionMethod(); } private void setupCompressionMethod() { UntarCompressionMethod untarCompressionMethod = new UntarCompressionMethod( UntarCompressionMethod.BZIP2 ); this.setCompression( untarCompressionMethod ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/TarBuffer.java000066400000000000000000000307011145404360500332710ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; /* * Copyright 2000,2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Timothy Gerard Endres * (time@ice.com) to whom the Ant project is very grateful for his great code. */ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * The TarBuffer class implements the tar archive concept * of a buffered input stream. This concept goes back to the * days of blocked tape drives and special io devices. In the * Java universe, the only real function that this class * performs is to ensure that files have the correct "block" * size, or other tars will complain. *

* You should never have a need to access this class directly. * TarBuffers are created by Tar IO Streams. * * @version $Revision$ $Date$ * from org.apache.ant.tools.tar.TarBuffer v1.12 */ public class TarBuffer { public static final int DEFAULT_RCDSIZE = ( 512 ); public static final int DEFAULT_BLKSIZE = ( DEFAULT_RCDSIZE * 20 ); private InputStream inStream; private OutputStream outStream; private byte[] blockBuffer; private int currBlkIdx; private int currRecIdx; private int blockSize; private int recordSize; private int recsPerBlock; private boolean debug; public TarBuffer( InputStream inStream ) { this( inStream, TarBuffer.DEFAULT_BLKSIZE ); } public TarBuffer( InputStream inStream, int blockSize ) { this( inStream, blockSize, TarBuffer.DEFAULT_RCDSIZE ); } public TarBuffer( InputStream inStream, int blockSize, int recordSize ) { this.inStream = inStream; this.outStream = null; this.initialize( blockSize, recordSize ); } public TarBuffer( OutputStream outStream ) { this( outStream, TarBuffer.DEFAULT_BLKSIZE ); } public TarBuffer( OutputStream outStream, int blockSize ) { this( outStream, blockSize, TarBuffer.DEFAULT_RCDSIZE ); } public TarBuffer( OutputStream outStream, int blockSize, int recordSize ) { this.inStream = null; this.outStream = outStream; this.initialize( blockSize, recordSize ); } /** * Initialization common to all constructors. */ private void initialize( int blockSize, int recordSize ) { this.debug = false; this.blockSize = blockSize; this.recordSize = recordSize; this.recsPerBlock = ( this.blockSize / this.recordSize ); this.blockBuffer = new byte[this.blockSize]; if ( this.inStream != null ) { this.currBlkIdx = -1; this.currRecIdx = this.recsPerBlock; } else { this.currBlkIdx = 0; this.currRecIdx = 0; } } /** * Get the TAR Buffer's block size. Blocks consist of multiple records. */ public int getBlockSize() { return this.blockSize; } /** * Get the TAR Buffer's record size. */ public int getRecordSize() { return this.recordSize; } /** * Set the debugging flag for the buffer. * * @param debug If true, print debugging output. */ public void setDebug( boolean debug ) { this.debug = debug; } /** * Determine if an archive record indicate End of Archive. End of * archive is indicated by a record that consists entirely of null bytes. * * @param record The record data to check. */ public boolean isEOFRecord( byte[] record ) { for ( int i = 0, sz = this.getRecordSize(); i < sz; ++i ) { if ( record[ i ] != 0 ) { return false; } } return true; } /** * Skip over a record on the input stream. */ public void skipRecord() throws IOException { if ( this.debug ) { System.err.println( "SkipRecord: recIdx = " + this.currRecIdx + " blkIdx = " + this.currBlkIdx ); } if ( this.inStream == null ) { throw new IOException( "reading (via skip) from an output buffer" ); } if ( this.currRecIdx >= this.recsPerBlock ) { if ( !this.readBlock() ) { return; // UNDONE } } this.currRecIdx++; } /** * Read a record from the input stream and return the data. * * @return The record data. */ public byte[] readRecord() throws IOException { if ( this.debug ) { System.err.println( "ReadRecord: recIdx = " + this.currRecIdx + " blkIdx = " + this.currBlkIdx ); } if ( this.inStream == null ) { throw new IOException( "reading from an output buffer" ); } if ( this.currRecIdx >= this.recsPerBlock ) { if ( !this.readBlock() ) { return null; } } byte[] result = new byte[this.recordSize]; System.arraycopy( this.blockBuffer, ( this.currRecIdx * this.recordSize ), result, 0, this.recordSize ); this.currRecIdx++; return result; } /** * @return false if End-Of-File, else true */ private boolean readBlock() throws IOException { if ( this.debug ) { System.err.println( "ReadBlock: blkIdx = " + this.currBlkIdx ); } if ( this.inStream == null ) { throw new IOException( "reading from an output buffer" ); } this.currRecIdx = 0; int offset = 0; int bytesNeeded = this.blockSize; while ( bytesNeeded > 0 ) { long numBytes = this.inStream.read( this.blockBuffer, offset, bytesNeeded ); // // NOTE // We have fit EOF, and the block is not full! // // This is a broken archive. It does not follow the standard // blocking algorithm. However, because we are generous, and // it requires little effort, we will simply ignore the error // and continue as if the entire block were read. This does // not appear to break anything upstream. We used to return // false in this case. // // Thanks to 'Yohann.Roussel@alcatel.fr' for this fix. // if ( numBytes == -1 ) { break; } offset += numBytes; bytesNeeded -= numBytes; if ( numBytes != this.blockSize ) { if ( this.debug ) { System.err.println( "ReadBlock: INCOMPLETE READ " + numBytes + " of " + this.blockSize + " bytes read." ); } } } this.currBlkIdx++; return true; } /** * Get the current block number, zero based. * * @return The current zero based block number. */ public int getCurrentBlockNum() { return this.currBlkIdx; } /** * Get the current record number, within the current block, zero based. * Thus, current offset = (currentBlockNum * recsPerBlk) + currentRecNum. * * @return The current zero based record number. */ public int getCurrentRecordNum() { return this.currRecIdx - 1; } /** * Write an archive record to the archive. * * @param record The record data to write to the archive. */ public void writeRecord( byte[] record ) throws IOException { if ( this.debug ) { System.err.println( "WriteRecord: recIdx = " + this.currRecIdx + " blkIdx = " + this.currBlkIdx ); } if ( this.outStream == null ) { throw new IOException( "writing to an input buffer" ); } if ( record.length != this.recordSize ) { throw new IOException( "record to write has length '" + record.length + "' which is not the record size of '" + this.recordSize + "'" ); } if ( this.currRecIdx >= this.recsPerBlock ) { this.writeBlock(); } System.arraycopy( record, 0, this.blockBuffer, ( this.currRecIdx * this.recordSize ), this.recordSize ); this.currRecIdx++; } /** * Write an archive record to the archive, where the record may be * inside of a larger array buffer. The buffer must be "offset plus * record size" long. * * @param buf The buffer containing the record data to write. * @param offset The offset of the record data within buf. */ public void writeRecord( byte[] buf, int offset ) throws IOException { if ( this.debug ) { System.err.println( "WriteRecord: recIdx = " + this.currRecIdx + " blkIdx = " + this.currBlkIdx ); } if ( this.outStream == null ) { throw new IOException( "writing to an input buffer" ); } if ( ( offset + this.recordSize ) > buf.length ) { throw new IOException( "record has length '" + buf.length + "' with offset '" + offset + "' which is less than the record size of '" + this.recordSize + "'" ); } if ( this.currRecIdx >= this.recsPerBlock ) { this.writeBlock(); } System.arraycopy( buf, offset, this.blockBuffer, ( this.currRecIdx * this.recordSize ), this.recordSize ); this.currRecIdx++; } /** * Write a TarBuffer block to the archive. */ private void writeBlock() throws IOException { if ( this.debug ) { System.err.println( "WriteBlock: blkIdx = " + this.currBlkIdx ); } if ( this.outStream == null ) { throw new IOException( "writing to an input buffer" ); } this.outStream.write( this.blockBuffer, 0, this.blockSize ); this.outStream.flush(); this.currRecIdx = 0; this.currBlkIdx++; } /** * Flush the current data block if it has any data in it. */ private void flushBlock() throws IOException { if ( this.debug ) { System.err.println( "TarBuffer.flushBlock() called." ); } if ( this.outStream == null ) { throw new IOException( "writing to an input buffer" ); } if ( this.currRecIdx > 0 ) { this.writeBlock(); } } /** * Close the TarBuffer. If this is an output buffer, also flush the * current block before closing. */ public void close() throws IOException { if ( this.debug ) { System.err.println( "TarBuffer.closeBuffer()." ); } if ( this.outStream != null ) { this.flushBlock(); if ( this.outStream != System.out && this.outStream != System.err ) { this.outStream.close(); this.outStream = null; } } else if ( this.inStream != null ) { if ( this.inStream != System.in ) { this.inStream.close(); this.inStream = null; } } } } TarCompressionMethod.java000066400000000000000000000031101145404360500354350ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /* * Copyright 2000-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.util.EnumeratedAttribute; /** * Valid Modes for Compression attribute to Tar Task */ public final class TarCompressionMethod extends EnumeratedAttribute { // permissible values for compression attribute /** * No compression */ private static final String NONE = "none"; /** * GZIP compression */ private static final String GZIP = "gzip"; /** * BZIP2 compression */ private static final String BZIP2 = "bzip2"; /** * Default constructor */ public TarCompressionMethod() throws ArchiverException { super(); setValue( NONE ); } /** * Get valid enumeration values. * * @return valid enumeration values */ public String[] getValues() { return new String[]{NONE, GZIP, BZIP2}; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/TarConstants.java000066400000000000000000000065111145404360500340360ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; /* * Copyright 2000-2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Timothy Gerard Endres * (time@ice.com) to whom the Ant project is very grateful for his great code. */ /** * This interface contains all the definitions used in the package. * * @version $Revision$ $Date$ * from org.apache.ant.tools.tar.TarConstants v1.13 */ public interface TarConstants { /** * The length of the name field in a header buffer. */ int NAMELEN = 100; /** * The length of the mode field in a header buffer. */ int MODELEN = 8; /** * The length of the user id field in a header buffer. */ int UIDLEN = 8; /** * The length of the group id field in a header buffer. */ int GIDLEN = 8; /** * The length of the checksum field in a header buffer. */ int CHKSUMLEN = 8; /** * The length of the size field in a header buffer. */ int SIZELEN = 12; /** * The length of the magic field in a header buffer. */ int MAGICLEN = 8; /** * The length of the modification time field in a header buffer. */ int MODTIMELEN = 12; /** * The length of the user name field in a header buffer. */ int UNAMELEN = 32; /** * The length of the group name field in a header buffer. */ int GNAMELEN = 32; /** * The length of the devices field in a header buffer. */ int DEVLEN = 8; /** * LF_ constants represent the "link flag" of an entry, or more commonly, * the "entry type". This is the "old way" of indicating a normal file. */ byte LF_OLDNORM = 0; /** * Normal file type. */ byte LF_NORMAL = (byte) '0'; /** * Link file type. */ byte LF_LINK = (byte) '1'; /** * Symbolic link file type. */ byte LF_SYMLINK = (byte) '2'; /** * Character device file type. */ byte LF_CHR = (byte) '3'; /** * Block device file type. */ byte LF_BLK = (byte) '4'; /** * Directory file type. */ byte LF_DIR = (byte) '5'; /** * FIFO (pipe) file type. */ byte LF_FIFO = (byte) '6'; /** * Contiguous file type. */ byte LF_CONTIG = (byte) '7'; /** * The magic tag representing a POSIX tar archive. */ String TMAGIC = "ustar"; /** * The magic tag representing a GNU tar archive. */ String GNU_TMAGIC = "ustar "; /** * The namr of the GNU tar entry which contains a long name. */ String GNU_LONGLINK = "././@LongLink"; /** * Identifies the *next* file on the tape as having a long name. */ byte LF_GNUTYPE_LONGNAME = (byte) 'L'; } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/TarEntry.java000066400000000000000000000457011145404360500331670ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; /* * Copyright 2000-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Timothy Gerard Endres * (time@ice.com) to whom the Ant project is very grateful for his great code. */ import java.io.File; import java.util.Date; import java.util.Locale; import org.codehaus.plexus.archiver.ArchiveFile; /** * This class represents an entry in a Tar archive. It consists * of the entry's header, as well as the entry's File. Entries * can be instantiated in one of three ways, depending on how * they are to be used. *

* TarEntries that are created from the header bytes read from * an archive are instantiated with the TarEntry( byte[] ) * constructor. These entries will be used when extracting from * or listing the contents of an archive. These entries have their * header filled in using the header bytes. They also set the File * to null, since they reference an archive entry not a file. *

* TarEntries that are created from Files that are to be written * into an archive are instantiated with the TarEntry( File ) * constructor. These entries have their header filled in using * the File's information. They also keep a reference to the File * for convenience when writing entries. *

* Finally, TarEntries can be constructed from nothing but a name. * This allows the programmer to construct the entry by hand, for * instance when only an InputStream is available for writing to * the archive, and the header information is constructed from * other information. In this case the header fields are set to * defaults and the File is set to null. *

*

* The C structure for a Tar Entry's header is: *

 * struct header {
 * char name[NAMSIZ];
 * char mode[8];
 * char uid[8];
 * char gid[8];
 * char size[12];
 * char mtime[12];
 * char chksum[8];
 * char linkflag;
 * char linkname[NAMSIZ];
 * char magic[8];
 * char uname[TUNMLEN];
 * char gname[TGNMLEN];
 * char devmajor[8];
 * char devminor[8];
 * } header;
 * 
* * @version $Revision$ $Date$ * from org.apache.ant.tools.tar.TarEntry v1.22 */ public class TarEntry implements TarConstants, ArchiveFile.Entry { /** * The entry's name. */ private StringBuffer name; /** * The entry's permission mode. */ private int mode; /** * The entry's user id. */ private int userId; /** * The entry's group id. */ private int groupId; /** * The entry's size. */ private long size; /** * The entry's modification time. */ private long modTime; /** * The entry's checksum. */ private int checkSum; /** * The entry's link flag. */ private byte linkFlag; /** * The entry's link name. */ private StringBuffer linkName; /** * The entry's magic tag. */ private StringBuffer magic; /** * The entry's user name. */ private StringBuffer userName; /** * The entry's group name. */ private StringBuffer groupName; /** * The entry's major device number. */ private int devMajor; /** * The entry's minor device number. */ private int devMinor; /** * The entry's file reference */ private File file; /** * Maximum length of a user's name in the tar file */ public static final int MAX_NAMELEN = 31; /** * Default permissions bits for directories */ public static final int DEFAULT_DIR_MODE = 040755; /** * Default permissions bits for files */ public static final int DEFAULT_FILE_MODE = 0100644; /** * Convert millis to seconds */ public static final int MILLIS_PER_SECOND = 1000; /** * Construct an empty entry and prepares the header values. */ private TarEntry() { this.magic = new StringBuffer( TMAGIC ); this.name = new StringBuffer(); this.linkName = new StringBuffer(); String user = System.getProperty( "user.name", "" ); if ( user.length() > MAX_NAMELEN ) { user = user.substring( 0, MAX_NAMELEN ); } this.userId = 0; this.groupId = 0; this.userName = new StringBuffer( user ); this.groupName = new StringBuffer( "" ); this.file = null; } /** * Construct an entry with only a name. This allows the programmer * to construct the entry's header "by hand". File is set to null. * * @param name the entry name */ public TarEntry( String name ) { this(); boolean isDir = name.endsWith( "/" ); this.devMajor = 0; this.devMinor = 0; this.name = new StringBuffer( name ); this.mode = isDir ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE; this.linkFlag = isDir ? LF_DIR : LF_NORMAL; this.userId = 0; this.groupId = 0; this.size = 0; this.modTime = ( new Date() ).getTime() / MILLIS_PER_SECOND; this.linkName = new StringBuffer( "" ); this.userName = new StringBuffer( "" ); this.groupName = new StringBuffer( "" ); this.devMajor = 0; this.devMinor = 0; } /** * Construct an entry with a name an a link flag. * * @param name the entry name * @param linkFlag the entry link flag. */ public TarEntry( String name, byte linkFlag ) { this( name ); this.linkFlag = linkFlag; } /** * Construct an entry for a file. File is set to file, and the * header is constructed from information from the file. * * @param file The file that the entry represents. */ public TarEntry( File file ) { this(); this.file = file; String name = file.getPath(); String osname = System.getProperty( "os.name" ).toLowerCase( Locale.US ); if ( osname != null ) { // Strip off drive letters! // REVIEW Would a better check be "(File.separator == '\')"? if ( osname.startsWith( "windows" ) ) { if ( name.length() > 2 ) { char ch1 = name.charAt( 0 ); char ch2 = name.charAt( 1 ); if ( ch2 == ':' && ( ( ch1 >= 'a' && ch1 <= 'z' ) || ( ch1 >= 'A' && ch1 <= 'Z' ) ) ) { name = name.substring( 2 ); } } } else if ( osname.indexOf( "netware" ) > -1 ) { int colon = name.indexOf( ':' ); if ( colon != -1 ) { name = name.substring( colon + 1 ); } } } name = name.replace( File.separatorChar, '/' ); // No absolute pathnames // Windows (and Posix?) paths can start with "\\NetworkDrive\", // so we loop on starting /'s. while ( name.startsWith( "/" ) ) { name = name.substring( 1 ); } this.linkName = new StringBuffer( "" ); this.name = new StringBuffer( name ); if ( file.isDirectory() ) { this.mode = DEFAULT_DIR_MODE; this.linkFlag = LF_DIR; if ( this.name.charAt( this.name.length() - 1 ) != '/' ) { this.name.append( "/" ); } } else { this.mode = DEFAULT_FILE_MODE; this.linkFlag = LF_NORMAL; } this.size = file.length(); this.modTime = file.lastModified() / MILLIS_PER_SECOND; this.devMajor = 0; this.devMinor = 0; } /** * Construct an entry from an archive's header bytes. File is set * to null. * * @param headerBuf The header bytes from a tar archive entry. */ public TarEntry( byte[] headerBuf ) { this(); this.parseTarHeader( headerBuf ); } /** * Determine if the two entries are equal. Equality is determined * by the header names being equal. * * @param it Entry to be checked for equality. * @return True if the entries are equal. */ public boolean equals( TarEntry it ) { return this.getName().equals( it.getName() ); } /** * Determine if the two entries are equal. Equality is determined * by the header names being equal. * * @param it Entry to be checked for equality. * @return True if the entries are equal. */ public boolean equals( Object it ) { if ( it == null || getClass() != it.getClass() ) { return false; } return equals( (TarEntry) it ); } /** * Hashcodes are based on entry names. * * @return the entry hashcode */ public int hashCode() { return getName().hashCode(); } /** * Determine if the given entry is a descendant of this entry. * Descendancy is determined by the name of the descendant * starting with this entry's name. * * @param desc Entry to be checked as a descendent of this. * @return True if entry is a descendant of this. */ public boolean isDescendent( TarEntry desc ) { return desc.getName().startsWith( this.getName() ); } /** * Get this entry's name. * * @return This entry's name. */ public String getName() { return this.name.toString(); } /** * Set this entry's name. * * @param name This entry's new name. */ public void setName( String name ) { this.name = new StringBuffer( name ); } /** * Set the mode for this entry * * @param mode the mode for this entry */ public void setMode( int mode ) { this.mode = mode; } /** * Get this entry's link name. * * @return This entry's link name. */ public String getLinkName() { return this.linkName.toString(); } /** * Get this entry's user id. * * @return This entry's user id. */ public int getUserId() { return this.userId; } /** * Set this entry's user id. * * @param userId This entry's new user id. */ public void setUserId( int userId ) { this.userId = userId; } /** * Get this entry's group id. * * @return This entry's group id. */ public int getGroupId() { return this.groupId; } /** * Set this entry's group id. * * @param groupId This entry's new group id. */ public void setGroupId( int groupId ) { this.groupId = groupId; } /** * Get this entry's user name. * * @return This entry's user name. */ public String getUserName() { return this.userName.toString(); } /** * Set this entry's user name. * * @param userName This entry's new user name. */ public void setUserName( String userName ) { this.userName = new StringBuffer( userName ); } /** * Get this entry's group name. * * @return This entry's group name. */ public String getGroupName() { return this.groupName.toString(); } /** * Set this entry's group name. * * @param groupName This entry's new group name. */ public void setGroupName( String groupName ) { this.groupName = new StringBuffer( groupName ); } /** * Convenience method to set this entry's group and user ids. * * @param userId This entry's new user id. * @param groupId This entry's new group id. */ public void setIds( int userId, int groupId ) { this.setUserId( userId ); this.setGroupId( groupId ); } /** * Convenience method to set this entry's group and user names. * * @param userName This entry's new user name. * @param groupName This entry's new group name. */ public void setNames( String userName, String groupName ) { this.setUserName( userName ); this.setGroupName( groupName ); } /** * Set this entry's modification time. The parameter passed * to this method is in "Java time". * * @param time This entry's new modification time. */ public void setModTime( long time ) { this.modTime = time / MILLIS_PER_SECOND; } /** * Set this entry's modification time. * * @param time This entry's new modification time. */ public void setModTime( Date time ) { this.modTime = time.getTime() / MILLIS_PER_SECOND; } /** * Get this entry's modification time. * * @return time This entry's new modification time. */ public Date getModTime() { return new Date( this.modTime * MILLIS_PER_SECOND ); } /** * Get this entry's checksum time. * * @return time This entry's new modification time. */ public int getChecksum() { return checkSum; } /** * Get this entry's file. * * @return This entry's file. */ public File getFile() { return this.file; } /** * Get this entry's mode. * * @return This entry's mode. */ public int getMode() { return this.mode; } /** * Get this entry's file size. * * @return This entry's file size. */ public long getSize() { return this.size; } /** * Set this entry's file size. * * @param size This entry's new file size. */ public void setSize( long size ) { this.size = size; } /** * Indicate if this entry is a GNU long name block * * @return true if this is a long name extension provided by GNU tar */ public boolean isGNULongNameEntry() { return linkFlag == LF_GNUTYPE_LONGNAME && name.toString().equals( GNU_LONGLINK ); } /** * Return whether or not this entry represents a directory. * * @return True if this entry is a directory. */ public boolean isDirectory() { if ( this.file != null ) { return this.file.isDirectory(); } if ( this.linkFlag == LF_DIR ) { return true; } if ( this.getName().endsWith( "/" ) ) { return true; } return false; } /** * If this entry represents a file, and the file is a directory, return * an array of TarEntries for this entry's children. * * @return An array of TarEntry's for this entry's children. */ public TarEntry[] getDirectoryEntries() { if ( this.file == null || !this.file.isDirectory() ) { return new TarEntry[0]; } String[] list = this.file.list(); TarEntry[] result = new TarEntry[list.length]; for ( int i = 0; i < list.length; ++i ) { result[ i ] = new TarEntry( new File( this.file, list[ i ] ) ); } return result; } /** * Write an entry's header information to a header buffer. * * @param outbuf The tar entry header buffer to fill in. */ public void writeEntryHeader( byte[] outbuf ) { int offset = 0; offset = TarUtils.getNameBytes( this.name, outbuf, offset, NAMELEN ); offset = TarUtils.getOctalBytes( this.mode, outbuf, offset, MODELEN ); offset = TarUtils.getOctalBytes( this.userId, outbuf, offset, UIDLEN ); offset = TarUtils.getOctalBytes( this.groupId, outbuf, offset, GIDLEN ); offset = TarUtils.getLongOctalBytes( this.size, outbuf, offset, SIZELEN ); offset = TarUtils.getLongOctalBytes( this.modTime, outbuf, offset, MODTIMELEN ); int csOffset = offset; for ( int c = 0; c < CHKSUMLEN; ++c ) { outbuf[ offset++ ] = (byte) ' '; } outbuf[ offset++ ] = this.linkFlag; offset = TarUtils.getNameBytes( this.linkName, outbuf, offset, NAMELEN ); offset = TarUtils.getNameBytes( this.magic, outbuf, offset, MAGICLEN ); offset = TarUtils.getNameBytes( this.userName, outbuf, offset, UNAMELEN ); offset = TarUtils.getNameBytes( this.groupName, outbuf, offset, GNAMELEN ); offset = TarUtils.getOctalBytes( this.devMajor, outbuf, offset, DEVLEN ); offset = TarUtils.getOctalBytes( this.devMinor, outbuf, offset, DEVLEN ); while ( offset < outbuf.length ) { outbuf[ offset++ ] = 0; } long checkSum = TarUtils.computeCheckSum( outbuf ); TarUtils.getCheckSumOctalBytes( checkSum, outbuf, csOffset, CHKSUMLEN ); } /** * Parse an entry's header information from a header buffer. * * @param header The tar entry header buffer to get information from. */ public void parseTarHeader( byte[] header ) { int offset = 0; this.name = TarUtils.parseName( header, offset, NAMELEN ); offset += NAMELEN; this.mode = (int) TarUtils.parseOctal( header, offset, MODELEN ); offset += MODELEN; this.userId = (int) TarUtils.parseOctal( header, offset, UIDLEN ); offset += UIDLEN; this.groupId = (int) TarUtils.parseOctal( header, offset, GIDLEN ); offset += GIDLEN; this.size = TarUtils.parseOctal( header, offset, SIZELEN ); offset += SIZELEN; this.modTime = TarUtils.parseOctal( header, offset, MODTIMELEN ); offset += MODTIMELEN; this.checkSum = (int) TarUtils.parseOctal( header, offset, CHKSUMLEN ); offset += CHKSUMLEN; this.linkFlag = header[ offset++ ]; this.linkName = TarUtils.parseName( header, offset, NAMELEN ); offset += NAMELEN; this.magic = TarUtils.parseName( header, offset, MAGICLEN ); offset += MAGICLEN; this.userName = TarUtils.parseName( header, offset, UNAMELEN ); offset += UNAMELEN; this.groupName = TarUtils.parseName( header, offset, GNAMELEN ); offset += GNAMELEN; this.devMajor = (int) TarUtils.parseOctal( header, offset, DEVLEN ); offset += DEVLEN; this.devMinor = (int) TarUtils.parseOctal( header, offset, DEVLEN ); } public long getLastModificationTime() { return modTime == 0 ? -1 : (modTime * MILLIS_PER_SECOND); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/TarFile.java000066400000000000000000000133641145404360500327450ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.FilterInputStream; import java.lang.reflect.UndeclaredThrowableException; import java.util.Enumeration; import java.util.NoSuchElementException; import org.codehaus.plexus.archiver.ArchiveFile; /** *

Implementation of {@link ArchiveFile} for tar files.

*

Compared to * {@link ZipFile}, this one should be used with some care, due to the * nature of a tar file: While a zip file contains a catalog, a tar * file does not. In other words, the only way to read a tar file in * a performant manner is by iterating over it from the beginning to * the end. If you try to open another entry than the "next" entry, * then you force to skip entries, until the requested entry is found. * This may require to reread the entire file!

*

In other words, the recommended use of this class is to use * {@link #getEntries()} and invoke {@link #getInputStream(TarEntry)} * only for the current entry. Basically, this is to handle it like * {@link TarInputStream}.

*

The advantage of this class is that you may write code for the * {@link ArchiveFile}, which is valid for both tar files and zip files.

*/ public class TarFile implements ArchiveFile { private final java.io.File file; private TarInputStream inputStream; private TarEntry currentEntry; /** * Creates a new instance with the given file. */ public TarFile( File file ) { this.file = file; } /** * Implementation of {@link ArchiveFile#getEntries()}. Note, that there is * an interaction between this method and {@link #getInputStream(TarEntry)}, * or {@link #getInputStream(org.codehaus.plexus.archiver.ArchiveFile.Entry)}: * If an input stream is opened for any other entry than the enumerations * current entry, then entries may be skipped. */ public Enumeration getEntries() throws IOException { if ( inputStream != null ) { close(); } open(); return new Enumeration() { boolean currentEntryValid; public boolean hasMoreElements() { if ( !currentEntryValid ) { try { currentEntry = inputStream.getNextEntry(); } catch ( IOException e ) { throw new UndeclaredThrowableException( e ); } } return currentEntry != null; } public Object nextElement() { if ( currentEntry == null ) { throw new NoSuchElementException(); } currentEntryValid = false; return currentEntry; } }; } public void close() throws IOException { if ( inputStream != null ) { inputStream.close(); inputStream = null; } } public InputStream getInputStream( Entry entry ) throws IOException { return getInputStream( (TarEntry) entry ); } /** * Returns an {@link InputStream} with the given entries * contents. This {@link InputStream} may be closed: Nothing * happens in that case, because an actual close would invalidate * the underlying {@link TarInputStream}. */ public InputStream getInputStream( TarEntry entry ) throws IOException { if ( entry.equals( (Object) currentEntry ) && inputStream != null ) { return new FilterInputStream( inputStream ) { public void close() throws IOException { // Does nothing. } }; } return getInputStream( entry, currentEntry ); } protected InputStream getInputStream( File file ) throws IOException { return new FileInputStream( file ); } private InputStream getInputStream( TarEntry entry, TarEntry currentEntry ) throws IOException { if ( currentEntry == null || inputStream == null ) { // Search for the entry from the beginning of the file to the end. if ( inputStream != null ) { close(); } open(); if ( !findEntry( entry, null ) ) { throw new IOException( "Unknown entry: " + entry.getName() ); } } else { // Search for the entry from the current position to the end of the file. if ( findEntry( entry, null ) ) { return getInputStream( entry ); } close(); open(); if ( !findEntry( entry, currentEntry ) ) { throw new IOException( "No such entry: " + entry.getName() ); } } return getInputStream( entry ); } private void open() throws IOException { inputStream = new TarInputStream( getInputStream( file ) ); } private boolean findEntry( TarEntry entry, TarEntry currentEntry) throws IOException { for (;;) { this.currentEntry = inputStream.getNextEntry(); if ( this.currentEntry == null || (currentEntry != null && this.currentEntry.equals( currentEntry ) ) ) { return false; } if ( this.currentEntry.equals( entry ) ) { return true; } } } } TarGZipUnArchiver.java000066400000000000000000000024531145404360500346440ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; /** * Extract files in tar with gzip compression * @author Dan Tran * @version $Revision: $ */ public class TarGZipUnArchiver extends TarUnArchiver { public TarGZipUnArchiver() { this.setupCompressionMethod(); } public TarGZipUnArchiver( File sourceFile ) { super( sourceFile ); this.setupCompressionMethod(); } private void setupCompressionMethod() { UntarCompressionMethod untarCompressionMethod = new UntarCompressionMethod( UntarCompressionMethod.GZIP ); this.setCompression( untarCompressionMethod ); } } TarInputStream.java000066400000000000000000000271461145404360500342650ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /* * Copyright 2000-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Timothy Gerard Endres * (time@ice.com) to whom the Ant project is very grateful for his great code. */ import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * The TarInputStream reads a UNIX tar archive as an InputStream. * methods are provided to position at each successive entry in * the archive, and the read each entry as a normal input stream * using read(). * * @version $Revision$ $Date$ * from org.apache.ant.tools.tar.TarInputStream v1.16 */ public class TarInputStream extends FilterInputStream { protected boolean debug; protected boolean hasHitEOF; protected int entrySize; protected int entryOffset; protected byte[] oneBuf; protected byte[] readBuf; protected TarBuffer buffer; protected TarEntry currEntry; public TarInputStream( InputStream is ) { this( is, TarBuffer.DEFAULT_BLKSIZE, TarBuffer.DEFAULT_RCDSIZE ); } public TarInputStream( InputStream is, int blockSize ) { this( is, blockSize, TarBuffer.DEFAULT_RCDSIZE ); } public TarInputStream( InputStream is, int blockSize, int recordSize ) { super( is ); this.buffer = new TarBuffer( is, blockSize, recordSize ); this.readBuf = null; this.oneBuf = new byte[1]; this.debug = false; this.hasHitEOF = false; } /** * Sets the debugging flag. * * @param debug True to turn on debugging. */ public void setDebug( boolean debug ) { this.debug = debug; this.buffer.setDebug( debug ); } /** * Closes this stream. Calls the TarBuffer's close() method. */ public void close() throws IOException { this.buffer.close(); } /** * Get the record size being used by this stream's TarBuffer. * * @return The TarBuffer record size. */ public int getRecordSize() { return this.buffer.getRecordSize(); } /** * Get the available data that can be read from the current * entry in the archive. This does not indicate how much data * is left in the entire archive, only in the current entry. * This value is determined from the entry's size header field * and the amount of data already read from the current entry. * * @return The number of available bytes for the current entry. */ public int available() throws IOException { return this.entrySize - this.entryOffset; } /** * Skip bytes in the input buffer. This skips bytes in the * current entry's data, not the entire archive, and will * stop at the end of the current entry's data if the number * to skip extends beyond that point. * * @param numToSkip The number of bytes to skip. */ public long skip( long numToSkip ) throws IOException { // REVIEW // This is horribly inefficient, but it ensures that we // properly skip over bytes via the TarBuffer... // byte[] skipBuf = new byte[8 * 1024]; long skip = numToSkip; while ( skip > 0 ) { int realSkip = (int) ( skip > skipBuf.length ? skipBuf.length : skip ); int numRead = this.read( skipBuf, 0, realSkip ); if ( numRead == -1 ) { break; } skip -= numRead; } return ( numToSkip - skip ); } /** * Since we do not support marking just yet, we return false. * * @return False. */ public boolean markSupported() { return false; } /** * Since we do not support marking just yet, we do nothing. * * @param markLimit The limit to mark. */ public void mark( int markLimit ) { } /** * Since we do not support marking just yet, we do nothing. */ public void reset() { } /** * Get the next entry in this tar archive. This will skip * over any remaining data in the current entry, if there * is one, and place the input stream at the header of the * next entry, and read the header and instantiate a new * TarEntry from the header bytes and return that entry. * If there are no more entries in the archive, null will * be returned to indicate that the end of the archive has * been reached. * * @return The next TarEntry in the archive, or null. */ public TarEntry getNextEntry() throws IOException { if ( this.hasHitEOF ) { return null; } if ( this.currEntry != null ) { int numToSkip = this.entrySize - this.entryOffset; if ( this.debug ) { System.err.println( "TarInputStream: SKIP currENTRY '" + this.currEntry.getName() + "' SZ " + this.entrySize + " OFF " + this.entryOffset + " skipping " + numToSkip + " bytes" ); } if ( numToSkip > 0 ) { this.skip( numToSkip ); } this.readBuf = null; } byte[] headerBuf = this.buffer.readRecord(); if ( headerBuf == null ) { if ( this.debug ) { System.err.println( "READ NULL RECORD" ); } this.hasHitEOF = true; } else if ( this.buffer.isEOFRecord( headerBuf ) ) { if ( this.debug ) { System.err.println( "READ EOF RECORD" ); } this.hasHitEOF = true; } if ( this.hasHitEOF ) { this.currEntry = null; } else { this.currEntry = new TarEntry( headerBuf ); if ( this.debug ) { System.err.println( "TarInputStream: SET CURRENTRY '" + this.currEntry.getName() + "' size = " + this.currEntry.getSize() ); } this.entryOffset = 0; // REVIEW How do we resolve this discrepancy?! this.entrySize = (int) this.currEntry.getSize(); } if ( this.currEntry != null && this.currEntry.isGNULongNameEntry() ) { // read in the name StringBuffer longName = new StringBuffer(); byte[] buffer = new byte[256]; int length; while ( ( length = read( buffer ) ) >= 0 ) { longName.append( new String( buffer, 0, length ) ); } getNextEntry(); // remove trailing null terminator if ( longName.length() > 0 && longName.charAt( longName.length() - 1 ) == 0 ) { longName.deleteCharAt( longName.length() - 1 ); } this.currEntry.setName( longName.toString() ); } return this.currEntry; } /** * Reads a byte from the current tar archive entry. *

* This method simply calls read( byte[], int, int ). * * @return The byte read, or -1 at EOF. */ public int read() throws IOException { int num = this.read( this.oneBuf, 0, 1 ); if ( num == -1 ) { return num; } else { return (int) this.oneBuf[ 0 ]; } } /** * Reads bytes from the current tar archive entry. *

* This method simply calls read( byte[], int, int ). * * @param buf The buffer into which to place bytes read. * @return The number of bytes read, or -1 at EOF. */ public int read( byte[] buf ) throws IOException { return this.read( buf, 0, buf.length ); } /** * Reads bytes from the current tar archive entry. *

* This method is aware of the boundaries of the current * entry in the archive and will deal with them as if they * were this stream's start and EOF. * * @param buf The buffer into which to place bytes read. * @param offset The offset at which to place bytes read. * @param numToRead The number of bytes to read. * @return The number of bytes read, or -1 at EOF. */ public int read( byte[] buf, int offset, int numToRead ) throws IOException { int totalRead = 0; if ( this.entryOffset >= this.entrySize ) { return -1; } if ( ( numToRead + this.entryOffset ) > this.entrySize ) { numToRead = ( this.entrySize - this.entryOffset ); } if ( this.readBuf != null ) { int sz = ( numToRead > this.readBuf.length ) ? this.readBuf.length : numToRead; System.arraycopy( this.readBuf, 0, buf, offset, sz ); if ( sz >= this.readBuf.length ) { this.readBuf = null; } else { int newLen = this.readBuf.length - sz; byte[] newBuf = new byte[newLen]; System.arraycopy( this.readBuf, sz, newBuf, 0, newLen ); this.readBuf = newBuf; } totalRead += sz; numToRead -= sz; offset += sz; } while ( numToRead > 0 ) { byte[] rec = this.buffer.readRecord(); if ( rec == null ) { // Unexpected EOF! throw new IOException( "unexpected EOF with " + numToRead + " bytes unread" ); } int sz = numToRead; int recLen = rec.length; if ( recLen > sz ) { System.arraycopy( rec, 0, buf, offset, sz ); this.readBuf = new byte[recLen - sz]; System.arraycopy( rec, sz, this.readBuf, 0, recLen - sz ); } else { sz = recLen; System.arraycopy( rec, 0, buf, offset, recLen ); } totalRead += sz; numToRead -= sz; offset += sz; } this.entryOffset += totalRead; return totalRead; } /** * Copies the contents of the current tar archive entry directly into * an output stream. * * @param out The OutputStream into which to write the entry's data. */ public void copyEntryContents( OutputStream out ) throws IOException { byte[] buf = new byte[32 * 1024]; while ( true ) { int numRead = this.read( buf, 0, buf.length ); if ( numRead == -1 ) { break; } out.write( buf, 0, numRead ); } } } TarLongFileMode.java000066400000000000000000000045521145404360500343120ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /* * Copyright 2000-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.util.EnumeratedAttribute; /** * Set of options for long file handling in the task. */ public class TarLongFileMode extends EnumeratedAttribute { /** * permissible values for longfile attribute */ public static final String WARN = "warn" , FAIL = "fail" , TRUNCATE = "truncate" , GNU = "gnu" , OMIT = "omit"; private final String[] validModes = {WARN, FAIL, TRUNCATE, GNU, OMIT}; /** * Constructor, defaults to "warn" */ public TarLongFileMode() { super(); try { setValue( WARN ); } catch ( ArchiverException ae ) { //Do nothing } } /** * @return the possible values for this enumerated type. */ public String[] getValues() { return validModes; } /** * @return true if value is "truncate". */ public boolean isTruncateMode() { return TRUNCATE.equalsIgnoreCase( getValue() ); } /** * @return true if value is "warn". */ public boolean isWarnMode() { return WARN.equalsIgnoreCase( getValue() ); } /** * @return true if value is "gnu". */ public boolean isGnuMode() { return GNU.equalsIgnoreCase( getValue() ); } /** * @return true if value is "fail". */ public boolean isFailMode() { return FAIL.equalsIgnoreCase( getValue() ); } /** * @return true if value is "omit". */ public boolean isOmitMode() { return OMIT.equalsIgnoreCase( getValue() ); } } TarOutputStream.java000066400000000000000000000254371145404360500344670ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /* * Copyright 2000-2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Timothy Gerard Endres * (time@ice.com) to whom the Ant project is very grateful for his great code. */ import java.io.FilterOutputStream; import java.io.IOException; import java.io.OutputStream; /** * The TarOutputStream writes a UNIX tar archive as an OutputStream. * Methods are provided to put entries, and then write their contents * by writing to this stream using write(). * * @version $Revision$ $Date$ * from org.apache.ant.tools.tar.TarOutputStream v1.16 */ public class TarOutputStream extends FilterOutputStream { /** * Fail if a long file name is required in the archive. */ public static final int LONGFILE_ERROR = 0; /** * Long paths will be truncated in the archive. */ public static final int LONGFILE_TRUNCATE = 1; /** * GNU tar extensions are used to store long file names in the archive. */ public static final int LONGFILE_GNU = 2; protected boolean debug; protected int currSize; protected int currBytes; protected byte[] oneBuf; protected byte[] recordBuf; protected int assemLen; protected byte[] assemBuf; protected TarBuffer buffer; protected int longFileMode = LONGFILE_ERROR; public TarOutputStream( OutputStream os ) { this( os, TarBuffer.DEFAULT_BLKSIZE, TarBuffer.DEFAULT_RCDSIZE ); } public TarOutputStream( OutputStream os, int blockSize ) { this( os, blockSize, TarBuffer.DEFAULT_RCDSIZE ); } public TarOutputStream( OutputStream os, int blockSize, int recordSize ) { super( os ); this.buffer = new TarBuffer( os, blockSize, recordSize ); this.debug = false; this.assemLen = 0; this.assemBuf = new byte[recordSize]; this.recordBuf = new byte[recordSize]; this.oneBuf = new byte[1]; } public void setLongFileMode( int longFileMode ) { this.longFileMode = longFileMode; } /** * Sets the debugging flag. * * @param debugF True to turn on debugging. */ public void setDebug( boolean debugF ) { this.debug = debugF; } /** * Sets the debugging flag in this stream's TarBuffer. * * @param debug True to turn on debugging. */ public void setBufferDebug( boolean debug ) { this.buffer.setDebug( debug ); } /** * Ends the TAR archive without closing the underlying OutputStream. * The result is that two EOF record of nulls are written. */ public void finish() throws IOException { this.writeEOFRecord(); this.writeEOFRecord(); } /** * Ends the TAR archive and closes the underlying OutputStream. * This means that finish() is called followed by calling the * TarBuffer's close(). */ public void close() throws IOException { this.finish(); this.buffer.close(); } /** * Get the record size being used by this stream's TarBuffer. * * @return The TarBuffer record size. */ public int getRecordSize() { return this.buffer.getRecordSize(); } /** * Put an entry on the output stream. This writes the entry's * header record and positions the output stream for writing * the contents of the entry. Once this method is called, the * stream is ready for calls to write() to write the entry's * contents. Once the contents are written, closeEntry() * MUST be called to ensure that all buffered data * is completely written to the output stream. * * @param entry The TarEntry to be written to the archive. */ public void putNextEntry( TarEntry entry ) throws IOException { if ( entry.getName().length() >= TarConstants.NAMELEN ) { if ( longFileMode == LONGFILE_GNU ) { // create a TarEntry for the LongLink, the contents // of which are the entry's name TarEntry longLinkEntry = new TarEntry( TarConstants.GNU_LONGLINK, TarConstants.LF_GNUTYPE_LONGNAME ); longLinkEntry.setSize( entry.getName().length() + 1 ); putNextEntry( longLinkEntry ); write( entry.getName().getBytes() ); write( 0 ); closeEntry(); } else if ( longFileMode != LONGFILE_TRUNCATE ) { throw new RuntimeException( "file name '" + entry.getName() + "' is too long ( > " + TarConstants.NAMELEN + " bytes)" ); } } entry.writeEntryHeader( this.recordBuf ); this.buffer.writeRecord( this.recordBuf ); this.currBytes = 0; if ( entry.isDirectory() ) { this.currSize = 0; } else { this.currSize = (int) entry.getSize(); } } /** * Close an entry. This method MUST be called for all file * entries that contain data. The reason is that we must * buffer data written to the stream in order to satisfy * the buffer's record based writes. Thus, there may be * data fragments still being assembled that must be written * to the output stream before this entry is closed and the * next entry written. */ public void closeEntry() throws IOException { if ( this.assemLen > 0 ) { for ( int i = this.assemLen; i < this.assemBuf.length; ++i ) { this.assemBuf[ i ] = 0; } this.buffer.writeRecord( this.assemBuf ); this.currBytes += this.assemLen; this.assemLen = 0; } if ( this.currBytes < this.currSize ) { throw new IOException( "entry closed at '" + this.currBytes + "' before the '" + this.currSize + "' bytes specified in the header were written" ); } } /** * Writes a byte to the current tar archive entry. *

* This method simply calls read( byte[], int, int ). * * @param b The byte written. */ public void write( int b ) throws IOException { this.oneBuf[ 0 ] = (byte) b; this.write( this.oneBuf, 0, 1 ); } /** * Writes bytes to the current tar archive entry. *

* This method simply calls write( byte[], int, int ). * * @param wBuf The buffer to write to the archive. */ public void write( byte[] wBuf ) throws IOException { this.write( wBuf, 0, wBuf.length ); } /** * Writes bytes to the current tar archive entry. This method * is aware of the current entry and will throw an exception if * you attempt to write bytes past the length specified for the * current entry. The method is also (painfully) aware of the * record buffering required by TarBuffer, and manages buffers * that are not a multiple of recordsize in length, including * assembling records from small buffers. * * @param wBuf The buffer to write to the archive. * @param wOffset The offset in the buffer from which to get bytes. * @param numToWrite The number of bytes to write. */ public void write( byte[] wBuf, int wOffset, int numToWrite ) throws IOException { if ( ( this.currBytes + numToWrite ) > this.currSize ) { throw new IOException( "request to write '" + numToWrite + "' bytes exceeds size in header of '" + this.currSize + "' bytes" ); // // We have to deal with assembly!!! // The programmer can be writing little 32 byte chunks for all // we know, and we must assemble complete records for writing. // REVIEW Maybe this should be in TarBuffer? Could that help to // eliminate some of the buffer copying. // } if ( this.assemLen > 0 ) { if ( ( this.assemLen + numToWrite ) >= this.recordBuf.length ) { int aLen = this.recordBuf.length - this.assemLen; System.arraycopy( this.assemBuf, 0, this.recordBuf, 0, this.assemLen ); System.arraycopy( wBuf, wOffset, this.recordBuf, this.assemLen, aLen ); this.buffer.writeRecord( this.recordBuf ); this.currBytes += this.recordBuf.length; wOffset += aLen; numToWrite -= aLen; this.assemLen = 0; } else { System.arraycopy( wBuf, wOffset, this.assemBuf, this.assemLen, numToWrite ); wOffset += numToWrite; this.assemLen += numToWrite; numToWrite -= numToWrite; } } // // When we get here we have EITHER: // o An empty "assemble" buffer. // o No bytes to write (numToWrite == 0) // while ( numToWrite > 0 ) { if ( numToWrite < this.recordBuf.length ) { System.arraycopy( wBuf, wOffset, this.assemBuf, this.assemLen, numToWrite ); this.assemLen += numToWrite; break; } this.buffer.writeRecord( wBuf, wOffset ); int num = this.recordBuf.length; this.currBytes += num; numToWrite -= num; wOffset += num; } } /** * Write an EOF (end of archive) record to the tar archive. * An EOF record consists of a record of all zeros. */ private void writeEOFRecord() throws IOException { for ( int i = 0; i < this.recordBuf.length; ++i ) { this.recordBuf[ i ] = 0; } this.buffer.writeRecord( this.recordBuf ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/TarResource.java000066400000000000000000000041321145404360500336460ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; import java.io.IOException; import java.io.InputStream; import java.net.URL; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributes; import org.codehaus.plexus.components.io.attributes.SimpleResourceAttributes; import org.codehaus.plexus.components.io.resources.AbstractPlexusIoResource; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import org.codehaus.plexus.components.io.resources.PlexusIoResourceWithAttributes; public class TarResource extends AbstractPlexusIoResource implements PlexusIoResourceWithAttributes { private final TarFile tarFile; private final TarEntry entry; private PlexusIoResourceAttributes attributes; public TarResource( TarFile tarFile, TarEntry entry ) { this.tarFile = tarFile; this.entry = entry; final boolean dir = entry.isDirectory(); setName( entry.getName() ); setDirectory( dir ); setExisting( true ); setFile( !dir ); long l = entry.getLastModificationTime(); setLastModified( l == -1 ? PlexusIoResource.UNKNOWN_MODIFICATION_DATE : l ); setSize( dir ? PlexusIoResource.UNKNOWN_RESOURCE_SIZE : entry.getSize() ); } public synchronized PlexusIoResourceAttributes getAttributes() { if ( attributes == null ) { attributes = new SimpleResourceAttributes(); attributes.setUserId( entry.getUserId() ); attributes.setUserName( entry.getUserName() ); attributes.setGroupId( entry.getGroupId() ); attributes.setGroupName( entry.getGroupName() ); attributes.setOctalMode( entry.getMode() ); } return attributes; } public synchronized void setAttributes( PlexusIoResourceAttributes attributes ) { this.attributes = attributes; } public URL getURL() throws IOException { return null; } public InputStream getContents() throws IOException { return tarFile.getInputStream( entry ); } } TarUnArchiver.java000066400000000000000000000137541145404360500340600ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.bzip2.CBZip2InputStream; import org.codehaus.plexus.archiver.util.EnumeratedAttribute; import org.codehaus.plexus.archiver.zip.AbstractZipUnArchiver; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.zip.GZIPInputStream; /** * @author Emmanuel Venisse * @version $Revision$ $Date$ */ public class TarUnArchiver extends AbstractZipUnArchiver { public TarUnArchiver() { } public TarUnArchiver( File sourceFile ) { super( sourceFile ); } /** * compression method */ private UntarCompressionMethod compression = new UntarCompressionMethod(); /** * Set decompression algorithm to use; default=none. *

* Allowable values are *

* * @param method compression method */ public void setCompression( UntarCompressionMethod method ) { compression = method; } /** * No encoding support in Untar. */ public void setEncoding( String encoding ) { getLogger().warn( "The TarUnArchiver doesn't support the encoding attribute" ); } protected void execute() throws ArchiverException { TarInputStream tis = null; try { getLogger().info( "Expanding: " + getSourceFile() + " into " + getDestDirectory() ); tis = new TarInputStream( compression.decompress( getSourceFile(), new BufferedInputStream( new FileInputStream( getSourceFile() ) ) ) ); TarEntry te; while ( ( te = tis.getNextEntry() ) != null ) { extractFile( getSourceFile(), getDestDirectory(), tis, te.getName(), te.getModTime(), te.isDirectory(), new Integer( te.getMode() ) ); } getLogger().debug( "expand complete" ); } catch ( IOException ioe ) { throw new ArchiverException( "Error while expanding " + getSourceFile().getAbsolutePath(), ioe ); } finally { if ( tis != null ) { try { tis.close(); } catch ( IOException e ) { // ignore } } } } /** * Valid Modes for Compression attribute to Untar Task */ public static final class UntarCompressionMethod extends EnumeratedAttribute { // permissible values for compression attribute /** * No compression */ public static final String NONE = "none"; /** * GZIP compression */ public static final String GZIP = "gzip"; /** * BZIP2 compression */ public static final String BZIP2 = "bzip2"; /** * Constructor */ public UntarCompressionMethod() { super(); try { setValue( NONE ); } catch ( ArchiverException ae ) { //Do nothing } } /** * Constructor */ public UntarCompressionMethod( String method ) { super(); try { setValue( method ); } catch ( ArchiverException ae ) { //Do nothing } } /** * Get valid enumeration values * * @return valid values */ public String[] getValues() { return new String[]{NONE, GZIP, BZIP2}; } /** * This method wraps the input stream with the * corresponding decompression method * * @param file provides location information for BuildException * @param istream input stream * @return input stream with on-the-fly decompression * @throws IOException thrown by GZIPInputStream constructor */ private InputStream decompress( final File file, final InputStream istream ) throws IOException, ArchiverException { final String value = getValue(); if ( GZIP.equals( value ) ) { return new GZIPInputStream( istream ); } else { if ( BZIP2.equals( value ) ) { final char[] magic = new char[]{'B', 'Z'}; for ( int i = 0; i < magic.length; i++ ) { if ( istream.read() != magic[ i ] ) { throw new ArchiverException( "Invalid bz2 file." + file.toString() ); } } return new CBZip2InputStream( istream ); } } return istream; } } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/tar/TarUtils.java000066400000000000000000000140121145404360500331550ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; /* * Copyright 2000,2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Timothy Gerard Endres * (time@ice.com) to whom the Ant project is very grateful for his great code. */ /** * This class provides static utility methods to work with byte streams. * * @version $Revision$ $Date$ * from org.apache.ant.tools.tar.TarUtils v1.11 */ public class TarUtils { /** * Parse an octal string from a header buffer. This is used for the * file permission mode value. * * @param header The header buffer from which to parse. * @param offset The offset into the buffer from which to parse. * @param length The number of header bytes to parse. * @return The long value of the octal string. */ public static long parseOctal( byte[] header, int offset, int length ) { long result = 0; boolean stillPadding = true; int end = offset + length; for ( int i = offset; i < end; ++i ) { if ( header[ i ] == 0 ) { break; } if ( header[ i ] == (byte) ' ' || header[ i ] == '0' ) { if ( stillPadding ) { continue; } if ( header[ i ] == (byte) ' ' ) { break; } } stillPadding = false; result = ( result << 3 ) + ( header[ i ] - '0' ); } return result; } /** * Parse an entry name from a header buffer. * * @param header The header buffer from which to parse. * @param offset The offset into the buffer from which to parse. * @param length The number of header bytes to parse. * @return The header's entry name. */ public static StringBuffer parseName( byte[] header, int offset, int length ) { StringBuffer result = new StringBuffer( length ); int end = offset + length; for ( int i = offset; i < end; ++i ) { if ( header[ i ] == 0 ) { break; } result.append( (char) header[ i ] ); } return result; } /** * Determine the number of bytes in an entry name. * * @param name The header name from which to parse. * @param offset The offset into the buffer from which to parse. * @param length The number of header bytes to parse. * @return The number of bytes in a header's entry name. */ public static int getNameBytes( StringBuffer name, byte[] buf, int offset, int length ) { int i; for ( i = 0; i < length && i < name.length(); ++i ) { buf[ offset + i ] = (byte) name.charAt( i ); } for ( ; i < length; ++i ) { buf[ offset + i ] = 0; } return offset + length; } /** * Parse an octal integer from a header buffer. * * @param value The header value * @param offset The offset into the buffer from which to parse. * @param length The number of header bytes to parse. * @return The integer value of the octal bytes. */ public static int getOctalBytes( long value, byte[] buf, int offset, int length ) { int idx = length - 1; buf[ offset + idx ] = 0; --idx; buf[ offset + idx ] = (byte) ' '; --idx; if ( value == 0 ) { buf[ offset + idx ] = (byte) '0'; --idx; } else { for ( long val = value; idx >= 0 && val > 0; --idx ) { buf[ offset + idx ] = (byte) ( (byte) '0' + (byte) ( val & 7 ) ); val = val >> 3; } } for ( ; idx >= 0; --idx ) { buf[ offset + idx ] = (byte) ' '; } return offset + length; } /** * Parse an octal long integer from a header buffer. * * @param value The header value * @param offset The offset into the buffer from which to parse. * @param length The number of header bytes to parse. * @return The long value of the octal bytes. */ public static int getLongOctalBytes( long value, byte[] buf, int offset, int length ) { byte[] temp = new byte[length + 1]; getOctalBytes( value, temp, 0, length + 1 ); System.arraycopy( temp, 0, buf, offset, length ); return offset + length; } /** * Parse the checksum octal integer from a header buffer. * * @param value The header value * @param offset The offset into the buffer from which to parse. * @param length The number of header bytes to parse. * @return The integer value of the entry's checksum. */ public static int getCheckSumOctalBytes( long value, byte[] buf, int offset, int length ) { getOctalBytes( value, buf, offset, length ); buf[ offset + length - 1 ] = (byte) ' '; buf[ offset + length - 2 ] = 0; return offset + length; } /** * Compute the checksum of a tar entry header. * * @param buf The tar entry's header buffer. * @return The computed checksum. */ public static long computeCheckSum( byte[] buf ) { long sum = 0; for ( int i = 0; i < buf.length; ++i ) { sum += 255 & buf[ i ]; } return sum; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/util/000077500000000000000000000000001145404360500307345ustar00rootroot00000000000000AbstractFileSet.java000066400000000000000000000051741145404360500345460ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/utilpackage org.codehaus.plexus.archiver.util; import org.codehaus.plexus.archiver.BaseFileSet; import org.codehaus.plexus.components.io.fileselectors.FileSelector; /** * Default implementation of {@link BaseFileSet}. * @since 1.0-alpha-9 */ public abstract class AbstractFileSet implements BaseFileSet { private String prefix; private String[] includes; private String[] excludes; private FileSelector[] fileSelectors; private boolean caseSensitive = true; private boolean usingDefaultExcludes = true; private boolean includingEmptyDirectories = true; /** * Sets a string of patterns, which excluded files * should match. */ public void setExcludes( String[] excludes ) { this.excludes = excludes; } public String[] getExcludes() { return excludes; } /** * Sets a set of file selectors, which should be used * to select the included files. */ public void setFileSelectors( FileSelector[] fileSelectors ) { this.fileSelectors = fileSelectors; } public FileSelector[] getFileSelectors() { return fileSelectors; } /** * Sets a string of patterns, which included files * should match. */ public void setIncludes( String[] includes ) { this.includes = includes; } public String[] getIncludes() { return includes; } /** * Sets the prefix, which the file sets contents shall * have. */ public void setPrefix( String prefix ) { this.prefix = prefix; } public String getPrefix() { return prefix; } /** * Sets, whether the include/exclude patterns are * case sensitive. Defaults to true. */ public void setCaseSensitive( boolean caseSensitive ) { this.caseSensitive = caseSensitive; } public boolean isCaseSensitive() { return caseSensitive; } /** * Sets, whether the default excludes are being * applied. Defaults to true. */ public void setUsingDefaultExcludes( boolean usingDefaultExcludes ) { this.usingDefaultExcludes = usingDefaultExcludes; } public boolean isUsingDefaultExcludes() { return usingDefaultExcludes; } /** * Sets, whether empty directories are being included. Defaults * to true. */ public void setIncludingEmptyDirectories( boolean includingEmptyDirectories ) { this.includingEmptyDirectories = includingEmptyDirectories; } public boolean isIncludingEmptyDirectories() { return includingEmptyDirectories; } } ArchiveEntryUtils.java000066400000000000000000000131641145404360500351510ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/utilpackage org.codehaus.plexus.archiver.util; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.util.Os; import org.codehaus.plexus.util.cli.CommandLineException; import org.codehaus.plexus.util.cli.CommandLineUtils; import org.codehaus.plexus.util.cli.Commandline; import java.io.File; import java.lang.reflect.Method; public final class ArchiveEntryUtils { public static boolean jvmFilePermAvailable = false; static { try { jvmFilePermAvailable = File.class.getMethod( "setReadable", new Class[] { Boolean.TYPE } ) != null; } catch ( final Exception e ) { // ignore exception log this ? } } private ArchiveEntryUtils() { // no op } /** * @since 1.1 * @param file * @param mode * @param logger * @param useJvmChmod * will use jvm file permissions not available for group level * @throws ArchiverException */ public static void chmod( final File file, final int mode, final Logger logger, boolean useJvmChmod ) throws ArchiverException { if ( !Os.isFamily( Os.FAMILY_UNIX ) ) { return; } final String m = Integer.toOctalString( mode & 0xfff ); if ( useJvmChmod && !jvmFilePermAvailable ) { logger.info( "you want to use jvmChmod but it's not possible where your current jvm" ); useJvmChmod = false; } if ( useJvmChmod && jvmFilePermAvailable ) { applyPermissionsWithJvm( file, m, logger ); return; } try { final Commandline commandline = new Commandline(); commandline.setWorkingDirectory( file.getParentFile() .getAbsolutePath() ); if ( logger.isDebugEnabled() ) { logger.debug( file + ": mode " + Integer.toOctalString( mode ) + ", chmod " + m ); } commandline.setExecutable( "chmod" ); commandline.createArg() .setValue( m ); final String path = file.getAbsolutePath(); commandline.createArg() .setValue( path ); // commenting this debug statement, since it can produce VERY verbose output... // this method is called often during archive creation. // logger.debug( "Executing:\n\n" + commandline.toString() + "\n\n" ); final CommandLineUtils.StringStreamConsumer stderr = new CommandLineUtils.StringStreamConsumer(); final CommandLineUtils.StringStreamConsumer stdout = new CommandLineUtils.StringStreamConsumer(); final int exitCode = CommandLineUtils.executeCommandLine( commandline, stderr, stdout ); if ( exitCode != 0 ) { logger.warn( "-------------------------------" ); logger.warn( "Standard error:" ); logger.warn( "-------------------------------" ); logger.warn( stderr.getOutput() ); logger.warn( "-------------------------------" ); logger.warn( "Standard output:" ); logger.warn( "-------------------------------" ); logger.warn( stdout.getOutput() ); logger.warn( "-------------------------------" ); throw new ArchiverException( "chmod exit code was: " + exitCode ); } } catch ( final CommandLineException e ) { throw new ArchiverException( "Error while executing chmod.", e ); } } /** * jvm chmod will be used only if System property useJvmChmod set to true * * @param file * @param mode * @param logger * @throws ArchiverException */ public static void chmod( final File file, final int mode, final Logger logger ) throws ArchiverException { chmod( file, mode, logger, Boolean.getBoolean( "useJvmChmod" ) && jvmFilePermAvailable ); } private static void applyPermissionsWithJvm( final File file, final String mode, final Logger logger ) { final FilePermission filePermission = FilePermissionUtils.getFilePermissionFromMode( mode, logger ); Method method; try { method = File.class.getMethod( "setReadable", new Class[] { Boolean.TYPE, Boolean.TYPE } ); method.invoke( file, new Object[] { Boolean.valueOf( filePermission.isReadable() ), Boolean.valueOf( filePermission.isOwnerOnlyReadable() ) } ); method = File.class.getMethod( "setExecutable", new Class[] { Boolean.TYPE, Boolean.TYPE } ); method.invoke( file, new Object[] { Boolean.valueOf( filePermission.isExecutable() ), Boolean.valueOf( filePermission.isOwnerOnlyExecutable() ) } ); method = File.class.getMethod( "setWritable", new Class[] { Boolean.TYPE, Boolean.TYPE } ); method.invoke( file, new Object[] { Boolean.valueOf( filePermission.isWritable() ), Boolean.valueOf( filePermission.isOwnerOnlyWritable() ) } ); } catch ( final Exception e ) { logger.error( "error calling dynamically file permissons with jvm " + e.getMessage(), e ); throw new RuntimeException( "error calling dynamically file permissons with jvm " + e.getMessage(), e ); } } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/util/Compressor.java000066400000000000000000000135631145404360500337430ustar00rootroot00000000000000package org.codehaus.plexus.archiver.util; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.components.io.resources.PlexusIoFileResource; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import org.codehaus.plexus.logging.AbstractLogEnabled; /** * @version $Revision$ $Date$ */ public abstract class Compressor extends AbstractLogEnabled { private File destFile; private PlexusIoResource source; /** * the required destination file. * * @param compressFile */ public void setDestFile( File compressFile ) { this.destFile = compressFile; } public File getDestFile() { return destFile; } /** * The resource to compress; required. */ public void setSource( PlexusIoResource source ) { this.source = source; } /** * The resource to compress; required. */ public PlexusIoResource getSource() { return source; } /** * the file to compress; required. * @deprecated Use {@link #getSource()}. */ public void setSourceFile( File srcFile ) { final PlexusIoFileResource res = new PlexusIoFileResource( srcFile ); setSource( res ); } /** * @deprecated Use {@link #getSource()}. */ public File getSourceFile() { final PlexusIoResource res = getSource(); if ( res instanceof PlexusIoFileResource ) { return ((PlexusIoFileResource) res).getFile(); } return null; } /** * validation routine * * @throws ArchiverException if anything is invalid */ private void validate() throws ArchiverException { if ( destFile == null ) { throw new ArchiverException( "Destination file attribute is required" ); } if ( destFile.isDirectory() ) { throw new ArchiverException( "Destination file attribute must not " + "represent a directory!" ); } if ( source == null ) { throw new ArchiverException( "Source file attribute is required" ); } if ( source.isDirectory() ) { throw new ArchiverException( "Source file attribute must not " + "represent a directory!" ); } } /** * validate, then hand off to the subclass * * @throws BuildException */ public void execute() throws ArchiverException { validate(); try { if ( !source.isExisting() ) { // getLogger().info( "Nothing to do: " + sourceFile.getAbsolutePath() // + " doesn't exist." ); } else { final long l = source.getLastModified(); if ( l == PlexusIoResource.UNKNOWN_MODIFICATION_DATE || destFile.lastModified() == 0 || destFile.lastModified() < l ) { compress(); } else { // getLogger().info( "Nothing to do: " + destFile.getAbsolutePath() // + " is up to date." ); } } } finally { close(); } } /** * compress a stream to an output stream * * @param in * @param zOut * @throws IOException */ private void compressFile( InputStream in, OutputStream zOut ) throws IOException { byte[] buffer = new byte[8 * 1024]; int count = 0; do { zOut.write( buffer, 0, count ); count = in.read( buffer, 0, buffer.length ); } while ( count != -1 ); } /** * compress a file to an output stream * @deprecated Use {@link #compress(PlexusIoResource, OutputStream)}. */ protected void compressFile( File file, OutputStream zOut ) throws IOException { FileInputStream fIn = new FileInputStream( file ); try { compressFile( fIn, zOut ); } finally { fIn.close(); } } /** * compress a resource to an output stream */ protected void compress( PlexusIoResource resource, OutputStream zOut ) throws IOException { InputStream fIn = resource.getContents(); try { compressFile( fIn, zOut ); } finally { fIn.close(); } } /** * subclasses must implement this method to do their compression * * this is public so the process of compression and closing can be dealt with separately. */ public abstract void compress() throws ArchiverException; /** * subclasses must implement this method to cleanup after compression * * this is public so the process of compression and closing can be dealt with separately. */ public abstract void close(); } DefaultArchivedFileSet.java000066400000000000000000000010171145404360500360250ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/utilpackage org.codehaus.plexus.archiver.util; import java.io.File; import org.codehaus.plexus.archiver.ArchivedFileSet; /** * Default implementation of {@link ArchivedFileSet}. * @since 1.0-alpha-9 */ public class DefaultArchivedFileSet extends AbstractFileSet implements ArchivedFileSet { private File archive; /** * Sets the file sets archive. */ public void setArchive( File archive ) { this.archive = archive; } public File getArchive() { return archive; } } DefaultFileSet.java000066400000000000000000000010041145404360500343530ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/utilpackage org.codehaus.plexus.archiver.util; import java.io.File; import org.codehaus.plexus.archiver.FileSet; /** * Default implementation of {@link FileSet}. * @since 1.0-alpha-9 */ public class DefaultFileSet extends AbstractFileSet implements FileSet { private File directory; /** * Sets the file sets base directory. */ public void setDirectory( File directory ) { this.directory = directory; } public File getDirectory() { return directory; } } EnumeratedAttribute.java000066400000000000000000000063701145404360500355030ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/utilpackage org.codehaus.plexus.archiver.util; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.ArchiverException; /** * Helper class for attributes that can only take one of a fixed list * of values. *

*

See {@link org.apache.tools.ant.taskdefs.FixCRLF FixCRLF} for an * example. */ public abstract class EnumeratedAttribute { /** * The selected value in this enumeration. */ protected String value; /** * the index of the selected value in the array. */ private int index = -1; /** * This is the only method a subclass needs to implement. * * @return an array holding all possible values of the enumeration. * The order of elements must be fixed so that indexOfValue(String) * always return the same index for the same value. */ public abstract String[] getValues(); /** * bean constructor */ protected EnumeratedAttribute() { } /** * Invoked by {@link org.apache.tools.ant.IntrospectionHelper IntrospectionHelper}. */ public final void setValue( String value ) throws ArchiverException { int index = indexOfValue( value ); if ( index == -1 ) { throw new ArchiverException( value + " is not a legal value for this attribute" ); } this.index = index; this.value = value; } /** * Is this value included in the enumeration? */ public final boolean containsValue( String value ) { return ( indexOfValue( value ) != -1 ); } /** * get the index of a value in this enumeration. * * @param value the string value to look for. * @return the index of the value in the array of strings * or -1 if it cannot be found. * @see #getValues() */ public final int indexOfValue( String value ) { String[] values = getValues(); if ( values == null || value == null ) { return -1; } for ( int i = 0; i < values.length; i++ ) { if ( value.equals( values[ i ] ) ) { return i; } } return -1; } /** * @return the selected value. */ public final String getValue() { return value; } /** * @return the index of the selected value in the array. * @see #getValues() */ public final int getIndex() { return index; } /** * Convert the value to its string form. * * @return the string form of the value. */ public String toString() { return getValue(); } } FilePermission.java000066400000000000000000000061211145404360500344500ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/utilpackage org.codehaus.plexus.archiver.util; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * @author Olivier Lamy * @since 1.1 * */ public class FilePermission { private boolean executable; private boolean ownerOnlyExecutable; private boolean ownerOnlyReadable; private boolean readable; private boolean ownerOnlyWritable; private boolean writable; public FilePermission( boolean executable, boolean ownerOnlyExecutable, boolean ownerOnlyReadable, boolean readable, boolean ownerOnlyWritable, boolean writable ) { this.executable = executable; this.ownerOnlyExecutable = ownerOnlyExecutable; this.ownerOnlyReadable = ownerOnlyReadable; this.readable = readable; this.ownerOnlyWritable = ownerOnlyWritable; this.writable = writable; } public boolean isExecutable() { return executable; } public void setExecutable( boolean executable ) { this.executable = executable; } public boolean isOwnerOnlyExecutable() { return ownerOnlyExecutable; } public void setOwnerOnlyExecutable( boolean ownerOnlyExecutable ) { this.ownerOnlyExecutable = ownerOnlyExecutable; } public boolean isOwnerOnlyReadable() { return ownerOnlyReadable; } public void setOwnerOnlyReadable( boolean ownerOnlyReadable ) { this.ownerOnlyReadable = ownerOnlyReadable; } public boolean isReadable() { return readable; } public void setReadable( boolean readable ) { this.readable = readable; } public boolean isOwnerOnlyWritable() { return ownerOnlyWritable; } public void setOwnerOnlyWritable( boolean ownerOnlyWritable ) { this.ownerOnlyWritable = ownerOnlyWritable; } public boolean isWritable() { return writable; } public void setWritable( boolean writable ) { this.writable = writable; } public String toString() { return "FilePermission [executable=" + executable + ", ownerOnlyExecutable=" + ownerOnlyExecutable + ", ownerOnlyReadable=" + ownerOnlyReadable + ", readable=" + readable + ", ownerOnlyWritable=" + ownerOnlyWritable + ", writable=" + writable + "]"; } } FilePermissionUtils.java000066400000000000000000000131341145404360500354730ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/utilpackage org.codehaus.plexus.archiver.util; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.List; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.util.StringUtils; /** * @author Olivier Lamy * @since 1.0.1 * */ public class FilePermissionUtils { private FilePermissionUtils() { // no op } /** * @param mode file mode "a la" unix ie 664, 440, etc * @return FilePermission associated to the mode (group permission are ignored here) */ public static FilePermission getFilePermissionFromMode( String mode, Logger logger ) { if ( StringUtils.isBlank( mode ) ) { throw new IllegalArgumentException( " file mode cannot be empty" ); } // 4 characters works on some unix (ie solaris) if ( mode.length() != 3 && mode.length() != 4 ) { throw new IllegalArgumentException( " file mode must be 3 or 4 characters" ); } List modes = new ArrayList(mode.length()); for (int i = 0,size = mode.length();i s; } /** * Queries, whether the given source is uptodate relative to * the given modification date. */ public static boolean isUptodate( long sourceDate, long destinationDate ) { if ( sourceDate == PlexusIoResource.UNKNOWN_MODIFICATION_DATE ) { return false; } if ( destinationDate == 0 ) { return false; } return destinationDate > sourceDate; } /** * Copies the sources contents to the given destination file. */ public static void copyFile( PlexusIoResource in, File outFile ) throws IOException { InputStream input = null; OutputStream output = null; try { input = in.getContents(); output = new FileOutputStream( outFile ); IOUtil.copy( input, output ); input.close(); input = null; output.close(); output = null; } finally { IOUtil.close( input ); IOUtil.close( output ); } } /** * Checks, whether the resource and the file are identical. */ public static boolean isSame( PlexusIoResource resource, File file ) { if ( resource instanceof PlexusIoFileResource ) { File resourceFile = ((PlexusIoFileResource) resource).getFile(); return file.equals( resourceFile ); } return false; } /** * Checks, whether the resource and the file are identical. * Uses {@link File#getCanonicalFile()} for comparison, which is much * slower than comparing the files. */ public static boolean isCanonicalizedSame( PlexusIoResource resource, File file ) throws IOException { if ( resource instanceof PlexusIoFileResource ) { File resourceFile = ((PlexusIoFileResource) resource).getFile(); return file.getCanonicalFile().equals( resourceFile.getCanonicalFile() ); } return false; } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/war/000077500000000000000000000000001145404360500305505ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/war/WarArchiver.java000066400000000000000000000141451145404360500336350ustar00rootroot00000000000000package org.codehaus.plexus.archiver.war; /* * Copyright 2000-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import org.codehaus.plexus.archiver.ArchiveEntry; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.jar.JarArchiver; import org.codehaus.plexus.archiver.util.ResourceUtils; import org.codehaus.plexus.archiver.zip.ZipOutputStream; import java.io.File; import java.io.IOException; /** * An extension of <jar> to create a WAR archive. * Contains special treatment for files that should end up in the * WEB-INF/lib, WEB-INF/classes or * WEB-INF directories of the Web Application Archive.

*

(The War task is a shortcut for specifying the particular layout of a WAR file. * The same thing can be accomplished by using the prefix and fullpath * attributes of zipfilesets in a Zip or Jar task.)

*

The extended zipfileset element from the zip task * (with attributes prefix, fullpath, and src) * is available in the War task.

* * @see JarArchiver */ public class WarArchiver extends JarArchiver { /** * our web.xml deployment descriptor */ private File deploymentDescriptor; /** * flag set if finding the webxml is to be expected. */ private boolean ignoreWebxml = true; /** * flag set if the descriptor is added */ private boolean descriptorAdded; public void setIgnoreWebxml( boolean ignore ) { ignoreWebxml = ignore; } public WarArchiver() { super(); archiveType = "war"; } /** * set the deployment descriptor to use (WEB-INF/web.xml); * required unless update=true */ public void setWebxml( File descr ) throws ArchiverException { deploymentDescriptor = descr; if ( !deploymentDescriptor.exists() ) { throw new ArchiverException( "Deployment descriptor: " + deploymentDescriptor + " does not exist." ); } addFile( descr, "WEB-INF/web.xml" ); } /** * add a file under WEB-INF/lib/ */ public void addLib( File fileName ) throws ArchiverException { addDirectory( fileName.getParentFile(), "WEB-INF/lib/", new String[]{fileName.getName()}, null ); } /** * add files under WEB-INF/lib/ */ public void addLibs( File directoryName, String[] includes, String[] excludes ) throws ArchiverException { addDirectory( directoryName, "WEB-INF/lib/", includes, excludes ); } /** * add a file under WEB-INF/lib/ */ public void addClass( File fileName ) throws ArchiverException { addDirectory( fileName.getParentFile(), "WEB-INF/classes/", new String[]{fileName.getName()}, null ); } /** * add files under WEB-INF/classes */ public void addClasses( File directoryName, String[] includes, String[] excludes ) throws ArchiverException { addDirectory( directoryName, "WEB-INF/classes/", includes, excludes ); } /** * files to add under WEB-INF; */ public void addWebinf( File directoryName, String[] includes, String[] excludes ) throws ArchiverException { addDirectory( directoryName, "WEB-INF/", includes, excludes ); } /** * override of parent; validates configuration * before initializing the output stream. */ protected void initZipOutputStream( ZipOutputStream zOut ) throws IOException, ArchiverException { // If no webxml file is specified, it's an error. if ( ignoreWebxml && deploymentDescriptor == null && !isInUpdateMode() ) { throw new ArchiverException( "webxml attribute is required (or pre-existing WEB-INF/web.xml if executing in update mode)" ); } super.initZipOutputStream( zOut ); } /** * Overridden from ZipArchiver class to deal with web.xml */ protected void zipFile( ArchiveEntry entry, ZipOutputStream zOut, String vPath ) throws IOException, ArchiverException { // If the file being added is WEB-INF/web.xml, we warn if it's // not the one specified in the "webxml" attribute - or if // it's being added twice, meaning the same file is specified // by the "webxml" attribute and in a element. if ( vPath.equalsIgnoreCase( "WEB-INF/web.xml" ) ) { if ( descriptorAdded || ( ignoreWebxml && ( deploymentDescriptor == null || !ResourceUtils.isCanonicalizedSame( entry.getResource(), deploymentDescriptor ) ) ) ) { getLogger().warn( "Warning: selected " + archiveType + " files include a WEB-INF/web.xml which will be ignored " + "\n(webxml attribute is missing from " + archiveType + " task, or ignoreWebxml attribute is specified as 'true')" ); } else { super.zipFile( entry, zOut, vPath ); descriptorAdded = true; } } else { super.zipFile( entry, zOut, vPath ); } } /** * Make sure we don't think we already have a web.xml next time this task * gets executed. */ protected void cleanUp() { descriptorAdded = false; ignoreWebxml = true; super.cleanUp(); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zip/000077500000000000000000000000001145404360500305615ustar00rootroot00000000000000AbstractZipArchiver.java000066400000000000000000000563751145404360500352770ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.Map; import java.util.Stack; import java.util.Vector; import java.util.zip.CRC32; import org.codehaus.plexus.archiver.AbstractArchiver; import org.codehaus.plexus.archiver.ArchiveEntry; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.ResourceIterator; import org.codehaus.plexus.archiver.UnixStat; import org.codehaus.plexus.archiver.util.ResourceUtils; import org.codehaus.plexus.components.io.resources.PlexusIoFileResource; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import org.codehaus.plexus.util.FileUtils; /** * @version $Revision$ $Date$ */ public abstract class AbstractZipArchiver extends AbstractArchiver { private String comment; /** * Encoding to use for filenames, defaults to the platform's * default encoding. */ private String encoding; private boolean doCompress = true; private boolean doUpdate = false; // shadow of the above if the value is altered in execute private boolean savedDoUpdate = false; protected String archiveType = "zip"; /* * Whether the original compression of entries coming from a ZIP * archive should be kept (for example when updating an archive). */ //not used: private boolean keepCompression = false; private boolean doFilesonly = false; protected Hashtable entries = new Hashtable(); protected Hashtable addedDirs = new Hashtable(); private Vector addedFiles = new Vector(); private static final long EMPTY_CRC = new CRC32().getValue(); protected boolean doubleFilePass = false; protected boolean skipWriting = false; /** * @deprecated Use {@link Archiver#setDuplicateBehavior(String)} instead. */ protected String duplicate = Archiver.DUPLICATES_SKIP; /** * true when we are adding new files into the Zip file, as opposed * to adding back the unchanged files */ protected boolean addingNewFiles = false; /** * Whether the file modification times will be rounded up to the * next even number of seconds. */ private boolean roundUp = true; // Renamed version of original file, if it exists private File renamedFile = null; private File zipFile; private boolean success; private ZipOutputStream zOut; public String getComment() { return comment; } public void setComment( String comment ) { this.comment = comment; } public String getEncoding() { return encoding; } public void setEncoding( String encoding ) { this.encoding = encoding; } public void setCompress( boolean compress ) { this.doCompress = compress; } public boolean isCompress() { return doCompress; } public void setUpdateMode( boolean update ) { this.doUpdate = update; savedDoUpdate = doUpdate; } public boolean isInUpdateMode() { return doUpdate; } // /** // * A 3 digit octal string, specify the user, group and // * other modes in the standard Unix fashion; // * optional, default=0644 // * // * @deprecated use AbstractArchiver.setDefaultFileMode(int) instead. // */ // public void setFileMode( String octalString ) // { // setDefaultFileMode( Integer.parseInt( octalString, 8 ) ); // } // // /** // * @deprecated use AbstractArchiver.getDefaultFileMode() instead. // */ // public int getFileMode() // { // return getDefaultFileMode(); // } // // /** // * A 3 digit octal string, specify the user, group and // * other modes in the standard Unix fashion; // * optional, default=0755 // * // * @deprecated use AbstractArchiver.setDefaultDirectoryMode(int). // */ // public void setDirMode( String octalString ) // { // setDefaultDirectoryMode( Integer.parseInt( octalString, 8 ) ); // } // // /** // * @deprecated use AbstractArchiver.getDefaultDirectoryMode() instead. // */ // public int getDirMode() // { // return getDefaultDirectoryMode(); // } // /** * If true, emulate Sun's jar utility by not adding parent directories; * optional, defaults to false. */ public void setFilesonly( boolean f ) { doFilesonly = f; } public boolean isFilesonly() { return doFilesonly; } /** * Whether the file modification times will be rounded up to the * next even number of seconds. *

*

Zip archives store file modification times with a * granularity of two seconds, so the times will either be rounded * up or down. If you round down, the archive will always seem * out-of-date when you rerun the task, so the default is to round * up. Rounding up may lead to a different type of problems like * JSPs inside a web archive that seem to be slightly more recent * than precompiled pages, rendering precompilation useless.

*/ public void setRoundUp( boolean r ) { roundUp = r; } public boolean isRoundUp() { return roundUp; } protected void execute() throws ArchiverException, IOException { if ( ! checkForced() ) { return; } if ( doubleFilePass ) { skipWriting = true; createArchiveMain(); skipWriting = false; createArchiveMain(); } else { createArchiveMain(); } finalizeZipOutputStream( zOut ); } protected void finalizeZipOutputStream( ZipOutputStream zOut ) throws IOException, ArchiverException { } private void createArchiveMain() throws ArchiverException, IOException { if ( !Archiver.DUPLICATES_SKIP.equals( duplicate ) ) { setDuplicateBehavior( duplicate ); } ResourceIterator iter = getResources(); if ( !iter.hasNext() && !hasVirtualFiles() ) { throw new ArchiverException( "You must set at least one file." ); } zipFile = getDestFile(); if ( zipFile == null ) { throw new ArchiverException( "You must set the destination " + archiveType + "file." ); } if ( zipFile.exists() && !zipFile.isFile() ) { throw new ArchiverException( zipFile + " isn't a file." ); } if ( zipFile.exists() && !zipFile.canWrite() ) { throw new ArchiverException( zipFile + " is read-only." ); } // Whether or not an actual update is required - // we don't need to update if the original file doesn't exist addingNewFiles = true; if ( doUpdate && !zipFile.exists() ) { doUpdate = false; getLogger().debug( "ignoring update attribute as " + archiveType + " doesn't exist." ); } success = false; if ( doUpdate ) { renamedFile = FileUtils.createTempFile( "zip", ".tmp", zipFile.getParentFile() ); renamedFile.deleteOnExit(); try { FileUtils.rename( zipFile, renamedFile ); } catch ( SecurityException e ) { getLogger().debug( e.toString() ); throw new ArchiverException( "Not allowed to rename old file (" + zipFile.getAbsolutePath() + ") to temporary file", e ); } catch ( IOException e ) { getLogger().debug( e.toString() ); throw new ArchiverException( "Unable to rename old file (" + zipFile.getAbsolutePath() + ") to temporary file", e ); } } String action = doUpdate ? "Updating " : "Building "; getLogger().info( action + archiveType + ": " + zipFile.getAbsolutePath() ); if ( !skipWriting ) { zOut = new ZipOutputStream( zipFile ); zOut.setEncoding( encoding ); if ( doCompress ) { zOut.setMethod( ZipOutputStream.DEFLATED ); } else { zOut.setMethod( ZipOutputStream.STORED ); } } initZipOutputStream( zOut ); // Add the new files to the archive. addResources( iter, zOut ); // If we've been successful on an update, delete the // temporary file if ( doUpdate ) { if ( !renamedFile.delete() ) { getLogger().warn( "Warning: unable to delete temporary file " + renamedFile.getName() ); } } success = true; } protected Map getZipEntryNames( File file ) throws IOException { if ( !file.exists() || !doUpdate ) { return Collections.EMPTY_MAP; } final Map entries = new HashMap(); final ZipFile zipFile = new ZipFile( file ); for ( Enumeration en = zipFile.getEntries(); en.hasMoreElements(); ) { ZipEntry ze = (ZipEntry) en.nextElement(); entries.put( ze.getName(), new Long( ze.getLastModificationTime() ) ); } return entries; } protected boolean isFileAdded( ArchiveEntry entry, Map entries ) { return !entries.containsKey( entry.getName() ); } protected boolean isFileUpdated( ArchiveEntry entry, Map entries ) { Long l = (Long) entries.get( entry.getName() ); if ( l == null ) { return false; } return l.longValue() == -1 || !ResourceUtils.isUptodate( entry.getResource(), l.longValue() ); } /** * Add the given resources. * * @param resources the resources to add * @param zOut the stream to write to */ protected final void addResources( ResourceIterator resources, ZipOutputStream zOut ) throws IOException, ArchiverException { File base = null; while ( resources.hasNext() ) { ArchiveEntry entry = resources.next(); String name = entry.getName(); name = name.replace( File.separatorChar, '/' ); if ( "".equals( name ) ) { continue; } if ( entry.getResource().isDirectory() && !name.endsWith( "/" ) ) { name = name + "/"; } addParentDirs( base, name, zOut, "" ); if ( entry.getResource().isFile() ) { zipFile( entry, zOut, name ); } else { zipDir( entry.getResource(), zOut, name, entry.getMode() ); } } } /** * Ensure all parent dirs of a given entry have been added. */ protected final void addParentDirs( File baseDir, String entry, ZipOutputStream zOut, String prefix ) throws IOException { if ( !doFilesonly && getIncludeEmptyDirs() ) { Stack directories = new Stack(); // Don't include the last entry itself if it's // a dir; it will be added on its own. int slashPos = entry.length() - ( entry.endsWith( "/" ) ? 1 : 0 ); while ( ( slashPos = entry.lastIndexOf( '/', slashPos - 1 ) ) != -1 ) { String dir = entry.substring( 0, slashPos + 1 ); if ( addedDirs.contains( prefix + dir ) ) { break; } directories.push( dir ); } while ( !directories.isEmpty() ) { String dir = (String) directories.pop(); File f; if ( baseDir != null ) { f = new File( baseDir, dir ); } else { f = new File( dir ); } final PlexusIoFileResource res = new PlexusIoFileResource( f ); zipDir( res, zOut, prefix + dir, getRawDefaultDirectoryMode() ); } } } /** * Adds a new entry to the archive, takes care of duplicates as well. * * @param in the stream to read data for the entry from. * @param zOut the stream to write to. * @param vPath the name this entry shall have in the archive. * @param lastModified last modification time for the entry. * @param fromArchive the original archive we are copying this * entry from, will be null if we are not copying from an archive. * @param mode the Unix permissions to set. */ protected void zipFile( InputStream in, ZipOutputStream zOut, String vPath, long lastModified, File fromArchive, int mode ) throws IOException, ArchiverException { getLogger().debug( "adding entry " + vPath ); entries.put( vPath, vPath ); if ( !skipWriting ) { ZipEntry ze = new ZipEntry( vPath ); ze.setTime( lastModified ); ze.setMethod( doCompress ? ZipEntry.DEFLATED : ZipEntry.STORED ); /* * ZipOutputStream.putNextEntry expects the ZipEntry to * know its size and the CRC sum before you start writing * the data when using STORED mode - unless it is seekable. * * This forces us to process the data twice. */ if ( !zOut.isSeekable() && !doCompress ) { long size = 0; CRC32 cal = new CRC32(); if ( !in.markSupported() ) { // Store data into a byte[] ByteArrayOutputStream bos = new ByteArrayOutputStream(); byte[] buffer = new byte[8 * 1024]; int count = 0; do { size += count; cal.update( buffer, 0, count ); bos.write( buffer, 0, count ); count = in.read( buffer, 0, buffer.length ); } while ( count != -1 ); in = new ByteArrayInputStream( bos.toByteArray() ); } else { in.mark( Integer.MAX_VALUE ); byte[] buffer = new byte[8 * 1024]; int count = 0; do { size += count; cal.update( buffer, 0, count ); count = in.read( buffer, 0, buffer.length ); } while ( count != -1 ); in.reset(); } ze.setSize( size ); ze.setCrc( cal.getValue() ); } ze.setUnixMode( UnixStat.FILE_FLAG | mode ); zOut.putNextEntry( ze ); byte[] buffer = new byte[8 * 1024]; int count = 0; do { if ( count != 0 ) { zOut.write( buffer, 0, count ); } count = in.read( buffer, 0, buffer.length ); } while ( count != -1 ); } addedFiles.addElement( vPath ); } /** * Method that gets called when adding from java.io.File instances. *

*

This implementation delegates to the six-arg version.

* * @param entry the file to add to the archive * @param zOut the stream to write to * @param vPath the name this entry shall have in the archive */ protected void zipFile( ArchiveEntry entry, ZipOutputStream zOut, String vPath ) throws IOException, ArchiverException { if ( ResourceUtils.isSame( entry.getResource(), getDestFile() ) ) { throw new ArchiverException( "A zip file cannot include itself" ); } InputStream fIn = entry.getInputStream(); try { // ZIPs store time with a granularity of 2 seconds, round up final long lastModified = entry.getResource().getLastModified() + ( roundUp ? 1999 : 0 ); zipFile( fIn, zOut, vPath, lastModified, null, entry.getMode() ); } finally { fIn.close(); } } /** * */ protected void zipDir( PlexusIoResource dir, ZipOutputStream zOut, String vPath, int mode ) throws IOException { if ( addedDirs.get( vPath ) != null ) { // don't add directories we've already added. // no warning if we try, it is harmless in and of itself return; } getLogger().debug( "adding directory " + vPath ); addedDirs.put( vPath, vPath ); if ( !skipWriting ) { ZipEntry ze = new ZipEntry( vPath ); if ( dir != null && dir.isExisting() ) { // ZIPs store time with a granularity of 2 seconds, round up final long lastModified = dir.getLastModified() + ( roundUp ? 1999 : 0 ); ze.setTime( lastModified ); } else { // ZIPs store time with a granularity of 2 seconds, round up ze.setTime( System.currentTimeMillis() + ( roundUp ? 1999 : 0 ) ); } ze.setSize( 0 ); ze.setMethod( ZipEntry.STORED ); // This is faintly ridiculous: ze.setCrc( EMPTY_CRC ); ze.setUnixMode( mode ); zOut.putNextEntry( ze ); } } /** * Create an empty zip file * * @return true for historic reasons */ protected boolean createEmptyZip( File zipFile ) throws ArchiverException { // In this case using java.util.zip will not work // because it does not permit a zero-entry archive. // Must create it manually. getLogger().info( "Note: creating empty " + archiveType + " archive " + zipFile ); OutputStream os = null; try { os = new FileOutputStream( zipFile ); // Cf. PKZIP specification. byte[] empty = new byte[22]; empty[0] = 80; // P empty[1] = 75; // K empty[2] = 5; empty[3] = 6; // remainder zeros os.write( empty ); } catch ( IOException ioe ) { throw new ArchiverException( "Could not create empty ZIP archive " + "(" + ioe.getMessage() + ")", ioe ); } finally { if ( os != null ) { try { os.close(); } catch ( IOException e ) { //ignore } } } return true; } /** * Do any clean up necessary to allow this instance to be used again. *

*

When we get here, the Zip file has been closed and all we * need to do is to reset some globals.

*

*

This method will only reset globals that have been changed * during execute(), it will not alter the attributes or nested * child elements. If you want to reset the instance so that you * can later zip a completely different set of files, you must use * the reset method.

* * @see #reset */ protected void cleanUp() { super.cleanUp(); addedDirs.clear(); addedFiles.removeAllElements(); entries.clear(); addingNewFiles = false; doUpdate = savedDoUpdate; success = false; zOut = null; renamedFile = null; zipFile = null; } /** * Makes this instance reset all attributes to their default * values and forget all children. * * @see #cleanUp */ public void reset() { setDestFile( null ); // duplicate = "add"; archiveType = "zip"; doCompress = true; doUpdate = false; doFilesonly = false; encoding = null; } /** * method for subclasses to override */ protected void initZipOutputStream( ZipOutputStream zOut ) throws IOException, ArchiverException { } /** * method for subclasses to override */ public boolean isSupportingForced() { return true; } protected boolean revert( StringBuffer messageBuffer ) { int initLength = messageBuffer.length(); // delete a bogus ZIP file (but only if it's not the original one) if ( ( !doUpdate || renamedFile != null ) && !zipFile.delete() ) { messageBuffer.append( " (and the archive is probably corrupt but I could not delete it)" ); } if ( doUpdate && renamedFile != null ) { try { FileUtils.rename( renamedFile, zipFile ); } catch ( IOException e ) { messageBuffer.append( " (and I couldn't rename the temporary file " ); messageBuffer.append( renamedFile.getName() ); messageBuffer.append( " back)" ); } } return messageBuffer.length() == initLength; } protected void close() throws IOException { // Close the output stream. try { if ( zOut != null ) { zOut.close(); } } catch ( IOException ex ) { // If we're in this finally clause because of an // exception, we don't really care if there's an // exception when closing the stream. E.g. if it // throws "ZIP file must have at least one entry", // because an exception happened before we added // any files, then we must swallow this // exception. Otherwise, the error that's reported // will be the close() error, which is not the // real cause of the problem. if ( success ) { throw ex; } } } protected String getArchiveType() { return archiveType; } } AbstractZipUnArchiver.java000066400000000000000000000217601145404360500355700ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.codehaus.plexus.archiver.AbstractUnArchiver; import org.codehaus.plexus.archiver.ArchiveFilterException; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.util.ArchiveEntryUtils; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import org.codehaus.plexus.util.FileUtils; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Date; import java.util.Enumeration; /** * @author Emmanuel Venisse * @version $Id$ */ public abstract class AbstractZipUnArchiver extends AbstractUnArchiver { private static final String NATIVE_ENCODING = "native-encoding"; private String encoding = "UTF8"; public AbstractZipUnArchiver() { } public AbstractZipUnArchiver( final File sourceFile ) { super( sourceFile ); } /** * Sets the encoding to assume for file names and comments. *

*

* Set to native-encoding if you want your platform's native encoding, defaults to UTF8. *

*/ public void setEncoding( String encoding ) { if ( NATIVE_ENCODING.equals( encoding ) ) { encoding = null; } this.encoding = encoding; } private static class ZipEntryFileInfo implements PlexusIoResource { private final ZipFile zipFile; private final ZipEntry zipEntry; ZipEntryFileInfo( final ZipFile zipFile, final ZipEntry zipEntry ) { this.zipFile = zipFile; this.zipEntry = zipEntry; } public String getName() { return zipEntry.getName(); } public boolean isDirectory() { return zipEntry.isDirectory(); } public boolean isFile() { return !zipEntry.isDirectory(); } public InputStream getContents() throws IOException { return zipFile.getInputStream( zipEntry ); } public long getLastModified() { final long l = zipEntry.getTime(); return l == 0 ? PlexusIoResource.UNKNOWN_MODIFICATION_DATE : l; } public long getSize() { final long l = zipEntry.getSize(); return l == -1 ? PlexusIoResource.UNKNOWN_RESOURCE_SIZE : l; } public URL getURL() throws IOException { return null; } public boolean isExisting() { return true; } } protected void execute() throws ArchiverException { getLogger().debug( "Expanding: " + getSourceFile() + " into " + getDestDirectory() ); ZipFile zf = null; try { zf = new ZipFile( getSourceFile(), encoding ); final Enumeration e = zf.getEntries(); while ( e.hasMoreElements() ) { final ZipEntry ze = (ZipEntry) e.nextElement(); final ZipEntryFileInfo fileInfo = new ZipEntryFileInfo( zf, ze ); if ( !isSelected( ze.getName(), fileInfo ) ) { continue; } extractFileIfIncluded( getSourceFile(), getDestDirectory(), zf.getInputStream( ze ), ze.getName(), new Date( ze.getTime() ), ze.isDirectory(), null ); } getLogger().debug( "expand complete" ); } catch ( final IOException ioe ) { throw new ArchiverException( "Error while expanding " + getSourceFile().getAbsolutePath(), ioe ); } finally { if ( zf != null ) { try { zf.close(); } catch ( final IOException e ) { // ignore } } } } private void extractFileIfIncluded( final File sourceFile, final File destDirectory, final InputStream inputStream, final String name, final Date time, final boolean isDirectory, final Integer mode ) throws IOException, ArchiverException { try { if ( include( inputStream, name ) ) { extractFile( sourceFile, destDirectory, inputStream, name, time, isDirectory, mode ); } } catch ( final ArchiveFilterException e ) { throw new ArchiverException( "Error verifying \'" + name + "\' for inclusion: " + e.getMessage(), e ); } } protected void extractFile( final File srcF, final File dir, final InputStream compressedInputStream, final String entryName, final Date entryDate, final boolean isDirectory, final Integer mode ) throws IOException, ArchiverException { final File f = FileUtils.resolveFile( dir, entryName ); try { if ( !isOverwrite() && f.exists() && ( f.lastModified() >= entryDate.getTime() ) ) { return; } // create intermediary directories - sometimes zip don't add them final File dirF = f.getParentFile(); if ( dirF != null ) { dirF.mkdirs(); } if ( isDirectory ) { f.mkdirs(); } else { final byte[] buffer = new byte[1024]; int length; FileOutputStream fos = null; try { fos = new FileOutputStream( f ); while ( ( length = compressedInputStream.read( buffer ) ) >= 0 ) { fos.write( buffer, 0, length ); } fos.close(); fos = null; } finally { if ( fos != null ) { try { fos.close(); } catch ( final IOException e ) { // ignore } } } } f.setLastModified( entryDate.getTime() ); if ( !isIgnorePermissions() && mode != null ) { ArchiveEntryUtils.chmod( f, mode.intValue(), getLogger(), isUseJvmChmod() ); } } catch ( final FileNotFoundException ex ) { getLogger().warn( "Unable to expand to file " + f.getPath() ); } } protected void execute( final String path, final File outputDirectory ) throws ArchiverException { ZipFile zipFile = null; try { zipFile = new ZipFile( getSourceFile(), encoding ); final Enumeration e = zipFile.getEntries(); while ( e.hasMoreElements() ) { final ZipEntry ze = (ZipEntry) e.nextElement(); final ZipEntryFileInfo fileInfo = new ZipEntryFileInfo( zipFile, ze ); if ( !isSelected( ze.getName(), fileInfo ) ) { continue; } if ( ze.getName() .startsWith( path ) ) { extractFileIfIncluded( getSourceFile(), outputDirectory, zipFile.getInputStream( ze ), ze.getName(), new Date( ze.getTime() ), ze.isDirectory(), null ); } } } catch ( final IOException ioe ) { throw new ArchiverException( "Error while expanding " + getSourceFile().getAbsolutePath(), ioe ); } finally { if ( zipFile != null ) { try { zipFile.close(); } catch ( final IOException e ) { // ignore } } } } } AsiExtraField.java000066400000000000000000000211761145404360500340400ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /* * Copyright 2001-2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import org.codehaus.plexus.archiver.UnixStat; import java.util.zip.CRC32; import java.util.zip.ZipException; /** * Adds Unix file permission and UID/GID fields as well as symbolic * link handling. *

*

This class uses the ASi extra field in the format: *

 *         Value         Size            Description
 *         -----         ----            -----------
 * (Unix3) 0x756e        Short           tag for this extra block type
 *         TSize         Short           total data size for this block
 *         CRC           Long            CRC-32 of the remaining data
 *         Mode          Short           file permissions
 *         SizDev        Long            symlink'd size OR major/minor dev num
 *         UID           Short           user ID
 *         GID           Short           group ID
 *         (var.)        variable        symbolic link filename
 * 
* taken from appnote.iz (Info-ZIP note, 981119) found at ftp://ftp.uu.net/pub/archiving/zip/doc/

*

*

*

Short is two bytes and Long is four bytes in big endian byte and * word order, device numbers are currently not supported.

* * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.AsiExtraField v1.10 */ public class AsiExtraField implements ZipExtraField, UnixStat, Cloneable { private static final ZipShort HEADER_ID = new ZipShort( 0x756E ); /** * Standard Unix stat(2) file mode. * * @since 1.1 */ private int mode = 0; /** * User ID. * * @since 1.1 */ private int uid = 0; /** * Group ID. * * @since 1.1 */ private int gid = 0; /** * File this entry points to, if it is a symbolic link. *

*

empty string - if entry is not a symbolic link.

* * @since 1.1 */ private String link = ""; /** * Is this an entry for a directory? * * @since 1.1 */ private boolean dirFlag = false; /** * Instance used to calculate checksums. * * @since 1.1 */ private CRC32 crc = new CRC32(); public AsiExtraField() { } /** * The Header-ID. * * @since 1.1 */ public ZipShort getHeaderId() { return HEADER_ID; } /** * Length of the extra field in the local file data - without * Header-ID or length specifier. * * @since 1.1 */ public ZipShort getLocalFileDataLength() { return new ZipShort( 4 // CRC + 2 // Mode + 4 // SizDev + 2 // UID + 2 // GID + getLinkedFile().getBytes().length ); } /** * Delegate to local file data. * * @since 1.1 */ public ZipShort getCentralDirectoryLength() { return getLocalFileDataLength(); } /** * The actual data to put into local file data - without Header-ID * or length specifier. * * @since 1.1 */ public byte[] getLocalFileDataData() { // CRC will be added later byte[] data = new byte[getLocalFileDataLength().getValue() - 4]; System.arraycopy( ( new ZipShort( getMode() ) ).getBytes(), 0, data, 0, 2 ); byte[] linkArray = getLinkedFile().getBytes(); System.arraycopy( ( new ZipLong( linkArray.length ) ).getBytes(), 0, data, 2, 4 ); System.arraycopy( ( new ZipShort( getUserId() ) ).getBytes(), 0, data, 6, 2 ); System.arraycopy( ( new ZipShort( getGroupId() ) ).getBytes(), 0, data, 8, 2 ); System.arraycopy( linkArray, 0, data, 10, linkArray.length ); crc.reset(); crc.update( data ); long checksum = crc.getValue(); byte[] result = new byte[data.length + 4]; System.arraycopy( ( new ZipLong( checksum ) ).getBytes(), 0, result, 0, 4 ); System.arraycopy( data, 0, result, 4, data.length ); return result; } /** * Delegate to local file data. * * @since 1.1 */ public byte[] getCentralDirectoryData() { return getLocalFileDataData(); } /** * Set the user id. * * @since 1.1 */ public void setUserId( int uid ) { this.uid = uid; } /** * Get the user id. * * @since 1.1 */ public int getUserId() { return uid; } /** * Set the group id. * * @since 1.1 */ public void setGroupId( int gid ) { this.gid = gid; } /** * Get the group id. * * @since 1.1 */ public int getGroupId() { return gid; } /** * Indicate that this entry is a symbolic link to the given filename. * * @param name Name of the file this entry links to, empty String * if it is not a symbolic link. * @since 1.1 */ public void setLinkedFile( String name ) { link = name; mode = getMode( mode ); } /** * Name of linked file * * @return name of the file this entry links to if it is a * symbolic link, the empty string otherwise. * @since 1.1 */ public String getLinkedFile() { return link; } /** * Is this entry a symbolic link? * * @since 1.1 */ public boolean isLink() { return getLinkedFile().length() != 0; } /** * File mode of this file. * * @since 1.1 */ public void setMode( int mode ) { this.mode = getMode( mode ); } /** * File mode of this file. * * @since 1.1 */ public int getMode() { return mode; } /** * Indicate whether this entry is a directory. * * @since 1.1 */ public void setDirectory( boolean dirFlag ) { this.dirFlag = dirFlag; mode = getMode( mode ); } /** * Is this entry a directory? * * @since 1.1 */ public boolean isDirectory() { return dirFlag && !isLink(); } /** * Populate data from this array as if it was in local file data. * * @since 1.1 */ public void parseFromLocalFileData( byte[] data, int offset, int length ) throws ZipException { long givenChecksum = ( new ZipLong( data, offset ) ).getValue(); byte[] tmp = new byte[length - 4]; System.arraycopy( data, offset + 4, tmp, 0, length - 4 ); crc.reset(); crc.update( tmp ); long realChecksum = crc.getValue(); if ( givenChecksum != realChecksum ) { throw new ZipException( "bad CRC checksum " + Long.toHexString( givenChecksum ) + " instead of " + Long.toHexString( realChecksum ) ); } int newMode = ( new ZipShort( tmp, 0 ) ).getValue(); byte[] linkArray = new byte[(int) ( new ZipLong( tmp, 2 ) ).getValue()]; uid = ( new ZipShort( tmp, 6 ) ).getValue(); gid = ( new ZipShort( tmp, 8 ) ).getValue(); if ( linkArray.length == 0 ) { link = ""; } else { System.arraycopy( tmp, 10, linkArray, 0, linkArray.length ); link = new String( linkArray ); } setDirectory( ( newMode & DIR_FLAG ) != 0 ); setMode( newMode ); } /** * Get the file mode for given permissions with the correct file type. * * @since 1.1 */ protected int getMode( int mode ) { int type = FILE_FLAG; if ( isLink() ) { type = LINK_FLAG; } else if ( isDirectory() ) { type = DIR_FLAG; } return type | ( mode & PERM_MASK ); } } ExtraFieldUtils.java000066400000000000000000000137251145404360500344250ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /* * Copyright 2001-2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.util.Hashtable; import java.util.Vector; import java.util.zip.ZipException; /** * ZipExtraField related methods * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ExtraFieldUtils v1.9 */ public class ExtraFieldUtils { /** * Static registry of known extra fields. * * @since 1.1 */ private static Hashtable implementations; static { implementations = new Hashtable(); register( AsiExtraField.class ); } /** * Register a ZipExtraField implementation. *

*

The given class must have a no-arg constructor and implement * the {@link ZipExtraField ZipExtraField interface}.

* * @since 1.1 */ public static void register( Class c ) { try { ZipExtraField ze = (ZipExtraField) c.newInstance(); implementations.put( ze.getHeaderId(), c ); } catch ( ClassCastException cc ) { throw new RuntimeException( c + " doesn\'t implement ZipExtraField" ); } catch ( InstantiationException ie ) { throw new RuntimeException( c + " is not a concrete class" ); } catch ( IllegalAccessException ie ) { throw new RuntimeException( c + "\'s no-arg constructor is not public" ); } } /** * Create an instance of the approriate ExtraField, falls back to * {@link UnrecognizedExtraField UnrecognizedExtraField}. * * @since 1.1 */ public static ZipExtraField createExtraField( ZipShort headerId ) throws InstantiationException, IllegalAccessException { Class c = (Class) implementations.get( headerId ); if ( c != null ) { return (ZipExtraField) c.newInstance(); } UnrecognizedExtraField u = new UnrecognizedExtraField(); u.setHeaderId( headerId ); return u; } /** * Split the array into ExtraFields and populate them with the * give data. * * @since 1.1 */ public static ZipExtraField[] parse( byte[] data ) throws ZipException { Vector v = new Vector(); int start = 0; while ( start <= data.length - 4 ) { ZipShort headerId = new ZipShort( data, start ); int length = ( new ZipShort( data, start + 2 ) ).getValue(); if ( start + 4 + length > data.length ) { throw new ZipException( "data starting at " + start + " is in unknown format" ); } try { ZipExtraField ze = createExtraField( headerId ); ze.parseFromLocalFileData( data, start + 4, length ); v.addElement( ze ); } catch ( InstantiationException ie ) { throw new ZipException( ie.getMessage() ); } catch ( IllegalAccessException iae ) { throw new ZipException( iae.getMessage() ); } start += ( length + 4 ); } if ( start != data.length ) { // array not exhausted throw new ZipException( "data starting at " + start + " is in unknown format" ); } ZipExtraField[] result = new ZipExtraField[v.size()]; v.copyInto( result ); return result; } /** * Merges the local file data fields of the given ZipExtraFields. * * @since 1.1 */ public static byte[] mergeLocalFileDataData( ZipExtraField[] data ) { int sum = 4 * data.length; for ( int i = 0; i < data.length; i++ ) { sum += data[ i ].getLocalFileDataLength().getValue(); } byte[] result = new byte[sum]; int start = 0; for ( int i = 0; i < data.length; i++ ) { System.arraycopy( data[ i ].getHeaderId().getBytes(), 0, result, start, 2 ); System.arraycopy( data[ i ].getLocalFileDataLength().getBytes(), 0, result, start + 2, 2 ); byte[] local = data[ i ].getLocalFileDataData(); System.arraycopy( local, 0, result, start + 4, local.length ); start += ( local.length + 4 ); } return result; } /** * Merges the central directory fields of the given ZipExtraFields. * * @since 1.1 */ public static byte[] mergeCentralDirectoryData( ZipExtraField[] data ) { int sum = 4 * data.length; for ( int i = 0; i < data.length; i++ ) { sum += data[ i ].getCentralDirectoryLength().getValue(); } byte[] result = new byte[sum]; int start = 0; for ( int i = 0; i < data.length; i++ ) { System.arraycopy( data[ i ].getHeaderId().getBytes(), 0, result, start, 2 ); System.arraycopy( data[ i ].getCentralDirectoryLength().getBytes(), 0, result, start + 2, 2 ); byte[] local = data[ i ].getCentralDirectoryData(); System.arraycopy( local, 0, result, start + 4, local.length ); start += ( local.length + 4 ); } return result; } } PlexusIoZipFileResourceCollection.java000066400000000000000000000025071145404360500401300ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; import org.codehaus.plexus.components.io.resources.AbstractPlexusIoArchiveResourceCollection; import java.io.File; import java.io.IOException; import java.util.Enumeration; import java.util.Iterator; public class PlexusIoZipFileResourceCollection extends AbstractPlexusIoArchiveResourceCollection { /** * The zip file resource collections role hint. */ public static final String ROLE_HINT = "zip"; protected Iterator getEntries() throws IOException { final File f = getFile(); if ( f == null ) { throw new IOException( "The tar archive file has not been set." ); } final ZipFile zipFile = new ZipFile( f ); final Enumeration en = zipFile.getEntries(); return new Iterator(){ public boolean hasNext() { return en.hasMoreElements(); } public Object next() { final ZipEntry entry = (ZipEntry) en.nextElement(); final ZipResource res = new ZipResource( zipFile, entry ); return res; } public void remove() { throw new UnsupportedOperationException( "Removing isn't implemented." ); } }; } } UnrecognizedExtraField.java000066400000000000000000000051251145404360500357540ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /* * Copyright 2001-2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * Simple placeholder for all those extra fields we don't want to deal * with. *

*

Assumes local file data and central directory entries are * identical - unless told the opposite.

* * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.UnrecognizedExtraField v1.8 */ public class UnrecognizedExtraField implements ZipExtraField { /** * The Header-ID. * * @since 1.1 */ private ZipShort headerId; public void setHeaderId( ZipShort headerId ) { this.headerId = headerId; } public ZipShort getHeaderId() { return headerId; } /** * Extra field data in local file data - without * Header-ID or length specifier. * * @since 1.1 */ private byte[] localData; public void setLocalFileDataData( byte[] data ) { localData = data; } public ZipShort getLocalFileDataLength() { return new ZipShort( localData.length ); } public byte[] getLocalFileDataData() { return localData; } /** * Extra field data in central directory - without * Header-ID or length specifier. * * @since 1.1 */ private byte[] centralData; public void setCentralDirectoryData( byte[] data ) { centralData = data; } public ZipShort getCentralDirectoryLength() { if ( centralData != null ) { return new ZipShort( centralData.length ); } return getLocalFileDataLength(); } public byte[] getCentralDirectoryData() { if ( centralData != null ) { return centralData; } return getLocalFileDataData(); } public void parseFromLocalFileData( byte[] data, int offset, int length ) { byte[] tmp = new byte[length]; System.arraycopy( data, offset, tmp, 0, length ); setLocalFileDataData( tmp ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zip/ZipArchiver.java000066400000000000000000000013641145404360500336560ustar00rootroot00000000000000package org.codehaus.plexus.archiver.zip; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @version $Id$ */ public class ZipArchiver extends AbstractZipArchiver { } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zip/ZipEntry.java000066400000000000000000000316671145404360500332250ustar00rootroot00000000000000package org.codehaus.plexus.archiver.zip; /* * Copyright 2001-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Vector; import java.util.zip.ZipException; import org.codehaus.plexus.archiver.ArchiveFile; /** * Extension that adds better handling of extra fields and provides * access to the internal and external file attributes. * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ZipEntry v1.16 */ public class ZipEntry extends java.util.zip.ZipEntry implements Cloneable, ArchiveFile.Entry { private static final int PLATFORM_UNIX = 3; private static final int PLATFORM_FAT = 0; private int internalAttributes = 0; private int platform = PLATFORM_FAT; private long externalAttributes = 0; private Vector extraFields = new Vector(); private String name = null; /** * Creates a new zip entry with the specified name. * * @since 1.1 */ public ZipEntry( String name ) { super( name ); } /** * Creates a new zip entry with fields taken from the specified zip entry. * * @since 1.1 */ public ZipEntry( java.util.zip.ZipEntry entry ) throws ZipException { /* * REVISIT: call super(entry) instead of this stuff in Ant2, * "copy constructor" has not been available in JDK 1.1 */ super( entry.getName() ); setComment( entry.getComment() ); setMethod( entry.getMethod() ); setTime( entry.getTime() ); long size = entry.getSize(); if ( size > 0 ) { setSize( size ); } long cSize = entry.getCompressedSize(); if ( cSize > 0 ) { setComprSize( cSize ); } long crc = entry.getCrc(); if ( crc > 0 ) { setCrc( crc ); } byte[] extra = entry.getExtra(); if ( extra != null ) { setExtraFields( ExtraFieldUtils.parse( extra ) ); } else { // initializes extra data to an empty byte array setExtra(); } } /** * Creates a new zip entry with fields taken from the specified zip entry. * * @since 1.1 */ public ZipEntry( ZipEntry entry ) throws ZipException { this( (java.util.zip.ZipEntry) entry ); setInternalAttributes( entry.getInternalAttributes() ); setExternalAttributes( entry.getExternalAttributes() ); setExtraFields( entry.getExtraFields() ); } /** * @since 1.9 */ protected ZipEntry() { super( "" ); } /** * Overwrite clone * * @since 1.1 */ public Object clone() { try { ZipEntry e = (ZipEntry) super.clone(); e.setName( getName() ); e.setComment( getComment() ); e.setMethod( getMethod() ); e.setTime( getTime() ); long size = getSize(); if ( size > 0 ) { e.setSize( size ); } long cSize = getCompressedSize(); if ( cSize > 0 ) { e.setComprSize( cSize ); } long crc = getCrc(); if ( crc > 0 ) { e.setCrc( crc ); } e.extraFields = (Vector) extraFields.clone(); e.setInternalAttributes( getInternalAttributes() ); e.setExternalAttributes( getExternalAttributes() ); e.setExtraFields( getExtraFields() ); return e; } catch ( Throwable t ) { // in JDK 1.1 ZipEntry is not Cloneable, so super.clone declares // to throw CloneNotSupported - since JDK 1.2 it is overridden to // not throw that exception return null; } } /** * Retrieves the internal file attributes. * * @since 1.1 */ public int getInternalAttributes() { return internalAttributes; } /** * Sets the internal file attributes. * * @since 1.1 */ public void setInternalAttributes( int value ) { internalAttributes = value; } /** * Retrieves the external file attributes. * * @since 1.1 */ public long getExternalAttributes() { return externalAttributes; } /** * Sets the external file attributes. * * @since 1.1 */ public void setExternalAttributes( long value ) { externalAttributes = value; } /** * Sets Unix permissions in a way that is understood by Info-Zip's * unzip command. * * @since Ant 1.5.2 */ public void setUnixMode( int mode ) { setExternalAttributes( ( mode << 16 ) // MS-DOS read-only attribute | ( ( mode & 0200 ) == 0 ? 1 : 0 ) // MS-DOS directory flag | ( isDirectory() ? 0x10 : 0 ) ); platform = PLATFORM_UNIX; } /** * Unix permission. * * @since Ant 1.6 */ public int getUnixMode() { return (int) ( ( getExternalAttributes() >> 16 ) & 0xFFFF ); } /** * Platform specification to put into the "version made * by" part of the central file header. * * @return 0 (MS-DOS FAT) unless {@link #setUnixMode setUnixMode} * has been called, in which case 3 (Unix) will be returned. * @since Ant 1.5.2 */ public int getPlatform() { return platform; } /** * @since 1.9 */ protected void setPlatform( int platform ) { this.platform = platform; } /** * Replaces all currently attached extra fields with the new array. * * @since 1.1 */ public void setExtraFields( ZipExtraField[] fields ) { extraFields.removeAllElements(); for ( int i = 0; i < fields.length; i++ ) { extraFields.addElement( fields[ i ] ); } setExtra(); } /** * Retrieves extra fields. * * @since 1.1 */ public ZipExtraField[] getExtraFields() { ZipExtraField[] result = new ZipExtraField[extraFields.size()]; extraFields.copyInto( result ); return result; } /** * Adds an extra fields - replacing an already present extra field * of the same type. * * @since 1.1 */ public void addExtraField( ZipExtraField ze ) { ZipShort type = ze.getHeaderId(); boolean done = false; for ( int i = 0; !done && i < extraFields.size(); i++ ) { if ( ( (ZipExtraField) extraFields.elementAt( i ) ).getHeaderId().equals( type ) ) { extraFields.setElementAt( ze, i ); done = true; } } if ( !done ) { extraFields.addElement( ze ); } setExtra(); } /** * Remove an extra fields. * * @since 1.1 */ public void removeExtraField( ZipShort type ) { boolean done = false; for ( int i = 0; !done && i < extraFields.size(); i++ ) { if ( ( (ZipExtraField) extraFields.elementAt( i ) ).getHeaderId().equals( type ) ) { extraFields.removeElementAt( i ); done = true; } } if ( !done ) { throw new java.util.NoSuchElementException(); } setExtra(); } /** * Throws an Exception if extra data cannot be parsed into extra fields. * * @since 1.1 */ public void setExtra( byte[] extra ) throws RuntimeException { try { setExtraFields( ExtraFieldUtils.parse( extra ) ); } catch ( Exception e ) { throw new RuntimeException( e.getMessage() ); } } /** * Unfortunately {@link java.util.zip.ZipOutputStream * java.util.zip.ZipOutputStream} seems to access the extra data * directly, so overriding getExtra doesn't help - we need to * modify super's data directly. * * @since 1.1 */ protected void setExtra() { super.setExtra( ExtraFieldUtils.mergeLocalFileDataData( getExtraFields() ) ); } /** * Retrieves the extra data for the local file data. * * @since 1.1 */ public byte[] getLocalFileDataExtra() { byte[] extra = getExtra(); return extra != null ? extra : new byte[0]; } /** * Retrieves the extra data for the central directory. * * @since 1.1 */ public byte[] getCentralDirectoryExtra() { return ExtraFieldUtils.mergeCentralDirectoryData( getExtraFields() ); } /** * Helper for JDK 1.1 <-> 1.2 incompatibility. * * @since 1.2 */ private Long compressedSize = null; /** * Make this class work in JDK 1.1 like a 1.2 class. *

*

This either stores the size for later usage or invokes * setCompressedSize via reflection.

* * @since 1.2 */ public void setComprSize( long size ) { if ( haveSetCompressedSize() ) { performSetCompressedSize( this, size ); } else { compressedSize = new Long( size ); } } /** * Override to make this class work in JDK 1.1 like a 1.2 class. * * @since 1.2 */ public long getCompressedSize() { if ( compressedSize != null ) { // has been set explicitly and we are running in a 1.1 VM return compressedSize.longValue(); } return super.getCompressedSize(); } /** * @since 1.9 */ public String getName() { return name == null ? super.getName() : name; } /** * @since 1.10 */ public boolean isDirectory() { return getName().endsWith( "/" ); } protected void setName( String name ) { this.name = name; } /** * Helper for JDK 1.1 * * @since 1.2 */ private static Method setCompressedSizeMethod = null; /** * Helper for JDK 1.1 * * @since 1.2 */ private final static Object lockReflection = new Object(); /** * Helper for JDK 1.1 * * @since 1.2 */ private static boolean triedToGetMethod = false; /** * Are we running JDK 1.2 or higher? * * @since 1.2 */ private static boolean haveSetCompressedSize() { checkSCS(); return setCompressedSizeMethod != null; } /** * Invoke setCompressedSize via reflection. * * @since 1.2 */ private static void performSetCompressedSize( ZipEntry ze, long size ) { Long[] s = {new Long( size )}; try { setCompressedSizeMethod.invoke( ze, s ); } catch ( InvocationTargetException ite ) { Throwable nested = ite.getTargetException(); throw new RuntimeException( "Exception setting the compressed size " + "of " + ze + ": " + nested.getMessage() ); } catch ( Throwable other ) { throw new RuntimeException( "Exception setting the compressed size " + "of " + ze + ": " + other.getMessage() ); } } /** * Try to get a handle to the setCompressedSize method. * * @since 1.2 */ private static void checkSCS() { if ( !triedToGetMethod ) { synchronized ( lockReflection ) { triedToGetMethod = true; try { setCompressedSizeMethod = java.util.zip.ZipEntry.class.getMethod( "setCompressedSize", new Class[]{Long.TYPE} ); } catch ( NoSuchMethodException nse ) { // ignored } } } } public long getLastModificationTime() { return getTime(); } } ZipExtraField.java000066400000000000000000000042661145404360500340670ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.util.zip.ZipException; /** * General format of extra field data. *

*

Extra fields usually appear twice per file, once in the local * file data and once in the central directory. Usually they are the * same, but they don't have to be. {@link * java.util.zip.ZipOutputStream java.util.zip.ZipOutputStream} will * only use the local file data in both places.

* * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ZipExtraField v1.9 */ public interface ZipExtraField { /** * The Header-ID. * * @since 1.1 */ ZipShort getHeaderId(); /** * Length of the extra field in the local file data - without * Header-ID or length specifier. * * @since 1.1 */ ZipShort getLocalFileDataLength(); /** * Length of the extra field in the central directory - without * Header-ID or length specifier. * * @since 1.1 */ ZipShort getCentralDirectoryLength(); /** * The actual data to put into local file data - without Header-ID * or length specifier. * * @since 1.1 */ byte[] getLocalFileDataData(); /** * The actual data to put central directory - without Header-ID or * length specifier. * * @since 1.1 */ byte[] getCentralDirectoryData(); /** * Populate data from this array as if it was in local file data. * * @since 1.1 */ void parseFromLocalFileData( byte[] data, int offset, int length ) throws ZipException; } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zip/ZipFile.java000066400000000000000000000465771145404360500330110ustar00rootroot00000000000000package org.codehaus.plexus.archiver.zip; /* * Copyright 2003-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.io.UnsupportedEncodingException; import java.util.Calendar; import java.util.Date; import java.util.Enumeration; import java.util.Hashtable; import java.util.zip.Inflater; import java.util.zip.InflaterInputStream; import java.util.zip.ZipException; import org.codehaus.plexus.archiver.ArchiveFile; /** * Replacement for java.util.ZipFile. *

*

This class adds support for file name encodings other than UTF-8 * (which is required to work on ZIP files created by native zip tools * and is able to skip a preamble like the one found in self * extracting archives. Furthermore it returns instances of * org.apache.tools.zip.ZipEntry instead of * java.util.zip.ZipEntry.

*

*

It doesn't extend java.util.zip.ZipFile as it would * have to reimplement all methods anyway. Like * java.util.ZipFile, it uses RandomAccessFile under the * covers and supports compressed and uncompressed entries.

*

*

The method signatures mimic the ones of * java.util.zip.ZipFile, with a couple of exceptions: *

*

* * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ZipFile v1.13 */ public class ZipFile implements ArchiveFile { /** * Maps ZipEntrys to Longs, recording the offsets of the local * file headers. */ private Hashtable entries = new Hashtable(); /** * Maps String to ZipEntrys, name -> actual entry. */ private Hashtable nameMap = new Hashtable(); /** * Maps ZipEntrys to Longs, recording the offsets of the actual file data. */ private Hashtable dataOffsets = new Hashtable(); /** * The encoding to use for filenames and the file comment. *

*

For a list of possible values see http://java.sun.com/products/jdk/1.2/docs/guide/internat/encoding.doc.html. * Defaults to the platform's default character encoding.

*/ private String encoding = null; /** * The actual data source. */ private RandomAccessFile archive; /** * Opens the given file for reading, assuming the platform's * native encoding for file names. * * @param f the archive. * @throws IOException if an error occurs while reading the file. */ public ZipFile( File f ) throws IOException { this( f, null ); } /** * Opens the given file for reading, assuming the platform's * native encoding for file names. * * @param name name of the archive. * @throws IOException if an error occurs while reading the file. */ public ZipFile( String name ) throws IOException { this( new File( name ), null ); } /** * Opens the given file for reading, assuming the specified * encoding for file names. * * @param name name of the archive. * @param encoding the encoding to use for file names * @throws IOException if an error occurs while reading the file. */ public ZipFile( String name, String encoding ) throws IOException { this( new File( name ), encoding ); } /** * Opens the given file for reading, assuming the specified * encoding for file names. * * @param f the archive. * @param encoding the encoding to use for file names * @throws IOException if an error occurs while reading the file. */ public ZipFile( File f, String encoding ) throws IOException { this.encoding = encoding; archive = new RandomAccessFile( f, "r" ); populateFromCentralDirectory(); resolveLocalFileHeaderData(); } /** * The encoding to use for filenames and the file comment. * * @return null if using the platform's default character encoding. */ public String getEncoding() { return encoding; } /** * Closes the archive. * * @throws IOException if an error occurs closing the archive. */ public void close() throws IOException { archive.close(); } /** * Returns all entries. * * @return all entries as {@link ZipEntry} instances */ public Enumeration getEntries() { return entries.keys(); } /** * Returns a named entry - or null if no entry by * that name exists. * * @param name name of the entry. * @return the ZipEntry corresponding to the given name - or * null if not present. */ public ZipEntry getEntry( String name ) { return (ZipEntry) nameMap.get( name ); } public InputStream getInputStream( ArchiveFile.Entry entry ) throws IOException { return getInputStream( (ZipEntry) entry ); } /** * Returns an InputStream for reading the contents of the given entry. * * @param ze the entry to get the stream for. * @return a stream to read the entry from. */ public InputStream getInputStream( ZipEntry ze ) throws IOException, ZipException { Long start = (Long) dataOffsets.get( ze ); if ( start == null ) { return null; } BoundedInputStream bis = new BoundedInputStream( start.longValue(), ze.getCompressedSize() ); switch ( ze.getMethod() ) { case ZipEntry.STORED: return bis; case ZipEntry.DEFLATED: bis.addDummy(); return new InflaterInputStream( bis, new Inflater( true ) ); default: throw new ZipException( "Found unsupported compression method " + ze.getMethod() ); } } private static final int CFH_LEN = /* version made by */ 2 + /* version needed to extract */ 2 + /* general purpose bit flag */ 2 + /* compression method */ 2 + /* last mod file time */ 2 + /* last mod file date */ 2 + /* crc-32 */ 4 + /* compressed size */ 4 + /* uncompressed size */ 4 + /* filename length */ 2 + /* extra field length */ 2 + /* file comment length */ 2 + /* disk number start */ 2 + /* internal file attributes */ 2 + /* external file attributes */ 4 + /* relative offset of local header */ 4; /** * Reads the central directory of the given archive and populates * the internal tables with ZipEntry instances. *

*

The ZipEntrys will know all data that can be obtained from * the central directory alone, but not the data that requires the * local file header or additional data to be read.

*/ private void populateFromCentralDirectory() throws IOException { positionAtCentralDirectory(); byte[] cfh = new byte[CFH_LEN]; byte[] signatureBytes = new byte[4]; archive.readFully( signatureBytes ); ZipLong sig = new ZipLong( signatureBytes ); while ( sig.equals( ZipOutputStream.CFH_SIG ) ) { archive.readFully( cfh ); int off = 0; ZipEntry ze = new ZipEntry(); ZipShort versionMadeBy = new ZipShort( cfh, off ); off += 2; ze.setPlatform( ( versionMadeBy.getValue() >> 8 ) & 0x0F ); off += 4; // skip version info and general purpose byte ze.setMethod( ( new ZipShort( cfh, off ) ).getValue() ); off += 2; ze.setTime( fromDosTime( new ZipLong( cfh, off ) ).getTime() ); off += 4; ze.setCrc( ( new ZipLong( cfh, off ) ).getValue() ); off += 4; ze.setCompressedSize( ( new ZipLong( cfh, off ) ).getValue() ); off += 4; ze.setSize( ( new ZipLong( cfh, off ) ).getValue() ); off += 4; int fileNameLen = ( new ZipShort( cfh, off ) ).getValue(); off += 2; int extraLen = ( new ZipShort( cfh, off ) ).getValue(); off += 2; int commentLen = ( new ZipShort( cfh, off ) ).getValue(); off += 2; off += 2; // disk number ze.setInternalAttributes( ( new ZipShort( cfh, off ) ).getValue() ); off += 2; ze.setExternalAttributes( ( new ZipLong( cfh, off ) ).getValue() ); off += 4; // LFH offset entries.put( ze, new Long( ( new ZipLong( cfh, off ) ).getValue() ) ); byte[] fileName = new byte[fileNameLen]; archive.readFully( fileName ); ze.setName( getString( fileName ) ); nameMap.put( ze.getName(), ze ); archive.skipBytes( extraLen ); byte[] comment = new byte[commentLen]; archive.readFully( comment ); ze.setComment( getString( comment ) ); archive.readFully( signatureBytes ); sig = new ZipLong( signatureBytes ); } } private static final int MIN_EOCD_SIZE = /* end of central dir signature */ 4 + /* number of this disk */ 2 + /* number of the disk with the */ + /* start of the central directory */ 2 + /* total number of entries in */ + /* the central dir on this disk */ 2 + /* total number of entries in */ + /* the central dir */ 2 + /* size of the central directory */ 4 + /* offset of start of central */ + /* directory with respect to */ + /* the starting disk number */ 4 + /* zipfile comment length */ 2; private static final int CFD_LOCATOR_OFFSET = /* end of central dir signature */ 4 + /* number of this disk */ 2 + /* number of the disk with the */ + /* start of the central directory */ 2 + /* total number of entries in */ + /* the central dir on this disk */ 2 + /* total number of entries in */ + /* the central dir */ 2 + /* size of the central directory */ 4; /** * Searches for the "End of central dir record", parses * it and positions the stream at the first central directory * record. */ private void positionAtCentralDirectory() throws IOException { long off = archive.length() - MIN_EOCD_SIZE; archive.seek( off ); byte[] sig = ZipOutputStream.EOCD_SIG.getBytes(); int curr = archive.read(); boolean found = false; while ( curr != -1 ) { if ( curr == sig[ 0 ] ) { curr = archive.read(); if ( curr == sig[ 1 ] ) { curr = archive.read(); if ( curr == sig[ 2 ] ) { curr = archive.read(); if ( curr == sig[ 3 ] ) { found = true; break; } } } } archive.seek( --off ); curr = archive.read(); } if ( !found ) { throw new ZipException( "archive is not a ZIP archive" ); } archive.seek( off + CFD_LOCATOR_OFFSET ); byte[] cfdOffset = new byte[4]; archive.readFully( cfdOffset ); archive.seek( ( new ZipLong( cfdOffset ) ).getValue() ); } /** * Number of bytes in local file header up to the "length of * filename" entry. */ private static final long LFH_OFFSET_FOR_FILENAME_LENGTH = /* local file header signature */ 4 + /* version needed to extract */ 2 + /* general purpose bit flag */ 2 + /* compression method */ 2 + /* last mod file time */ 2 + /* last mod file date */ 2 + /* crc-32 */ 4 + /* compressed size */ 4 + /* uncompressed size */ 4; /** * Walks through all recorded entries and adds the data available * from the local file header. *

*

Also records the offsets for the data to read from the * entries.

*/ private void resolveLocalFileHeaderData() throws IOException { Enumeration e = getEntries(); while ( e.hasMoreElements() ) { ZipEntry ze = (ZipEntry) e.nextElement(); long offset = ( (Long) entries.get( ze ) ).longValue(); archive.seek( offset + LFH_OFFSET_FOR_FILENAME_LENGTH ); byte[] b = new byte[2]; archive.readFully( b ); int fileNameLen = ( new ZipShort( b ) ).getValue(); archive.readFully( b ); int extraFieldLen = ( new ZipShort( b ) ).getValue(); archive.skipBytes( fileNameLen ); byte[] localExtraData = new byte[extraFieldLen]; archive.readFully( localExtraData ); ze.setExtra( localExtraData ); dataOffsets.put( ze, new Long( offset + LFH_OFFSET_FOR_FILENAME_LENGTH + 2 + 2 + fileNameLen + extraFieldLen ) ); } } /** * Convert a DOS date/time field to a Date object. * * @param l contains the stored DOS time. * @return a Date instance corresponding to the given time. */ protected static Date fromDosTime( ZipLong l ) { long dosTime = l.getValue(); Calendar cal = Calendar.getInstance(); cal.set( Calendar.YEAR, (int) ( ( dosTime >> 25 ) & 0x7f ) + 1980 ); cal.set( Calendar.MONTH, (int) ( ( dosTime >> 21 ) & 0x0f ) - 1 ); cal.set( Calendar.DATE, (int) ( dosTime >> 16 ) & 0x1f ); cal.set( Calendar.HOUR_OF_DAY, (int) ( dosTime >> 11 ) & 0x1f ); cal.set( Calendar.MINUTE, (int) ( dosTime >> 5 ) & 0x3f ); cal.set( Calendar.SECOND, (int) ( dosTime << 1 ) & 0x3e ); return cal.getTime(); } /** * Retrieve a String from the given bytes using the encoding set * for this ZipFile. * * @param bytes the byte array to transform * @return String obtained by using the given encoding * @throws ZipException if the encoding cannot be recognized. */ protected String getString( byte[] bytes ) throws ZipException { if ( encoding == null ) { return new String( bytes ); } else { try { return new String( bytes, encoding ); } catch ( UnsupportedEncodingException uee ) { throw new ZipException( uee.getMessage() ); } } } /** * InputStream that delegates requests to the underlying * RandomAccessFile, making sure that only bytes from a certain * range can be read. */ private class BoundedInputStream extends InputStream { private long remaining; private long loc; private boolean addDummyByte = false; BoundedInputStream( long start, long remaining ) { this.remaining = remaining; loc = start; } public int read() throws IOException { if ( remaining-- <= 0 ) { if ( addDummyByte ) { addDummyByte = false; return 0; } return -1; } synchronized ( archive ) { archive.seek( loc++ ); return archive.read(); } } public int read( byte[] b, int off, int len ) throws IOException { if ( remaining <= 0 ) { if ( addDummyByte ) { addDummyByte = false; b[ off ] = 0; return 1; } return -1; } if ( len <= 0 ) { return 0; } if ( len > remaining ) { len = (int) remaining; } int ret; synchronized ( archive ) { archive.seek( loc ); ret = archive.read( b, off, len ); } if ( ret > 0 ) { loc += ret; remaining -= ret; } return ret; } /** * Inflater needs an extra dummy byte for nowrap - see * Inflater's javadocs. */ void addDummy() { addDummyByte = true; } } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zip/ZipLong.java000066400000000000000000000052251145404360500330120ustar00rootroot00000000000000package org.codehaus.plexus.archiver.zip; /* * Copyright 2001-2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * Utility class that represents a four byte integer with conversion * rules for the big endian byte order of ZIP files. * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ZipLong v1.10 */ public final class ZipLong implements Cloneable { private long value; /** * Create instance from a number. * * @since 1.1 */ public ZipLong( long value ) { this.value = value; } /** * Create instance from bytes. * * @since 1.1 */ public ZipLong( byte[] bytes ) { this( bytes, 0 ); } /** * Create instance from the four bytes starting at offset. * * @since 1.1 */ public ZipLong( byte[] bytes, int offset ) { value = ( bytes[ offset + 3 ] << 24 ) & 0xFF000000L; value += ( bytes[ offset + 2 ] << 16 ) & 0xFF0000; value += ( bytes[ offset + 1 ] << 8 ) & 0xFF00; value += ( bytes[ offset ] & 0xFF ); } /** * Get value as two bytes in big endian byte order. * * @since 1.1 */ public byte[] getBytes() { byte[] result = new byte[4]; result[ 0 ] = (byte) ( ( value & 0xFF ) ); result[ 1 ] = (byte) ( ( value & 0xFF00 ) >> 8 ); result[ 2 ] = (byte) ( ( value & 0xFF0000 ) >> 16 ); result[ 3 ] = (byte) ( ( value & 0xFF000000l ) >> 24 ); return result; } /** * Get value as Java int. * * @since 1.1 */ public long getValue() { return value; } /** * Override to make two instances with same value equal. * * @since 1.1 */ public boolean equals( Object o ) { if ( o == null || !( o instanceof ZipLong ) ) { return false; } return value == ( (ZipLong) o ).getValue(); } /** * Override to make two instances with same value equal. * * @since 1.1 */ public int hashCode() { return (int) value; } } ZipOutputStream.java000066400000000000000000000572611145404360500345170ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /* * Copyright 2001-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.io.File; import java.io.FileOutputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.RandomAccessFile; import java.io.UnsupportedEncodingException; import java.util.Date; import java.util.Hashtable; import java.util.Vector; import java.util.zip.CRC32; import java.util.zip.Deflater; import java.util.zip.ZipException; /** * Reimplementation of {@link java.util.zip.ZipOutputStream * java.util.zip.ZipOutputStream} that does handle the extended * functionality of this package, especially internal/external file * attributes and extra fields with different layouts for local file * data and central directory entries. *

*

This class will try to use {@link java.io.RandomAccessFile * RandomAccessFile} when you know that the output is going to go to a * file.

*

*

If RandomAccessFile cannot be used, this implementation will use * a Data Descriptor to store size and CRC information for {@link * #DEFLATED DEFLATED} entries, this means, you don't need to * calculate them yourself. Unfortunately this is not possible for * the {@link #STORED STORED} method, here setting the CRC and * uncompressed size information is required before {@link * #putNextEntry putNextEntry} can be called.

* * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ZipOutputStream v1.24 */ public class ZipOutputStream extends FilterOutputStream { /** * Current entry. * * @since 1.1 */ private ZipEntry entry; /** * The file comment. * * @since 1.1 */ private String comment = ""; /** * Compression level for next entry. * * @since 1.1 */ private int level = Deflater.DEFAULT_COMPRESSION; /** * Has the compression level changed when compared to the last * entry? * * @since 1.5 */ private boolean hasCompressionLevelChanged = false; /** * Default compression method for next entry. * * @since 1.1 */ private int method = DEFLATED; /** * List of ZipEntries written so far. * * @since 1.1 */ private Vector entries = new Vector(); /** * CRC instance to avoid parsing DEFLATED data twice. * * @since 1.1 */ private CRC32 crc = new CRC32(); /** * Count the bytes written to out. * * @since 1.1 */ private long written = 0; /** * Data for local header data * * @since 1.1 */ private long dataStart = 0; /** * Offset for CRC entry in the local file header data for the * current entry starts here. * * @since 1.15 */ private long localDataStart = 0; /** * Start of central directory. * * @since 1.1 */ private ZipLong cdOffset = new ZipLong( 0 ); /** * Length of central directory. * * @since 1.1 */ private ZipLong cdLength = new ZipLong( 0 ); /** * Helper, a 0 as ZipShort. * * @since 1.1 */ private static final byte[] ZERO = {0, 0}; /** * Helper, a 0 as ZipLong. * * @since 1.1 */ private static final byte[] LZERO = {0, 0, 0, 0}; /** * Holds the offsets of the LFH starts for each entry. * * @since 1.1 */ private Hashtable offsets = new Hashtable(); /** * The encoding to use for filenames and the file comment. *

*

For a list of possible values see http://java.sun.com/products/jdk/1.2/docs/guide/internat/encoding.doc.html. * Defaults to the platform's default character encoding.

* * @since 1.3 */ private String encoding = null; /** * This Deflater object is used for output. *

*

This attribute is only protected to provide a level of API * backwards compatibility. This class used to extend {@link * java.util.zip.DeflaterOutputStream DeflaterOutputStream} up to * Revision 1.13.

* * @since 1.14 */ protected Deflater def = new Deflater( Deflater.DEFAULT_COMPRESSION, true ); /** * This buffer servers as a Deflater. *

*

This attribute is only protected to provide a level of API * backwards compatibility. This class used to extend {@link * java.util.zip.DeflaterOutputStream DeflaterOutputStream} up to * Revision 1.13.

* * @since 1.14 */ protected byte[] buf = new byte[512]; /** * Optional random access output. * * @since 1.14 */ private RandomAccessFile raf = null; /** * Compression method for deflated entries. * * @since 1.1 */ public static final int DEFLATED = ZipEntry.DEFLATED; /** * Compression method for deflated entries. * * @since 1.1 */ public static final int STORED = ZipEntry.STORED; /** * Creates a new ZIP OutputStream filtering the underlying stream. * * @since 1.1 */ public ZipOutputStream( OutputStream out ) { super( out ); } /** * Creates a new ZIP OutputStream writing to a File. Will use * random access if possible. * * @since 1.14 */ public ZipOutputStream( File file ) throws IOException { super( null ); try { raf = new RandomAccessFile( file, "rw" ); raf.setLength( 0 ); } catch ( IOException e ) { if ( raf != null ) { try { raf.close(); } catch ( IOException inner ) { // ignore } raf = null; } out = new FileOutputStream( file ); } } /** * This method indicates whether this archive is writing to a seekable stream (i.e., to a random * access file). *

*

For seekable streams, you don't need to calculate the CRC or * uncompressed size for {@link #STORED} entries before * invoking {@link #putNextEntry}. * * @since 1.17 */ public boolean isSeekable() { return raf != null; } /** * The encoding to use for filenames and the file comment. *

*

For a list of possible values see http://java.sun.com/products/jdk/1.2/docs/guide/internat/encoding.doc.html. * Defaults to the platform's default character encoding.

* * @since 1.3 */ public void setEncoding( String encoding ) { this.encoding = encoding; } /** * The encoding to use for filenames and the file comment. * * @return null if using the platform's default character encoding. * @since 1.3 */ public String getEncoding() { return encoding; } /** * Finishs writing the contents and closes this as well as the * underlying stream. * * @since 1.1 */ public void finish() throws IOException { closeEntry(); cdOffset = new ZipLong( written ); for ( int i = 0; i < entries.size(); i++ ) { writeCentralFileHeader( (ZipEntry) entries.elementAt( i ) ); } cdLength = new ZipLong( written - cdOffset.getValue() ); writeCentralDirectoryEnd(); offsets.clear(); entries.removeAllElements(); } /** * Writes all necessary data for this entry. * * @since 1.1 */ public void closeEntry() throws IOException { if ( entry == null ) { return; } long realCrc = crc.getValue(); crc.reset(); if ( entry.getMethod() == DEFLATED ) { def.finish(); while ( !def.finished() ) { deflate(); } entry.setSize( def.getTotalIn() ); entry.setComprSize( def.getTotalOut() ); entry.setCrc( realCrc ); def.reset(); written += entry.getCompressedSize(); } else if ( raf == null ) { if ( entry.getCrc() != realCrc ) { throw new ZipException( "bad CRC checksum for entry " + entry.getName() + ": " + Long.toHexString( entry.getCrc() ) + " instead of " + Long.toHexString( realCrc ) ); } if ( entry.getSize() != written - dataStart ) { throw new ZipException( "bad size for entry " + entry.getName() + ": " + entry.getSize() + " instead of " + ( written - dataStart ) ); } } else { /* method is STORED and we used RandomAccessFile */ long size = written - dataStart; entry.setSize( size ); entry.setComprSize( size ); entry.setCrc( realCrc ); } // If random access output, write the local file header containing // the correct CRC and compressed/uncompressed sizes if ( raf != null ) { long save = raf.getFilePointer(); raf.seek( localDataStart ); writeOut( ( new ZipLong( entry.getCrc() ) ).getBytes() ); writeOut( ( new ZipLong( entry.getCompressedSize() ) ).getBytes() ); writeOut( ( new ZipLong( entry.getSize() ) ).getBytes() ); raf.seek( save ); } writeDataDescriptor( entry ); entry = null; } /** * Begin writing next entry. * * @since 1.1 */ public void putNextEntry( ZipEntry ze ) throws IOException { closeEntry(); entry = ze; entries.addElement( entry ); if ( entry.getMethod() == -1 ) { // not specified entry.setMethod( method ); } if ( entry.getTime() == -1 ) { // not specified entry.setTime( System.currentTimeMillis() ); } // Size/CRC not required if RandomAccessFile is used if ( entry.getMethod() == STORED && raf == null ) { if ( entry.getSize() == -1 ) { throw new ZipException( "uncompressed size is required for" + " STORED method when not writing to a" + " file" ); } if ( entry.getCrc() == -1 ) { throw new ZipException( "crc checksum is required for STORED" + " method when not writing to a file" ); } entry.setComprSize( entry.getSize() ); } if ( entry.getMethod() == DEFLATED && hasCompressionLevelChanged ) { def.setLevel( level ); hasCompressionLevelChanged = false; } writeLocalFileHeader( entry ); } /** * Set the file comment. * * @since 1.1 */ public void setComment( String comment ) { this.comment = comment; } /** * Sets the compression level for subsequent entries. *

*

Default is Deflater.DEFAULT_COMPRESSION.

* * @since 1.1 */ public void setLevel( int level ) { hasCompressionLevelChanged = ( this.level != level ); this.level = level; } /** * Sets the default compression method for subsequent entries. *

*

Default is DEFLATED.

* * @since 1.1 */ public void setMethod( int method ) { this.method = method; } /** * Writes bytes to ZIP entry. */ public void write( byte[] b, int offset, int length ) throws IOException { if ( entry.getMethod() == DEFLATED ) { if ( length > 0 ) { if ( !def.finished() ) { def.setInput( b, offset, length ); while ( !def.needsInput() ) { deflate(); } } } } else { writeOut( b, offset, length ); written += length; } crc.update( b, offset, length ); } /** * Writes a single byte to ZIP entry. *

*

Delegates to the three arg method.

* * @since 1.14 */ public void write( int b ) throws IOException { byte[] buf = new byte[1]; buf[ 0 ] = (byte) ( b & 0xff ); write( buf, 0, 1 ); } /** * Closes this output stream and releases any system resources * associated with the stream. * * @throws IOException if an I/O error occurs. * @since 1.14 */ public void close() throws IOException { finish(); if ( raf != null ) { raf.close(); } if ( out != null ) { out.close(); } } /** * Flushes this output stream and forces any buffered output bytes * to be written out to the stream. * * @throws IOException if an I/O error occurs. * @since 1.14 */ public void flush() throws IOException { if ( out != null ) { out.flush(); } } /* * Various ZIP constants */ /** * local file header signature * * @since 1.1 */ protected static final ZipLong LFH_SIG = new ZipLong( 0X04034B50L ); /** * data descriptor signature * * @since 1.1 */ protected static final ZipLong DD_SIG = new ZipLong( 0X08074B50L ); /** * central file header signature * * @since 1.1 */ protected static final ZipLong CFH_SIG = new ZipLong( 0X02014B50L ); /** * end of central dir signature * * @since 1.1 */ protected static final ZipLong EOCD_SIG = new ZipLong( 0X06054B50L ); /** * Writes next block of compressed data to the output stream. * * @since 1.14 */ protected final void deflate() throws IOException { int len = def.deflate( buf, 0, buf.length ); if ( len > 0 ) { writeOut( buf, 0, len ); } } /** * Writes the local file header entry * * @since 1.1 */ protected void writeLocalFileHeader( ZipEntry ze ) throws IOException { offsets.put( ze, new ZipLong( written ) ); writeOut( LFH_SIG.getBytes() ); written += 4; // version needed to extract // general purpose bit flag if ( ze.getMethod() == DEFLATED && raf == null ) { // requires version 2 as we are going to store length info // in the data descriptor writeOut( ( new ZipShort( 20 ) ).getBytes() ); // bit3 set to signal, we use a data descriptor writeOut( ( new ZipShort( 8 ) ).getBytes() ); } else { writeOut( ( new ZipShort( 10 ) ).getBytes() ); writeOut( ZERO ); } written += 4; // compression method writeOut( ( new ZipShort( ze.getMethod() ) ).getBytes() ); written += 2; // last mod. time and date writeOut( toDosTime( new Date( ze.getTime() ) ).getBytes() ); written += 4; // CRC // compressed length // uncompressed length localDataStart = written; if ( ze.getMethod() == DEFLATED || raf != null ) { writeOut( LZERO ); writeOut( LZERO ); writeOut( LZERO ); } else { writeOut( ( new ZipLong( ze.getCrc() ) ).getBytes() ); writeOut( ( new ZipLong( ze.getSize() ) ).getBytes() ); writeOut( ( new ZipLong( ze.getSize() ) ).getBytes() ); } written += 12; // file name length byte[] name = getBytes( ze.getName() ); writeOut( ( new ZipShort( name.length ) ).getBytes() ); written += 2; // extra field length byte[] extra = ze.getLocalFileDataExtra(); writeOut( ( new ZipShort( extra.length ) ).getBytes() ); written += 2; // file name writeOut( name ); written += name.length; // extra field writeOut( extra ); written += extra.length; dataStart = written; } /** * Writes the data descriptor entry * * @since 1.1 */ protected void writeDataDescriptor( ZipEntry ze ) throws IOException { if ( ze.getMethod() != DEFLATED || raf != null ) { return; } writeOut( DD_SIG.getBytes() ); writeOut( ( new ZipLong( entry.getCrc() ) ).getBytes() ); writeOut( ( new ZipLong( entry.getCompressedSize() ) ).getBytes() ); writeOut( ( new ZipLong( entry.getSize() ) ).getBytes() ); written += 16; } /** * Writes the central file header entry * * @since 1.1 */ protected void writeCentralFileHeader( ZipEntry ze ) throws IOException { writeOut( CFH_SIG.getBytes() ); written += 4; // version made by writeOut( ( new ZipShort( ( ze.getPlatform() << 8 ) | 20 ) ).getBytes() ); written += 2; // version needed to extract // general purpose bit flag if ( ze.getMethod() == DEFLATED && raf == null ) { // requires version 2 as we are going to store length info // in the data descriptor writeOut( ( new ZipShort( 20 ) ).getBytes() ); // bit3 set to signal, we use a data descriptor writeOut( ( new ZipShort( 8 ) ).getBytes() ); } else { writeOut( ( new ZipShort( 10 ) ).getBytes() ); writeOut( ZERO ); } written += 4; // compression method writeOut( ( new ZipShort( ze.getMethod() ) ).getBytes() ); written += 2; // last mod. time and date writeOut( toDosTime( new Date( ze.getTime() ) ).getBytes() ); written += 4; // CRC // compressed length // uncompressed length writeOut( ( new ZipLong( ze.getCrc() ) ).getBytes() ); writeOut( ( new ZipLong( ze.getCompressedSize() ) ).getBytes() ); writeOut( ( new ZipLong( ze.getSize() ) ).getBytes() ); written += 12; // file name length byte[] name = getBytes( ze.getName() ); writeOut( ( new ZipShort( name.length ) ).getBytes() ); written += 2; // extra field length byte[] extra = ze.getCentralDirectoryExtra(); writeOut( ( new ZipShort( extra.length ) ).getBytes() ); written += 2; // file comment length String comm = ze.getComment(); if ( comm == null ) { comm = ""; } byte[] comment = getBytes( comm ); writeOut( ( new ZipShort( comment.length ) ).getBytes() ); written += 2; // disk number start writeOut( ZERO ); written += 2; // internal file attributes writeOut( ( new ZipShort( ze.getInternalAttributes() ) ).getBytes() ); written += 2; // external file attributes writeOut( ( new ZipLong( ze.getExternalAttributes() ) ).getBytes() ); written += 4; // relative offset of LFH writeOut( ( (ZipLong) offsets.get( ze ) ).getBytes() ); written += 4; // file name writeOut( name ); written += name.length; // extra field writeOut( extra ); written += extra.length; // file comment writeOut( comment ); written += comment.length; } /** * Writes the "End of central dir record" * * @since 1.1 */ protected void writeCentralDirectoryEnd() throws IOException { writeOut( EOCD_SIG.getBytes() ); // disk numbers writeOut( ZERO ); writeOut( ZERO ); // number of entries byte[] num = ( new ZipShort( entries.size() ) ).getBytes(); writeOut( num ); writeOut( num ); // length and location of CD writeOut( cdLength.getBytes() ); writeOut( cdOffset.getBytes() ); // ZIP file comment byte[] data = getBytes( comment ); writeOut( ( new ZipShort( data.length ) ).getBytes() ); writeOut( data ); } /** * Smallest date/time ZIP can handle. * * @since 1.1 */ private static final ZipLong DOS_TIME_MIN = new ZipLong( 0x00002100L ); /** * Convert a Date object to a DOS date/time field. *

*

Stolen from InfoZip's fileio.c

* * @since 1.1 */ protected static ZipLong toDosTime( Date time ) { int year = time.getYear() + 1900; int month = time.getMonth() + 1; if ( year < 1980 ) { return DOS_TIME_MIN; } long value = ( ( year - 1980 ) << 25 ) | ( month << 21 ) | ( time.getDate() << 16 ) | ( time.getHours() << 11 ) | ( time.getMinutes() << 5 ) | ( time.getSeconds() >> 1 ); byte[] result = new byte[4]; result[ 0 ] = (byte) ( ( value & 0xFF ) ); result[ 1 ] = (byte) ( ( value & 0xFF00 ) >> 8 ); result[ 2 ] = (byte) ( ( value & 0xFF0000 ) >> 16 ); result[ 3 ] = (byte) ( ( value & 0xFF000000L ) >> 24 ); return new ZipLong( result ); } /** * Retrieve the bytes for the given String in the encoding set for * this Stream. * * @since 1.3 */ protected byte[] getBytes( String name ) throws ZipException { if ( encoding == null ) { return name.getBytes(); } else { try { return name.getBytes( encoding ); } catch ( UnsupportedEncodingException uee ) { throw new ZipException( uee.getMessage() ); } } } /** * Write bytes to output or random access file * * @since 1.14 */ protected final void writeOut( byte [] data ) throws IOException { writeOut( data, 0, data.length ); } /** * Write bytes to output or random access file * * @since 1.14 */ protected final void writeOut( byte [] data, int offset, int length ) throws IOException { if ( raf != null ) { raf.write( data, offset, length ); } else { out.write( data, offset, length ); } } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zip/ZipResource.java000066400000000000000000000041411145404360500336760ustar00rootroot00000000000000package org.codehaus.plexus.archiver.zip; import org.codehaus.plexus.archiver.UnixStat; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributes; import org.codehaus.plexus.components.io.attributes.SimpleResourceAttributes; import org.codehaus.plexus.components.io.resources.AbstractPlexusIoResourceWithAttributes; import org.codehaus.plexus.components.io.resources.PlexusIoResource; import java.io.IOException; import java.io.InputStream; import java.net.URL; public class ZipResource extends AbstractPlexusIoResourceWithAttributes { private final ZipFile zipFile; private final ZipEntry entry; private PlexusIoResourceAttributes attributes; public ZipResource( ZipFile zipFile, ZipEntry entry ) { this.zipFile = zipFile; this.entry = entry; final boolean dir = entry.isDirectory(); setName( entry.getName() ); setFile( !dir ); setDirectory( dir ); setExisting( true ); setFile( !dir ); long l = entry.getLastModificationTime(); setLastModified( l == -1 ? PlexusIoResource.UNKNOWN_MODIFICATION_DATE : l ); setSize( dir ? PlexusIoResource.UNKNOWN_RESOURCE_SIZE : entry.getSize() ); } public synchronized PlexusIoResourceAttributes getAttributes() { int mode = entry.getUnixMode(); if ( ( mode & UnixStat.FILE_FLAG ) == UnixStat.FILE_FLAG ) { mode = mode & ~UnixStat.FILE_FLAG; } else { mode = mode & ~UnixStat.DIR_FLAG; } if ( attributes == null ) { attributes = new SimpleResourceAttributes(); attributes.setOctalMode( mode ); } return attributes; } public synchronized void setAttributes( PlexusIoResourceAttributes attributes ) { this.attributes = attributes; } public URL getURL() throws IOException { return null; } public InputStream getContents() throws IOException { return zipFile.getInputStream( entry ); } } plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zip/ZipShort.java000066400000000000000000000046271145404360500332170ustar00rootroot00000000000000package org.codehaus.plexus.archiver.zip; /* * Copyright 2001-2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * Utility class that represents a two byte integer with conversion * rules for the big endian byte order of ZIP files. * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ZipShort v1.10 */ public final class ZipShort implements Cloneable { private int value; /** * Create instance from a number. * * @since 1.1 */ public ZipShort( int value ) { this.value = value; } /** * Create instance from bytes. * * @since 1.1 */ public ZipShort( byte[] bytes ) { this( bytes, 0 ); } /** * Create instance from the two bytes starting at offset. * * @since 1.1 */ public ZipShort( byte[] bytes, int offset ) { value = ( bytes[ offset + 1 ] << 8 ) & 0xFF00; value += ( bytes[ offset ] & 0xFF ); } /** * Get value as two bytes in big endian byte order. * * @since 1.1 */ public byte[] getBytes() { byte[] result = new byte[2]; result[ 0 ] = (byte) ( value & 0xFF ); result[ 1 ] = (byte) ( ( value & 0xFF00 ) >> 8 ); return result; } /** * Get value as Java int. * * @since 1.1 */ public int getValue() { return value; } /** * Override to make two instances with same value equal. * * @since 1.1 */ public boolean equals( Object o ) { if ( o == null || !( o instanceof ZipShort ) ) { return false; } return value == ( (ZipShort) o ).getValue(); } /** * Override to make two instances with same value equal. * * @since 1.1 */ public int hashCode() { return value; } } ZipUnArchiver.java000066400000000000000000000017371145404360500341060ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; import java.io.File; /** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Emmanuel Venisse * @version $Revision$ $Date$ */ public class ZipUnArchiver extends AbstractZipUnArchiver { public ZipUnArchiver() { } public ZipUnArchiver( File sourceFile ) { super( sourceFile ); } } plexus-archiver-plexus-archiver-1.2/src/main/resources/000077500000000000000000000000001145404360500233435ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/resources/META-INF/000077500000000000000000000000001145404360500245035ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/resources/META-INF/plexus/000077500000000000000000000000001145404360500260235ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/resources/META-INF/plexus/components.xml000066400000000000000000000425531145404360500307430ustar00rootroot00000000000000 org.codehaus.plexus.archiver.Archiver bzip2 org.codehaus.plexus.archiver.bzip2.BZip2Archiver per-lookup org.codehaus.plexus.archiver.Archiver dir org.codehaus.plexus.archiver.dir.DirectoryArchiver per-lookup org.codehaus.plexus.archiver.Archiver ear org.codehaus.plexus.archiver.ear.EarArchiver per-lookup org.codehaus.plexus.archiver.Archiver gzip org.codehaus.plexus.archiver.gzip.GZipArchiver per-lookup org.codehaus.plexus.archiver.Archiver jar org.codehaus.plexus.archiver.jar.JarArchiver per-lookup org.codehaus.plexus.archiver.Archiver tar org.codehaus.plexus.archiver.tar.TarArchiver per-lookup org.codehaus.plexus.archiver.Archiver war org.codehaus.plexus.archiver.war.WarArchiver per-lookup org.codehaus.plexus.archiver.Archiver zip org.codehaus.plexus.archiver.zip.ZipArchiver per-lookup org.codehaus.plexus.archiver.Archiver rar org.codehaus.plexus.archiver.jar.JarArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver bzip2 org.codehaus.plexus.archiver.bzip2.BZip2UnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver gzip org.codehaus.plexus.archiver.gzip.GZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver tar org.codehaus.plexus.archiver.tar.TarUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver zip org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver jar org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver war org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver ear org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver swc org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver nar org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver esb org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver sar org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver car org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver par org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver rar org.codehaus.plexus.archiver.zip.ZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver tgz org.codehaus.plexus.archiver.tar.TarGZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver tar.gz org.codehaus.plexus.archiver.tar.TarGZipUnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver tbz2 org.codehaus.plexus.archiver.tar.TarBZip2UnArchiver per-lookup org.codehaus.plexus.archiver.UnArchiver tar.bz2 org.codehaus.plexus.archiver.tar.TarBZip2UnArchiver per-lookup org.codehaus.plexus.archiver.manager.ArchiverManager org.codehaus.plexus.archiver.manager.DefaultArchiverManager default org.codehaus.plexus.components.io.fileselectors.FileSelector jar-security org.codehaus.plexus.archiver.filters.JarSecurityFileSelector singleton org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection bzip2 org.codehaus.plexus.archiver.bzip2.PlexusIoBzip2ResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection bz2 org.codehaus.plexus.archiver.bzip2.PlexusIoBzip2ResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection gzip org.codehaus.plexus.archiver.gzip.PlexusIoGzipResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection gz org.codehaus.plexus.archiver.gzip.PlexusIoGzipResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection tar org.codehaus.plexus.archiver.tar.PlexusIoTarFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection jar org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection war org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection ear org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection swc org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection nar org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection esb org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection sar org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection car org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection Par org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection rar org.codehaus.plexus.components.io.resources.PlexusIoZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection tgz org.codehaus.plexus.archiver.tar.PlexusIoTarGZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection tar.gz org.codehaus.plexus.archiver.tar.PlexusIoTarGZipFileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection tbz2 org.codehaus.plexus.archiver.tar.PlexusIoTarBZip2FileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection tar.bz2 org.codehaus.plexus.archiver.tar.PlexusIoTarBZip2FileResourceCollection per-lookup org.codehaus.plexus.components.io.resources.PlexusIoResourceCollection zip org.codehaus.plexus.archiver.zip.PlexusIoZipFileResourceCollection per-lookup plexus-archiver-plexus-archiver-1.2/src/main/resources/org/000077500000000000000000000000001145404360500241325ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/resources/org/codehaus/000077500000000000000000000000001145404360500257255ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/resources/org/codehaus/plexus/000077500000000000000000000000001145404360500272455ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/resources/org/codehaus/plexus/archiver/000077500000000000000000000000001145404360500310505ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/resources/org/codehaus/plexus/archiver/jar/000077500000000000000000000000001145404360500316245ustar00rootroot00000000000000defaultManifest.mf000066400000000000000000000000711145404360500352020ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/main/resources/org/codehaus/plexus/archiver/jarManifest-Version: 1.0 Archiver-Version: Plexus Archiver plexus-archiver-plexus-archiver-1.2/src/site/000077500000000000000000000000001145404360500213515ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/site/site.xml000066400000000000000000000004171145404360500230410ustar00rootroot00000000000000 plexus-archiver-plexus-archiver-1.2/src/test/000077500000000000000000000000001145404360500213645ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/dotfiles/000077500000000000000000000000001145404360500231755ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/dotfiles/.plxarc000066400000000000000000000000411145404360500244620ustar00rootroot00000000000000licenses licenses:META-INF/maven plexus-archiver-plexus-archiver-1.2/src/test/dotfiles/licenses/000077500000000000000000000000001145404360500250025ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/dotfiles/licenses/LICENSE.txt000066400000000000000000000261361145404360500266350ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. plexus-archiver-plexus-archiver-1.2/src/test/dotfiles/licenses/NOTICE.txt000066400000000000000000000001451145404360500265240ustar00rootroot00000000000000This product includes software developed by The Apache Software Foundation (http://www.apache.org/). plexus-archiver-plexus-archiver-1.2/src/test/jars/000077500000000000000000000000001145404360500223235ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/jars/test.jar000066400000000000000000000020561145404360500240030ustar00rootroot00000000000000PKBVG5 META-INF/þÊPKPKBVG5META-INF/MANIFEST.MFóMÌËLK-.Ñ K-*ÎÌϳR0Ô3àår.JM,IMÑuª ˜èÅX*h8ä¤*8çç”–¤é(xæ%ëiòrñrPKR‚¡WFFPK õUG5 resources/PK VG5resources/artifactId/PK VG5resources/artifactId/directory/PK VG5.resources/artifactId/directory/test.propertiesËKÌMµMÎ(Ê,æPKÉuFî PKVG5$resources/artifactId/test.propertiesËKÌMµÍJ,ÎÏãPK»1‡ PKBVG5 META-INF/þÊPKBVG5R‚¡WFF=META-INF/MANIFEST.MFPK õUG5 Åresources/PK VG5íresources/artifactId/PK VG5 resources/artifactId/directory/PK VG5ÉuFî .]resources/artifactId/directory/test.propertiesPKVG5»1‡ $Æresources/artifactId/test.propertiesPKó%plexus-archiver-plexus-archiver-1.2/src/test/jars/test.rar000077500000000000000000000020561145404360500240160ustar00rootroot00000000000000PKBVG5 META-INF/þÊPKPKBVG5META-INF/MANIFEST.MFóMÌËLK-.Ñ K-*ÎÌϳR0Ô3àår.JM,IMÑuª ˜èÅX*h8ä¤*8çç”–¤é(xæ%ëiòrñrPKR‚¡WFFPK õUG5 resources/PK VG5resources/artifactId/PK VG5resources/artifactId/directory/PK VG5.resources/artifactId/directory/test.propertiesËKÌMµMÎ(Ê,æPKÉuFî PKVG5$resources/artifactId/test.propertiesËKÌMµÍJ,ÎÏãPK»1‡ PKBVG5 META-INF/þÊPKBVG5R‚¡WFF=META-INF/MANIFEST.MFPK õUG5 Åresources/PK VG5íresources/artifactId/PK VG5 resources/artifactId/directory/PK VG5ÉuFî .]resources/artifactId/directory/test.propertiesPKVG5»1‡ $Æresources/artifactId/test.propertiesPKó%plexus-archiver-plexus-archiver-1.2/src/test/java/000077500000000000000000000000001145404360500223055ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/000077500000000000000000000000001145404360500230745ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/000077500000000000000000000000001145404360500246675ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/000077500000000000000000000000001145404360500262075ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/000077500000000000000000000000001145404360500300125ustar00rootroot00000000000000BasePlexusArchiverTest.java000066400000000000000000000063301145404360500351770ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; /* * The MIT License * * Copyright (c) 2004, The Codehaus * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import java.io.File; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.util.FileUtils; /** * Base abstract class that all the test-cases for different archivers * extend so that they can use its helpful methids. * * @version $Id$ */ public abstract class BasePlexusArchiverTest extends PlexusTestCase { /** * Ensure that when a new file is created at the specified location that the timestamp of * that file will be greater than the one specified as a reference. * * Warning: Runs in a busy loop creating a file until the output file is newer than the reference timestamp. * This should be better than sleeping for a race condition time out value. * * @param outputFile the file to be created * @param timestampReference the created file will have a newer timestamp than this reference timestamp. * @throws Exception failures */ protected void waitUntilNewTimestamp( File outputFile, long timestampReference ) throws Exception { File tmpFile = File.createTempFile( "ZipArchiverTest.waitUntilNewTimestamp", null ); // slurp the file into a temp file and then copy the temp back over the top until it is newer. FileUtils.copyFile( outputFile, tmpFile ); FileUtils.copyFile( tmpFile, outputFile ); while ( timestampReference >= outputFile.lastModified() ) { FileUtils.copyFile( tmpFile, outputFile ); Thread.yield(); } tmpFile.delete(); } /** * Base method for all the Archivers to create an archiver. * * @param format * @return * @throws Exception */ protected Archiver createArchiver(String format) throws Exception { final File pomFile = new File("pom.xml"); final File rarFile = new File( "target/output/pom.xml."+format ); Archiver archiver = (Archiver) lookup( Archiver.ROLE, format ); archiver.setDestFile( rarFile ); archiver.addFile( pomFile, "pom.xml" ); return archiver; } } DotDirectiveArchiveFinalizerTest.java000066400000000000000000000034241145404360500371740ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.jar.JarArchiver; import java.io.File; import java.util.jar.JarFile; /** @author Jason van Zyl */ public class DotDirectiveArchiveFinalizerTest extends PlexusTestCase { public void testDotDirectiveArchiveFinalizer() throws Exception { DotDirectiveArchiveFinalizer ddaf = new DotDirectiveArchiveFinalizer( new File( getBasedir(), "src/test/dotfiles" ) ); JarArchiver archiver = new JarArchiver(); File jarFile = new File( getBasedir(), "target/dotfiles.jar" ); archiver.setDestFile( jarFile ); archiver.addArchiveFinalizer( ddaf ); archiver.createArchive(); JarFile jar = new JarFile( jarFile ); assertNotNull( jar.getEntry( "LICENSE.txt" ) ); assertNotNull( jar.getEntry( "NOTICE.txt" ) ); assertNotNull( jar.getEntry( "META-INF/maven/LICENSE.txt" ) ); assertNotNull( jar.getEntry( "META-INF/maven/NOTICE.txt" ) ); } public void testDefaultDotDirectiveBehaviour() throws Exception { File dotFileDirectory = new File( getBasedir(), "src/test/dotfiles" ); JarArchiver archiver = new JarArchiver(); archiver.setDotFileDirectory( dotFileDirectory ); File jarFile = new File( getBasedir(), "target/default-dotfiles.jar" ); archiver.setDestFile( jarFile ); archiver.createArchive(); JarFile jar = new JarFile( jarFile ); assertNotNull( jar.getEntry( "LICENSE.txt" ) ); assertNotNull( jar.getEntry( "NOTICE.txt" ) ); assertNotNull( jar.getEntry( "META-INF/maven/LICENSE.txt" ) ); assertNotNull( jar.getEntry( "META-INF/maven/NOTICE.txt" ) ); } } DuplicateFilesTest.java000066400000000000000000000110101145404360500343240ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.zip.AbstractZipArchiver; import org.codehaus.plexus.archiver.zip.ZipFile; import org.codehaus.plexus.archiver.zip.ZipEntry; import org.codehaus.plexus.archiver.tar.TarInputStream; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.util.FileUtils; import java.io.File; import java.io.FileReader; import java.io.BufferedReader; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.util.Enumeration; /** * @author Erik Engstrom */ public class DuplicateFilesTest extends PlexusTestCase { private static final File file1 = getTestFile( "src/test/resources/group-writable/foo.txt" ); private static final File file2 = getTestFile( "src/test/resources/world-writable/foo.txt" ); private static final File destination = getTestFile( "target/output/duplicateFiles" ); public void setUp() throws Exception { super.setUp(); getContainer().getLoggerManager().setThreshold( Logger.LEVEL_DEBUG ); } public void testZipArchiver() throws Exception { Archiver archiver = (Archiver) lookup( Archiver.ROLE, "zip" ); archiver.setDuplicateBehavior( Archiver.DUPLICATES_SKIP ); File archive = createArchive( archiver, "zip" ); ZipFile zf = new ZipFile( archive ); Enumeration e = zf.getEntries(); int entryCount = 0; while ( e.hasMoreElements() ) { ZipEntry entry = (ZipEntry) e.nextElement(); System.out.println( entry.getName() ); entryCount++; } // Zip file should have 2 entries, 1 for the directory and one for foo.txt assertEquals( 2, entryCount ); testArchive( archive, "zip" ); } public void testDirArchiver() throws Exception { Archiver archiver = (Archiver) lookup( Archiver.ROLE, "dir" ); createArchive( archiver, "dir" ); testFinalFile( "target/output/duplicateFiles.dir/duplicateFiles/foo.txt" ); } public void testTarArchiver() throws Exception { Archiver archiver = (Archiver) lookup( Archiver.ROLE, "tar" ); archiver.setDuplicateBehavior( Archiver.DUPLICATES_SKIP ); File archive = createArchive( archiver, "tar" ); TarInputStream tis; tis = new TarInputStream( new BufferedInputStream( new FileInputStream( archive ) ) ); int entryCount = 0; while ( ( tis.getNextEntry() ) != null ) { entryCount++; } assertEquals( 1, entryCount ); testArchive( archive, "tar" ); } private File createArchive( Archiver archiver, String outputFileExt ) throws Exception { archiver.addFile( file1, "duplicateFiles/foo.txt" ); archiver.addFile( file2, "duplicateFiles/foo.txt" ); // delete it if it exists to ensure it is actually empty if ( destination.exists() ) { destination.delete(); } File archive = getTestFile( "target/output/duplicateFiles." + outputFileExt ); if ( archive.exists() ) { if ( archive.isDirectory() ) { FileUtils.deleteDirectory( archive ); } else { archive.delete(); } } archiver.setDestFile( archive ); archiver.createArchive(); return archive; } private void testArchive( File archive, String role ) throws Exception { // Check the content of the archive by extracting it UnArchiver unArchiver = (UnArchiver) lookup( UnArchiver.ROLE, role ); unArchiver.setSourceFile( archive ); unArchiver.setDestDirectory( getTestFile( "target/output/" ) ); unArchiver.extract(); assertTrue( destination.exists() ); assertTrue( destination.isDirectory() ); testFinalFile( "target/output/duplicateFiles/foo.txt" ); } private void testFinalFile( String path ) throws Exception { File outputFile = getTestFile( path ); assertTrue( outputFile.exists() ); BufferedReader reader = new BufferedReader( new FileReader( outputFile ) ); String firstLine = reader.readLine(); reader.close(); reader = new BufferedReader( new FileReader( file2 ) ); String expectedFirstLine = reader.readLine(); reader.close(); assertEquals( expectedFirstLine, firstLine ); } } EmptyDirectoryTest.java000066400000000000000000000062571145404360500344330ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiverpackage org.codehaus.plexus.archiver; /* * The MIT License * * Copyright (c) 2004, The Codehaus * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import org.codehaus.plexus.PlexusTestCase; import java.io.File; /** * @author Daniel Krisher * @version $Id$ */ public class EmptyDirectoryTest extends PlexusTestCase { public void testZipArchiver() throws Exception { testEmptyDirectory( "zip" ); } public void testJarArchiver() throws Exception { // No JAR UnArchiver implementation :( // testEmptyDirectory( "jar" ); } public void testTarArchiver() throws Exception { testEmptyDirectory( "tar" ); } // ---------------------------------------------------------------------- // // ---------------------------------------------------------------------- private void testEmptyDirectory( String role ) throws Exception { Archiver archiver = (Archiver) lookup( Archiver.ROLE, role ); // Should default to true... assertTrue( archiver.getIncludeEmptyDirs() ); // create an empty directory to store in the zip archive File emptyDir = getTestFile( "target/output/emptyTest/TmpEmptyDir" ); // delete it if it exists to ensure it is actually empty if ( emptyDir.exists() ) { emptyDir.delete(); } emptyDir.mkdirs(); archiver.addDirectory( emptyDir.getParentFile() ); File archive = getTestFile( "target/output/emptyDirArchive.zip" ); if ( archive.exists() ) { archive.delete(); } archiver.setDestFile( archive ); archiver.createArchive(); // delete the empty dir, we will extract it from the archive emptyDir.delete(); // Check the content of the archive by extracting it UnArchiver unArchiver = (UnArchiver) lookup( UnArchiver.ROLE, role ); unArchiver.setSourceFile( archive ); unArchiver.setDestDirectory( getTestFile( "target/output/emptyTest" ) ); unArchiver.extract(); assertTrue( emptyDir.exists() ); assertTrue( emptyDir.isDirectory() ); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/bzip2/000077500000000000000000000000001145404360500310405ustar00rootroot00000000000000BZip2ArchiverTest.java000066400000000000000000000116251145404360500351030ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/bzip2package org.codehaus.plexus.archiver.bzip2; /* * The MIT License * * Copyright (c) 2004, The Codehaus * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.util.Arrays; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.BasePlexusArchiverTest; import org.codehaus.plexus.archiver.zip.ZipArchiver; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; /** * @author Emmanuel Venisse * @version $Id$ */ public class BZip2ArchiverTest extends BasePlexusArchiverTest { public void testCreateArchive() throws Exception { ZipArchiver zipArchiver = (ZipArchiver) lookup( Archiver.ROLE, "zip" ); zipArchiver.addDirectory( getTestFile( "src" ) ); zipArchiver.setDestFile( getTestFile( "target/output/archiveForbz2.zip" ) ); zipArchiver.createArchive(); BZip2Archiver archiver = (BZip2Archiver) lookup( Archiver.ROLE, "bzip2" ); String[] inputFiles = new String[ 1 ]; inputFiles[ 0 ] = "archiveForbz2.zip"; archiver.addDirectory( getTestFile( "target/output" ), inputFiles, null ); archiver.setDestFile( getTestFile( "target/output/archive.bz2" ) ); archiver.createArchive(); } public void testCreateResourceCollection() throws Exception { final File pomFile = new File("pom.xml"); final File bz2File = new File( "target/output/pom.xml.bz2" ); BZip2Archiver bzip2Archiver = (BZip2Archiver) lookup( Archiver.ROLE, "bzip2" ); bzip2Archiver.setDestFile( bz2File ); bzip2Archiver.addFile( pomFile, "pom.xml" ); FileUtils.removePath( bz2File.getPath() ); bzip2Archiver.createArchive(); System.out.println( "Created: " + bz2File.getAbsolutePath() ); final File zipFile = new File( "target/output/pom.zip" ); ZipArchiver zipArchiver = (ZipArchiver) lookup( Archiver.ROLE, "zip" ); zipArchiver.setDestFile( zipFile ); zipArchiver.addArchivedFileSet( bz2File, "prfx/" ); FileUtils.removePath( zipFile.getPath() ); zipArchiver.createArchive(); final ZipFile juZipFile = new ZipFile( zipFile ); final ZipEntry zipEntry = juZipFile.getEntry( "prfx/target/output/pom.xml" ); final InputStream archivePom = juZipFile.getInputStream( zipEntry ); final InputStream pom = new FileInputStream( pomFile ); assertTrue( Arrays.equals( IOUtil.toByteArray( pom ), IOUtil.toByteArray( archivePom ) ) ); archivePom.close(); pom.close(); juZipFile.close(); } /** * Tests the .bzip2 archiver is forced set to true, and after that * tests the behavior when the forced is set to false. * * @throws Exception */ public void testBz2IsForcedBehaviour() throws Exception { BZip2Archiver bZip2Archiver = (BZip2Archiver) createArchiver( "bzip2" ); assertTrue( bZip2Archiver.isSupportingForced() ); bZip2Archiver.createArchive(); final long creationTime = bZip2Archiver.getDestFile().lastModified(); waitUntilNewTimestamp( bZip2Archiver.getDestFile(), creationTime ); bZip2Archiver = (BZip2Archiver) createArchiver( "bzip2" ); bZip2Archiver.setForced( true ); bZip2Archiver.createArchive(); final long firstRunTime = bZip2Archiver.getDestFile().lastModified(); assertFalse( creationTime==firstRunTime ); bZip2Archiver = (BZip2Archiver) createArchiver( "bzip2" ); bZip2Archiver.setForced( false ); bZip2Archiver.createArchive(); final long secondRunTime = bZip2Archiver.getDestFile().lastModified(); assertEquals( firstRunTime,secondRunTime ); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/gzip/000077500000000000000000000000001145404360500307635ustar00rootroot00000000000000GZipArchiverTest.java000066400000000000000000000114331145404360500347460ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/gzippackage org.codehaus.plexus.archiver.gzip; /* * The MIT License * * Copyright (c) 2004, The Codehaus * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.util.Arrays; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.BasePlexusArchiverTest; import org.codehaus.plexus.archiver.zip.ZipArchiver; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; /** * @author Emmanuel Venisse * @version $Id$ */ public class GZipArchiverTest extends BasePlexusArchiverTest { public void testCreateArchive() throws Exception { ZipArchiver zipArchiver = (ZipArchiver) lookup( Archiver.ROLE, "zip" ); zipArchiver.addDirectory( getTestFile( "src" ) ); zipArchiver.setDestFile( getTestFile( "target/output/archiveForGzip.zip" ) ); zipArchiver.createArchive(); GZipArchiver archiver = (GZipArchiver) lookup( Archiver.ROLE, "gzip" ); String[] inputFiles = new String[ 1 ]; inputFiles[ 0 ] = "archiveForGzip.zip"; archiver.addDirectory( getTestFile( "target/output" ), inputFiles, null ); archiver.setDestFile( getTestFile( "target/output/archive.gzip" ) ); archiver.createArchive(); } public void testCreateResourceCollection() throws Exception { final File pomFile = new File("pom.xml"); final File gzFile = new File( "target/output/pom.xml.gz" ); GZipArchiver gzipArchiver = (GZipArchiver) lookup( Archiver.ROLE, "gzip" ); gzipArchiver.setDestFile( gzFile ); gzipArchiver.addFile( pomFile, "pom.xml" ); FileUtils.removePath( gzFile.getPath() ); gzipArchiver.createArchive(); final File zipFile = new File( "target/output/pom.zip" ); ZipArchiver zipArchiver = (ZipArchiver) lookup( Archiver.ROLE, "zip" ); zipArchiver.setDestFile( zipFile ); zipArchiver.addArchivedFileSet( gzFile, "prfx/" ); FileUtils.removePath( zipFile.getPath() ); zipArchiver.createArchive(); final ZipFile juZipFile = new ZipFile( zipFile ); final ZipEntry zipEntry = juZipFile.getEntry( "prfx/target/output/pom.xml" ); final InputStream archivePom = juZipFile.getInputStream( zipEntry ); final InputStream pom = new FileInputStream( pomFile ); assertTrue( Arrays.equals( IOUtil.toByteArray( pom ), IOUtil.toByteArray( archivePom ) ) ); archivePom.close(); pom.close(); juZipFile.close(); } /** * Tests the .gzip archiver is forced set to true, and after that * tests the behavior when the forced is set to false. * * @throws Exception */ public void testTarGzIsForcedBehaviour() throws Exception { GZipArchiver gZipArchiver = (GZipArchiver) createArchiver( "gzip" ); assertTrue( gZipArchiver.isSupportingForced() ); gZipArchiver.createArchive(); final long creationTime = gZipArchiver.getDestFile().lastModified(); waitUntilNewTimestamp( gZipArchiver.getDestFile(), creationTime ); gZipArchiver = (GZipArchiver) createArchiver( "gzip" ); gZipArchiver.setForced( true ); gZipArchiver.createArchive(); final long firstRunTime = gZipArchiver.getDestFile().lastModified(); assertFalse( creationTime==firstRunTime ); gZipArchiver = (GZipArchiver) createArchiver( "gzip" ); gZipArchiver.setForced( false ); gZipArchiver.createArchive(); final long secondRunTime = gZipArchiver.getDestFile().lastModified(); assertEquals( firstRunTime,secondRunTime ); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/jar/000077500000000000000000000000001145404360500305665ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/jar/IndexTest.java000066400000000000000000000132061145404360500333420ustar00rootroot00000000000000package org.codehaus.plexus.archiver.jar; /* * Copyright 2006 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.io.BufferedInputStream; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.zip.ZipEntry; import org.codehaus.plexus.archiver.zip.ZipFile; /** * @author Richard van der Hoff * @version $Id$ */ public class IndexTest extends PlexusTestCase { public void testCreateArchiveWithIndexedJars() throws Exception { /* create a dummy jar */ JarArchiver archiver1 = (JarArchiver) lookup( Archiver.ROLE, "jar" ); archiver1.addFile( getTestFile( "src/test/resources/manifests/manifest1.mf" ), "one.txt" ); archiver1.setDestFile( getTestFile( "target/output/archive1.jar" ) ); archiver1.createArchive(); /* now create another jar, with an index, and whose manifest includes a Class-Path entry for the first jar. */ Manifest m = new Manifest(); Manifest.Attribute classpathAttr = new Manifest.Attribute( "Class-Path", "archive1.jar" ); m.addConfiguredAttribute( classpathAttr ); JarArchiver archiver2 = (JarArchiver) lookup( Archiver.ROLE, "jar" ); archiver2.addFile( getTestFile( "src/test/resources/manifests/manifest2.mf" ), "two.txt" ); archiver2.setIndex(true); archiver2.addConfiguredIndexJars(archiver1.getDestFile()); archiver2.setDestFile( getTestFile( "target/output/archive2.jar" ) ); archiver2.addConfiguredManifest(m); archiver2.createArchive(); // read the index file back and check it looks like it ought to ZipFile zf = new ZipFile( archiver2.getDestFile() ); ZipEntry indexEntry = zf.getEntry("META-INF/INDEX.LIST"); assertNotNull(indexEntry); BufferedInputStream bis = new BufferedInputStream(zf.getInputStream(indexEntry)); byte buf[] = new byte[1024]; int i = bis.read(buf); String res = new String(buf,0,i); assertEquals("JarIndex-Version: 1.0\n\narchive2.jar\ntwo.txt\n\narchive1.jar\none.txt\n\n", res.replaceAll("\r\n", "\n")); } /** * this is pretty much a duplicate of testCreateArchiveWithIndexedJars(), but adds some extra * tests for files in META-INF */ public void testCreateArchiveWithIndexedJarsAndMetaInf() throws Exception { /* create a dummy jar */ JarArchiver archiver1 = (JarArchiver) lookup( Archiver.ROLE, "jar" ); archiver1.addFile( getTestFile( "src/test/resources/manifests/manifest1.mf" ), "one.txt" ); // add a file in the META-INF directory, as this previously didn't make it into the index archiver1.addFile( getTestFile( "src/test/resources/manifests/manifest2.mf" ), "META-INF/foo" ); archiver1.setDestFile( getTestFile( "target/output/archive1.jar" ) ); archiver1.createArchive(); /* create another dummy jar, with an index but nothing else in META-INF. Also checks non-leaf files. */ JarArchiver archiver3 = (JarArchiver) lookup( Archiver.ROLE, "jar" ); archiver3.addFile( getTestFile( "src/test/resources/manifests/manifest1.mf" ), "org/apache/maven/one.txt" ); archiver3.addFile( getTestFile( "src/test/resources/manifests/manifest2.mf" ), "META-INF/INDEX.LIST" ); archiver3.setDestFile( getTestFile( "target/output/archive3.jar" ) ); archiver3.createArchive(); /* now create another jar, with an index, and whose manifest includes a Class-Path entry for the first two jars. */ Manifest m = new Manifest(); Manifest.Attribute classpathAttr = new Manifest.Attribute( "Class-Path", "archive1.jar archive3.jar" ); m.addConfiguredAttribute( classpathAttr ); JarArchiver archiver2 = (JarArchiver) lookup( Archiver.ROLE, "jar" ); archiver2.addFile( getTestFile( "src/test/resources/manifests/manifest2.mf" ), "two.txt" ); archiver2.setIndex(true); archiver2.addConfiguredIndexJars(archiver1.getDestFile()); archiver2.addConfiguredIndexJars(archiver3.getDestFile()); archiver2.setDestFile( getTestFile( "target/output/archive2.jar" ) ); archiver2.addConfiguredManifest(m); archiver2.createArchive(); // read the index file back and check it looks like it ought to ZipFile zf = new ZipFile( archiver2.getDestFile() ); ZipEntry indexEntry = zf.getEntry("META-INF/INDEX.LIST"); assertNotNull(indexEntry); BufferedInputStream bis = new BufferedInputStream(zf.getInputStream(indexEntry)); byte buf[] = new byte[1024]; int i = bis.read(buf); String res = new String(buf,0,i); //System.out.println(res); StringBuffer expected = new StringBuffer(); expected.append("JarIndex-Version: 1.0\n\n"); expected.append("archive2.jar\ntwo.txt\n\n"); expected.append("archive1.jar\nMETA-INF\none.txt\n\n"); expected.append("archive3.jar\norg\norg/apache\norg/apache/maven\n\n"); assertEquals(expected.toString(), res.replaceAll("\r\n", "\n")); } } JarArchiverTest.java000066400000000000000000000015171145404360500344160ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/jarpackage org.codehaus.plexus.archiver.jar; import org.codehaus.plexus.archiver.ArchiverException; import java.io.File; import java.io.IOException; import junit.framework.TestCase; public class JarArchiverTest extends TestCase { public void testCreateManifestOnlyJar() throws IOException, ManifestException, ArchiverException { File jarFile = File.createTempFile( "JarArchiverTest.", ".jar" ); jarFile.deleteOnExit(); JarArchiver archiver = new JarArchiver(); archiver.setDestFile( jarFile ); Manifest manifest = new Manifest(); Manifest.Attribute attribute = new Manifest.Attribute( "Main-Class", getClass().getName() ); manifest.addConfiguredAttribute( attribute ); archiver.addConfiguredManifest( manifest ); archiver.createArchive(); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/jar/ManifestTest.java000066400000000000000000000162251145404360500340450ustar00rootroot00000000000000package org.codehaus.plexus.archiver.jar; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import org.codehaus.plexus.PlexusTestCase; import java.io.FileReader; import java.io.IOException; import java.util.Enumeration; import java.io.StringWriter; import java.io.PrintWriter; /** * @author Emmanuel Venisse * @version $Id$ */ public class ManifestTest extends PlexusTestCase { public void testManifestReader1() throws Exception { Manifest manifest = getManifest( "src/test/resources/manifests/manifest1.mf" ); String version = manifest.getManifestVersion(); assertEquals( "Manifest was not created with correct version - ", "1.0", version ); } public void testManifestReader2() throws Exception { try { getManifest( "src/test/resources/manifests/manifest2.mf" ); fail( "Manifest isn't well formed. It must be generate an exception." ); } catch ( ManifestException me ) { if ( me.getMessage().indexOf( "is not valid as it does not contain a name and a value separated by ': '" ) == -1 ) { fail( "Manifest isn't well formed. It must generate an exception." ); } } } public void testManifestReader3() throws Exception { try { getManifest( "src/test/resources/manifests/manifest3.mf" ); fail( "Manifest isn't well formed. It must be generate an exception." ); } catch ( ManifestException me ) { if ( me.getMessage().indexOf( "is not valid as it does not contain a name and a value separated by ': '" ) == -1 ) { fail( "Manifest isn't well formed. It must generate an exception." ); } } } public void testManifestReader4() throws Exception { Manifest manifest = getManifest( "src/test/resources/manifests/manifest4.mf" ); Enumeration warnings = manifest.getWarnings(); assertTrue( warnings.hasMoreElements() ); String warn = (String) warnings.nextElement(); assertFalse( warnings.hasMoreElements() ); boolean hasWarning = warn.indexOf( "\"Name\" attributes should not occur in the main section" ) != -1; assertEquals( "Expected warning about Name in main section", true, hasWarning ); } public void testManifestReader5() throws Exception { try { getManifest( "src/test/resources/manifests/manifest5.mf" ); fail(); } catch ( ManifestException me ) { boolean hasWarning = me.getMessage().indexOf( "Manifest sections should start with a \"Name\" attribute" ) != -1; assertEquals( "Expected warning about section not starting with Name: attribute", true, hasWarning ); } } public void testManifestReader6() throws Exception { Manifest manifest = getManifest( "src/test/resources/manifests/manifest6.mf" ); Enumeration warnings = manifest.getWarnings(); assertTrue( warnings.hasMoreElements() ); String warn = (String) warnings.nextElement(); assertFalse( warnings.hasMoreElements() ); boolean hasWarning = warn.indexOf( "Manifest attributes should not start with \"From\"" ) != -1; assertEquals( "Expected warning about From: attribute", true, hasWarning ); } public void testGetDefaultManifest() throws Exception { Manifest mf = Manifest.getDefaultManifest(); assertNotNull( mf ); } public void testAttributeLongLineWrite() throws Exception { StringWriter writer = new StringWriter(); Manifest.Attribute attr = new Manifest.Attribute(); String longLineOfChars = "123456789 123456789 123456789 123456789 123456789 123456789 123456789 " + "123456789 123456789 123456789 "; attr.setName( "test" ); attr.setValue( longLineOfChars ); attr.write( new PrintWriter( writer ) ); writer.flush(); assertEquals( "should be multiline", "test: 123456789 123456789 123456789 123456789 123456789 123456789 1234" + Manifest.EOL + " 56789 123456789 123456789 123456789 " + Manifest.EOL, writer.toString() ); } public void testAttributeMultiLineValue() throws Exception { checkMultiLineAttribute( "123456789" + Manifest.EOL + "123456789", "123456789" + Manifest.EOL + " 123456789" + Manifest.EOL ); } public void testAttributeDifferentLineEndings() throws Exception { checkMultiLineAttribute( "\tA\rB\n\t C\r\n \tD\n\r", "\tA" + Manifest.EOL + " B" + Manifest.EOL + " \t C" + Manifest.EOL + " \tD" + Manifest.EOL ); } public void checkMultiLineAttribute( String in, String expected ) throws Exception { StringWriter writer = new StringWriter(); Manifest.Attribute attr = new Manifest.Attribute(); attr.setName( "test" ); attr.setValue( in ); attr.write( new PrintWriter( writer ) ); writer.flush(); // Print the string with whitespace replaced with special codes // so in case of failure you can see what went wrong. System.err.println( "String: " + dumpString( writer.toString() ) ); assertEquals( "should be indented multiline", "test: " + expected, writer.toString() ); } private static String dumpString( String in ) { String out = ""; char [] chars = in.toCharArray(); for ( int i = 0; i < chars.length; i ++ ) { switch ( chars[i] ) { case '\t': out+="\\t"; break; case '\r': out+="\\r"; break; case '\n': out+="\\n"; break; case ' ': out+="\\s"; break; default: out+= chars[i]; break; } } return out; } /** * Reads a Manifest file. */ private Manifest getManifest( String filename ) throws IOException, ManifestException { FileReader r = new FileReader( getTestFile( filename ) ); try { return new Manifest( r ); } finally { r.close(); } } } SecurityFilterTest.java000066400000000000000000000057101145404360500351720ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/jarpackage org.codehaus.plexus.archiver.jar; /* * Copyright 2007 The Codehaus Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ import java.io.File; import java.util.ArrayList; import java.util.List; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.filters.JarSecurityFileFilter; import org.codehaus.plexus.archiver.zip.ZipEntry; import org.codehaus.plexus.archiver.zip.ZipFile; /** * @author Mike Cumings * @version $Id$ * @since 14 Jun 07 */ public class SecurityFilterTest extends PlexusTestCase { public void testSecurityFilters() throws Exception { File dummyContent = getTestFile( "src/test/resources/jar-security/dummy.txt" ); String[] unFilteredFiles = new String[] { "META-INF/BOB.txt", "META-INF/harry.xml", }; String[] filteredFiles = new String[] { "META-INF/FOO.DSA", "META-INF/BAR.dsa", "META-INF/BAZ.RSA", "META-INF/BOO.rsa", "META-INF/SIG.SF", "META-INF/FILE.sf" }; // Load up our filter ist List filters = new ArrayList(); filters.add( new JarSecurityFileFilter() ); //filters.add( new JarSecurityFileSelector() ); // Create our test jar with fake security files JarArchiver archiver = (JarArchiver) lookup( Archiver.ROLE, "jar" ); archiver.setArchiveFilters( filters ); for ( int i = 0; i < filteredFiles.length; i++ ) { archiver.addFile( dummyContent, filteredFiles[i] ); } for ( int i = 0; i < unFilteredFiles.length; i++ ) { archiver.addFile( dummyContent, unFilteredFiles[i] ); } archiver.setDestFile( getTestFile( "target/jar-security/test-archive.jar" ) ); archiver.createArchive(); // Verify that the fake files were filtered out of the created jar and that // the legitimate files were not ZipFile zf = new ZipFile( archiver.getDestFile() ); for ( int i = 0; i < filteredFiles.length; i++ ) { ZipEntry entry = zf.getEntry( filteredFiles[i] ); assertNull( "Entry was not filtered out: " + filteredFiles[i], entry ); } for ( int i = 0; i < unFilteredFiles.length; i++ ) { ZipEntry entry = zf.getEntry( unFilteredFiles[i] ); assertNotNull( "Entry was filtered out: " + unFilteredFiles[i], entry ); } } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/manager/000077500000000000000000000000001145404360500314245ustar00rootroot00000000000000ArchiverManagerTest.java000066400000000000000000000134031145404360500361070ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/managerpackage org.codehaus.plexus.archiver.manager; /* * The MIT License * * Copyright (c) 2004, The Codehaus * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.UnArchiver; import java.io.File; /** * @author Dan T. Tran * @version $Id: ArchiverManagerTest.java$ */ public class ArchiverManagerTest extends PlexusTestCase { public void testLookupArchiver() throws Exception { ArchiverManager manager = (ArchiverManager) lookup( ArchiverManager.ROLE ); Archiver archiver = manager.getArchiver( "jar" ); assertNotNull( archiver ); } public void testReuseArchiver() throws Exception { ArchiverManager manager = (ArchiverManager) lookup( ArchiverManager.ROLE ); Archiver archiver = manager.getArchiver( "jar" ); assertNotNull( archiver ); archiver.addDirectory( new File( getBasedir() ) ); Archiver newArchiver = manager.getArchiver( "jar" ); assertNotNull( newArchiver ); assertFalse( newArchiver.equals( archiver ) ); assertTrue( !newArchiver.getResources().hasNext() ); } public void testLookupUnArchiver() throws Exception { ArchiverManager manager = (ArchiverManager) lookup( ArchiverManager.ROLE ); UnArchiver unarchiver = manager.getUnArchiver( "zip" ); assertNotNull( unarchiver ); } public void testLookupUnknownArchiver() throws Exception { ArchiverManager manager = (ArchiverManager) lookup( ArchiverManager.ROLE ); try { manager.getArchiver( "Unknown" ); fail(); } catch ( NoSuchArchiverException e ) { } } public void testLookupUnknownUnArchiver() throws Exception { ArchiverManager manager = (ArchiverManager) lookup( ArchiverManager.ROLE ); try { manager.getUnArchiver( "Unknown" ); fail(); } catch ( NoSuchArchiverException e ) { } } public void testLookupUnArchiverUsingFile() throws Exception { ArchiverManager manager = (ArchiverManager) lookup( ArchiverManager.ROLE ); UnArchiver unarchiver = manager.getUnArchiver( new File( "test.tar.gz" ) ); assertNotNull( unarchiver ); unarchiver = manager.getUnArchiver( new File( "test.tar.bz2" ) ); assertNotNull( unarchiver ); unarchiver = manager.getUnArchiver( new File( "test.tgz" ) ); assertNotNull( unarchiver ); unarchiver = manager.getUnArchiver( new File( "test.tbz2" ) ); assertNotNull( unarchiver ); unarchiver = manager.getUnArchiver( new File( "test.bzip2" ) ); assertNotNull( unarchiver ); unarchiver = manager.getUnArchiver( new File( "test.tar" ) ); assertNotNull( unarchiver ); } public void testLookupArchiverUsingFile() throws Exception { ArchiverManager manager = (ArchiverManager) lookup( ArchiverManager.ROLE ); Archiver archiver = manager.getArchiver( new File( "test.gzip" ) ); assertNotNull( archiver ); archiver = manager.getArchiver( new File( "test.bzip2" ) ); assertNotNull( archiver ); archiver = manager.getArchiver( new File( "test.tar" ) ); assertNotNull( archiver ); } public void testUnspportedLookupArchiverUsingFile() throws Exception { ArchiverManager manager = (ArchiverManager) lookup( ArchiverManager.ROLE ); try { manager.getArchiver( new File( "test.tbz2" ) ); //until we support this type, this must fail fail ( "Please remove this test." ); } catch ( NoSuchArchiverException e ) { } try { manager.getArchiver( new File( "test.tgz" ) ); //until we support this type, this must fail fail ( "Please remove this test." ); } catch ( NoSuchArchiverException e ) { } try { manager.getArchiver( new File( "test.tar.gz" ) ); //until we support this type, this must fail fail ( "Please remove this test." ); } catch ( NoSuchArchiverException e ) { } try { manager.getArchiver( new File( "test.tar.bz2" ) ); //until we support this type, this must fail fail ( "Please remove this test." ); } catch ( NoSuchArchiverException e ) { } } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/rar/000077500000000000000000000000001145404360500305765ustar00rootroot00000000000000RarArchiverTest.java000066400000000000000000000103511145404360500344320ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/rarpackage org.codehaus.plexus.archiver.rar; import java.io.File; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.BasePlexusArchiverTest; import org.codehaus.plexus.archiver.UnArchiver; import org.codehaus.plexus.util.FileUtils; /* * Copyright 2007 The Codehaus Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author olamy * @since 13 mars 07 * @version $Id$ */ public class RarArchiverTest extends BasePlexusArchiverTest { public File getTargetRarFolfer() { return new File( getBasedir(), "/target/rartest/" ); } protected void setUp() throws Exception { super.setUp(); // clean output directory and re create it if ( getTargetRarFolfer().exists() ) { FileUtils.deleteDirectory( getTargetRarFolfer() ); } } public void testArchive() throws Exception { Archiver archiver = (Archiver) lookup( Archiver.ROLE, "rar" ); archiver.setDestFile( new File( getTargetRarFolfer(), "test.rar" ) ); //archiver.addDirectory( , "manifests" ); archiver.addFile( getTestFile( "src/test/resources/manifests/manifest1.mf" ), "manifests/manifest1.mf" ); archiver.createArchive(); assertTrue( new File( getTargetRarFolfer(), "test.rar" ).exists() ); UnArchiver unArchiver = (UnArchiver) lookup( UnArchiver.ROLE, "rar" ); unArchiver.setSourceFile( new File( getTargetRarFolfer(), "test.rar" ) ); unArchiver.setDestDirectory( getTargetRarFolfer() ); unArchiver.extract(); File manifestsDir = new File( getTargetRarFolfer(), "/manifests" ); assertTrue( manifestsDir.exists() ); File manifestsFile = new File( getTargetRarFolfer(), "/manifests/manifest1.mf" ); assertTrue( manifestsFile.exists() ); } public void atestUnarchive() throws Exception { UnArchiver unArchiver = (UnArchiver) lookup( UnArchiver.ROLE, "rar" ); File rarFile = new File( getBasedir() + "/src/test/jars/test.rar" ); assertTrue( rarFile.exists() ); unArchiver.setSourceFile( rarFile ); unArchiver.setDestDirectory( getTargetRarFolfer() ); getTargetRarFolfer().mkdir(); unArchiver.extract(); File dirExtract = new File( getTargetRarFolfer(), "META-INF" ); assertTrue( dirExtract.exists() ); assertTrue( dirExtract.isDirectory() ); } /** * Tests the .rar archiver is forced set to true, and after that * tests the behavior when the forced is set to false. * * @throws Exception */ public void testRarIsForcedBehaviour() throws Exception { Archiver rarArvhiver = createArchiver( "rar" ); assertTrue( rarArvhiver.isSupportingForced() ); rarArvhiver.createArchive(); final long creationTime = rarArvhiver.getDestFile().lastModified(); rarArvhiver = createArchiver( "rar" ); assertTrue( rarArvhiver.isSupportingForced() ); //Default should be true rarArvhiver.setForced( true ); waitUntilNewTimestamp( rarArvhiver.getDestFile(), creationTime ); rarArvhiver.createArchive(); final long firstRunTime = rarArvhiver.getDestFile().lastModified(); assertFalse( creationTime==firstRunTime ); //waitUntilNewTimestamp( rarArvhiver.getDestFile(), firstRunTime ); rarArvhiver = createArchiver( "rar" ); rarArvhiver.setForced( false ); rarArvhiver.createArchive(); final long secondRunTime = rarArvhiver.getDestFile().lastModified(); assertEquals( secondRunTime,firstRunTime ); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/tar/000077500000000000000000000000001145404360500306005ustar00rootroot00000000000000TarArchiverTest.java000066400000000000000000000336231145404360500344450ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /* * The MIT License * * Copyright (c) 2004, The Codehaus * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.UnixStat; import org.codehaus.plexus.archiver.bzip2.BZip2Compressor; import org.codehaus.plexus.archiver.gzip.GZipCompressor; import org.codehaus.plexus.archiver.util.ArchiveEntryUtils; import org.codehaus.plexus.archiver.util.Compressor; import org.codehaus.plexus.archiver.zip.ArchiveFileComparator; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributeUtils; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributes; import org.codehaus.plexus.components.io.resources.PlexusIoFileResource; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.logging.console.ConsoleLogger; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.Os; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.util.Enumeration; import java.util.LinkedHashMap; import java.util.Map; /** * @author Emmanuel Venisse * @version $Id$ */ public class TarArchiverTest extends PlexusTestCase { private Logger logger; public void setUp() throws Exception { super.setUp(); logger = new ConsoleLogger( Logger.LEVEL_DEBUG, "test" ); } public void testCreateArchiveWithDetectedModes() throws Exception { String[] executablePaths = { "path/to/executable", "path/to/executable.bat" }; String[] confPaths = { "path/to/etc/file", "path/to/etc/file2" }; String[] logPaths = { "path/to/logs/log.txt" }; int exeMode = 0777; int confMode = 0600; int logMode = 0640; if ( Os.isFamily( Os.FAMILY_WINDOWS ) ) { StackTraceElement e = new Throwable().getStackTrace()[0]; System.out.println( "Cannot execute test: " + e.getMethodName() + " on " + System.getProperty( "os.name" ) ); return; } File tmpDir = null; try { tmpDir = File.createTempFile( "tbz2-with-chmod.", ".dir" ); tmpDir.delete(); tmpDir.mkdirs(); for ( int i = 0; i < executablePaths.length; i++ ) { writeFile( tmpDir, executablePaths[i], exeMode ); } for ( int i = 0; i < confPaths.length; i++ ) { writeFile( tmpDir, confPaths[i], confMode ); } for ( int i = 0; i < logPaths.length; i++ ) { writeFile( tmpDir, logPaths[i], logMode ); } { Map attributesByPath = PlexusIoResourceAttributeUtils.getFileAttributesByPath( tmpDir ); for ( int i = 0; i < executablePaths.length; i++ ) { String path = executablePaths[i]; PlexusIoResourceAttributes attrs = (PlexusIoResourceAttributes) attributesByPath.get( path ); if ( attrs == null ) { attrs = (PlexusIoResourceAttributes) attributesByPath.get( new File( tmpDir, path ).getAbsolutePath() ); } assertNotNull( attrs ); assertEquals( "Wrong mode for: " + path + "; expected: " + exeMode, exeMode, attrs.getOctalMode() ); } for ( int i = 0; i < confPaths.length; i++ ) { String path = confPaths[i]; PlexusIoResourceAttributes attrs = (PlexusIoResourceAttributes) attributesByPath.get( path ); if ( attrs == null ) { attrs = (PlexusIoResourceAttributes) attributesByPath.get( new File( tmpDir, path ).getAbsolutePath() ); } assertNotNull( attrs ); assertEquals( "Wrong mode for: " + path + "; expected: " + confMode, confMode, attrs.getOctalMode() ); } for ( int i = 0; i < logPaths.length; i++ ) { String path = logPaths[i]; PlexusIoResourceAttributes attrs = (PlexusIoResourceAttributes) attributesByPath.get( path ); if ( attrs == null ) { attrs = (PlexusIoResourceAttributes) attributesByPath.get( new File( tmpDir, path ).getAbsolutePath() ); } assertNotNull( attrs ); assertEquals( "Wrong mode for: " + path + "; expected: " + logMode, logMode, attrs.getOctalMode() ); } } File tarFile = getTestFile( "target/output/tar-with-modes.tar" ); TarArchiver archiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); archiver.setDestFile( tarFile ); archiver.addDirectory( tmpDir ); archiver.createArchive(); assertTrue( tarFile.exists() ); File tarFile2 = getTestFile( "target/output/tar-with-modes-L2.tar" ); archiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); archiver.setDestFile( tarFile2 ); archiver.addArchivedFileSet( tarFile ); archiver.createArchive(); TarFile tf = new TarFile( tarFile2 ); Map entriesByPath = new LinkedHashMap(); for( Enumeration e = tf.getEntries(); e.hasMoreElements(); ) { TarEntry te = (TarEntry) e.nextElement(); entriesByPath.put( te.getName(), te ); } for ( int i = 0; i < executablePaths.length; i++ ) { String path = executablePaths[i]; TarEntry te = (TarEntry) entriesByPath.get( path ); int mode = te.getMode() & UnixStat.PERM_MASK; assertEquals( "Wrong mode for: " + path + "; expected: " + exeMode, exeMode, mode ); } for ( int i = 0; i < confPaths.length; i++ ) { String path = confPaths[i]; TarEntry te = (TarEntry) entriesByPath.get( path ); int mode = te.getMode() & UnixStat.PERM_MASK; assertEquals( "Wrong mode for: " + path + "; expected: " + confMode, confMode, mode ); } for ( int i = 0; i < logPaths.length; i++ ) { String path = logPaths[i]; TarEntry te = (TarEntry) entriesByPath.get( path ); int mode = te.getMode() & UnixStat.PERM_MASK; assertEquals( "Wrong mode for: " + path + "; expected: " + logMode, logMode, mode ); } } finally { if ( tmpDir != null && tmpDir.exists() ) { try { FileUtils.forceDelete( tmpDir ); } catch ( IOException e ) { e.printStackTrace(); } } } } private void writeFile( File dir, String fname, int mode ) throws IOException, ArchiverException { File file = new File( dir, fname ); FileWriter writer = null; try { if ( file.getParentFile() != null ) { file.getParentFile().mkdirs(); } writer = new FileWriter( file ); writer.write( "This is a test file." ); } finally { IOUtil.close( writer ); } ArchiveEntryUtils.chmod( file, mode, logger, false ); } public void testCreateArchive() throws Exception { TarArchiver archiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); archiver.setDirectoryMode( 0500 ); archiver.getOptions().setDirMode( 0500 ); archiver.setFileMode( 0400 ); archiver.getOptions().setMode( 0400 ); archiver.addDirectory( getTestFile( "src" ) ); archiver.setFileMode( 0640 ); archiver.getOptions().setMode( 0640 ); archiver.addFile( getTestFile( "src/test/resources/manifests/manifest1.mf" ), "one.txt" ); archiver.addFile( getTestFile( "src/test/resources/manifests/manifest2.mf" ), "two.txt", 0664 ); archiver.setDestFile( getTestFile( "target/output/archive.tar" ) ); archiver.createArchive(); TarInputStream tis; tis = new TarInputStream( new BufferedInputStream( new FileInputStream( archiver.getDestFile() ) ) ); TarEntry te; while ( ( te = tis.getNextEntry() ) != null ) { if ( te.isDirectory() ) { assertEquals( 0500, te.getMode() & UnixStat.PERM_MASK ); } else { if ( te.getName().equals( "one.txt" ) ) { assertEquals( 0640, te.getMode() & UnixStat.PERM_MASK ); } else if ( te.getName().equals( "two.txt" ) ) { assertEquals( 0664, te.getMode() & UnixStat.PERM_MASK ); } else { assertEquals( 0400, te.getMode() & UnixStat.PERM_MASK ); } } } } private class TarHandler { File createTarFile() throws Exception { final File srcDir = new File("src"); final File tarFile = new File( "target/output/src.tar" ); TarArchiver tarArchiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); tarArchiver.setDestFile( tarFile ); tarArchiver.addDirectory( srcDir, null, FileUtils.getDefaultExcludes() ); FileUtils.removePath( tarFile.getPath() ); tarArchiver.createArchive(); return tarFile; } File createTarfile2( File tarFile ) throws Exception { final File tarFile2 = new File( "target/output/src2.tar" ); TarArchiver tarArchiver2 = (TarArchiver) lookup( Archiver.ROLE, "tar" ); tarArchiver2.setDestFile( tarFile2 ); tarArchiver2.addArchivedFileSet( tarFile, "prfx/" ); FileUtils.removePath( tarFile2.getPath() ); tarArchiver2.createArchive(); return tarFile2; } TarFile newTarFile( File tarFile ) { return new TarFile( tarFile ); } } private class GZipTarHandler extends TarHandler { File createTarFile() throws Exception { File file = super.createTarFile(); File compressedFile = new File( file.getPath() + ".gz" ); Compressor compressor = new GZipCompressor(); compressor.setSource( new PlexusIoFileResource( file ) ); compressor.setDestFile( compressedFile ); compressor.compress(); compressor.close(); return compressedFile; } TarFile newTarFile( File tarFile ) { return new GZipTarFile( tarFile ); } } private class BZip2TarHandler extends TarHandler { File createTarFile() throws Exception { File file = super.createTarFile(); File compressedFile = new File( file.getPath() + ".bz2" ); Compressor compressor = new BZip2Compressor(); compressor.setSource( new PlexusIoFileResource( file ) ); compressor.setDestFile( compressedFile ); compressor.compress(); compressor.close(); return compressedFile; } TarFile newTarFile( File tarFile ) { return new BZip2TarFile( tarFile ); } } public void testUncompressedResourceCollection() throws Exception { testCreateResourceCollection( new TarHandler() ); } public void testGzipCompressedResourceCollection() throws Exception { testCreateResourceCollection( new GZipTarHandler() ); } public void testBzip2CompressedResourceCollection() throws Exception { testCreateResourceCollection( new BZip2TarHandler() ); } private void testCreateResourceCollection( TarHandler tarHandler ) throws Exception { final File tarFile = tarHandler.createTarFile(); final File tarFile2 = tarHandler.createTarfile2( tarFile ); final TarFile cmp1 = tarHandler.newTarFile( tarFile ); final TarFile cmp2 = new TarFile( tarFile2 ); ArchiveFileComparator.assertEquals( cmp1, cmp2, "prfx/" ); cmp1.close(); cmp2.close(); } } TarBZip2UnArchiverTest.java000066400000000000000000000062711145404360500356160ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /* * The MIT License * * Copyright (c) 2004, The Codehaus * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import java.io.File; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.UnArchiver; import org.codehaus.plexus.archiver.bzip2.BZip2Archiver; /** * @author Dan Tran * @version $Id$ */ public class TarBZip2UnArchiverTest extends PlexusTestCase { public void testExtract() throws Exception { TarArchiver tarArchiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); String fileName1 = "TarBZip2UnArchiverTest1.txt"; String fileName2 = "TarBZip2UnArchiverTest2.txt"; File file1InTar = getTestFile( "target/output/" + fileName1 ); File file2InTar = getTestFile( "target/output/" + fileName2 ); file1InTar.delete(); file2InTar.delete(); File testBZip2File = getTestFile( "target/output/archive.tar.bz2" ); tarArchiver.addFile( getTestFile( "src/test/resources/manifests/manifest1.mf" ), fileName1 ); tarArchiver.addFile( getTestFile( "src/test/resources/manifests/manifest2.mf" ), fileName2, 0664 ); tarArchiver.setDestFile( getTestFile( "target/output/archive.tar" ) ); tarArchiver.createArchive(); BZip2Archiver gzipArchiver = (BZip2Archiver) lookup( Archiver.ROLE, "bzip2" ); gzipArchiver.setDestFile( testBZip2File ); gzipArchiver.addFile( getTestFile( "target/output/archive.tar" ), "dontcare" ); gzipArchiver.createArchive(); TarBZip2UnArchiver tarBZip2UnArchiver = (TarBZip2UnArchiver) lookup( UnArchiver.ROLE, "tbz2" ); tarBZip2UnArchiver.setDestDirectory( getTestFile( "target/output" ) ); tarBZip2UnArchiver.setSourceFile( testBZip2File ); tarBZip2UnArchiver.extract(); assertTrue( file1InTar.exists() ); assertTrue( file2InTar.exists() ); //makesure we place the source file back assertEquals( testBZip2File, tarBZip2UnArchiver.getSourceFile() ); } public void testLookup() throws Exception { lookup( UnArchiver.ROLE, "tar.bz2" ); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/tar/TarEntryTest.java000066400000000000000000000020461145404360500340550ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; /* * Copyright 2003-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import junit.framework.TestCase; /** * @version $Revision$ $Date$ * from org.apache.ant.tools.tar.TarEntryTest v1.4 * @since Ant 1.6 */ public class TarEntryTest extends TestCase { /** * demonstrates bug 18105 on OSes with os.name shorter than 7. */ public void testFileConstructor() { new TarEntry( new java.io.File( "/foo" ) ); } } TarFileAttributesTest.java000066400000000000000000000234741145404360500356330ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.UnArchiver; import org.codehaus.plexus.archiver.util.DefaultArchivedFileSet; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributeUtils; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributes; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.Os; public class TarFileAttributesTest extends PlexusTestCase { private final List toDelete = new ArrayList(); public void setUp() throws Exception { super.setUp(); System.out.println( "Octal 0660 is decimal " + 0660 ); System.out.println( "Octal 0644 is decimal " + 0644 ); System.out.println( "Octal 0440 is decimal " + 0440 ); } public void tearDown() throws Exception { super.tearDown(); if ( !toDelete.isEmpty() ) { for ( Iterator it = toDelete.iterator(); it.hasNext(); ) { File f = (File) it.next(); System.out.println( "Deleting: " + f ); if ( f.isDirectory() ) { try { FileUtils.deleteDirectory( f ); } catch ( IOException e ) { System.out.println( "Error deleting test directory: " + f ); } } else { f.delete(); f.deleteOnExit(); } } } } private void printTestHeader() { StackTraceElement e = new Throwable().getStackTrace()[1]; System.out.println( "\n\nRunning: " + e.getMethodName() + "\n\n" ); } public void testUseAttributesFromTarArchiveInputInTarArchiverOutput() throws Exception { printTestHeader(); if ( checkForWindows() ) { System.out.println( "This test cannot run on windows. Aborting." ); return; } File tempFile = File.createTempFile( "tar-file-attributes.", ".tmp" ); toDelete.add( tempFile ); FileWriter writer = null; try { writer = new FileWriter( tempFile ); writer.write( "This is a test file." ); } finally { IOUtil.close( writer ); } int result = Runtime.getRuntime().exec( "chmod 440 " + tempFile.getAbsolutePath() ).waitFor(); assertEquals( 0, result ); TarArchiver tarArchiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); File tempTarFile = File.createTempFile( "tar-file.", ".tar" ); toDelete.add( tempTarFile ); tarArchiver.setDestFile( tempTarFile ); tarArchiver.addFile( tempFile, tempFile.getName(), 0660 ); tarArchiver.createArchive(); tarArchiver = null; TarArchiver tarArchiver2 = (TarArchiver) lookup( Archiver.ROLE, "tar" ); File tempTarFile2 = File.createTempFile( "tar-file.", ".tar" ); toDelete.add( tempTarFile2 ); tarArchiver2.setDestFile( tempTarFile2 ); DefaultArchivedFileSet afs = new DefaultArchivedFileSet(); afs.setArchive( tempTarFile ); System.out.println( "Adding tar archive to new archiver: " + tempTarFile ); tarArchiver2.addArchivedFileSet( afs ); tarArchiver2.createArchive(); // Cut from here, and feed it into a new tar archiver...then unarchive THAT. TarUnArchiver tarUnArchiver = (TarUnArchiver) lookup( UnArchiver.ROLE, "tar" ); File tempTarDir = File.createTempFile( "tar-test.", ".dir" ); tempTarDir.delete(); tempTarDir.mkdirs(); toDelete.add( tempTarDir ); tarUnArchiver.setDestDirectory( tempTarDir ); tarUnArchiver.setSourceFile( tempTarFile2 ); tarUnArchiver.extract(); PlexusIoResourceAttributes fileAttributes = PlexusIoResourceAttributeUtils.getFileAttributes( new File( tempTarDir, tempFile.getName() ) ); assertEquals( 0660, fileAttributes.getOctalMode() ); } public void testUseDetectedFileAttributes() throws Exception { printTestHeader(); if ( checkForWindows() ) { System.out.println( "This test cannot run on windows. Aborting." ); return; } File tempFile = File.createTempFile( "tar-file-attributes.", ".tmp" ); toDelete.add( tempFile ); FileWriter writer = null; try { writer = new FileWriter( tempFile ); writer.write( "This is a test file." ); } finally { IOUtil.close( writer ); } int result = Runtime.getRuntime().exec( "chmod 440 " + tempFile.getAbsolutePath() ).waitFor(); assertEquals( 0, result ); PlexusIoResourceAttributes fileAttributes = PlexusIoResourceAttributeUtils.getFileAttributes( tempFile ); assertEquals( 0440, fileAttributes.getOctalMode() ); TarArchiver tarArchiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); File tempTarFile = File.createTempFile( "tar-file.", ".tar" ); toDelete.add( tempTarFile ); tarArchiver.setDestFile( tempTarFile ); tarArchiver.addFile( tempFile, tempFile.getName() ); tarArchiver.createArchive(); TarUnArchiver tarUnArchiver = (TarUnArchiver) lookup( UnArchiver.ROLE, "tar" ); File tempTarDir = File.createTempFile( "tar-test.", ".dir" ); tempTarDir.delete(); tempTarDir.mkdirs(); toDelete.add( tempTarDir ); tarUnArchiver.setDestDirectory( tempTarDir ); tarUnArchiver.setSourceFile( tempTarFile ); tarUnArchiver.extract(); fileAttributes = PlexusIoResourceAttributeUtils.getFileAttributes( new File( tempTarDir, tempFile.getName() ) ); assertEquals( 0440, fileAttributes.getOctalMode() ); } private boolean checkForWindows() { if ( Os.isFamily( Os.FAMILY_WINDOWS ) ) { return true; } return false; } public void testOverrideDetectedFileAttributes() throws Exception { printTestHeader(); if ( checkForWindows() ) { System.out.println( "This test cannot run on windows. Aborting." ); return; } File tempFile = File.createTempFile( "tar-file-attributes.", ".tmp" ); toDelete.add( tempFile ); FileWriter writer = null; try { writer = new FileWriter( tempFile ); writer.write( "This is a test file." ); } finally { IOUtil.close( writer ); } int result = Runtime.getRuntime().exec( "chmod 440 " + tempFile.getAbsolutePath() ).waitFor(); assertEquals( 0, result ); TarArchiver tarArchiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); File tempTarFile = File.createTempFile( "tar-file.", ".tar" ); toDelete.add( tempTarFile ); tarArchiver.setDestFile( tempTarFile ); tarArchiver.addFile( tempFile, tempFile.getName(), 0660 ); tarArchiver.createArchive(); TarUnArchiver tarUnArchiver = (TarUnArchiver) lookup( UnArchiver.ROLE, "tar" ); File tempTarDir = File.createTempFile( "tar-test.", ".dir" ); tempTarDir.delete(); tempTarDir.mkdirs(); toDelete.add( tempTarDir ); tarUnArchiver.setDestDirectory( tempTarDir ); tarUnArchiver.setSourceFile( tempTarFile ); tarUnArchiver.extract(); PlexusIoResourceAttributes fileAttributes = PlexusIoResourceAttributeUtils.getFileAttributes( new File( tempTarDir, tempFile.getName() ) ); assertEquals( 0660, fileAttributes.getOctalMode() ); } public void testOverrideDetectedFileAttributesUsingFileMode() throws Exception { printTestHeader(); if ( checkForWindows() ) { System.out.println( "This test cannot run on windows. Aborting." ); return; } File tempFile = File.createTempFile( "tar-file-attributes.", ".tmp" ); toDelete.add( tempFile ); FileWriter writer = null; try { writer = new FileWriter( tempFile ); writer.write( "This is a test file." ); } finally { IOUtil.close( writer ); } int result = Runtime.getRuntime().exec( "chmod 440 " + tempFile.getAbsolutePath() ).waitFor(); assertEquals( 0, result ); TarArchiver tarArchiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); File tempTarFile = File.createTempFile( "tar-file.", ".tar" ); toDelete.add( tempTarFile ); tarArchiver.setDestFile( tempTarFile ); tarArchiver.setFileMode( 0660 ); tarArchiver.addFile( tempFile, tempFile.getName() ); tarArchiver.createArchive(); TarUnArchiver tarUnArchiver = (TarUnArchiver) lookup( UnArchiver.ROLE, "tar" ); File tempTarDir = File.createTempFile( "tar-test.", ".dir" ); tempTarDir.delete(); tempTarDir.mkdirs(); toDelete.add( tempTarDir ); tarUnArchiver.setDestDirectory( tempTarDir ); tarUnArchiver.setSourceFile( tempTarFile ); tarUnArchiver.extract(); PlexusIoResourceAttributes fileAttributes = PlexusIoResourceAttributeUtils.getFileAttributes( new File( tempTarDir, tempFile.getName() ) ); assertEquals( 0660, fileAttributes.getOctalMode() ); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/tar/TarFileTest.java000066400000000000000000000071321145404360500336340ustar00rootroot00000000000000package org.codehaus.plexus.archiver.tar; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.Enumeration; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.bzip2.BZip2Compressor; import org.codehaus.plexus.archiver.gzip.GZipCompressor; import org.codehaus.plexus.archiver.util.Compressor; import org.codehaus.plexus.components.io.resources.PlexusIoFileResource; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; /** * Test case for {@link TarFile}. */ public class TarFileTest extends PlexusTestCase { private interface TarFileCreator { TarFile newTarFile( File file ) throws IOException; } /** * Test for the uncompressed tar file, {@link TarFile}. */ public void testTarFile() throws Exception { testTarFile( null, null, new TarFileCreator(){ public TarFile newTarFile( File file ) throws IOException { return new TarFile( file ); } } ); } /** * Test for the gzip compressed tar file, {@link GZipTarFile}. */ public void testGZipTarFile() throws Exception { final GZipCompressor compressor = new GZipCompressor(); testTarFile( compressor, ".gz", new TarFileCreator(){ public TarFile newTarFile( File file ) throws IOException { return new GZipTarFile( file ); } } ); } /** * Test for the bzip2 compressed tar file, {@link BZip2TarFile}. */ public void testBZip2TarFile() throws Exception { final BZip2Compressor compressor = new BZip2Compressor(); testTarFile( compressor, ".bz2", new TarFileCreator(){ public TarFile newTarFile( File file ) throws IOException { return new BZip2TarFile( file ); } } ); } private void testTarFile( Compressor compressor, String extension, TarFileCreator tarFileCreator ) throws Exception { File file = new File( "target/output/TarFileTest.tar" ); final Archiver archiver = (Archiver) lookup( Archiver.ROLE, "tar" ); archiver.setDestFile( file ); archiver.addDirectory( new File( "src" ) ); FileUtils.removePath( file.getPath() ); archiver.createArchive(); if ( compressor != null ) { final File compressedFile = new File( file.getPath() + extension ); compressor.setSource( new PlexusIoFileResource( file ) ); compressor.setDestFile( compressedFile ); compressor.compress(); compressor.close(); file = compressedFile; } final TarFile tarFile = tarFileCreator.newTarFile( file ); for ( Enumeration en = tarFile.getEntries(); en.hasMoreElements(); ) { final TarEntry te = (TarEntry) en.nextElement(); if ( te.isDirectory() ) { continue; } final File teFile = new File( "src", te.getName() ); final InputStream teStream = tarFile.getInputStream( te ); final InputStream fileStream = new FileInputStream( teFile ); assertTrue( Arrays.equals( IOUtil.toByteArray( teStream ), IOUtil.toByteArray( fileStream ) ) ); teStream.close(); fileStream.close(); } tarFile.close(); } } TarGZipUnArchiverTest.java000066400000000000000000000062421145404360500355370ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /* * The MIT License * * Copyright (c) 2004, The Codehaus * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import java.io.File; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.UnArchiver; import org.codehaus.plexus.archiver.gzip.GZipArchiver; /** * @author Dan Tran * @version $Id$ */ public class TarGZipUnArchiverTest extends PlexusTestCase { public void testExtract() throws Exception { TarArchiver tarArchiver = (TarArchiver) lookup( Archiver.ROLE, "tar" ); String fileName1 = "TarGZipUnArchiverTest1.txt"; String fileName2 = "TarGZipUnArchiverTest2.txt"; File file1InTar = getTestFile( "target/output/" + fileName1 ); File file2InTar = getTestFile( "target/output/" + fileName2 ); file1InTar.delete(); file2InTar.delete(); tarArchiver.addFile( getTestFile( "src/test/resources/manifests/manifest1.mf" ), fileName1 ); tarArchiver.addFile( getTestFile( "src/test/resources/manifests/manifest2.mf" ), fileName2, 0664 ); tarArchiver.setDestFile( getTestFile( "target/output/archive.tar" ) ); tarArchiver.createArchive(); GZipArchiver gzipArchiver = (GZipArchiver) lookup( Archiver.ROLE, "gzip" ); File testGZipFile = getTestFile( "target/output/archive.tar.gz" ); gzipArchiver.setDestFile( testGZipFile ); gzipArchiver.addFile( getTestFile( "target/output/archive.tar" ), "dontcare" ); gzipArchiver.createArchive(); TarGZipUnArchiver tarGZipUnArchiver = (TarGZipUnArchiver) lookup( UnArchiver.ROLE, "tgz" ); tarGZipUnArchiver.setDestDirectory( getTestFile( "target/output" ) ); tarGZipUnArchiver.setSourceFile( testGZipFile ); tarGZipUnArchiver.extract(); assertTrue( file1InTar.exists() ); assertTrue( file2InTar.exists() ); //make sure we place the source file back assertEquals( testGZipFile, tarGZipUnArchiver.getSourceFile() ); } public void testLookup() throws Exception { lookup( UnArchiver.ROLE, "tar.gz" ); } } TarRoundTripTest.java000066400000000000000000000040261145404360500346230ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/tarpackage org.codehaus.plexus.archiver.tar; /* * Copyright 2003-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import junit.framework.TestCase; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; /** * @version $Id$ * from org.apache.ant.tools.tar.TarRoundTripTest v1.6 */ public class TarRoundTripTest extends TestCase { private static final String LONG_NAME = "this/path/name/contains/more/than/one/hundred/characters/in/order/" + "to/test/the/GNU/long/file/name/capability/round/tripped"; /** * test round-tripping long (GNU) entries */ public void testLongRoundTripping() throws IOException { TarEntry original = new TarEntry( LONG_NAME ); assertEquals( "over 100 chars", true, LONG_NAME.length() > 100 ); assertEquals( "original name", LONG_NAME, original.getName() ); ByteArrayOutputStream buff = new ByteArrayOutputStream(); TarOutputStream tos = new TarOutputStream( buff ); tos.setLongFileMode( TarOutputStream.LONGFILE_GNU ); tos.putNextEntry( original ); tos.closeEntry(); tos.close(); TarInputStream tis = new TarInputStream( new ByteArrayInputStream( buff.toByteArray() ) ); TarEntry tripped = tis.getNextEntry(); assertEquals( "round-tripped name", LONG_NAME, tripped.getName() ); assertNull( "no more entries", tis.getNextEntry() ); tis.close(); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/util/000077500000000000000000000000001145404360500307675ustar00rootroot00000000000000EnumeratedAttributeTest.java000066400000000000000000000056731145404360500364030ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/utilpackage org.codehaus.plexus.archiver.util; /* * Copyright 2000-2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import junit.framework.TestCase; import org.codehaus.plexus.archiver.ArchiverException; /** * JUnit 3 testcases for org.apache.tools.ant.EnumeratedAttribute. */ public class EnumeratedAttributeTest extends TestCase { private static String[] expected = {"a", "b", "c"}; public EnumeratedAttributeTest( String name ) { super( name ); } public void testContains() { EnumeratedAttribute t1 = new TestNormal(); for ( int i = 0; i < expected.length; i++ ) { assertTrue( expected[ i ] + " is in TestNormal", t1.containsValue( expected[ i ] ) ); assertTrue( expected[ i ].toUpperCase() + " is in TestNormal", !t1.containsValue( expected[ i ].toUpperCase() ) ); } assertTrue( "TestNormal doesn\'t have \"d\" attribute", !t1.containsValue( "d" ) ); assertTrue( "TestNull doesn\'t have \"d\" attribute and doesn\'t die", !( new TestNull() ).containsValue( "d" ) ); } public void testExceptions() { EnumeratedAttribute t1 = new TestNormal(); for ( int i = 0; i < expected.length; i++ ) { try { t1.setValue( expected[ i ] ); } catch ( ArchiverException ae ) { fail( "unexpected exception for value " + expected[ i ] ); } } try { t1.setValue( "d" ); fail( "expected exception for value \"d\"" ); } catch ( ArchiverException ae ) { } try { ( new TestNull() ).setValue( "d" ); fail( "expected exception for value \"d\" in TestNull" ); } catch ( ArchiverException ae ) { } catch ( Throwable other ) { fail( "unexpected death of TestNull: " + other.getMessage() ); } } public static class TestNormal extends EnumeratedAttribute { public String[] getValues() { return expected; } } public static class TestNull extends EnumeratedAttribute { public String[] getValues() { return null; } } } FilePermissionUtilsTest.java000066400000000000000000000053571145404360500363760ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/utilpackage org.codehaus.plexus.archiver.util; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.lang.reflect.Method; import junit.framework.TestCase; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.logging.console.ConsoleLogger; /** * @author Olivier Lamy * */ public class FilePermissionUtilsTest extends TestCase { Logger getLogger() { return new ConsoleLogger( Logger.LEVEL_DEBUG, "foo" ); } public void testOnlyWritableOnlyUser() throws Exception { FilePermission fp = FilePermissionUtils.getFilePermissionFromMode( "200", getLogger() ); assertTrue( fp.isWritable() ); assertTrue( fp.isOwnerOnlyWritable() ); assertFalse( fp.isExecutable() ); assertTrue( fp.isOwnerOnlyExecutable() ); assertFalse( fp.isReadable() ); } public void testExecAndRead() { FilePermission fp = FilePermissionUtils.getFilePermissionFromMode( "500", getLogger() ); assertFalse( fp.isWritable() ); assertTrue( fp.isOwnerOnlyWritable() ); assertTrue( fp.isExecutable() ); assertTrue( fp.isOwnerOnlyExecutable() ); assertTrue( fp.isReadable() ); } public void testAllUser() { FilePermission fp = FilePermissionUtils.getFilePermissionFromMode( "700", getLogger() ); assertTrue( fp.isWritable() ); assertTrue( fp.isOwnerOnlyWritable() ); assertTrue( fp.isExecutable() ); assertTrue( fp.isOwnerOnlyExecutable() ); assertTrue( fp.isReadable() ); } public void testAllAllUser() { FilePermission fp = FilePermissionUtils.getFilePermissionFromMode( "707", getLogger() ); assertTrue( fp.isWritable() ); assertFalse( fp.isOwnerOnlyWritable() ); assertTrue( fp.isExecutable() ); assertFalse( fp.isOwnerOnlyExecutable() ); assertTrue( fp.isReadable() ); assertFalse( fp.isOwnerOnlyReadable() ); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/zip/000077500000000000000000000000001145404360500306145ustar00rootroot00000000000000ArchiveFileComparator.java000066400000000000000000000053361145404360500356200ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import org.codehaus.plexus.archiver.ArchiveFile; import org.codehaus.plexus.archiver.ArchiveFile.Entry; import org.codehaus.plexus.util.IOUtil; import junit.framework.Assert; /** * A utility class, which allows to compare archive files. */ public class ArchiveFileComparator { /** * Creates a map with the archive files contents. The map keys * are the names of file entries in the archive file. The map * values are the respective archive entries. */ private static Map getFileEntries( ArchiveFile file ) throws IOException { final Map map = new HashMap(); for ( java.util.Enumeration en = file.getEntries(); en.hasMoreElements(); ) { Entry ze = (Entry) en.nextElement(); if ( ze.isDirectory() ) { continue; } if ( map.put( ze.getName(), ze ) != null ) { Assert.fail( "Multiple archive file entries named " + ze.getName() + " found." ); } } return map; } /** * Called to compare the given archive entries. */ private static void assertEquals( ArchiveFile file1, Entry entry1, ArchiveFile file2, Entry entry2 ) throws Exception { Assert.assertEquals( entry1.isDirectory(), entry2.isDirectory() ); Assert.assertEquals( entry1.getLastModificationTime(), entry2.getLastModificationTime() ); final InputStream is1 = file1.getInputStream( entry1 ); final InputStream is2 = file2.getInputStream( entry2 ); final byte[] bytes1 = IOUtil.toByteArray( is1 ); final byte[] bytes2 = IOUtil.toByteArray( is2 ); Assert.assertTrue( Arrays.equals( bytes1, bytes2 ) ); is1.close(); is2.close(); } /** * Called to compare the given archive files. */ public static void assertEquals( ArchiveFile file1, ArchiveFile file2, String prefix ) throws Exception { final Map map1 = getFileEntries( file1 ); final Map map2 = getFileEntries( file2 ); for ( java.util.Iterator iter = map1.keySet().iterator(); iter.hasNext(); ) { final String name1 = (String) iter.next(); final String name2 = prefix == null ? name1 : (prefix + name1); Entry ze1 = (Entry) map1.get( name1 ); Entry ze2 = (Entry) map2.remove( name2 ); Assert.assertNotNull( ze2 ); assertEquals( file1, ze1, file2, ze2 ); } Assert.assertTrue( map2.isEmpty() ); } } AsiExtraFieldTest.java000066400000000000000000000135031145404360500347260ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import junit.framework.TestCase; import org.codehaus.plexus.archiver.UnixStat; /** * JUnit 3 testcases for org.apache.tools.zip.AsiExtraField. * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.AsiExtraFieldTest v1.8 */ public class AsiExtraFieldTest extends TestCase implements UnixStat { public AsiExtraFieldTest( String name ) { super( name ); } /** * Test file mode magic. */ public void testModes() { AsiExtraField a = new AsiExtraField(); a.setMode( 0123 ); assertEquals( "plain file", 0100123, a.getMode() ); a.setDirectory( true ); assertEquals( "directory", 040123, a.getMode() ); a.setLinkedFile( "test" ); assertEquals( "symbolic link", 0120123, a.getMode() ); } /** * Test content. */ public void testContent() { AsiExtraField a = new AsiExtraField(); a.setMode( 0123 ); a.setUserId( 5 ); a.setGroupId( 6 ); byte[] b = a.getLocalFileDataData(); // CRC manually calculated, sorry byte[] expect = {(byte) 0xC6, 0x02, 0x78, (byte) 0xB6, // CRC 0123, (byte) 0x80, // mode 0, 0, 0, 0, // link length 5, 0, 6, 0}; // uid, gid assertEquals( "no link", expect.length, b.length ); for ( int i = 0; i < expect.length; i++ ) { assertEquals( "no link, byte " + i, expect[ i ], b[ i ] ); } a.setLinkedFile( "test" ); expect = new byte[]{0x75, (byte) 0x8E, 0x41, (byte) 0xFD, // CRC 0123, (byte) 0xA0, // mode 4, 0, 0, 0, // link length 5, 0, 6, 0, // uid, gid (byte) 't', (byte) 'e', (byte) 's', (byte) 't'}; b = a.getLocalFileDataData(); assertEquals( "no link", expect.length, b.length ); for ( int i = 0; i < expect.length; i++ ) { assertEquals( "no link, byte " + i, expect[ i ], b[ i ] ); } } /** * Test reparse */ public void testReparse() throws Exception { // CRC manually calculated, sorry byte[] data = {(byte) 0xC6, 0x02, 0x78, (byte) 0xB6, // CRC 0123, (byte) 0x80, // mode 0, 0, 0, 0, // link length 5, 0, 6, 0}; // uid, gid AsiExtraField a = new AsiExtraField(); a.parseFromLocalFileData( data, 0, data.length ); assertEquals( "length plain file", data.length, a.getLocalFileDataLength().getValue() ); assertTrue( "plain file, no link", !a.isLink() ); assertTrue( "plain file, no dir", !a.isDirectory() ); assertEquals( "mode plain file", FILE_FLAG | 0123, a.getMode() ); assertEquals( "uid plain file", 5, a.getUserId() ); assertEquals( "gid plain file", 6, a.getGroupId() ); data = new byte[]{0x75, (byte) 0x8E, 0x41, (byte) 0xFD, // CRC 0123, (byte) 0xA0, // mode 4, 0, 0, 0, // link length 5, 0, 6, 0, // uid, gid (byte) 't', (byte) 'e', (byte) 's', (byte) 't'}; a = new AsiExtraField(); a.parseFromLocalFileData( data, 0, data.length ); assertEquals( "length link", data.length, a.getLocalFileDataLength().getValue() ); assertTrue( "link, is link", a.isLink() ); assertTrue( "link, no dir", !a.isDirectory() ); assertEquals( "mode link", LINK_FLAG | 0123, a.getMode() ); assertEquals( "uid link", 5, a.getUserId() ); assertEquals( "gid link", 6, a.getGroupId() ); assertEquals( "test", a.getLinkedFile() ); data = new byte[]{(byte) 0x8E, 0x01, (byte) 0xBF, (byte) 0x0E, // CRC 0123, (byte) 0x40, // mode 0, 0, 0, 0, // link 5, 0, 6, 0}; // uid, gid a = new AsiExtraField(); a.parseFromLocalFileData( data, 0, data.length ); assertEquals( "length dir", data.length, a.getLocalFileDataLength().getValue() ); assertTrue( "dir, no link", !a.isLink() ); assertTrue( "dir, is dir", a.isDirectory() ); assertEquals( "mode dir", DIR_FLAG | 0123, a.getMode() ); assertEquals( "uid dir", 5, a.getUserId() ); assertEquals( "gid dir", 6, a.getGroupId() ); data = new byte[]{0, 0, 0, 0, // bad CRC 0123, (byte) 0x40, // mode 0, 0, 0, 0, // link 5, 0, 6, 0}; // uid, gid a = new AsiExtraField(); try { a.parseFromLocalFileData( data, 0, data.length ); fail( "should raise bad CRC exception" ); } catch ( Exception e ) { assertEquals( "bad CRC checksum 0 instead of ebf018e", e.getMessage() ); } } } ExtraFieldUtilsTest.java000066400000000000000000000107221145404360500353120ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import junit.framework.TestCase; import org.codehaus.plexus.archiver.UnixStat; /** * JUnit 3 testcases for org.apache.tools.zip.ExtraFieldUtils. * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ExtraFieldUtilsTest v1.8 */ public class ExtraFieldUtilsTest extends TestCase implements UnixStat { public ExtraFieldUtilsTest( String name ) { super( name ); } private AsiExtraField a; private UnrecognizedExtraField dummy; private byte[] data; private byte[] aLocal; public void setUp() { a = new AsiExtraField(); a.setMode( 0755 ); a.setDirectory( true ); dummy = new UnrecognizedExtraField(); dummy.setHeaderId( new ZipShort( 1 ) ); dummy.setLocalFileDataData( new byte[0] ); dummy.setCentralDirectoryData( new byte[]{0} ); aLocal = a.getLocalFileDataData(); byte[] dummyLocal = dummy.getLocalFileDataData(); data = new byte[4 + aLocal.length + 4 + dummyLocal.length]; System.arraycopy( a.getHeaderId().getBytes(), 0, data, 0, 2 ); System.arraycopy( a.getLocalFileDataLength().getBytes(), 0, data, 2, 2 ); System.arraycopy( aLocal, 0, data, 4, aLocal.length ); System.arraycopy( dummy.getHeaderId().getBytes(), 0, data, 4 + aLocal.length, 2 ); System.arraycopy( dummy.getLocalFileDataLength().getBytes(), 0, data, 4 + aLocal.length + 2, 2 ); System.arraycopy( dummyLocal, 0, data, 4 + aLocal.length + 4, dummyLocal.length ); } /** * test parser. */ public void testParse() throws Exception { ZipExtraField[] ze = ExtraFieldUtils.parse( data ); assertEquals( "number of fields", 2, ze.length ); assertTrue( "type field 1", ze[ 0 ] instanceof AsiExtraField ); assertEquals( "mode field 1", 040755, ( (AsiExtraField) ze[ 0 ] ).getMode() ); assertTrue( "type field 2", ze[ 1 ] instanceof UnrecognizedExtraField ); assertEquals( "data length field 2", 0, ze[ 1 ].getLocalFileDataLength().getValue() ); byte[] data2 = new byte[data.length - 1]; System.arraycopy( data, 0, data2, 0, data2.length ); try { ExtraFieldUtils.parse( data2 ); fail( "data should be invalid" ); } catch ( Exception e ) { assertEquals( "message", "data starting at " + ( 4 + aLocal.length ) + " is in unknown format", e.getMessage() ); } } /** * Test merge methods */ public void testMerge() { byte[] local = ExtraFieldUtils.mergeLocalFileDataData( new ZipExtraField[]{a, dummy} ); assertEquals( "local length", data.length, local.length ); for ( int i = 0; i < local.length; i++ ) { assertEquals( "local byte " + i, data[ i ], local[ i ] ); } byte[] dummyCentral = dummy.getCentralDirectoryData(); byte[] data2 = new byte[4 + aLocal.length + 4 + dummyCentral.length]; System.arraycopy( data, 0, data2, 0, 4 + aLocal.length + 2 ); System.arraycopy( dummy.getCentralDirectoryLength().getBytes(), 0, data2, 4 + aLocal.length + 2, 2 ); System.arraycopy( dummyCentral, 0, data2, 4 + aLocal.length + 4, dummyCentral.length ); byte[] central = ExtraFieldUtils.mergeCentralDirectoryData( new ZipExtraField[]{a, dummy} ); assertEquals( "central length", data2.length, central.length ); for ( int i = 0; i < central.length; i++ ) { assertEquals( "central byte " + i, data2[ i ], central[ i ] ); } } } ZipArchiverTest.java000066400000000000000000000324461145404360500344770ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; /* * The MIT License * * Copyright (c) 2004, The Codehaus * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is furnished to do * so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ import org.codehaus.plexus.archiver.Archiver; import org.codehaus.plexus.archiver.ArchiverException; import org.codehaus.plexus.archiver.BasePlexusArchiverTest; import org.codehaus.plexus.archiver.UnixStat; import org.codehaus.plexus.archiver.util.ArchiveEntryUtils; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributeUtils; import org.codehaus.plexus.components.io.attributes.PlexusIoResourceAttributes; import org.codehaus.plexus.logging.Logger; import org.codehaus.plexus.logging.console.ConsoleLogger; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.Os; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.Enumeration; import java.util.Map; /** * @author Emmanuel Venisse * @version $Id$ */ public class ZipArchiverTest extends BasePlexusArchiverTest { private Logger logger; public void setUp() throws Exception { super.setUp(); logger = new ConsoleLogger( Logger.LEVEL_DEBUG, "test" ); } public void testCreateArchiveWithDetectedModes() throws Exception { String[] executablePaths = { "path/to/executable", "path/to/executable.bat" }; String[] confPaths = { "path/to/etc/file", "path/to/etc/file2" }; String[] logPaths = { "path/to/logs/log.txt" }; int exeMode = 0777; int confMode = 0600; int logMode = 0640; if ( Os.isFamily( Os.FAMILY_WINDOWS ) ) { StackTraceElement e = new Throwable().getStackTrace()[0]; System.out.println( "Cannot execute test: " + e.getMethodName() + " on " + System.getProperty( "os.name" ) ); return; } File tmpDir = null; try { tmpDir = File.createTempFile( "zip-with-chmod.", ".dir" ); tmpDir.delete(); tmpDir.mkdirs(); for ( int i = 0; i < executablePaths.length; i++ ) { writeFile( tmpDir, executablePaths[i], exeMode ); } for ( int i = 0; i < confPaths.length; i++ ) { writeFile( tmpDir, confPaths[i], confMode ); } for ( int i = 0; i < logPaths.length; i++ ) { writeFile( tmpDir, logPaths[i], logMode ); } { Map attributesByPath = PlexusIoResourceAttributeUtils.getFileAttributesByPath( tmpDir ); for ( int i = 0; i < executablePaths.length; i++ ) { String path = executablePaths[i]; PlexusIoResourceAttributes attrs = (PlexusIoResourceAttributes) attributesByPath.get( path ); if ( attrs == null ) { attrs = (PlexusIoResourceAttributes) attributesByPath.get( new File( tmpDir, path ).getAbsolutePath() ); } assertNotNull( attrs ); assertEquals( "Wrong mode for: " + path + "; expected: " + exeMode, exeMode, attrs.getOctalMode() ); } for ( int i = 0; i < confPaths.length; i++ ) { String path = confPaths[i]; PlexusIoResourceAttributes attrs = (PlexusIoResourceAttributes) attributesByPath.get( path ); if ( attrs == null ) { attrs = (PlexusIoResourceAttributes) attributesByPath.get( new File( tmpDir, path ).getAbsolutePath() ); } assertNotNull( attrs ); assertEquals( "Wrong mode for: " + path + "; expected: " + confMode, confMode, attrs.getOctalMode() ); } for ( int i = 0; i < logPaths.length; i++ ) { String path = logPaths[i]; PlexusIoResourceAttributes attrs = (PlexusIoResourceAttributes) attributesByPath.get( path ); if ( attrs == null ) { attrs = (PlexusIoResourceAttributes) attributesByPath.get( new File( tmpDir, path ).getAbsolutePath() ); } assertNotNull( attrs ); assertEquals( "Wrong mode for: " + path + "; expected: " + logMode, logMode, attrs.getOctalMode() ); } } File zipFile = getTestFile( "target/output/zip-with-modes.zip" ); ZipArchiver archiver = (ZipArchiver) lookup( Archiver.ROLE, "zip" ); archiver.setDestFile( zipFile ); archiver.addDirectory( tmpDir ); archiver.createArchive(); assertTrue( zipFile.exists() ); File zipFile2 = getTestFile( "target/output/zip-with-modes-L2.zip" ); archiver = (ZipArchiver) lookup( Archiver.ROLE, "zip" ); archiver.setDestFile( zipFile2 ); archiver.addArchivedFileSet( zipFile ); archiver.createArchive(); ZipFile zf = new ZipFile( zipFile2 ); for ( int i = 0; i < executablePaths.length; i++ ) { String path = executablePaths[i]; ZipEntry ze = zf.getEntry( path ); int mode = ze.getUnixMode() & UnixStat.PERM_MASK; assertEquals( "Wrong mode for: " + path + "; expected: " + exeMode, exeMode, mode ); } for ( int i = 0; i < confPaths.length; i++ ) { String path = confPaths[i]; ZipEntry ze = zf.getEntry( path ); int mode = ze.getUnixMode() & UnixStat.PERM_MASK; assertEquals( "Wrong mode for: " + path + "; expected: " + confMode, confMode, mode ); } for ( int i = 0; i < logPaths.length; i++ ) { String path = logPaths[i]; ZipEntry ze = zf.getEntry( path ); int mode = ze.getUnixMode() & UnixStat.PERM_MASK; assertEquals( "Wrong mode for: " + path + "; expected: " + logMode, logMode, mode ); } } finally { if ( tmpDir != null && tmpDir.exists() ) { try { FileUtils.forceDelete( tmpDir ); } catch ( IOException e ) { e.printStackTrace(); } } } } private void writeFile( File dir, String fname, int mode ) throws IOException, ArchiverException { File file = new File( dir, fname ); FileWriter writer = null; try { if ( file.getParentFile() != null ) { file.getParentFile().mkdirs(); } writer = new FileWriter( file ); writer.write( "This is a test file." ); } finally { IOUtil.close( writer ); } ArchiveEntryUtils.chmod( file, mode, logger, false ); } public void testCreateArchive() throws Exception { ZipArchiver archiver = newArchiver( "archive1.zip" ); createArchive(archiver); } private ZipArchiver newArchiver( String name ) throws Exception { ZipArchiver archiver = (ZipArchiver) lookup( Archiver.ROLE, "zip" ); archiver.setFileMode( 0640 ); archiver.addFile( getTestFile( "src/test/resources/manifests/manifest1.mf" ), "one.txt" ); archiver.addFile( getTestFile( "src/test/resources/manifests/manifest2.mf" ), "two.txt", 0664 ); // reset default file mode for files included from now on archiver.setFileMode( 0400 ); archiver.setDirectoryMode( 0777 ); archiver.addDirectory( getTestFile( "src/test/resources/world-writable/" ), "worldwritable/" ); archiver.setDirectoryMode( 0070 ); archiver.addDirectory( getTestFile( "src/test/resources/group-writable/" ), "groupwritable/" ); archiver.setDirectoryMode( 0500 ); archiver.setFileMode( 0400 ); archiver.addDirectory( getTestFile( "src" ) ); archiver.setDestFile( getTestFile( "target/output/" + name ) ); return archiver; } private void createArchive( ZipArchiver archiver ) throws ArchiverException, IOException { archiver.createArchive(); ZipFile zf = new ZipFile( archiver.getDestFile() ); Enumeration e = zf.getEntries(); while ( e.hasMoreElements() ) { ZipEntry ze = (ZipEntry) e.nextElement(); if ( ze.isDirectory() ) { if ( ze.getName().startsWith( "worldwritable" ) ) { assertEquals( 0777, UnixStat.PERM_MASK & ze.getUnixMode() ); } else if ( ze.getName().startsWith( "groupwritable" ) ) { assertEquals( 0070, UnixStat.PERM_MASK & ze.getUnixMode() ); } else { assertEquals( 0500, UnixStat.PERM_MASK & ze.getUnixMode() ); } } else { if ( ze.getName().equals( "one.txt" ) ) { assertEquals( 0640, UnixStat.PERM_MASK & ze.getUnixMode() ); } else if ( ze.getName().equals( "two.txt" ) ) { assertEquals( 0664, UnixStat.PERM_MASK & ze.getUnixMode() ); } else { assertEquals( 0400, UnixStat.PERM_MASK & ze.getUnixMode() ); } } } } public void testForced() throws Exception { ZipArchiver archiver = newArchiver( "archive2.zip" ); assertTrue( archiver.isForced() ); File f = archiver.getDestFile(); if ( f.exists() ) { FileUtils.fileDelete( f.getPath() ); } assertFalse( f.exists() ); createArchive( archiver ); long l1 = f.lastModified(); assertTrue( f.exists() ); archiver = newArchiver( "archive2.zip" ); waitUntilNewTimestamp( archiver.getDestFile(), l1 ); createArchive( archiver ); long l2 = f.lastModified(); assertTrue( f.exists() ); assertTrue( l2 > l1 ); archiver = newArchiver( "archive2.zip" ); assertTrue( archiver.isSupportingForced() ); archiver.setForced( false ); assertFalse( archiver.isForced() ); createArchive( archiver ); long l3 = f.lastModified(); assertTrue( f.exists() ); assertEquals(l2, l3); } public void testCreateResourceCollection() throws Exception { final File srcDir = new File("src"); final File zipFile = new File( "target/output/src.zip" ); ZipArchiver zipArchiver = (ZipArchiver) lookup( Archiver.ROLE, "zip" ); zipArchiver.setDestFile( zipFile ); zipArchiver.addDirectory( srcDir, null, FileUtils.getDefaultExcludes() ); FileUtils.removePath( zipFile.getPath() ); zipArchiver.createArchive(); final File zipFile2 = new File( "target/output/src2.zip" ); ZipArchiver zipArchiver2 = (ZipArchiver) lookup( Archiver.ROLE, "zip" ); zipArchiver2.setDestFile( zipFile2 ); zipArchiver2.addArchivedFileSet( zipFile, "prfx/" ); FileUtils.removePath( zipFile2.getPath() ); zipArchiver2.createArchive(); final ZipFile cmp1 = new ZipFile( zipFile ); final ZipFile cmp2 = new ZipFile( zipFile2 ); ArchiveFileComparator.assertEquals( cmp1, cmp2, "prfx/" ); cmp1.close(); cmp2.close(); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/zip/ZipEntryTest.java000066400000000000000000000101701145404360500341020ustar00rootroot00000000000000package org.codehaus.plexus.archiver.zip; /* * Copyright 2001-2002,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import junit.framework.TestCase; /** * JUnit 3 testcases for org.apache.tools.zip.ZipEntry. * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ZipEntryTest v1.9 */ public class ZipEntryTest extends TestCase { public ZipEntryTest( String name ) { super( name ); } /** * test handling of extra fields * * @since 1.1 */ public void testExtraFields() { AsiExtraField a = new AsiExtraField(); a.setDirectory( true ); a.setMode( 0755 ); UnrecognizedExtraField u = new UnrecognizedExtraField(); u.setHeaderId( new ZipShort( 1 ) ); u.setLocalFileDataData( new byte[0] ); ZipEntry ze = new ZipEntry( "test/" ); ze.setExtraFields( new ZipExtraField[]{a, u} ); byte[] data1 = ze.getExtra(); ZipExtraField[] result = ze.getExtraFields(); assertEquals( "first pass", 2, result.length ); assertSame( a, result[ 0 ] ); assertSame( u, result[ 1 ] ); UnrecognizedExtraField u2 = new UnrecognizedExtraField(); u2.setHeaderId( new ZipShort( 1 ) ); u2.setLocalFileDataData( new byte[]{1} ); ze.addExtraField( u2 ); byte[] data2 = ze.getExtra(); result = ze.getExtraFields(); assertEquals( "second pass", 2, result.length ); assertSame( a, result[ 0 ] ); assertSame( u2, result[ 1 ] ); assertEquals( "length second pass", data1.length + 1, data2.length ); UnrecognizedExtraField u3 = new UnrecognizedExtraField(); u3.setHeaderId( new ZipShort( 2 ) ); u3.setLocalFileDataData( new byte[]{1} ); ze.addExtraField( u3 ); result = ze.getExtraFields(); assertEquals( "third pass", 3, result.length ); ze.removeExtraField( new ZipShort( 1 ) ); byte[] data3 = ze.getExtra(); result = ze.getExtraFields(); assertEquals( "fourth pass", 2, result.length ); assertSame( a, result[ 0 ] ); assertSame( u3, result[ 1 ] ); assertEquals( "length fourth pass", data2.length, data3.length ); try { ze.removeExtraField( new ZipShort( 1 ) ); fail( "should be no such element" ); } catch ( java.util.NoSuchElementException nse ) { } } public void testUnixMode() { ZipEntry ze = new ZipEntry( "foo" ); assertEquals( 0, ze.getPlatform() ); ze.setUnixMode( 0755 ); assertEquals( 3, ze.getPlatform() ); assertEquals( 0755, ( ze.getExternalAttributes() >> 16 ) & 0xFFFF ); assertEquals( 0, ze.getExternalAttributes() & 0xFFFF ); ze.setUnixMode( 0444 ); assertEquals( 3, ze.getPlatform() ); assertEquals( 0444, ( ze.getExternalAttributes() >> 16 ) & 0xFFFF ); assertEquals( 1, ze.getExternalAttributes() & 0xFFFF ); ze = new ZipEntry( "foo/" ); assertEquals( 0, ze.getPlatform() ); ze.setUnixMode( 0777 ); assertEquals( 3, ze.getPlatform() ); assertEquals( 0777, ( ze.getExternalAttributes() >> 16 ) & 0xFFFF ); assertEquals( 0x10, ze.getExternalAttributes() & 0xFFFF ); ze.setUnixMode( 0577 ); assertEquals( 3, ze.getPlatform() ); assertEquals( 0577, ( ze.getExternalAttributes() >> 16 ) & 0xFFFF ); assertEquals( 0x11, ze.getExternalAttributes() & 0xFFFF ); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/zip/ZipLongTest.java000066400000000000000000000050521145404360500337030ustar00rootroot00000000000000package org.codehaus.plexus.archiver.zip; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import junit.framework.TestCase; /** * JUnit 3 testcases for org.apache.tools.zip.ZipLong. * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ZipLongTest v1.8 */ public class ZipLongTest extends TestCase { public ZipLongTest( String name ) { super( name ); } /** * Test conversion to bytes. */ public void testToBytes() { ZipLong zl = new ZipLong( 0x12345678 ); byte[] result = zl.getBytes(); assertEquals( "length getBytes", 4, result.length ); assertEquals( "first byte getBytes", 0x78, result[ 0 ] ); assertEquals( "second byte getBytes", 0x56, result[ 1 ] ); assertEquals( "third byte getBytes", 0x34, result[ 2 ] ); assertEquals( "fourth byte getBytes", 0x12, result[ 3 ] ); } /** * Test conversion from bytes. */ public void testFromBytes() { byte[] val = new byte[]{0x78, 0x56, 0x34, 0x12}; ZipLong zl = new ZipLong( val ); assertEquals( "value from bytes", 0x12345678, zl.getValue() ); } /** * Test the contract of the equals method. */ public void testEquals() { ZipLong zl = new ZipLong( 0x12345678 ); ZipLong zl2 = new ZipLong( 0x12345678 ); ZipLong zl3 = new ZipLong( 0x87654321 ); assertTrue( "reflexive", zl.equals( zl ) ); assertTrue( "works", zl.equals( zl2 ) ); assertTrue( "works, part two", !zl.equals( zl3 ) ); assertTrue( "symmetric", zl2.equals( zl ) ); assertNotNull( "null handling", zl ); assertTrue( "non ZipLong handling", !zl.equals( new Integer( 0x1234 ) ) ); } /** * Test sign handling. */ public void testSign() { ZipLong zl = new ZipLong( new byte[]{(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF} ); assertEquals( 0x00000000FFFFFFFFl, zl.getValue() ); } } plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/zip/ZipShortTest.java000066400000000000000000000045631145404360500341110ustar00rootroot00000000000000package org.codehaus.plexus.archiver.zip; /* * Copyright 2001,2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import junit.framework.TestCase; /** * JUnit 3 testcases for org.apache.tools.zip.ZipShort. * * @version $Revision$ $Date$ * from org.apache.ant.tools.zip.ZipShortTest v1.8 */ public class ZipShortTest extends TestCase { public ZipShortTest( String name ) { super( name ); } /** * Test conversion to bytes. */ public void testToBytes() { ZipShort zs = new ZipShort( 0x1234 ); byte[] result = zs.getBytes(); assertEquals( "length getBytes", 2, result.length ); assertEquals( "first byte getBytes", 0x34, result[ 0 ] ); assertEquals( "second byte getBytes", 0x12, result[ 1 ] ); } /** * Test conversion from bytes. */ public void testFromBytes() { byte[] val = new byte[]{0x34, 0x12}; ZipShort zs = new ZipShort( val ); assertEquals( "value from bytes", 0x1234, zs.getValue() ); } /** * Test the contract of the equals method. */ public void testEquals() { ZipShort zs = new ZipShort( 0x1234 ); ZipShort zs2 = new ZipShort( 0x1234 ); ZipShort zs3 = new ZipShort( 0x5678 ); assertTrue( "reflexive", zs.equals( zs ) ); assertTrue( "works", zs.equals( zs2 ) ); assertTrue( "works, part two", !zs.equals( zs3 ) ); assertTrue( "symmetric", zs2.equals( zs ) ); assertNotNull( "null handling", zs ); assertTrue( "non ZipShort handling", !zs.equals( new Integer( 0x1234 ) ) ); } /** * Test sign handling. */ public void testSign() { ZipShort zs = new ZipShort( new byte[]{(byte) 0xFF, (byte) 0xFF} ); assertEquals( 0x0000FFFF, zs.getValue() ); } } ZipUnArchiverTest.java000066400000000000000000000047571145404360500350060ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/java/org/codehaus/plexus/archiver/zippackage org.codehaus.plexus.archiver.zip; import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.components.io.fileselectors.FileSelector; import org.codehaus.plexus.components.io.fileselectors.IncludeExcludeFileSelector; import org.codehaus.plexus.util.FileUtils; import java.io.File; /** * @author Jason van Zyl */ public class ZipUnArchiverTest extends PlexusTestCase { private void runUnarchiver( String path, FileSelector[] selectors, boolean[] results ) throws Exception { String s = "target/zip-unarchiver-tests"; File testJar = new File( getBasedir(), "src/test/jars/test.jar" ); File outputDirectory = new File( getBasedir(), s ); ZipUnArchiver zu = new ZipUnArchiver( testJar ); zu.setFileSelectors( selectors ); FileUtils.deleteDirectory( outputDirectory ); zu.extract( path, outputDirectory ); File f0 = new File( getBasedir(), s + "/resources/artifactId/test.properties" ); assertEquals( results[0], f0.exists() ); File f1 = new File( getBasedir(), s + "/resources/artifactId/directory/test.properties" ); assertEquals( results[1], f1.exists() ); File f2 = new File( getBasedir(), s + "/META-INF/MANIFEST.MF" ); assertEquals( results[2], f2.exists() ); } public void testExtractingADirectoryFromAJarFile() throws Exception { runUnarchiver( "resources/artifactId", null, new boolean[]{ true, true, false } ); runUnarchiver( "", null, new boolean[]{ true, true, true } ); } public void testSelectors() throws Exception { IncludeExcludeFileSelector fileSelector = new IncludeExcludeFileSelector(); runUnarchiver( "", new FileSelector[]{ fileSelector }, new boolean[]{ true, true, true } ); fileSelector.setExcludes( new String[]{ "**/test.properties" } ); runUnarchiver( "", new FileSelector[]{ fileSelector }, new boolean[]{ false, false, true } ); fileSelector.setIncludes( new String[]{ "**/test.properties" } ); fileSelector.setExcludes( null ); runUnarchiver( "", new FileSelector[]{ fileSelector }, new boolean[]{ true, true, false } ); fileSelector.setExcludes( new String[]{ "resources/artifactId/directory/test.properties" } ); runUnarchiver( "", new FileSelector[]{ fileSelector }, new boolean[]{ true, false, false } ); } } plexus-archiver-plexus-archiver-1.2/src/test/resources/000077500000000000000000000000001145404360500233765ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/resources/group-writable/000077500000000000000000000000001145404360500263415ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/resources/group-writable/bar.txt000066400000000000000000000000001145404360500276340ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/resources/group-writable/foo.txt000066400000000000000000000000001145404360500276530ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/resources/jar-security/000077500000000000000000000000001145404360500260175ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/resources/jar-security/dummy.txt000066400000000000000000000001111145404360500277040ustar00rootroot00000000000000This file is used as fake content for the security file filtering tests. plexus-archiver-plexus-archiver-1.2/src/test/resources/manifests/000077500000000000000000000000001145404360500253675ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/resources/manifests/manifest1.mf000066400000000000000000000000271145404360500276010ustar00rootroot00000000000000Manifest-Version: 1.0 plexus-archiver-plexus-archiver-1.2/src/test/resources/manifests/manifest2.mf000066400000000000000000000000731145404360500276030ustar00rootroot00000000000000Manifest-Version: 1.0 Header-without-colon maybe mistyped plexus-archiver-plexus-archiver-1.2/src/test/resources/manifests/manifest3.mf000066400000000000000000000000761145404360500276070ustar00rootroot00000000000000Manifest-Version: 1.0 Can't start with a continuation line plexus-archiver-plexus-archiver-1.2/src/test/resources/manifests/manifest4.mf000066400000000000000000000000441145404360500276030ustar00rootroot00000000000000Manifest-Version: 1.0 Name: test5 plexus-archiver-plexus-archiver-1.2/src/test/resources/manifests/manifest5.mf000066400000000000000000000000671145404360500276110ustar00rootroot00000000000000Manifest-Version: 1.0 Test: test6 Class-Path: fubar plexus-archiver-plexus-archiver-1.2/src/test/resources/manifests/manifest6.mf000066400000000000000000000000651145404360500276100ustar00rootroot00000000000000Manifest-Version: 1.0 Class-Path: fubar From: Jack plexus-archiver-plexus-archiver-1.2/src/test/resources/world-writable/000077500000000000000000000000001145404360500263345ustar00rootroot00000000000000plexus-archiver-plexus-archiver-1.2/src/test/resources/world-writable/foo.txt000066400000000000000000000000001145404360500276460ustar00rootroot00000000000000