You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@archiva.apache.org by jo...@apache.org on 2007/02/23 20:05:37 UTC
svn commit: r511053 [2/6] - in /maven/archiva/trunk: ./ archiva-applet/
archiva-cli/ archiva-cli/src/main/java/org/apache/maven/archiva/cli/
archiva-common/ archiva-common/src/ archiva-common/src/main/
archiva-common/src/main/java/ archiva-common/src/m...
Modified: maven/archiva/trunk/archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java
URL: http://svn.apache.org/viewvc/maven/archiva/trunk/archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java?view=diff&rev=511053&r1=511052&r2=511053
==============================================================================
--- maven/archiva/trunk/archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java (original)
+++ maven/archiva/trunk/archiva-converter/src/main/java/org/apache/maven/archiva/converter/DefaultRepositoryConverter.java Fri Feb 23 11:05:21 2007
@@ -22,7 +22,6 @@
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.maven.archiva.converter.transaction.FileTransaction;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager;
@@ -38,7 +37,6 @@
import org.apache.maven.model.DistributionManagement;
import org.apache.maven.model.Model;
import org.apache.maven.model.Relocation;
-import org.apache.maven.model.converter.ArtifactPomRewriter;
import org.apache.maven.model.converter.ModelConverter;
import org.apache.maven.model.converter.PomTranslationException;
import org.apache.maven.model.io.xpp3.MavenXpp3Writer;
@@ -46,6 +44,7 @@
import org.codehaus.plexus.digest.Digester;
import org.codehaus.plexus.digest.DigesterException;
import org.codehaus.plexus.i18n.I18N;
+import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import java.io.File;
@@ -54,6 +53,7 @@
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
+import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
@@ -67,6 +67,7 @@
* @plexus.component role="org.apache.maven.archiva.converter.RepositoryConverter" role-hint="default"
*/
public class DefaultRepositoryConverter
+ extends AbstractLogEnabled
implements RepositoryConverter
{
/**
@@ -84,11 +85,6 @@
/**
* @plexus.requirement
*/
- private ArtifactPomRewriter rewriter;
-
- /**
- * @plexus.requirement
- */
private ModelConverter translator;
/**
@@ -111,7 +107,9 @@
*/
private I18N i18n;
- public void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ private List listeners = new ArrayList();
+
+ public void convert( Artifact artifact, ArtifactRepository targetRepository )
throws RepositoryConversionException
{
if ( artifact.getRepository().getUrl().equals( targetRepository.getUrl() ) )
@@ -119,20 +117,19 @@
throw new RepositoryConversionException( getI18NString( "exception.repositories.match" ) );
}
- if ( validateMetadata( artifact, reporter ) )
+ if ( validateMetadata( artifact ) )
{
FileTransaction transaction = new FileTransaction();
- if ( copyPom( artifact, targetRepository, reporter, transaction ) )
+ if ( copyPom( artifact, targetRepository, transaction ) )
{
- if ( copyArtifact( artifact, targetRepository, reporter, transaction ) )
+ if ( copyArtifact( artifact, targetRepository, transaction ) )
{
Metadata metadata = createBaseMetadata( artifact );
Versioning versioning = new Versioning();
versioning.addVersion( artifact.getBaseVersion() );
metadata.setVersioning( versioning );
- updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata,
- transaction );
+ updateMetadata( new ArtifactRepositoryMetadata( artifact ), targetRepository, metadata, transaction );
metadata = createBaseMetadata( artifact );
metadata.setVersion( artifact.getBaseVersion() );
@@ -173,8 +170,8 @@
Metadata newMetadata, FileTransaction transaction )
throws RepositoryConversionException
{
- File file = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File file = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
Metadata metadata;
boolean changed;
@@ -244,7 +241,7 @@
return metadata;
}
- private boolean validateMetadata( Artifact artifact, ReportingDatabase reporter )
+ private boolean validateMetadata( Artifact artifact )
throws RepositoryConversionException
{
ArtifactRepository repository = artifact.getRepository();
@@ -252,12 +249,11 @@
boolean result = true;
RepositoryMetadata repositoryMetadata = new ArtifactRepositoryMetadata( artifact );
- File file =
- new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
+ File file = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( repositoryMetadata ) );
if ( file.exists() )
{
Metadata metadata = readMetadata( file );
- result = validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+ result = validateMetadata( metadata, repositoryMetadata, artifact );
}
repositoryMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
@@ -265,14 +261,13 @@
if ( file.exists() )
{
Metadata metadata = readMetadata( file );
- result = result && validateMetadata( metadata, repositoryMetadata, artifact, reporter );
+ result = result && validateMetadata( metadata, repositoryMetadata, artifact );
}
return result;
}
- private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact,
- ReportingDatabase reporter )
+ private boolean validateMetadata( Metadata metadata, RepositoryMetadata repositoryMetadata, Artifact artifact )
{
String groupIdKey;
String artifactIdKey = null;
@@ -302,14 +297,14 @@
if ( metadata.getGroupId() == null || !metadata.getGroupId().equals( artifact.getGroupId() ) )
{
- addFailure( reporter, artifact, groupIdKey );
+ addFailure( artifact, groupIdKey );
result = false;
}
if ( !repositoryMetadata.storedInGroupDirectory() )
{
if ( metadata.getGroupId() == null || !metadata.getArtifactId().equals( artifact.getArtifactId() ) )
{
- addFailure( reporter, artifact, artifactIdKey );
+ addFailure( artifact, artifactIdKey );
result = false;
}
if ( !repositoryMetadata.storedInArtifactVersionDirectory() )
@@ -319,8 +314,7 @@
boolean foundVersion = false;
if ( metadata.getVersioning() != null )
{
- for ( Iterator i = metadata.getVersioning().getVersions().iterator();
- i.hasNext() && !foundVersion; )
+ for ( Iterator i = metadata.getVersioning().getVersions().iterator(); i.hasNext() && !foundVersion; )
{
String version = (String) i.next();
if ( version.equals( artifact.getBaseVersion() ) )
@@ -332,7 +326,7 @@
if ( !foundVersion )
{
- addFailure( reporter, artifact, versionsKey );
+ addFailure( artifact, versionsKey );
result = false;
}
}
@@ -341,7 +335,7 @@
// snapshot metadata
if ( !artifact.getBaseVersion().equals( metadata.getVersion() ) )
{
- addFailure( reporter, artifact, versionKey );
+ addFailure( artifact, versionKey );
result = false;
}
@@ -364,7 +358,7 @@
if ( !correct )
{
- addFailure( reporter, artifact, snapshotKey );
+ addFailure( artifact, snapshotKey );
result = false;
}
}
@@ -374,30 +368,30 @@
return result;
}
- private void addFailure( ReportingDatabase reporter, Artifact artifact, String key )
+ private void addFailure( Artifact artifact, String key )
{
- addFailureWithReason( reporter, artifact, getI18NString( key ) );
-
+ addFailureWithReason( artifact, getI18NString( key ) );
}
- private static void addWarning( ReportingDatabase reporter, Artifact artifact, String message )
+ private void addWarning( Artifact artifact, String message )
{
// TODO: should we be able to identify/fix these?
- reporter.addWarning( artifact, null, null, message );
+ // TODO: write archiva-artifact-repair module
+ triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.WARNING, artifact,
+ message ) );
}
- private static void addFailureWithReason( ReportingDatabase reporter, Artifact artifact, String reason )
+ private void addFailureWithReason( Artifact artifact, String reason )
{
// TODO: should we be able to identify/fix these?
- reporter.addFailure( artifact, null, null, reason );
+ triggerConversionEvent( new ConversionEvent( artifact.getRepository(), ConversionEvent.ERROR, artifact, reason ) );
}
- private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
- FileTransaction transaction )
+ private boolean copyPom( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
throws RepositoryConversionException
{
- Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(),
- artifact.getVersion() );
+ Artifact pom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact
+ .getVersion() );
pom.setBaseVersion( artifact.getBaseVersion() );
ArtifactRepository repository = artifact.getRepository();
File file = new File( repository.getBasedir(), repository.pathOf( pom ) );
@@ -411,7 +405,7 @@
boolean checksumsValid = false;
try
{
- if ( testChecksums( artifact, file, reporter ) )
+ if ( testChecksums( artifact, file ) )
{
checksumsValid = true;
}
@@ -457,16 +451,15 @@
if ( doRelocation( artifact, v3Model, targetRepository, transaction ) )
{
- Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(),
- artifact.getArtifactId(),
- artifact.getVersion() );
+ Artifact relocatedPom = artifactFactory.createProjectArtifact( artifact.getGroupId(), artifact
+ .getArtifactId(), artifact.getVersion() );
targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( relocatedPom ) );
}
Model v4Model = translator.translate( v3Model );
- translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(),
- v3Model.getVersion(), v3Model.getPackage() );
+ translator.validateV4Basics( v4Model, v3Model.getGroupId(), v3Model.getArtifactId(), v3Model
+ .getVersion(), v3Model.getPackage() );
writer = new StringWriter();
MavenXpp3Writer Xpp3Writer = new MavenXpp3Writer();
@@ -479,13 +472,12 @@
for ( Iterator i = warnings.iterator(); i.hasNext(); )
{
String message = (String) i.next();
- addWarning( reporter, artifact, message );
+ addWarning( artifact, message );
}
}
catch ( XmlPullParserException e )
{
- addFailureWithReason( reporter, artifact,
- getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+ addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
result = false;
}
catch ( IOException e )
@@ -494,8 +486,7 @@
}
catch ( PomTranslationException e )
{
- addFailureWithReason( reporter, artifact,
- getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
+ addFailureWithReason( artifact, getI18NString( "failure.invalid.source.pom", e.getMessage() ) );
result = false;
}
finally
@@ -506,7 +497,7 @@
}
else
{
- addWarning( reporter, artifact, getI18NString( "warning.missing.pom" ) );
+ addWarning( artifact, getI18NString( "warning.missing.pom" ) );
}
return result;
}
@@ -516,8 +507,8 @@
throws IOException
{
Properties properties = v3Model.getProperties();
- if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" ) ||
- properties.containsKey( "relocated.version" ) )
+ if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" )
+ || properties.containsKey( "relocated.version" ) )
{
String newGroupId = properties.getProperty( "relocated.groupId", v3Model.getGroupId() );
properties.remove( "relocated.groupId" );
@@ -600,7 +591,7 @@
return i18n.getString( getClass().getName(), Locale.getDefault(), key );
}
- private boolean testChecksums( Artifact artifact, File file, ReportingDatabase reporter )
+ private boolean testChecksums( Artifact artifact, File file )
throws IOException
{
boolean result = true;
@@ -609,7 +600,7 @@
{
Digester digester = (Digester) it.next();
result &= verifyChecksum( file, file.getName() + "." + getDigesterFileExtension( digester ), digester,
- reporter, artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
+ artifact, "failure.incorrect." + getDigesterFileExtension( digester ) );
}
return result;
}
@@ -623,8 +614,7 @@
return digester.getAlgorithm().toLowerCase().replaceAll( "-", "" );
}
- private boolean verifyChecksum( File file, String fileName, Digester digester, ReportingDatabase reporter,
- Artifact artifact, String key )
+ private boolean verifyChecksum( File file, String fileName, Digester digester, Artifact artifact, String key )
throws IOException
{
boolean result = true;
@@ -639,15 +629,14 @@
}
catch ( DigesterException e )
{
- addFailure( reporter, artifact, key );
+ addFailure( artifact, key );
result = false;
}
}
return result;
}
- private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter,
- FileTransaction transaction )
+ private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, FileTransaction transaction )
throws RepositoryConversionException
{
File sourceFile = artifact.getFile();
@@ -668,7 +657,7 @@
matching = FileUtils.contentEquals( sourceFile, targetFile );
if ( !matching )
{
- addFailure( reporter, artifact, "failure.target.already.exists" );
+ addFailure( artifact, "failure.target.already.exists" );
result = false;
}
}
@@ -676,7 +665,7 @@
{
if ( force || !matching )
{
- if ( testChecksums( artifact, sourceFile, reporter ) )
+ if ( testChecksums( artifact, sourceFile ) )
{
transaction.copyFile( sourceFile, targetFile, digesters );
}
@@ -694,7 +683,7 @@
return result;
}
- public void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ public void convert( List artifacts, ArtifactRepository targetRepository )
throws RepositoryConversionException
{
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
@@ -703,20 +692,49 @@
try
{
- convert( artifact, targetRepository, reporter );
+ convert( artifact, targetRepository );
}
catch ( RepositoryConversionException e )
{
- // Need to add:
- // artifact
- // processor
- // problem
- // reason
- //TODO: this doesn't really provide any real facility for a decent error message, having
- // the stack trace would be useful. I also have no idea what a processor is currently or
- // how to get hold of it here.
+ triggerConversionEvent( new ConversionEvent( targetRepository, ConversionEvent.ERROR, artifact, e ) );
+ }
+ }
+ }
+
+ /**
+ * Add a listener to the conversion process.
+ *
+ * @param listener the listener to add.
+ */
+ public void addConversionListener( ConversionListener listener )
+ {
+ listeners.add( listener );
+ }
+
+ /**
+ * Remove a listener from the conversion process.
+ *
+ * @param listener the listener to remove.
+ */
+ public void removeConversionListener( ConversionListener listener )
+ {
+ listeners.remove( listener );
+ }
- reporter.addFailure( artifact, "", e.getLocalizedMessage(), e.getCause().getLocalizedMessage() );
+ private void triggerConversionEvent( ConversionEvent event )
+ {
+ Iterator it = listeners.iterator();
+ while ( it.hasNext() )
+ {
+ ConversionListener listener = (ConversionListener) it.next();
+
+ try
+ {
+ listener.conversionEvent( event );
+ }
+ catch ( Throwable t )
+ {
+ getLogger().warn( "ConversionEvent resulted in exception from listener: " + t.getMessage(), t );
}
}
}
Modified: maven/archiva/trunk/archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java
URL: http://svn.apache.org/viewvc/maven/archiva/trunk/archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java?view=diff&rev=511053&r1=511052&r2=511053
==============================================================================
--- maven/archiva/trunk/archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java (original)
+++ maven/archiva/trunk/archiva-converter/src/main/java/org/apache/maven/archiva/converter/RepositoryConverter.java Fri Feb 23 11:05:21 2007
@@ -19,7 +19,6 @@
* under the License.
*/
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -39,9 +38,8 @@
*
* @param artifact the artifact to convert
* @param targetRepository the target repository
- * @param reporter reporter to track the results of the conversion
*/
- void convert( Artifact artifact, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ void convert( Artifact artifact, ArtifactRepository targetRepository )
throws RepositoryConversionException;
/**
@@ -49,8 +47,21 @@
*
* @param artifacts the set of artifacts to convert
* @param targetRepository the target repository
- * @param reporter reporter to track the results of the conversions
*/
- void convert( List artifacts, ArtifactRepository targetRepository, ReportingDatabase reporter )
+ void convert( List artifacts, ArtifactRepository targetRepository )
throws RepositoryConversionException;
+
+ /**
+ * Add a listener to the conversion process.
+ *
+ * @param listener the listener to add.
+ */
+ void addConversionListener( ConversionListener listener );
+
+ /**
+ * Remove a listener from the conversion process.
+ *
+ * @param listener the listener to remove.
+ */
+ void removeConversionListener( ConversionListener listener );
}
Modified: maven/archiva/trunk/archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java
URL: http://svn.apache.org/viewvc/maven/archiva/trunk/archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java?view=diff&rev=511053&r1=511052&r2=511053
==============================================================================
--- maven/archiva/trunk/archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java (original)
+++ maven/archiva/trunk/archiva-converter/src/test/java/org/apache/maven/archiva/converter/RepositoryConverterTest.java Fri Feb 23 11:05:21 2007
@@ -20,10 +20,6 @@
*/
import org.apache.commons.io.FileUtils;
-import org.apache.maven.archiva.reporting.database.ReportingDatabase;
-import org.apache.maven.archiva.reporting.group.ReportGroup;
-import org.apache.maven.archiva.reporting.model.ArtifactResults;
-import org.apache.maven.archiva.reporting.model.Result;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.metadata.ArtifactMetadata;
@@ -42,6 +38,8 @@
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
import java.util.regex.Matcher;
/**
@@ -64,8 +62,6 @@
private ArtifactFactory artifactFactory;
- private ReportingDatabase reportingDatabase;
-
private static final int SLEEP_MILLIS = 100;
private I18N i18n;
@@ -80,25 +76,28 @@
ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "legacy" );
File sourceBase = getTestFile( "src/test/source-repository" );
- sourceRepository =
- factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
+ sourceRepository = factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null,
+ null );
layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
File targetBase = getTestFile( "target/test-target-repository" );
copyDirectoryStructure( getTestFile( "src/test/target-repository" ), targetBase );
- targetRepository =
- factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null, null );
+ targetRepository = factory.createArtifactRepository( "target", targetBase.toURL().toString(), layout, null,
+ null );
repositoryConverter = (RepositoryConverter) lookup( RepositoryConverter.ROLE, "default" );
artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
i18n = (I18N) lookup( I18N.ROLE );
+ }
- ReportGroup reportGroup = (ReportGroup) lookup( ReportGroup.ROLE, "health" );
- reportingDatabase = new ReportingDatabase( reportGroup );
+ protected void tearDown()
+ throws Exception
+ {
+ super.tearDown();
}
private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory )
@@ -135,8 +134,8 @@
{
if ( !destination.exists() && !destination.mkdirs() )
{
- throw new IOException(
- "Could not create destination directory '" + destination.getAbsolutePath() + "'." );
+ throw new IOException( "Could not create destination directory '"
+ + destination.getAbsolutePath() + "'." );
}
copyDirectoryStructure( file, destination );
}
@@ -155,20 +154,23 @@
Artifact artifact = createArtifact( "test", "v4artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
artifactFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkSuccess(listener);
assertTrue( "Check artifact created", artifactFile.exists() );
assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
@@ -200,17 +202,20 @@
Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkSuccess(listener);
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
@@ -241,22 +246,21 @@
{
Artifact artifact = createArtifact( "test", "relocated-v3artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
//checkSuccess(); --> commented until MNG-2100 is fixed
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check if relocated artifact created", artifactFile.exists() );
- assertTrue( "Check if relocated artifact matches",
- FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
+ assertTrue( "Check if relocated artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
Artifact pomArtifact = createArtifact( "relocated-test", "relocated-v3artifact", "1.0.0", "1.0.0", "pom" );
File pomFile = getTestFile( "src/test/expected-files/" + targetRepository.pathOf( pomArtifact ) );
File testFile = getTestFile( "target/test-target-repository/" + targetRepository.pathOf( pomArtifact ) );
@@ -276,19 +280,20 @@
Artifact artifact = createArtifact( "test", "v3-warnings-artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
versionMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
- assertEquals( "check number of warnings", 2, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 2 );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
@@ -311,17 +316,20 @@
Artifact artifact = createArtifact( "test", "v4artifact", version );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+ File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
snapshotMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
@@ -354,17 +362,20 @@
Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-SNAPSHOT" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+ File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
snapshotMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
@@ -410,11 +421,10 @@
public void testMavenOnePluginConversion()
throws Exception
{
- Artifact artifact =
- createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0", "maven-plugin" );
- artifact.setFile(
- new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ Artifact artifact = createArtifact( "org.apache.maven.plugins", "maven-foo-plugin", "1.0", "1.0",
+ "maven-plugin" );
+ artifact.setFile( new File( getBasedir(), "src/test/source-repository/test/plugins/maven-foo-plugin-1.0.jar" ) );
+ repositoryConverter.convert( artifact, targetRepository );
// There is a warning but I can't figure out how to look at it. Eyeballing the results it appears
// the plugin is being coverted correctly.
//checkSuccess();
@@ -424,14 +434,14 @@
assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
/*
- The POM isn't needed for Maven 1.x plugins but the raw conversion for
+ The POM isn't needed for Maven 1.x plugins but the raw conversion for
- artifact = createPomArtifact( artifact );
- File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile = getTestFile( "src/test/expected-files/maven-foo-plugin-1.0.pom" );
- assertTrue( "Check POM created", pomFile.exists() );
- compareFiles( expectedPomFile, pomFile );
- */
+ artifact = createPomArtifact( artifact );
+ File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
+ File expectedPomFile = getTestFile( "src/test/expected-files/maven-foo-plugin-1.0.pom" );
+ assertTrue( "Check POM created", pomFile.exists() );
+ compareFiles( expectedPomFile, pomFile );
+ */
}
public void testV3TimestampedSnapshotPomConvert()
@@ -441,17 +451,20 @@
Artifact artifact = createArtifact( "test", "v3artifact", "1.0.0-20060105.130101-3" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
artifactMetadataFile.delete();
ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File snapshotMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
+ File snapshotMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
snapshotMetadataFile.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
@@ -483,11 +496,13 @@
// test that a POM is not created when there was none at the source
Artifact artifact = createArtifact( "test", "noPomArtifact", "1.0.0" );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
- assertEquals( "check warnings", 1, reportingDatabase.getNumWarnings() );
- assertEquals( "check warning message", getI18nString( "warning.missing.pom" ), getWarning().getReason() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 1 );
+
+ assertHasWarningReason( listener, getI18nString( "warning.missing.pom" ) );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
@@ -510,15 +525,19 @@
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.md5" ), getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 1, 0 );
+
+ assertHasErrorReason( listener, getI18nString( "failure.incorrect.md5" ) );
assertFalse( "Check artifact not created", file.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
@@ -531,15 +550,19 @@
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.sha1" ), getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 1, 0 );
+
+ assertHasErrorReason( listener, getI18nString( "failure.incorrect.sha1" ) );
assertFalse( "Check artifact not created", file.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
@@ -568,8 +591,11 @@
// Need to guarantee last modified is not equal
Thread.sleep( SLEEP_MILLIS );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
compareFiles( sourceFile, targetFile );
compareFiles( sourcePomFile, targetPomFile );
@@ -604,17 +630,20 @@
// Need to guarantee last modified is not equal
Thread.sleep( SLEEP_MILLIS );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
- getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 1, 0 );
+
+ assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) );
assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
@@ -641,8 +670,11 @@
sourceFile.setLastModified( dateFormat.parse( "2006-01-01" ).getTime() );
sourcePomFile.setLastModified( dateFormat.parse( "2006-02-02" ).getTime() );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
compareFiles( sourceFile, targetFile );
compareFiles( sourcePomFile, targetPomFile );
@@ -651,8 +683,8 @@
assertFalse( "Check modified", origTime == targetPomFile.lastModified() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertTrue( "Check metadata created", metadataFile.exists() );
}
@@ -671,8 +703,11 @@
File targetFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
File targetPomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( pomArtifact ) );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 0, 0 );
assertTrue( "Check source file exists", sourceFile.exists() );
assertTrue( "Check source POM exists", sourcePomFile.exists() );
@@ -681,8 +716,8 @@
assertFalse( "Check target POM doesn't exist", targetPomFile.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
@@ -713,17 +748,20 @@
// Need to guarantee last modified is not equal
Thread.sleep( SLEEP_MILLIS );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "Check failure message", getI18nString( "failure.target.already.exists" ),
- getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkFailure(listener);
+
+ assertHasErrorReason( listener, getI18nString( "failure.target.already.exists" ) );
assertEquals( "Check unmodified", origTime, targetFile.lastModified() );
assertEquals( "Check unmodified", origPomTime, targetPomFile.lastModified() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
@@ -734,20 +772,38 @@
Artifact artifact = createArtifact( "test", "rollback-created-artifact", "1.0.0" );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
FileUtils.deleteDirectory( artifactMetadataFile.getParentFile() );
ArtifactMetadata versionMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
- File versionMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( versionMetadata ) );
+ File versionMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( versionMetadata ) );
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkCounts( listener, 1, 0 );
+
+ List messages = (List) listener.getErrors().get( listener.toKey( artifact ));
+ assertNotNull("Should have error messages.");
+
+ boolean found = false;
String pattern = "^" + getI18nString( "failure.invalid.source.pom" ).replaceFirst( "\\{0\\}", ".*" ) + "$";
- assertTrue( "Check failure message", getFailure().getReason().matches( pattern ) );
+ for ( Iterator it = messages.iterator(); it.hasNext(); )
+ {
+ String reason = (String) it.next();
+ if( reason.matches( pattern ) )
+ {
+ found = true;
+ break;
+ }
+ }
+
+ assertTrue( "Check failure message.", found );
assertFalse( "check artifact rolled back", artifactFile.exists() );
assertFalse( "check metadata rolled back", artifactMetadataFile.exists() );
@@ -763,10 +819,12 @@
artifacts.add( createArtifact( "test", "artifact-one", "1.0.0" ) );
artifacts.add( createArtifact( "test", "artifact-two", "1.0.0" ) );
artifacts.add( createArtifact( "test", "artifact-three", "1.0.0" ) );
- repositoryConverter.convert( artifacts, targetRepository, reportingDatabase );
- assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
- assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifacts, targetRepository );
+ checkCounts( listener, 0, 0 );
for ( Iterator i = artifacts.iterator(); i.hasNext(); )
{
@@ -778,8 +836,8 @@
artifact = createPomArtifact( artifact );
File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
- File expectedPomFile =
- getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId() + ".pom" );
+ File expectedPomFile = getTestFile( "src/test/expected-files/converted-" + artifact.getArtifactId()
+ + ".pom" );
assertTrue( "Check POM created", pomFile.exists() );
compareFiles( expectedPomFile, pomFile );
@@ -797,16 +855,19 @@
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.artifactMetadata.versions" ),
- getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkFailure(listener);
+
+ assertHasErrorReason( listener, getI18nString( "failure.incorrect.artifactMetadata.versions" ) );
assertFalse( "Check artifact not created", file.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
@@ -821,16 +882,19 @@
File file = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
file.delete();
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkFailure();
- assertEquals( "check failure message", getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ),
- getFailure().getReason() );
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkFailure(listener);
+
+ assertHasErrorReason( listener, getI18nString( "failure.incorrect.snapshotMetadata.snapshot" ) );
assertFalse( "Check artifact not created", file.exists() );
ArtifactRepositoryMetadata metadata = new ArtifactRepositoryMetadata( artifact );
- File metadataFile =
- new File( targetRepository.getBasedir(), targetRepository.pathOfRemoteRepositoryMetadata( metadata ) );
+ File metadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( metadata ) );
assertFalse( "Check metadata not created", metadataFile.exists() );
}
@@ -840,9 +904,11 @@
// test artifact level metadata is merged when it already exists on successful conversion
Artifact artifact = createArtifact( "test", "newversion-artifact", "1.0.1" );
-
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
- checkSuccess();
+ MockConversionListener listener = new MockConversionListener();
+
+ repositoryConverter.addConversionListener( listener );
+ repositoryConverter.convert( artifact, targetRepository );
+ checkSuccess(listener);
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
assertTrue( "Check artifact created", artifactFile.exists() );
@@ -856,8 +922,8 @@
compareFiles( sourcePomFile, pomFile );
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
- File artifactMetadataFile = new File( targetRepository.getBasedir(),
- targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
+ File artifactMetadataFile = new File( targetRepository.getBasedir(), targetRepository
+ .pathOfRemoteRepositoryMetadata( artifactMetadata ) );
assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
File expectedMetadataFile = getTestFile( "src/test/expected-files/newversion-artifact-metadata.xml" );
@@ -872,14 +938,14 @@
ArtifactRepositoryFactory factory = (ArtifactRepositoryFactory) lookup( ArtifactRepositoryFactory.ROLE );
- sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(),
- targetRepository.getLayout(), null, null );
+ sourceRepository = factory.createArtifactRepository( "source", targetRepository.getUrl(), targetRepository
+ .getLayout(), null, null );
Artifact artifact = createArtifact( "test", "repository-artifact", "1.0" );
try
{
- repositoryConverter.convert( artifact, targetRepository, reportingDatabase );
+ repositoryConverter.convert( artifact, targetRepository );
fail( "Should have failed trying to convert within the same repository" );
}
catch ( RepositoryConversionException e )
@@ -905,8 +971,7 @@
return createArtifact( groupId, artifactId, baseVersion, version, "jar" );
}
- private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version,
- String type )
+ private Artifact createArtifact( String groupId, String artifactId, String baseVersion, String version, String type )
{
Artifact artifact = artifactFactory.createArtifact( groupId, artifactId, version, null, type );
artifact.setBaseVersion( baseVersion );
@@ -917,8 +982,8 @@
private Artifact createPomArtifact( Artifact artifact )
{
- return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(),
- artifact.getVersion(), "pom" );
+ return createArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getBaseVersion(), artifact
+ .getVersion(), "pom" );
}
private static void compareFiles( File expectedPomFile, File pomFile )
@@ -926,8 +991,7 @@
{
String expectedContent = normalizeString( FileUtils.readFileToString( expectedPomFile, null ) );
String targetContent = normalizeString( FileUtils.readFileToString( pomFile, null ) );
- assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent,
- targetContent );
+ assertEquals( "Check file match between " + expectedPomFile + " and " + pomFile, expectedContent, targetContent );
}
private static String normalizeString( String path )
@@ -935,35 +999,79 @@
return path.trim().replaceAll( "\r\n", "\n" ).replace( '\r', '\n' ).replaceAll( "<\\?xml .+\\?>", "" );
}
- private void checkSuccess()
+ private void checkSuccess(MockConversionListener listener)
+ {
+ checkCounts( listener, 0, 0 );
+ }
+
+ private void checkFailure(MockConversionListener listener)
{
- assertEquals( "check no errors", 0, reportingDatabase.getNumFailures() );
- assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ checkCounts( listener, 1, 0 );
}
- private void checkFailure()
+ private void checkCounts( MockConversionListener listener, int failures, int warnings )
{
- assertEquals( "check num errors", 1, reportingDatabase.getNumFailures() );
- assertEquals( "check no warnings", 0, reportingDatabase.getNumWarnings() );
- assertEquals( "check no notices", 0, reportingDatabase.getNumNotices() );
+ int actualFailures = listener.getErrorMessageCount();
+ int actualWarnings = listener.getWarningMessageCount();
+
+ if ( ( failures != actualFailures ) || ( warnings != actualWarnings ) )
+ {
+ fail( "Check Results Counts expected:<" + failures + "," + warnings + "> but was:<" + actualFailures + ","
+ + actualWarnings + ">" );
+ }
}
private String getI18nString( String key )
{
return i18n.getString( repositoryConverter.getClass().getName(), Locale.getDefault(), key );
}
-
- private Result getFailure()
+
+ private void assertHasWarningReason( MockConversionListener listener, String reason )
{
- ArtifactResults artifact = (ArtifactResults) reportingDatabase.getArtifactIterator().next();
- return (Result) artifact.getFailures().get( 0 );
+ assertHasMessage( listener.getWarnings(), "warning", reason );
}
- private Result getWarning()
+ private void assertHasErrorReason( MockConversionListener listener, String reason )
+ {
+ assertHasMessage( listener.getErrors(), "error", reason );
+ }
+
+ private void assertHasMessage( Map map, String type, String message )
{
- ArtifactResults artifact = (ArtifactResults) reportingDatabase.getArtifactIterator().next();
- return (Result) artifact.getWarnings().get( 0 );
+ if ( ( map == null ) || ( map.isEmpty() ) )
+ {
+ fail( "No " + type + "s captured, expected " + type + " <" + message + ">" );
+ }
+
+ // Attempt to find the message ...
+ for ( Iterator it = map.values().iterator(); it.hasNext(); )
+ {
+ List msgList = (List) it.next();
+
+ if ( msgList.contains( message ) )
+ {
+ // Found it!
+ return;
+ }
+ }
+
+ // Didn't find it! whoops ...
+ for ( Iterator it = map.entrySet().iterator(); it.hasNext(); )
+ {
+ Map.Entry entry = (Entry) it.next();
+ String key = (String) entry.getKey();
+ List msgList = (List) entry.getValue();
+
+ System.err.println( " Artifact: " + key );
+
+ for ( Iterator itMsgs = msgList.iterator(); itMsgs.hasNext(); )
+ {
+ String msg = (String) itMsgs.next();
+ System.err.println( " " + msg );
+ }
+ }
+
+ fail( "Unable to find " + type + " reason <" + message + "> in any artifact." );
}
private void createModernSourceRepository()
@@ -974,7 +1082,7 @@
ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
File sourceBase = getTestFile( "src/test/source-modern-repository" );
- sourceRepository =
- factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null, null );
+ sourceRepository = factory.createArtifactRepository( "source", sourceBase.toURL().toString(), layout, null,
+ null );
}
}
Modified: maven/archiva/trunk/archiva-converter/src/test/resources/org/apache/maven/archiva/converter/RepositoryConverterTest.xml
URL: http://svn.apache.org/viewvc/maven/archiva/trunk/archiva-converter/src/test/resources/org/apache/maven/archiva/converter/RepositoryConverterTest.xml?view=diff&rev=511053&r1=511052&r2=511053
==============================================================================
--- maven/archiva/trunk/archiva-converter/src/test/resources/org/apache/maven/archiva/converter/RepositoryConverterTest.xml (original)
+++ maven/archiva/trunk/archiva-converter/src/test/resources/org/apache/maven/archiva/converter/RepositoryConverterTest.xml Fri Feb 23 11:05:21 2007
@@ -16,10 +16,11 @@
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
- -->
+-->
<component-set>
<components>
+
<component>
<role>org.apache.maven.archiva.converter.RepositoryConverter</role>
<implementation>org.apache.maven.archiva.converter.DefaultRepositoryConverter</implementation>
@@ -37,10 +38,6 @@
<field-name>artifactFactory</field-name>
</requirement>
<requirement>
- <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
- <field-name>rewriter</field-name>
- </requirement>
- <requirement>
<role>org.codehaus.plexus.i18n.I18N</role>
<field-name>i18n</field-name>
</requirement>
@@ -63,14 +60,66 @@
<field-name>artifactFactory</field-name>
</requirement>
<requirement>
- <role>org.apache.maven.model.converter.ArtifactPomRewriter</role>
- <field-name>rewriter</field-name>
- </requirement>
- <requirement>
<role>org.codehaus.plexus.i18n.I18N</role>
<field-name>i18n</field-name>
</requirement>
</requirements>
+ </component>
+
+
+ <component>
+ <role>org.codehaus.plexus.jdo.JdoFactory</role>
+ <role-hint>archiva</role-hint>
+ <implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
+
+ <configuration>
+ <!-- Database Configuration -->
+ <driverName>org.hsqldb.jdbcDriver</driverName>
+ <url>jdbc:hsqldb:mem:TESTDB</url>
+ <userName>sa</userName>
+ <password></password>
+
+ <!-- JPOX and JDO configuration -->
+ <persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
+ <otherProperties>
+ <property>
+ <name>javax.jdo.PersistenceManagerFactoryClass</name>
+ <value>org.jpox.PersistenceManagerFactoryImpl</value>
+ </property>
+ <property>
+ <name>org.jpox.autoCreateSchema</name>
+ <value>true</value>
+ </property>
+ <property>
+ <name>org.jpox.validateTables</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateConstraints</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.validateColumns</name>
+ <value>false</value>
+ </property>
+ <property>
+ <name>org.jpox.autoStartMechanism</name>
+ <value>None</value>
+ </property>
+ <property>
+ <name>org.jpox.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.poid.transactionIsolation</name>
+ <value>READ_UNCOMMITTED</value>
+ </property>
+ <property>
+ <name>org.jpox.rdbms.dateTimezone</name>
+ <value>JDK_DEFAULT_TIMEZONE</value>
+ </property>
+ </otherProperties>
+ </configuration>
</component>
</components>
</component-set>
Propchange: maven/archiva/trunk/archiva-core/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Fri Feb 23 11:05:21 2007
@@ -1,5 +1,15 @@
-*.iml
target
+*~
+.*.swp
+*.log
+*.patch
+*.diff
+*.ipr
+*.iws
+*.iml
.classpath
.project
+.m2eclipse
.settings
+.wtpmodules
+cobertura.ser
Modified: maven/archiva/trunk/archiva-core/pom.xml
URL: http://svn.apache.org/viewvc/maven/archiva/trunk/archiva-core/pom.xml?view=diff&rev=511053&r1=511052&r2=511053
==============================================================================
--- maven/archiva/trunk/archiva-core/pom.xml (original)
+++ maven/archiva/trunk/archiva-core/pom.xml Fri Feb 23 11:05:21 2007
@@ -35,10 +35,6 @@
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
- <artifactId>archiva-converter</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-discoverer</artifactId>
</dependency>
<dependency>
@@ -78,6 +74,13 @@
<version>1.0-alpha-1</version>
<scope>test</scope>
</dependency>
+ <!-- TEST DEPS -->
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>1.7.3.3</version>
+ <scope>test</scope>
+ </dependency>
<!-- needed for PlexusTestCase -->
<dependency>
<groupId>org.codehaus.plexus</groupId>
@@ -88,16 +91,22 @@
<build>
<plugins>
<plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>cobertura-maven-plugin</artifactId>
- <!-- TODO! add unit tests -->
- <configuration>
- <instrumentation>
- <excludes>
- <exclude>**/**</exclude>
- </excludes>
- </instrumentation>
- </configuration>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>merge</id>
+ <goals>
+ <goal>merge-descriptors</goal>
+ </goals>
+ <configuration>
+ <descriptors>
+ <descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
+ <descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
+ </descriptors>
+ </configuration>
+ </execution>
+ </executions>
</plugin>
</plugins>
</build>
Modified: maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java
URL: http://svn.apache.org/viewvc/maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java?view=diff&rev=511053&r1=511052&r2=511053
==============================================================================
--- maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java (original)
+++ maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/repositories/ActiveManagedRepositories.java Fri Feb 23 11:05:21 2007
@@ -19,7 +19,7 @@
* under the License.
*/
-import org.apache.maven.archiva.artifact.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -31,7 +31,7 @@
/**
* ActiveManagedRepositories
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public interface ActiveManagedRepositories
@@ -44,12 +44,27 @@
* @param id the ID of the repository.
* @return the ArtifactRepository associated with the provided ID, or null if none found.
*/
- ArtifactRepository getArtifactRepository( String id );
+ public ArtifactRepository getArtifactRepository( String id );
- List getAllArtifactRepositories();
+ /**
+ * Get the List of active managed repositories as a List of {@link ArtifactRepository} objects.
+ *
+ * @return the list of ArtifactRepository objects.
+ */
+ public List /*<ArtifactRepository>*/getAllArtifactRepositories();
RepositoryConfiguration getRepositoryConfiguration( String id );
+ /**
+ * Providing only a groupId, artifactId, and version, return the MavenProject that
+ * is found, in any managed repository.
+ *
+ * @param groupId the groupId to search for
+ * @param artifactId the artifactId to search for
+ * @param version the version to search for
+ * @return the MavenProject from the provided parameters.
+ * @throws ProjectBuildingException if there was a problem building the maven project object.
+ */
MavenProject findProject( String groupId, String artifactId, String version )
throws ProjectBuildingException;
@@ -59,4 +74,20 @@
ManagedArtifact findArtifact( String groupId, String artifactId, String version, String type );
ManagedArtifact findArtifact( Artifact artifact );
+
+ /**
+ * Obtain the last data refresh timestamp for all Managed Repositories.
+ *
+ * @return the last data refresh timestamp.
+ */
+ long getLastDataRefreshTime();
+
+ /**
+ * Tests to see if there needs to be a data refresh performed.
+ *
+ * The only valid scenario is if 1 or more repositories have not had their data refreshed ever.
+ *
+ * @return true if there needs to be a data refresh.
+ */
+ boolean needsDataRefresh();
}
Modified: maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java
URL: http://svn.apache.org/viewvc/maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java?view=diff&rev=511053&r1=511052&r2=511053
==============================================================================
--- maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java (original)
+++ maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/repositories/DefaultActiveManagedRepositories.java Fri Feb 23 11:05:21 2007
@@ -20,14 +20,15 @@
*/
import org.apache.commons.lang.StringUtils;
-import org.apache.maven.archiva.artifact.ManagedArtifact;
-import org.apache.maven.archiva.artifact.ManagedArtifactTypes;
-import org.apache.maven.archiva.artifact.ManagedEjbArtifact;
-import org.apache.maven.archiva.artifact.ManagedJavaArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedArtifactTypes;
+import org.apache.maven.archiva.common.artifact.managed.ManagedEjbArtifact;
+import org.apache.maven.archiva.common.artifact.managed.ManagedJavaArtifact;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
+import org.apache.maven.archiva.discoverer.DiscovererStatistics;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
@@ -49,7 +50,7 @@
/**
* DefaultActiveManagedRepositories
*
- * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
+ * @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
* @plexus.component role="org.apache.maven.archiva.repositories.ActiveManagedRepositories"
*/
@@ -224,6 +225,7 @@
repositories = repositoryFactory.createRepositories( this.configuration );
localRepository = repositoryFactory.createLocalRepository( this.configuration );
+
}
private ManagedArtifact createManagedArtifact( ArtifactRepository repository, Artifact artifact, File f )
@@ -283,13 +285,49 @@
{
if ( propertyName.startsWith( "repositories" ) || propertyName.startsWith( "localRepository" ) )
{
- getLogger().debug( "Triggering managed repository configuration change with " + propertyName + " set to " +
- propertyValue );
+ getLogger().debug(
+ "Triggering managed repository configuration change with " + propertyName + " set to "
+ + propertyValue );
configureSelf( archivaConfiguration.getConfiguration() );
}
else
{
getLogger().debug( "Not triggering managed repository configuration change with " + propertyName );
}
+ }
+
+ public long getLastDataRefreshTime()
+ {
+ long lastDataRefreshTime = 0;
+
+ for ( Iterator i = getAllArtifactRepositories().iterator(); i.hasNext(); )
+ {
+ ArtifactRepository repository = (ArtifactRepository) i.next();
+
+ DiscovererStatistics stats = new DiscovererStatistics( repository );
+ if ( stats.getTimestampFinished() > lastDataRefreshTime )
+ {
+ lastDataRefreshTime = stats.getTimestampFinished();
+ }
+ }
+
+ return lastDataRefreshTime;
+ }
+
+ public boolean needsDataRefresh()
+ {
+ for ( Iterator i = getAllArtifactRepositories().iterator(); i.hasNext(); )
+ {
+ ArtifactRepository repository = (ArtifactRepository) i.next();
+
+ DiscovererStatistics stats = new DiscovererStatistics( repository );
+ if ( stats.getTimestampFinished() <= 0 )
+ {
+ // Found a repository that has NEVER had it's data walked.
+ return true;
+ }
+ }
+
+ return false;
}
}
Modified: maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java
URL: http://svn.apache.org/viewvc/maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java?view=diff&rev=511053&r1=511052&r2=511053
==============================================================================
--- maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java (original)
+++ maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/DefaultRepositoryTaskScheduler.java Fri Feb 23 11:05:21 2007
@@ -21,11 +21,8 @@
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndex;
-import org.apache.maven.archiva.indexer.RepositoryArtifactIndexFactory;
-import org.apache.maven.archiva.indexer.RepositoryIndexException;
-import org.apache.maven.archiva.scheduler.executors.IndexerTaskExecutor;
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.repositories.ActiveManagedRepositories;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
import org.apache.maven.archiva.scheduler.task.RepositoryTask;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
@@ -42,7 +39,6 @@
import org.quartz.JobDetail;
import org.quartz.SchedulerException;
-import java.io.File;
import java.text.ParseException;
/**
@@ -61,28 +57,23 @@
private Scheduler scheduler;
/**
- * @plexus.requirement role-hint="indexer"
+ * @plexus.requirement role-hint="data-refresh"
*/
- private TaskQueue indexerQueue;
-
- /**
- * @plexus.requirement role="org.codehaus.plexus.taskqueue.execution.TaskExecutor" role-hint="indexer"
- */
- private IndexerTaskExecutor indexerTaskExecutor;
+ private TaskQueue datarefreshQueue;
/**
* @plexus.requirement
*/
private ArchivaConfiguration archivaConfiguration;
-
+
/**
* @plexus.requirement
*/
- private RepositoryArtifactIndexFactory indexFactory;
+ private ActiveManagedRepositories activeRepositories;
private static final String DISCOVERER_GROUP = "DISCOVERER";
- private static final String INDEXER_JOB = "indexerTask";
+ private static final String DATA_REFRESH_JOB = "dataRefreshTask";
public void start()
throws StartingException
@@ -92,11 +83,11 @@
try
{
- scheduleJobs( configuration.getIndexPath(), configuration.getIndexerCronExpression() );
+ scheduleJobs( configuration.getDataRefreshCronExpression() );
}
catch ( ParseException e )
{
- throw new StartingException( "Invalid configuration: " + configuration.getIndexerCronExpression(), e );
+ throw new StartingException( "Invalid configuration: " + configuration.getDataRefreshCronExpression(), e );
}
catch ( SchedulerException e )
{
@@ -104,29 +95,22 @@
}
}
- private void scheduleJobs( String indexPath, String indexerCronExpression )
+ private void scheduleJobs( String indexerCronExpression )
throws ParseException, SchedulerException
{
- if ( indexPath != null )
- {
- JobDetail jobDetail = createJobDetail( INDEXER_JOB );
+ JobDetail jobDetail = createJobDetail( DATA_REFRESH_JOB );
- getLogger().info( "Scheduling indexer: " + indexerCronExpression );
- CronTrigger trigger = new CronTrigger( INDEXER_JOB + "Trigger", DISCOVERER_GROUP, indexerCronExpression );
- scheduler.scheduleJob( jobDetail, trigger );
+ getLogger().info( "Scheduling data-refresh: " + indexerCronExpression );
+ CronTrigger trigger = new CronTrigger( DATA_REFRESH_JOB + "Trigger", DISCOVERER_GROUP, indexerCronExpression );
+ scheduler.scheduleJob( jobDetail, trigger );
- try
- {
- queueNowIfNeeded();
- }
- catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e )
- {
- getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
- }
+ try
+ {
+ queueNowIfNeeded();
}
- else
+ catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e )
{
- getLogger().info( "Not scheduling indexer - index path is not configured" );
+ getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
}
}
@@ -135,7 +119,7 @@
JobDetail jobDetail = new JobDetail( jobName, DISCOVERER_GROUP, RepositoryTaskJob.class );
JobDataMap dataMap = new JobDataMap();
- dataMap.put( RepositoryTaskJob.TASK_QUEUE, indexerQueue );
+ dataMap.put( RepositoryTaskJob.TASK_QUEUE, datarefreshQueue );
dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, RepositoryTask.QUEUE_POLICY_SKIP );
jobDetail.setJobDataMap( dataMap );
@@ -147,7 +131,7 @@
{
try
{
- scheduler.unscheduleJob( INDEXER_JOB, DISCOVERER_GROUP );
+ scheduler.unscheduleJob( DATA_REFRESH_JOB, DISCOVERER_GROUP );
}
catch ( SchedulerException e )
{
@@ -163,7 +147,7 @@
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
- if ( "indexPath".equals( propertyName ) || "indexerCronExpression".equals( propertyName ) )
+ if ( "dataRefreshCronExpression".equals( propertyName ) )
{
getLogger().debug( "Restarting task scheduler with new configuration after property change: " +
propertyName + " to " + propertyValue );
@@ -179,7 +163,7 @@
try
{
Configuration configuration = archivaConfiguration.getConfiguration();
- scheduleJobs( configuration.getIndexPath(), configuration.getIndexerCronExpression() );
+ scheduleJobs( configuration.getDataRefreshCronExpression() );
}
catch ( ParseException e )
{
@@ -199,43 +183,27 @@
}
}
- public void runIndexer()
- throws org.apache.maven.archiva.scheduler.TaskExecutionException
+ public void runDataRefresh()
+ throws TaskExecutionException
{
- IndexerTask task = new IndexerTask();
- task.setJobName( "INDEX_INIT" );
+ DataRefreshTask task = new DataRefreshTask();
+ task.setJobName( "DATA_REFRESH_INIT" );
try
{
- indexerQueue.put( task );
+ datarefreshQueue.put( task );
}
catch ( TaskQueueException e )
{
- throw new org.apache.maven.archiva.scheduler.TaskExecutionException( e.getMessage(), e );
+ throw new TaskExecutionException( e.getMessage(), e );
}
}
public void queueNowIfNeeded()
- throws org.codehaus.plexus.taskqueue.execution.TaskExecutionException
+ throws TaskExecutionException
{
- Configuration configuration = archivaConfiguration.getConfiguration();
-
- File indexPath = new File( configuration.getIndexPath() );
-
- try
+ if ( activeRepositories.needsDataRefresh() )
{
- RepositoryArtifactIndex artifactIndex = indexFactory.createStandardIndex( indexPath );
- if ( !artifactIndex.exists() )
- {
- runIndexer();
- }
- }
- catch ( RepositoryIndexException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
- }
- catch ( org.apache.maven.archiva.scheduler.TaskExecutionException e )
- {
- throw new TaskExecutionException( e.getMessage(), e );
+ runDataRefresh();
}
}
Modified: maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java
URL: http://svn.apache.org/viewvc/maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java?view=diff&rev=511053&r1=511052&r2=511053
==============================================================================
--- maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java (original)
+++ maven/archiva/trunk/archiva-core/src/main/java/org/apache/maven/archiva/scheduler/RepositoryTaskJob.java Fri Feb 23 11:05:21 2007
@@ -19,7 +19,7 @@
* under the License.
*/
-import org.apache.maven.archiva.scheduler.task.IndexerTask;
+import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
import org.apache.maven.archiva.scheduler.task.RepositoryTask;
import org.codehaus.plexus.scheduler.AbstractJob;
import org.codehaus.plexus.taskqueue.TaskQueue;
@@ -53,27 +53,27 @@
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
setJobDataMap( dataMap );
- TaskQueue indexerQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
+ TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
- RepositoryTask task = new IndexerTask();
+ RepositoryTask task = new DataRefreshTask();
task.setJobName( context.getJobDetail().getName() );
try
{
- if ( indexerQueue.getQueueSnapshot().size() == 0 )
+ if ( taskQueue.getQueueSnapshot().size() == 0 )
{
- indexerQueue.put( task );
+ taskQueue.put( task );
}
else
{
if ( RepositoryTask.QUEUE_POLICY_WAIT.equals( queuePolicy ) )
{
- indexerQueue.put( task );
+ taskQueue.put( task );
}
else if ( RepositoryTask.QUEUE_POLICY_SKIP.equals( queuePolicy ) )
{
- //do not queue anymore, policy is to skip
+ // do not queue anymore, policy is to skip
}
}
}