Package org.apache.maven.archiva.configuration

Examples of org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration


    {
        super.setUp();

        File repoDir = getTestFile( "src/test/repositories/legacy-repository" );

        ManagedRepositoryConfiguration repository = createRepository( "testRepo", "Unit Test Repo", repoDir );
        repository.setLayout( "legacy" );

        repoContent = (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class, "legacy" );
        repoContent.setRepository( repository );
    }
View Full Code Here


   
    public String doUpload()
    {
        try
        {
            ManagedRepositoryConfiguration repoConfig =
                configuration.getConfiguration().findManagedRepositoryById( repositoryId );

            ArtifactReference artifactReference = new ArtifactReference();
            artifactReference.setArtifactId( artifactId );
            artifactReference.setGroupId( groupId );
            artifactReference.setVersion( version );
            artifactReference.setClassifier( classifier );
            artifactReference.setType( packaging );

            ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent( repositoryId );

            String artifactPath = repository.toPath( artifactReference );

            int lastIndex = artifactPath.lastIndexOf( '/' );

            File targetPath = new File( repoConfig.getLocation(), artifactPath.substring( 0, lastIndex ) );

            Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
            int newBuildNumber = -1;
            String timestamp = null;
           
            File metadataFile = getMetadata( targetPath.getAbsolutePath() );
            ArchivaRepositoryMetadata metadata = getMetadata( metadataFile );

            if (VersionUtil.isSnapshot(version))
            {
                TimeZone timezone = TimeZone.getTimeZone( "UTC" );
                DateFormat fmt = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
                fmt.setTimeZone( timezone );
                timestamp = fmt.format( lastUpdatedTimestamp );
                if ( metadata.getSnapshotVersion() != null )
                {
                    newBuildNumber = metadata.getSnapshotVersion().getBuildNumber() + 1;
                }
                else
                {
                  metadata.setSnapshotVersion( new SnapshotVersion() );
                  newBuildNumber = 1;
                }
            }

            if ( !targetPath.exists() )
            {
                targetPath.mkdirs();
            }

            String filename = artifactPath.substring( lastIndex + 1 );
            if ( VersionUtil.isSnapshot( version ) )
            {
                filename = filename.replaceAll( "SNAPSHOT", timestamp + "-" + newBuildNumber );
            }

            try
            {
                copyFile( artifactFile, targetPath, filename );
                queueRepositoryTask( repository.getId(), repository.toFile( artifactReference ) );
                //consumers.executeConsumers( repoConfig, repository.toFile( artifactReference ) );
            }
            catch ( IOException ie )
            {
                addActionError( "Error encountered while uploading file: " + ie.getMessage() );
                return ERROR;
            }

            String pomFilename = filename;
            if( classifier != null && !"".equals( classifier ) )
            {
                pomFilename = StringUtils.remove( pomFilename, "-" + classifier );
            }
            pomFilename = FilenameUtils.removeExtension( pomFilename ) + ".pom";
               
            if ( generatePom )
            {
                try
                {
                    File generatedPomFile = createPom( targetPath, pomFilename );
                    queueRepositoryTask( repoConfig.getId(), generatedPomFile );
                    //consumers.executeConsumers( repoConfig, generatedPomFile );
                }
                catch ( IOException ie )
                {
                    addActionError( "Error encountered while writing pom file: " + ie.getMessage() );
                    return ERROR;
                }
                catch ( ProjectModelException pe )
                {
                    addActionError( "Error encountered while generating pom file: " + pe.getMessage() );
                    return ERROR;
                }
            }
           
            if ( pomFile != null && pomFile.length() > 0 )
            {
                try
                {                   
                    copyFile( pomFile, targetPath, pomFilename );
                    queueRepositoryTask( repoConfig.getId(), new File( targetPath, pomFilename ) );
                    //consumers.executeConsumers( repoConfig, new File( targetPath, pomFilename ) );
                }
                catch ( IOException ie )
                {
                    addActionError( "Error encountered while uploading pom file: " + ie.getMessage() );
View Full Code Here

        super.setUp();
       
        indexingExecutor = new ArchivaIndexingTaskExecutor();
        indexingExecutor.initialize();   
       
        repositoryConfig = new ManagedRepositoryConfiguration();
        repositoryConfig.setId( "test-repo" );
        repositoryConfig.setLocation( getBasedir() + "/target/test-classes/test-repo" );
        repositoryConfig.setLayout( "default" );
        repositoryConfig.setName( "Test Repository" );
        repositoryConfig.setScanned( true );
View Full Code Here

        if ( StringUtils.isBlank( repoTask.getRepositoryId() ) )
        {
            throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
        }

        ManagedRepositoryConfiguration arepo = archivaConfiguration.getConfiguration().findManagedRepositoryById( repoTask.getRepositoryId() );
       
        // execute consumers on resource file if set
        if( repoTask.getResourceFile() != null )
        {
            log.debug( "Executing task from queue with job name: " + repoTask.getName() );
            consumers.executeConsumers( arepo, repoTask.getResourceFile() );
        }
        else
        {
            log.info( "Executing task from queue with job name: " + repoTask.getName() );
           
            // otherwise, execute consumers on whole repository
            try
            {  
                if ( arepo == null )
                {
                    throw new TaskExecutionException( "Unable to execute RepositoryTask with invalid repository id: " + repoTask.getRepositoryId() );
                }
   
                long sinceWhen = RepositoryScanner.FRESH_SCAN;
   
                List<RepositoryContentStatistics> results = (List<RepositoryContentStatistics>) dao.query( new MostRecentRepositoryScanStatistics( arepo.getId() ) );
   
                if ( CollectionUtils.isNotEmpty( results ) )
                {
                    RepositoryContentStatistics lastStats = results.get( 0 );
                    sinceWhen = lastStats.getWhenGathered().getTime() + lastStats.getDuration();
View Full Code Here

    {
        synchronized( indexerEngine )
        {
            ArtifactIndexingTask indexingTask = ( ArtifactIndexingTask ) task;
           
            ManagedRepositoryConfiguration repository =
                archivaConfiguration.getConfiguration().findManagedRepositoryById( indexingTask.getRepositoryId() );
   
            String indexDir = repository.getIndexDir();
            File managedRepository = new File( repository.getLocation() );
           
            File indexDirectory = null;
            if( indexDir != null && !"".equals( indexDir ) )
            {
                indexDirectory = new File( repository.getIndexDir() );
            }
            else
            {
                indexDirectory = new File( managedRepository, ".indexer" );
            }
           
            IndexingContext context = null;
            try
            {
                context =
                    new DefaultIndexingContext( repository.getId(), repository.getId(), managedRepository,
                                            indexDirectory, null, null, NexusIndexer.FULL_INDEX, false );
                context.setSearchable( repository.isScanned() );
               
                File artifactFile = indexingTask.getResourceFile();               
                ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile );
               
                if( ac != null )
View Full Code Here

        value.setRepositoryGroups( repositoryGroups );
        java.util.List managedRepositories = new java.util.ArrayList();
        List managedRepositoriesSubsets = registry.getSubsetList( prefix + "managedRepositories.managedRepository" );
        for ( Iterator i = managedRepositoriesSubsets.iterator(); i.hasNext(); )
        {
            ManagedRepositoryConfiguration v = readManagedRepositoryConfiguration( "", (Registry) i.next() );
            managedRepositories.add( v );
        }
        value.setManagedRepositories( managedRepositories );
        java.util.List remoteRepositories = new java.util.ArrayList();
        List remoteRepositoriesSubsets = registry.getSubsetList( prefix + "remoteRepositories.remoteRepository" );
View Full Code Here

        return value;
    }
   
    private ManagedRepositoryConfiguration readManagedRepositoryConfiguration( String prefix, Registry registry )
    {
        ManagedRepositoryConfiguration value = new ManagedRepositoryConfiguration();

        String location = registry.getString( prefix + "location", value.getLocation() );
        value.setLocation( location );
        boolean releases = registry.getBoolean( prefix + "releases", value.isReleases() );
        value.setReleases( releases );
        boolean snapshots = registry.getBoolean( prefix + "snapshots", value.isSnapshots() );
        value.setSnapshots( snapshots );
        boolean scanned = registry.getBoolean( prefix + "scanned", value.isScanned() );
        value.setScanned( scanned );
        String indexDir = registry.getString( prefix + "indexDir", value.getIndexDir() );
        value.setIndexDir( indexDir );
        String refreshCronExpression = registry.getString( prefix + "refreshCronExpression", value.getRefreshCronExpression() );
        value.setRefreshCronExpression( refreshCronExpression );
        int retentionCount = registry.getInt( prefix + "retentionCount", value.getRetentionCount() );
        value.setRetentionCount( retentionCount );
        int daysOlder = registry.getInt( prefix + "daysOlder", value.getDaysOlder() );
        value.setDaysOlder( daysOlder );
        boolean deleteReleasedSnapshots = registry.getBoolean( prefix + "deleteReleasedSnapshots", value.isDeleteReleasedSnapshots() );
        value.setDeleteReleasedSnapshots( deleteReleasedSnapshots );
        String id = registry.getString( prefix + "id", value.getId() );
        value.setId( id );
        String name = registry.getString( prefix + "name", value.getName() );
        value.setName( name );
        String layout = registry.getString( prefix + "layout", value.getLayout() );
        value.setLayout( layout );

        return value;
    }
View Full Code Here

        KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
            KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-retention-count" );

        populateDbForRetentionCountTest();

        ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration();
        repoConfiguration.setDaysOlder( 0 ); // force days older off to allow retention count purge to execute.
        repoConfiguration.setRetentionCount( TEST_RETENTION_COUNT );
        addRepoToConfiguration( "retention-count", repoConfiguration );

        repoPurgeConsumer.beginScan( repoConfiguration );

        String repoRoot = prepareTestRepo();
View Full Code Here

        populateDbForDaysOldTest();

        KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
            KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-days-old" );

        ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration();
        repoConfiguration.setDaysOlder( TEST_DAYS_OLDER );
        addRepoToConfiguration( "days-old", repoConfiguration );

        repoPurgeConsumer.beginScan( repoConfiguration );

        String repoRoot = prepareTestRepo();
View Full Code Here

        KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
            KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-retention-count" );

        populateDbForReleasedSnapshotsTest();

        ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration();
        repoConfiguration.setDeleteReleasedSnapshots( false ); // Set to NOT delete released snapshots.
        addRepoToConfiguration( "retention-count", repoConfiguration );

        repoPurgeConsumer.beginScan( repoConfiguration );

        String repoRoot = prepareTestRepo();
View Full Code Here

TOP

Related Classes of org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.