<version>${slf4j.version}</version>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
<version>${archivaVersion}</version>
</dependency>
<dependency>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
<version>${archiva.version}</version>
<scope>test</scope>
</dependency>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
<scope>test</scope>
</dependency>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
<scope>test</scope>
</dependency>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <parent>
- <artifactId>archiva-base</artifactId>
- <groupId>org.apache.archiva</groupId>
- <version>3.0.0-SNAPSHOT</version>
- </parent>
- <modelVersion>4.0.0</modelVersion>
-
- <artifactId>archiva-maven2-common</artifactId>
- <name>Archiva Base :: Maven2 Common</name>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-proxy-api</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-provider-api</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-file</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>commons-lang</groupId>
- <artifactId>commons-lang</artifactId>
- </dependency>
- </dependencies>
-
-
-</project>
\ No newline at end of file
+++ /dev/null
-package org.apache.archiva.proxy.maven;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.wagon.events.TransferEvent;
-import org.apache.maven.wagon.events.TransferListener;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M1
- */
-public class DebugTransferListener
- implements TransferListener
-{
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- @Override
- public void transferInitiated( TransferEvent transferEvent )
- {
- log.debug( "transferInitiated for resource {} on repository url {}", transferEvent.getResource().getName(),
- transferEvent.getWagon().getRepository().getUrl() );
- }
-
- @Override
- public void transferStarted( TransferEvent transferEvent )
- {
- log.debug( "transferStarted for resource {} on repository url {}", transferEvent.getResource().getName(),
- transferEvent.getWagon().getRepository().getUrl() );
- }
-
- @Override
- public void transferProgress( TransferEvent transferEvent, byte[] bytes, int i )
- {
- log.debug( "transferProgress for resource {} on repository url {}", transferEvent.getResource().getName(),
- transferEvent.getWagon().getRepository().getUrl() );
- }
-
- @Override
- public void transferCompleted( TransferEvent transferEvent )
- {
- log.debug( "transferCompleted for resource {} on repository url {}", transferEvent.getResource().getName(),
- transferEvent.getWagon().getRepository().getUrl() );
- }
-
- @Override
- public void transferError( TransferEvent transferEvent )
- {
- log.debug( "transferError for resource {} on repository url {}", transferEvent.getResource().getName(),
- transferEvent.getWagon().getRepository().getUrl(), transferEvent.getException() );
- }
-
- @Override
- public void debug( String s )
- {
- log.debug( "wagon debug {}", s );
- }
-}
+++ /dev/null
-package org.apache.archiva.proxy.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.wagon.Wagon;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.BeansException;
-import org.springframework.context.ApplicationContext;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.lang.reflect.Method;
-import java.util.Map;
-import java.util.Properties;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M1
- */
-@Service ("wagonFactory")
-public class DefaultWagonFactory
- implements WagonFactory
-{
-
- private ApplicationContext applicationContext;
-
- private Logger logger = LoggerFactory.getLogger( getClass() );
-
- private DebugTransferListener debugTransferListener = new DebugTransferListener();
-
- @Inject
- public DefaultWagonFactory( ApplicationContext applicationContext )
- {
- this.applicationContext = applicationContext;
- }
-
- @Override
- public Wagon getWagon( WagonFactoryRequest wagonFactoryRequest )
- throws WagonFactoryException
- {
- try
- {
- String protocol = StringUtils.startsWith( wagonFactoryRequest.getProtocol(), "wagon#" )
- ? wagonFactoryRequest.getProtocol()
- : "wagon#" + wagonFactoryRequest.getProtocol();
-
- // if it's a ntlm proxy we have to lookup the wagon light which support thats
- // wagon http client doesn't support that
- if ( wagonFactoryRequest.getNetworkProxy() != null && wagonFactoryRequest.getNetworkProxy().isUseNtlm() )
- {
- protocol = protocol + "-ntlm";
- }
-
- Wagon wagon = applicationContext.getBean( protocol, Wagon.class );
- wagon.addTransferListener( debugTransferListener );
- configureUserAgent( wagon, wagonFactoryRequest );
- return wagon;
- }
- catch ( BeansException e )
- {
- throw new WagonFactoryException( e.getMessage(), e );
- }
- }
-
- protected void configureUserAgent( Wagon wagon, WagonFactoryRequest wagonFactoryRequest )
- {
- try
- {
- Class<? extends Wagon> clazz = wagon.getClass();
- Method getHttpHeaders = clazz.getMethod( "getHttpHeaders" );
-
- Properties headers = (Properties) getHttpHeaders.invoke( wagon );
- if ( headers == null )
- {
- headers = new Properties();
- }
-
- headers.put( "User-Agent", wagonFactoryRequest.getUserAgent() );
-
- if ( !wagonFactoryRequest.getHeaders().isEmpty() )
- {
- for ( Map.Entry<String, String> entry : wagonFactoryRequest.getHeaders().entrySet() )
- {
- headers.put( entry.getKey(), entry.getValue() );
- }
- }
-
- Method setHttpHeaders = clazz.getMethod( "setHttpHeaders", new Class[]{ Properties.class } );
- setHttpHeaders.invoke( wagon, headers );
-
- logger.debug( "http headers set to: {}", headers );
- }
- catch ( Exception e )
- {
- logger.warn( "fail to configure User-Agent: {}", e.getMessage(), e );
- }
- }
-}
+++ /dev/null
-package org.apache.archiva.proxy.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.wagon.Wagon;
-
-/**
- * Create a Wagon instance for the given protocol.
- */
-public interface WagonFactory
-{
- /**
- * Create a new Wagon instance for the given protocol.
- *
- * @param wagonFactoryRequest
- *
- * @return the Wagon instance
- */
- Wagon getWagon( WagonFactoryRequest wagonFactoryRequest )
- throws WagonFactoryException;
-}
+++ /dev/null
-package org.apache.archiva.proxy.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M1
- */
-public class WagonFactoryException
- extends Exception
-{
- public WagonFactoryException( String message, Throwable e )
- {
- super( message, e );
- }
-}
+++ /dev/null
-package org.apache.archiva.proxy.maven;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.proxy.model.NetworkProxy;
-import org.apache.commons.lang.StringUtils;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M4
- */
-public class WagonFactoryRequest
-{
-
- public static final String USER_AGENT_SYSTEM_PROPERTY = "archiva.userAgent";
-
- private static String DEFAULT_USER_AGENT = "Java-Archiva";
-
- /**
- * the protocol to find the Wagon for, which must be prefixed with <code>wagon#</code>, for example
- * <code>wagon#http</code>. <b>to have a wagon supporting ntlm add -ntlm</b>
- */
- private String protocol;
-
- private Map<String, String> headers = new HashMap<>();
-
- private String userAgent = DEFAULT_USER_AGENT;
-
- static {
- if ( StringUtils.isNotBlank( System.getProperty( USER_AGENT_SYSTEM_PROPERTY))) {
- DEFAULT_USER_AGENT=System.getProperty(USER_AGENT_SYSTEM_PROPERTY);
- }
- }
-
- private NetworkProxy networkProxy;
-
- public WagonFactoryRequest()
- {
- // no op
- }
-
- public WagonFactoryRequest( String protocol, Map<String, String> headers )
- {
- this.protocol = protocol;
- this.headers = headers;
- }
-
- public String getProtocol()
- {
- return protocol;
- }
-
- public void setProtocol( String protocol )
- {
- this.protocol = protocol;
- }
-
- public WagonFactoryRequest protocol( String protocol )
- {
- this.protocol = protocol;
- return this;
- }
-
- public Map<String, String> getHeaders()
- {
- if ( this.headers == null )
- {
- this.headers = new HashMap<>();
- }
- return headers;
- }
-
- public void setHeaders( Map<String, String> headers )
- {
- this.headers = headers;
- }
-
- public WagonFactoryRequest headers( Map<String, String> headers )
- {
- this.headers = headers;
- return this;
- }
-
- public String getUserAgent()
- {
- return userAgent;
- }
-
- public void setUserAgent( String userAgent )
- {
- this.userAgent = userAgent;
- }
-
- public WagonFactoryRequest userAgent( String userAgent )
- {
- this.userAgent = userAgent;
- return this;
- }
-
- public NetworkProxy getNetworkProxy()
- {
- return networkProxy;
- }
-
- public void setNetworkProxy( NetworkProxy networkProxy )
- {
- this.networkProxy = networkProxy;
- }
-
- public WagonFactoryRequest networkProxy( NetworkProxy networkProxy )
- {
- this.networkProxy = networkProxy;
- return this;
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( this == o )
- {
- return true;
- }
- if ( !( o instanceof WagonFactoryRequest ) )
- {
- return false;
- }
-
- WagonFactoryRequest that = (WagonFactoryRequest) o;
-
- if ( protocol != null ? !protocol.equals( that.protocol ) : that.protocol != null )
- {
- return false;
- }
- if ( userAgent != null ? !userAgent.equals( that.userAgent ) : that.userAgent != null )
- {
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode()
- {
- int result = protocol != null ? protocol.hashCode() : 0;
- result = 31 * result + ( userAgent != null ? userAgent.hashCode() : 0 );
- return result;
- }
-
- @Override
- public String toString()
- {
- return "WagonFactoryRequest{" +
- "protocol='" + protocol + '\'' +
- ", headers=" + headers +
- ", userAgent='" + userAgent + '\'' +
- ", networkProxy=" + networkProxy +
- '}';
- }
-}
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <parent>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-base</artifactId>
- <version>3.0.0-SNAPSHOT</version>
- </parent>
- <modelVersion>4.0.0</modelVersion>
- <artifactId>archiva-maven2-indexer</artifactId>
- <name>Archiva Base :: Maven2 Indexer</name>
-
- <properties>
- <site.staging.base>${project.parent.parent.basedir}</site.staging.base>
- </properties>
-
- <dependencies>
-
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-repository-admin-api</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-repository-layer</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-proxy</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-context</artifactId>
- </dependency>
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-context-support</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-utils</artifactId>
- </dependency>
- <dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- </dependency>
- <!--
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-digest</artifactId>
- <exclusions>
- <exclusion>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-component-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- -->
- <dependency>
- <groupId>commons-lang</groupId>
- <artifactId>commons-lang</artifactId>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.indexer</groupId>
- <artifactId>indexer-reader</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.indexer</groupId>
- <artifactId>indexer-core</artifactId>
- <classifier>shaded-lucene</classifier>
- <exclusions>
- <exclusion>
- <groupId>org.apache.lucene</groupId>
- <artifactId>lucene-queryparser</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.apache.lucene</groupId>
- <artifactId>lucene-analyzers-common</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-http</artifactId>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>org.eclipse.sisu</groupId>
- <artifactId>org.eclipse.sisu.plexus</artifactId>
- </dependency>
- <dependency>
- <groupId>com.google.inject</groupId>
- <artifactId>guice</artifactId>
- <classifier>no_aop</classifier>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-plexus-bridge</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-scheduler-repository</artifactId>
- </dependency>
- <dependency>
- <groupId>javax.inject</groupId>
- <artifactId>javax.inject</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-mock</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-repository-admin-default</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-test-utils</artifactId>
- <version>${project.version}</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-simple</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.derby</groupId>
- <artifactId>derby</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva.redback</groupId>
- <artifactId>redback-keys-memory</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva.redback</groupId>
- <artifactId>redback-rbac-cached</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva.redback</groupId>
- <artifactId>redback-rbac-memory</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva.redback</groupId>
- <artifactId>redback-users-memory</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva.redback</groupId>
- <artifactId>redback-common-test-resources</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-file</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-http-lightweight</artifactId>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>xerces</groupId>
- <artifactId>xercesImpl</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.hsqldb</groupId>
- <artifactId>hsqldb</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-configuration</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.maven</groupId>
- <artifactId>maven-core</artifactId>
- <exclusions>
- <exclusion>
- <groupId>org.sonatype.sisu</groupId>
- <artifactId>sisu-guava</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.sonatype.sisu</groupId>
- <artifactId>sisu-inject</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.sonatype.sisu</groupId>
- <artifactId>sisu-guice</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
-
- <dependency>
- <groupId>org.apache.logging.log4j</groupId>
- <artifactId>log4j-slf4j-impl</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.logging.log4j</groupId>
- <artifactId>log4j-jcl</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.ow2.asm</groupId>
- <artifactId>asm</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <pluginManagement>
- <plugins>
- <plugin>
- <groupId>org.apache.rat</groupId>
- <artifactId>apache-rat-plugin</artifactId>
- <configuration>
- <excludes>
- <exclude>src/test/maven-search-test-repo*/**</exclude>
- <exclude>src/test/repo-release*/**</exclude>
- </excludes>
- </configuration>
- </plugin>
- </plugins>
- </pluginManagement>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <configuration>
- <systemPropertyVariables>
- <appserver.base>${project.build.directory}/appserver-base</appserver.base>
- <plexus.home>${project.build.directory}/appserver-base</plexus.home>
- <derby.system.home>${project.build.directory}/appserver-base</derby.system.home>
- <redback.jdbc.url>${redbackTestJdbcUrl}</redback.jdbc.url>
- <redback.jdbc.driver.name>${redbackTestJdbcDriver}</redback.jdbc.driver.name>
- <archiva.repositorySessionFactory.id>mock</archiva.repositorySessionFactory.id>
- <openjpa.Log>${openjpa.Log}</openjpa.Log>
- </systemPropertyVariables>
- </configuration>
- </plugin>
- </plugins>
- </build>
-</project>
+++ /dev/null
-package org.apache.archiva.indexer.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.updater.IndexUpdateSideEffect;
-import org.apache.maven.index_shaded.lucene.store.Directory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-/**
- * Not doing much but required at least one implementation
- *
- * @since 3.0.0
- */
-@Service("archivaIndexUpdater")
-public class DefaultIndexUpdateSideEffect
- implements IndexUpdateSideEffect
-{
- private static final Logger LOGGER = LoggerFactory.getLogger( DefaultIndexUpdateSideEffect.class );
-
- @Override
- public void updateIndex( Directory directory, IndexingContext indexingContext, boolean b )
- {
- LOGGER.info( "updating index: {} with directory: {}", //
- indexingContext.getId(), //
- directory.toString() );
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.repository.Repository;
-import org.apache.maven.index.context.IndexingContext;
-
-import java.io.IOException;
-import java.net.URI;
-import java.nio.file.Files;
-import java.nio.file.NoSuchFileException;
-import java.sql.Date;
-import java.time.ZonedDateTime;
-import java.util.Set;
-
-/**
- * Maven implementation of index context
- */
-public class MavenIndexContext implements ArchivaIndexingContext {
-
- private IndexingContext delegate;
- private Repository repository;
-
- MavenIndexContext(Repository repository, IndexingContext delegate) {
- this.delegate = delegate;
- this.repository = repository;
-
- }
-
- @Override
- public String getId() {
- return delegate.getId();
- }
-
- @Override
- public Repository getRepository() {
- return repository;
- }
-
- @Override
- public URI getPath() {
- return delegate.getIndexDirectoryFile().toURI();
- }
-
- @Override
- public boolean isEmpty() throws IOException {
- return Files.list(delegate.getIndexDirectoryFile().toPath()).count()==0;
- }
-
- @Override
- public void commit() throws IOException {
- delegate.commit();
- }
-
- @Override
- public void rollback() throws IOException {
- delegate.rollback();
- }
-
- @Override
- public void optimize() throws IOException {
- delegate.optimize();
- }
-
- @Override
- public void close(boolean deleteFiles) throws IOException {
- try {
- delegate.close(deleteFiles);
- } catch (NoSuchFileException e) {
- // Ignore missing directory
- }
- }
-
- @Override
- public void close() throws IOException {
- try {
- delegate.close(false);
- } catch (NoSuchFileException e) {
- // Ignore missing directory
- }
- }
-
- @Override
- public void purge() throws IOException {
- delegate.purge();
- }
-
- @Override
- public boolean supports(Class<?> clazz) {
- return IndexingContext.class.equals(clazz);
- }
-
- @SuppressWarnings( "unchecked" )
- @Override
- public <T> T getBaseContext(Class<T> clazz) throws UnsupportedOperationException {
- if (IndexingContext.class.equals(clazz)) {
- return (T) delegate;
- } else {
- throw new UnsupportedOperationException("The class "+clazz+" is not supported by the maven indexer");
- }
- }
-
- @Override
- public Set<String> getGroups() throws IOException {
- return delegate.getAllGroups();
- }
-
- @Override
- public void updateTimestamp(boolean save) throws IOException {
- delegate.updateTimestamp(save);
- }
-
- @Override
- public void updateTimestamp(boolean save, ZonedDateTime time) throws IOException {
- delegate.updateTimestamp(save, Date.from(time.toInstant()));
- }
-
-
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.admin.model.RepositoryAdminException;
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.common.utils.PathUtil;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.indexer.ArchivaIndexManager;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.indexer.IndexCreationFailedException;
-import org.apache.archiva.indexer.IndexUpdateFailedException;
-import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.proxy.ProxyRegistry;
-import org.apache.archiva.proxy.maven.WagonFactory;
-import org.apache.archiva.proxy.maven.WagonFactoryException;
-import org.apache.archiva.proxy.maven.WagonFactoryRequest;
-import org.apache.archiva.proxy.model.NetworkProxy;
-import org.apache.archiva.repository.EditableRepository;
-import org.apache.archiva.repository.ManagedRepository;
-import org.apache.archiva.repository.PasswordCredentials;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.features.RemoteIndexFeature;
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.index.ArtifactContext;
-import org.apache.maven.index.ArtifactContextProducer;
-import org.apache.maven.index.DefaultScannerListener;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.IndexerEngine;
-import org.apache.maven.index.Scanner;
-import org.apache.maven.index.ScanningRequest;
-import org.apache.maven.index.ScanningResult;
-import org.apache.maven.index.context.IndexCreator;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.packer.IndexPacker;
-import org.apache.maven.index.packer.IndexPackingRequest;
-import org.apache.maven.index.updater.IndexUpdateRequest;
-import org.apache.maven.index.updater.IndexUpdater;
-import org.apache.maven.index.updater.ResourceFetcher;
-import org.apache.maven.index_shaded.lucene.index.IndexFormatTooOldException;
-import org.apache.maven.wagon.ConnectionException;
-import org.apache.maven.wagon.ResourceDoesNotExistException;
-import org.apache.maven.wagon.StreamWagon;
-import org.apache.maven.wagon.TransferFailedException;
-import org.apache.maven.wagon.Wagon;
-import org.apache.maven.wagon.authentication.AuthenticationException;
-import org.apache.maven.wagon.authentication.AuthenticationInfo;
-import org.apache.maven.wagon.authorization.AuthorizationException;
-import org.apache.maven.wagon.events.TransferEvent;
-import org.apache.maven.wagon.events.TransferListener;
-import org.apache.maven.wagon.proxy.ProxyInfo;
-import org.apache.maven.wagon.shared.http.AbstractHttpClientWagon;
-import org.apache.maven.wagon.shared.http.HttpConfiguration;
-import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.net.URI;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentSkipListSet;
-import java.util.stream.Collectors;
-
-/**
- * Maven implementation of index manager.
- * The index manager is a singleton, so we try to make sure, that index operations are not running
- * parallel by synchronizing on the index path.
- * A update operation waits for parallel running methods to finish before starting, but after a certain
- * time of retries a IndexUpdateFailedException is thrown.
- */
-@Service( "archivaIndexManager#maven" )
-public class MavenIndexManager implements ArchivaIndexManager {
-
- private static final Logger log = LoggerFactory.getLogger( MavenIndexManager.class );
-
- @Inject
- private Indexer indexer;
-
- @Inject
- private IndexerEngine indexerEngine;
-
- @Inject
- private List<? extends IndexCreator> indexCreators;
-
- @Inject
- private IndexPacker indexPacker;
-
- @Inject
- private Scanner scanner;
-
- @Inject
- private ArchivaConfiguration archivaConfiguration;
-
- @Inject
- private WagonFactory wagonFactory;
-
- @Inject
- private IndexUpdater indexUpdater;
-
- @Inject
- private ArtifactContextProducer artifactContextProducer;
-
- @Inject
- private ProxyRegistry proxyRegistry;
-
-
- public static final String DEFAULT_INDEXER_DIR = ".indexer";
- public static final String DEFAULT_PACKED_INDEX_DIR = ".index";
-
- private ConcurrentSkipListSet<Path> activeContexts = new ConcurrentSkipListSet<>( );
-
- private static final int WAIT_TIME = 100;
- private static final int MAX_WAIT = 10;
-
-
- public static IndexingContext getMvnContext( ArchivaIndexingContext context ) throws UnsupportedBaseContextException
- {
- if ( !context.supports( IndexingContext.class ) )
- {
- log.error( "The provided archiva index context does not support the maven IndexingContext" );
- throw new UnsupportedBaseContextException( "The context does not support the Maven IndexingContext" );
- }
- return context.getBaseContext( IndexingContext.class );
- }
-
- private Path getIndexPath( ArchivaIndexingContext ctx )
- {
- return PathUtil.getPathFromUri( ctx.getPath( ) );
- }
-
- @FunctionalInterface
- interface IndexUpdateConsumer
- {
-
- void accept( IndexingContext indexingContext ) throws IndexUpdateFailedException;
- }
-
- /*
- * This method is used to do some actions around the update execution code. And to make sure, that no other
- * method is running on the same index.
- */
- private void executeUpdateFunction( ArchivaIndexingContext context, IndexUpdateConsumer function ) throws IndexUpdateFailedException
- {
- IndexingContext indexingContext = null;
- try
- {
- indexingContext = getMvnContext( context );
- }
- catch ( UnsupportedBaseContextException e )
- {
- throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
- }
- final Path ctxPath = getIndexPath( context );
- int loop = MAX_WAIT;
- boolean active = false;
- while ( loop-- > 0 && !active )
- {
- active = activeContexts.add( ctxPath );
- try
- {
- Thread.currentThread( ).sleep( WAIT_TIME );
- }
- catch ( InterruptedException e )
- {
- // Ignore this
- }
- }
- if ( active )
- {
- try
- {
- function.accept( indexingContext );
- }
- finally
- {
- activeContexts.remove( ctxPath );
- }
- }
- else
- {
- throw new IndexUpdateFailedException( "Timeout while waiting for index release on context " + context.getId( ) );
- }
- }
-
- @Override
- public void pack( final ArchivaIndexingContext context ) throws IndexUpdateFailedException
- {
- executeUpdateFunction( context, indexingContext -> {
- try
- {
- IndexPackingRequest request = new IndexPackingRequest( indexingContext,
- indexingContext.acquireIndexSearcher( ).getIndexReader( ),
- indexingContext.getIndexDirectoryFile( ) );
- indexPacker.packIndex( request );
- indexingContext.updateTimestamp( true );
- }
- catch ( IOException e )
- {
- log.error( "IOException while packing index of context " + context.getId( ) + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ) );
- throw new IndexUpdateFailedException( "IOException during update of " + context.getId( ), e );
- }
- }
- );
-
- }
-
- @Override
- public void scan(final ArchivaIndexingContext context) throws IndexUpdateFailedException
- {
- executeUpdateFunction( context, indexingContext -> {
- DefaultScannerListener listener = new DefaultScannerListener( indexingContext, indexerEngine, true, null );
- ScanningRequest request = new ScanningRequest( indexingContext, listener );
- ScanningResult result = scanner.scan( request );
- if ( result.hasExceptions( ) )
- {
- log.error( "Exceptions occured during index scan of " + context.getId( ) );
- result.getExceptions( ).stream( ).map( e -> e.getMessage( ) ).distinct( ).limit( 5 ).forEach(
- s -> log.error( "Message: " + s )
- );
- }
-
- } );
- }
-
- @Override
- public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException
- {
- log.info( "start download remote index for remote repository {}", context.getRepository( ).getId( ) );
- URI remoteUpdateUri;
- if ( !( context.getRepository( ) instanceof RemoteRepository ) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class)) )
- {
- throw new IndexUpdateFailedException( "The context is not associated to a remote repository with remote index " + context.getId( ) );
- } else {
- RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get();
- remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri());
- }
- final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository( );
-
- executeUpdateFunction( context,
- indexingContext -> {
- try
- {
- // create a temp directory to download files
- Path tempIndexDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".tmpIndex" );
- Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".indexCache" );
- Files.createDirectories( indexCacheDirectory );
- if ( Files.exists( tempIndexDirectory ) )
- {
- org.apache.archiva.common.utils.FileUtils.deleteDirectory( tempIndexDirectory );
- }
- Files.createDirectories( tempIndexDirectory );
- tempIndexDirectory.toFile( ).deleteOnExit( );
- String baseIndexUrl = indexingContext.getIndexUpdateUrl( );
-
- String wagonProtocol = remoteUpdateUri.toURL( ).getProtocol( );
-
- NetworkProxy networkProxy = null;
- if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
- {
- RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
- if ( StringUtils.isNotBlank( rif.getProxyId( ) ) )
- {
- networkProxy = proxyRegistry.getNetworkProxy( rif.getProxyId( ) );
- if ( networkProxy == null )
- {
- log.warn(
- "your remote repository is configured to download remote index trought a proxy we cannot find id:{}",
- rif.getProxyId( ) );
- }
- }
-
- final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
- new WagonFactoryRequest( wagonProtocol, remoteRepository.getExtraHeaders( ) ).networkProxy(
- networkProxy )
- );
- int readTimeout = (int) rif.getDownloadTimeout( ).toMillis( ) * 1000;
- wagon.setReadTimeout( readTimeout );
- wagon.setTimeout( (int) remoteRepository.getTimeout( ).toMillis( ) * 1000 );
-
- if ( wagon instanceof AbstractHttpClientWagon )
- {
- HttpConfiguration httpConfiguration = new HttpConfiguration( );
- HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration( );
- httpMethodConfiguration.setUsePreemptive( true );
- httpMethodConfiguration.setReadTimeout( readTimeout );
- httpConfiguration.setGet( httpMethodConfiguration );
- AbstractHttpClientWagon.class.cast( wagon ).setHttpConfiguration( httpConfiguration );
- }
-
- wagon.addTransferListener( new DownloadListener( ) );
- ProxyInfo proxyInfo = null;
- if ( networkProxy != null )
- {
- proxyInfo = new ProxyInfo( );
- proxyInfo.setType( networkProxy.getProtocol( ) );
- proxyInfo.setHost( networkProxy.getHost( ) );
- proxyInfo.setPort( networkProxy.getPort( ) );
- proxyInfo.setUserName( networkProxy.getUsername( ) );
- proxyInfo.setPassword( networkProxy.getPassword( ) );
- }
- AuthenticationInfo authenticationInfo = null;
- if ( remoteRepository.getLoginCredentials( ) != null && ( remoteRepository.getLoginCredentials( ) instanceof PasswordCredentials ) )
- {
- PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials( );
- authenticationInfo = new AuthenticationInfo( );
- authenticationInfo.setUserName( creds.getUsername( ) );
- authenticationInfo.setPassword( new String( creds.getPassword( ) ) );
- }
- wagon.connect( new org.apache.maven.wagon.repository.Repository( remoteRepository.getId( ), baseIndexUrl ), authenticationInfo,
- proxyInfo );
-
- Path indexDirectory = indexingContext.getIndexDirectoryFile( ).toPath( );
- if ( !Files.exists( indexDirectory ) )
- {
- Files.createDirectories( indexDirectory );
- }
-
- ResourceFetcher resourceFetcher =
- new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
- IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
- request.setForceFullUpdate( fullUpdate );
- request.setLocalIndexCacheDir( indexCacheDirectory.toFile( ) );
-
- indexUpdater.fetchAndUpdateIndex( request );
-
- indexingContext.updateTimestamp( true );
- }
-
- }
- catch ( AuthenticationException e )
- {
- log.error( "Could not login to the remote proxy for updating index of {}", remoteRepository.getId( ), e );
- throw new IndexUpdateFailedException( "Login in to proxy failed while updating remote repository " + remoteRepository.getId( ), e );
- }
- catch ( ConnectionException e )
- {
- log.error( "Connection error during index update for remote repository {}", remoteRepository.getId( ), e );
- throw new IndexUpdateFailedException( "Connection error during index update for remote repository " + remoteRepository.getId( ), e );
- }
- catch ( MalformedURLException e )
- {
- log.error( "URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId( ), remoteUpdateUri, e );
- throw new IndexUpdateFailedException( "URL for remote index update of repository is not correct " + remoteUpdateUri, e );
- }
- catch ( IOException e )
- {
- log.error( "IOException during index update of remote repository {}: {}", remoteRepository.getId( ), e.getMessage( ), e );
- throw new IndexUpdateFailedException( "IOException during index update of remote repository " + remoteRepository.getId( )
- + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
- }
- catch ( WagonFactoryException e )
- {
- log.error( "Wagon for remote index download of {} could not be created: {}", remoteRepository.getId( ), e.getMessage( ), e );
- throw new IndexUpdateFailedException( "Error while updating the remote index of " + remoteRepository.getId( ), e );
- }
- } );
-
- }
-
- @Override
- public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
- {
- final URI ctxUri = context.getPath();
- executeUpdateFunction(context, indexingContext -> {
- Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
- try {
- indexer.addArtifactsToIndex(artifacts, indexingContext);
- } catch (IOException e) {
- log.error("IOException while adding artifact {}", e.getMessage(), e);
- throw new IndexUpdateFailedException("Error occured while adding artifact to index of "+context.getId()
- + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
- }
- });
- }
-
- @Override
- public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
- {
- final URI ctxUri = context.getPath();
- executeUpdateFunction(context, indexingContext -> {
- Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
- try {
- indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
- } catch (IOException e) {
- log.error("IOException while removing artifact {}", e.getMessage(), e);
- throw new IndexUpdateFailedException("Error occured while removing artifact from index of "+context.getId()
- + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
- }
- });
-
- }
-
- @Override
- public boolean supportsRepository( RepositoryType type )
- {
- return type == RepositoryType.MAVEN;
- }
-
- @Override
- public ArchivaIndexingContext createContext( Repository repository ) throws IndexCreationFailedException
- {
- log.debug("Creating context for repo {}, type: {}", repository.getId(), repository.getType());
- if ( repository.getType( ) != RepositoryType.MAVEN )
- {
- throw new UnsupportedRepositoryTypeException( repository.getType( ) );
- }
- IndexingContext mvnCtx = null;
- try
- {
- if ( repository instanceof RemoteRepository )
- {
- mvnCtx = createRemoteContext( (RemoteRepository) repository );
- }
- else if ( repository instanceof ManagedRepository )
- {
- mvnCtx = createManagedContext( (ManagedRepository) repository );
- }
- }
- catch ( IOException e )
- {
- log.error( "IOException during context creation " + e.getMessage( ), e );
- throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
- + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
- }
- MavenIndexContext context = new MavenIndexContext( repository, mvnCtx );
-
- return context;
- }
-
- @Override
- public ArchivaIndexingContext reset(ArchivaIndexingContext context) throws IndexUpdateFailedException {
- ArchivaIndexingContext ctx;
- executeUpdateFunction(context, indexingContext -> {
- try {
- indexingContext.close(true);
- } catch (IOException e) {
- log.warn("Index close failed");
- }
- try {
- FileUtils.deleteDirectory(Paths.get(context.getPath()));
- } catch (IOException e) {
- throw new IndexUpdateFailedException("Could not delete index files");
- }
- });
- try {
- Repository repo = context.getRepository();
- ctx = createContext(context.getRepository());
- if (repo instanceof EditableRepository) {
- ((EditableRepository)repo).setIndexingContext(ctx);
- }
- } catch (IndexCreationFailedException e) {
- throw new IndexUpdateFailedException("Could not create index");
- }
- return ctx;
- }
-
- @Override
- public ArchivaIndexingContext move(ArchivaIndexingContext context, Repository repo) throws IndexCreationFailedException {
- if (context==null) {
- return null;
- }
- if (context.supports(IndexingContext.class)) {
- try {
- Path newPath = getIndexPath(repo);
- IndexingContext ctx = context.getBaseContext(IndexingContext.class);
- Path oldPath = ctx.getIndexDirectoryFile().toPath();
- if (oldPath.equals(newPath)) {
- // Nothing to do, if path does not change
- return context;
- }
- if (!Files.exists(oldPath)) {
- return createContext(repo);
- } else if (context.isEmpty()) {
- context.close();
- return createContext(repo);
- } else {
- context.close(false);
- Files.move(oldPath, newPath);
- return createContext(repo);
- }
- } catch (IOException e) {
- log.error("IOException while moving index directory {}", e.getMessage(), e);
- throw new IndexCreationFailedException("Could not recreated the index.", e);
- } catch (UnsupportedBaseContextException e) {
- throw new IndexCreationFailedException("The given context, is not a maven context.");
- }
- } else {
- throw new IndexCreationFailedException("Bad context type. This is not a maven context.");
- }
- }
-
- @Override
- public void updateLocalIndexPath(Repository repo) {
- if (repo.supportsFeature(IndexCreationFeature.class)) {
- IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
- try {
- icf.setLocalIndexPath(getIndexPath(repo));
- icf.setLocalPackedIndexPath(getPackedIndexPath(repo));
- } catch (IOException e) {
- log.error("Could not set local index path for {}. New URI: {}", repo.getId(), icf.getIndexPath());
- }
- }
- }
-
- private Path getIndexPath(Repository repo) throws IOException {
- IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
- Path repoDir = repo.getLocalPath();
- URI indexDir = icf.getIndexPath();
- Path indexDirectory = null;
- if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
- {
-
- indexDirectory = PathUtil.getPathFromUri( indexDir );
- // not absolute so create it in repository directory
- if ( !indexDirectory.isAbsolute( ) )
- {
- indexDirectory = repoDir.resolve( indexDirectory );
- }
- }
- else
- {
- indexDirectory = repoDir.resolve( DEFAULT_INDEXER_DIR );
- }
-
- if ( !Files.exists( indexDirectory ) )
- {
- Files.createDirectories( indexDirectory );
- }
- return indexDirectory;
- }
-
- private Path getPackedIndexPath(Repository repo) throws IOException {
- IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
- Path repoDir = repo.getLocalPath();
- URI indexDir = icf.getPackedIndexPath();
- Path indexDirectory = null;
- if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
- {
-
- indexDirectory = PathUtil.getPathFromUri( indexDir );
- // not absolute so create it in repository directory
- if ( !indexDirectory.isAbsolute( ) )
- {
- indexDirectory = repoDir.resolve( indexDirectory );
- }
- }
- else
- {
- indexDirectory = repoDir.resolve( DEFAULT_PACKED_INDEX_DIR );
- }
-
- if ( !Files.exists( indexDirectory ) )
- {
- Files.createDirectories( indexDirectory );
- }
- return indexDirectory;
- }
-
- private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
- {
- Path appServerBase = archivaConfiguration.getAppServerBaseDir( );
-
- String contextKey = "remote-" + remoteRepository.getId( );
-
-
- // create remote repository path
- Path repoDir = remoteRepository.getLocalPath();
- if ( !Files.exists( repoDir ) )
- {
- Files.createDirectories( repoDir );
- }
-
- Path indexDirectory = null;
-
- // is there configured indexDirectory ?
- if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
- {
- RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
- indexDirectory = getIndexPath(remoteRepository);
- String remoteIndexUrl = calculateIndexRemoteUrl( remoteRepository.getLocation( ), rif );
- try
- {
-
- return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
- }
- catch ( IndexFormatTooOldException e )
- {
- // existing index with an old lucene format so we need to delete it!!!
- // delete it first then recreate it.
- log.warn( "the index of repository {} is too old we have to delete and recreate it", //
- remoteRepository.getId( ) );
- org.apache.archiva.common.utils.FileUtils.deleteDirectory( indexDirectory );
- return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
-
- }
- }
- else
- {
- throw new IOException( "No remote index defined" );
- }
- }
-
- private IndexingContext getIndexingContext( Repository repository, String contextKey, Path repoDir, Path indexDirectory, String indexUrl ) throws IOException
- {
- return indexer.createIndexingContext( contextKey, repository.getId( ), repoDir.toFile( ), indexDirectory.toFile( ),
- repository.getLocation( ) == null ? null : repository.getLocation( ).toString( ),
- indexUrl,
- true, false,
- indexCreators );
- }
-
- private IndexingContext createManagedContext( ManagedRepository repository ) throws IOException
- {
-
- IndexingContext context;
- // take care first about repository location as can be relative
- Path repositoryDirectory = repository.getLocalPath();
-
- if ( !Files.exists( repositoryDirectory ) )
- {
- try
- {
- Files.createDirectories( repositoryDirectory );
- }
- catch ( IOException e )
- {
- log.error( "Could not create directory {}", repositoryDirectory );
- }
- }
-
- Path indexDirectory = null;
-
- if ( repository.supportsFeature( IndexCreationFeature.class ) )
- {
- indexDirectory = getIndexPath(repository);
-
- String indexUrl = repositoryDirectory.toUri( ).toURL( ).toExternalForm( );
- try
- {
- context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
- context.setSearchable( repository.isScanned( ) );
- }
- catch ( IndexFormatTooOldException e )
- {
- // existing index with an old lucene format so we need to delete it!!!
- // delete it first then recreate it.
- log.warn( "the index of repository {} is too old we have to delete and recreate it", //
- repository.getId( ) );
- org.apache.archiva.common.utils.FileUtils.deleteDirectory( indexDirectory );
- context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
- context.setSearchable( repository.isScanned( ) );
- }
- return context;
- }
- else
- {
- throw new IOException( "No repository index defined" );
- }
- }
-
- private String calculateIndexRemoteUrl( URI baseUri, RemoteIndexFeature rif )
- {
- if ( rif.getIndexUri( ) == null )
- {
- return baseUri.resolve( DEFAULT_INDEXER_DIR ).toString( );
- }
- else
- {
- return baseUri.resolve( rif.getIndexUri( ) ).toString( );
- }
- }
-
- private static final class DownloadListener
- implements TransferListener
- {
- private Logger log = LoggerFactory.getLogger( getClass( ) );
-
- private String resourceName;
-
- private long startTime;
-
- private int totalLength = 0;
-
- @Override
- public void transferInitiated( TransferEvent transferEvent )
- {
- startTime = System.currentTimeMillis( );
- resourceName = transferEvent.getResource( ).getName( );
- log.debug( "initiate transfer of {}", resourceName );
- }
-
- @Override
- public void transferStarted( TransferEvent transferEvent )
- {
- this.totalLength = 0;
- resourceName = transferEvent.getResource( ).getName( );
- log.info( "start transfer of {}", transferEvent.getResource( ).getName( ) );
- }
-
- @Override
- public void transferProgress( TransferEvent transferEvent, byte[] buffer, int length )
- {
- log.debug( "transfer of {} : {}/{}", transferEvent.getResource( ).getName( ), buffer.length, length );
- this.totalLength += length;
- }
-
- @Override
- public void transferCompleted( TransferEvent transferEvent )
- {
- resourceName = transferEvent.getResource( ).getName( );
- long endTime = System.currentTimeMillis( );
- log.info( "end of transfer file {} {} kb: {}s", transferEvent.getResource( ).getName( ),
- this.totalLength / 1024, ( endTime - startTime ) / 1000 );
- }
-
- @Override
- public void transferError( TransferEvent transferEvent )
- {
- log.info( "error of transfer file {}: {}", transferEvent.getResource( ).getName( ),
- transferEvent.getException( ).getMessage( ), transferEvent.getException( ) );
- }
-
- @Override
- public void debug( String message )
- {
- log.debug( "transfer debug {}", message );
- }
- }
-
- private static class WagonResourceFetcher
- implements ResourceFetcher
- {
-
- Logger log;
-
- Path tempIndexDirectory;
-
- Wagon wagon;
-
- RemoteRepository remoteRepository;
-
- private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
- RemoteRepository remoteRepository )
- {
- this.log = log;
- this.tempIndexDirectory = tempIndexDirectory;
- this.wagon = wagon;
- this.remoteRepository = remoteRepository;
- }
-
- @Override
- public void connect( String id, String url )
- throws IOException
- {
- //no op
- }
-
- @Override
- public void disconnect( )
- throws IOException
- {
- // no op
- }
-
- @Override
- public InputStream retrieve( String name )
- throws IOException, FileNotFoundException
- {
- try
- {
- log.info( "index update retrieve file, name:{}", name );
- Path file = tempIndexDirectory.resolve( name );
- Files.deleteIfExists( file );
- file.toFile( ).deleteOnExit( );
- wagon.get( addParameters( name, remoteRepository ), file.toFile( ) );
- return Files.newInputStream( file );
- }
- catch ( AuthorizationException | TransferFailedException e )
- {
- throw new IOException( e.getMessage( ), e );
- }
- catch ( ResourceDoesNotExistException e )
- {
- FileNotFoundException fnfe = new FileNotFoundException( e.getMessage( ) );
- fnfe.initCause( e );
- throw fnfe;
- }
- }
-
- // FIXME remove crappy copy/paste
- protected String addParameters( String path, RemoteRepository remoteRepository )
- {
- if ( remoteRepository.getExtraParameters( ).isEmpty( ) )
- {
- return path;
- }
-
- boolean question = false;
-
- StringBuilder res = new StringBuilder( path == null ? "" : path );
-
- for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters( ).entrySet( ) )
- {
- if ( !question )
- {
- res.append( '?' ).append( entry.getKey( ) ).append( '=' ).append( entry.getValue( ) );
- }
- }
-
- return res.toString( );
- }
-
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven.merger;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.indexer.merger.IndexMerger;
-import org.apache.archiva.indexer.merger.IndexMergerException;
-import org.apache.archiva.indexer.merger.IndexMergerRequest;
-import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.commons.lang.time.StopWatch;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.context.ContextMemberProvider;
-import org.apache.maven.index.context.IndexCreator;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.context.StaticContextMemberProvider;
-import org.apache.maven.index.packer.IndexPacker;
-import org.apache.maven.index.packer.IndexPackingRequest;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.scheduling.annotation.Async;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Collection;
-import java.util.List;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.stream.Collectors;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M2
- */
-@Service("indexMerger#default")
-public class DefaultIndexMerger
- implements IndexMerger
-{
-
- @Inject
- RepositoryRegistry repositoryRegistry;
-
- private Logger log = LoggerFactory.getLogger( getClass() );
-
-
- private final IndexPacker indexPacker;
-
- private Indexer indexer;
-
- private final List<IndexCreator> indexCreators;
-
- private List<TemporaryGroupIndex> temporaryGroupIndexes = new CopyOnWriteArrayList<>();
-
- private List<IndexingContext> temporaryContextes = new CopyOnWriteArrayList<>( );
-
- private List<String> runningGroups = new CopyOnWriteArrayList<>();
-
- @Inject
- public DefaultIndexMerger( Indexer indexer, IndexPacker indexPacker, List<IndexCreator> indexCreators )
- {
- this.indexer = indexer;
- this.indexPacker = indexPacker;
- this.indexCreators = indexCreators;
- }
-
- @Override
- public IndexingContext buildMergedIndex( IndexMergerRequest indexMergerRequest )
- throws IndexMergerException
- {
- String groupId = indexMergerRequest.getGroupId();
-
- if ( runningGroups.contains( groupId ) )
- {
- log.info( "skip build merge remote indexes for id: '{}' as already running", groupId );
- return null;
- }
-
- runningGroups.add( groupId );
-
- StopWatch stopWatch = new StopWatch();
- stopWatch.reset();
- stopWatch.start();
-
- Path mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
-
- String tempRepoId = mergedIndexDirectory.getFileName().toString();
-
- try
- {
- Path indexLocation = mergedIndexDirectory.resolve( indexMergerRequest.getMergedIndexPath() );
-
- List<IndexingContext> members = indexMergerRequest.getRepositoriesIds( ).stream( ).map( id ->
- repositoryRegistry.getRepository( id ) ).filter( repo -> repo.getType().equals( RepositoryType.MAVEN ) )
- .map( repo -> {
- try
- {
- return repo.getIndexingContext().getBaseContext( IndexingContext.class );
- }
- catch ( UnsupportedBaseContextException e )
- {
- return null;
- // Ignore
- }
- } ).filter( Objects::nonNull ).collect( Collectors.toList() );
- ContextMemberProvider memberProvider = new StaticContextMemberProvider(members);
- IndexingContext mergedCtx = indexer.createMergedIndexingContext( tempRepoId, tempRepoId, mergedIndexDirectory.toFile(),
- indexLocation.toFile(), true, memberProvider);
- mergedCtx.optimize();
-
- if ( indexMergerRequest.isPackIndex() )
- {
- IndexPackingRequest request = new IndexPackingRequest( mergedCtx, //
- mergedCtx.acquireIndexSearcher().getIndexReader(), //
- indexLocation.toFile() );
- indexPacker.packIndex( request );
- }
-
- if ( indexMergerRequest.isTemporary() )
- {
- temporaryGroupIndexes.add( new TemporaryGroupIndex( mergedIndexDirectory, tempRepoId, groupId,
- indexMergerRequest.getMergedIndexTtl() ) );
- temporaryContextes.add(mergedCtx);
- }
- stopWatch.stop();
- log.info( "merged index for repos {} in {} s", indexMergerRequest.getRepositoriesIds(),
- stopWatch.getTime() );
- return mergedCtx;
- }
- catch ( IOException e)
- {
- throw new IndexMergerException( e.getMessage(), e );
- }
- finally
- {
- runningGroups.remove( groupId );
- }
- }
-
- @Async
- @Override
- public void cleanTemporaryGroupIndex( TemporaryGroupIndex temporaryGroupIndex )
- {
- if ( temporaryGroupIndex == null )
- {
- return;
- }
-
- try
- {
-
- Optional<IndexingContext> ctxOpt = temporaryContextes.stream( ).filter( ctx -> ctx.getId( ).equals( temporaryGroupIndex.getIndexId( ) ) ).findFirst( );
- if (ctxOpt.isPresent()) {
- IndexingContext ctx = ctxOpt.get();
- indexer.closeIndexingContext( ctx, true );
- temporaryGroupIndexes.remove( temporaryGroupIndex );
- temporaryContextes.remove( ctx );
- Path directory = temporaryGroupIndex.getDirectory();
- if ( directory != null && Files.exists(directory) )
- {
- FileUtils.deleteDirectory( directory );
- }
- }
- }
- catch ( IOException e )
- {
- log.warn( "fail to delete temporary group index {}", temporaryGroupIndex.getIndexId(), e );
- }
- }
-
- @Override
- public Collection<TemporaryGroupIndex> getTemporaryGroupIndexes()
- {
- return this.temporaryGroupIndexes;
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.admin.model.RepositoryAdminException;
-import org.apache.archiva.admin.model.beans.ProxyConnector;
-import org.apache.archiva.admin.model.proxyconnector.ProxyConnectorAdmin;
-import org.apache.archiva.indexer.UnsupportedBaseContextException;
-import org.apache.archiva.indexer.search.ArtifactInfoFilter;
-import org.apache.archiva.indexer.search.NoClassifierArtifactInfoFilter;
-import org.apache.archiva.indexer.search.RepositorySearch;
-import org.apache.archiva.indexer.search.RepositorySearchException;
-import org.apache.archiva.indexer.search.SearchFields;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResultLimits;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.indexer.util.SearchUtil;
-import org.apache.archiva.model.ArchivaArtifactModel;
-import org.apache.archiva.repository.RemoteRepository;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.commons.lang.StringUtils;
-import org.apache.maven.index.ArtifactInfo;
-import org.apache.maven.index.FlatSearchRequest;
-import org.apache.maven.index.FlatSearchResponse;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.MAVEN;
-import org.apache.maven.index.OSGI;
-import org.apache.maven.index.QueryCreator;
-import org.apache.maven.index.SearchType;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.expr.SearchExpression;
-import org.apache.maven.index.expr.SearchTyped;
-import org.apache.maven.index.expr.SourcedSearchExpression;
-import org.apache.maven.index.expr.UserInputSearchExpression;
-import org.apache.maven.index_shaded.lucene.search.BooleanClause;
-import org.apache.maven.index_shaded.lucene.search.BooleanClause.Occur;
-import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * RepositorySearch implementation which uses the Maven Indexer for searching.
- */
-@Service( "repositorySearch#maven" )
-public class MavenRepositorySearch
- implements RepositorySearch
-{
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- private Indexer indexer;
-
- private QueryCreator queryCreator;
-
-
- RepositoryRegistry repositoryRegistry;
-
- private ProxyConnectorAdmin proxyConnectorAdmin;
-
- protected MavenRepositorySearch()
- {
- // for test purpose
- }
-
- @Inject
- public MavenRepositorySearch( Indexer nexusIndexer, RepositoryRegistry repositoryRegistry,
-
- ProxyConnectorAdmin proxyConnectorAdmin, QueryCreator queryCreator )
- {
- this.indexer = nexusIndexer;
- this.queryCreator = queryCreator;
- this.repositoryRegistry = repositoryRegistry;
- this.proxyConnectorAdmin = proxyConnectorAdmin;
- }
-
- /**
- * @see RepositorySearch#search(String, List, String, SearchResultLimits, List)
- */
- @Override
- public SearchResults search(String principal, List<String> selectedRepos, String term, SearchResultLimits limits,
- List<String> previousSearchTerms )
- throws RepositorySearchException
- {
- List<String> indexingContextIds = addIndexingContexts( selectedRepos );
-
- // since upgrade to nexus 2.0.0, query has changed from g:[QUERIED TERM]* to g:*[QUERIED TERM]*
- // resulting to more wildcard searches so we need to increase max clause count
- BooleanQuery.setMaxClauseCount( Integer.MAX_VALUE );
- BooleanQuery.Builder qb = new BooleanQuery.Builder();
-
- if ( previousSearchTerms == null || previousSearchTerms.isEmpty() )
- {
- constructQuery( term, qb );
- }
- else
- {
- for ( String previousTerm : previousSearchTerms )
- {
- BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
- constructQuery( previousTerm, iQuery );
-
- qb.add( iQuery.build(), BooleanClause.Occur.MUST );
- }
-
- BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
- constructQuery( term, iQuery );
- qb.add( iQuery.build(), BooleanClause.Occur.MUST );
- }
-
- // we retun only artifacts without classifier in quick search, olamy cannot find a way to say with this field empty
- // FIXME cannot find a way currently to setup this in constructQuery !!!
- return search( limits, qb.build(), indexingContextIds, NoClassifierArtifactInfoFilter.LIST, selectedRepos, true );
-
- }
-
- /**
- * @see RepositorySearch#search(String, SearchFields, SearchResultLimits)
- */
- @SuppressWarnings( "deprecation" )
- @Override
- public SearchResults search( String principal, SearchFields searchFields, SearchResultLimits limits )
- throws RepositorySearchException
- {
- if ( searchFields.getRepositories() == null )
- {
- throw new RepositorySearchException( "Repositories cannot be null." );
- }
-
- List<String> indexingContextIds = addIndexingContexts( searchFields.getRepositories() );
-
- // if no index found in the specified ones return an empty search result instead of doing a search on all index
- // olamy: IMHO doesn't make sense
- if ( !searchFields.getRepositories().isEmpty() && ( indexingContextIds == null
- || indexingContextIds.isEmpty() ) )
- {
- return new SearchResults();
- }
-
- BooleanQuery.Builder qb = new BooleanQuery.Builder();
- if ( StringUtils.isNotBlank( searchFields.getGroupId() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.GROUP_ID, searchFields.isExactSearch() ? new SourcedSearchExpression(
- searchFields.getGroupId() ) : new UserInputSearchExpression( searchFields.getGroupId() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getArtifactId() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.ARTIFACT_ID,
- searchFields.isExactSearch()
- ? new SourcedSearchExpression( searchFields.getArtifactId() )
- : new UserInputSearchExpression( searchFields.getArtifactId() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getVersion() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.VERSION, searchFields.isExactSearch() ? new SourcedSearchExpression(
- searchFields.getVersion() ) : new SourcedSearchExpression( searchFields.getVersion() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getPackaging() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.PACKAGING, searchFields.isExactSearch() ? new SourcedSearchExpression(
- searchFields.getPackaging() ) : new UserInputSearchExpression( searchFields.getPackaging() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getClassName() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.CLASSNAMES,
- new UserInputSearchExpression( searchFields.getClassName() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleSymbolicName() ) )
- {
- qb.add( indexer.constructQuery( OSGI.SYMBOLIC_NAME,
- new UserInputSearchExpression( searchFields.getBundleSymbolicName() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleVersion() ) )
- {
- qb.add( indexer.constructQuery( OSGI.VERSION,
- new UserInputSearchExpression( searchFields.getBundleVersion() ) ),
- BooleanClause.Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleExportPackage() ) )
- {
- qb.add( indexer.constructQuery( OSGI.EXPORT_PACKAGE,
- new UserInputSearchExpression( searchFields.getBundleExportPackage() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleExportService() ) )
- {
- qb.add( indexer.constructQuery( OSGI.EXPORT_SERVICE,
- new UserInputSearchExpression( searchFields.getBundleExportService() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleImportPackage() ) )
- {
- qb.add( indexer.constructQuery( OSGI.IMPORT_PACKAGE,
- new UserInputSearchExpression( searchFields.getBundleImportPackage() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleName() ) )
- {
- qb.add( indexer.constructQuery( OSGI.NAME, new UserInputSearchExpression( searchFields.getBundleName() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleImportPackage() ) )
- {
- qb.add( indexer.constructQuery( OSGI.IMPORT_PACKAGE,
- new UserInputSearchExpression( searchFields.getBundleImportPackage() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getBundleRequireBundle() ) )
- {
- qb.add( indexer.constructQuery( OSGI.REQUIRE_BUNDLE,
- new UserInputSearchExpression( searchFields.getBundleRequireBundle() ) ),
- Occur.MUST );
- }
-
- if ( StringUtils.isNotBlank( searchFields.getClassifier() ) )
- {
- qb.add( indexer.constructQuery( MAVEN.CLASSIFIER, searchFields.isExactSearch() ? new SourcedSearchExpression(
- searchFields.getClassifier() ) : new UserInputSearchExpression( searchFields.getClassifier() ) ),
- Occur.MUST );
- }
- else if ( searchFields.isExactSearch() )
- {
- //TODO improvement in case of exact search and no classifier we must query for classifier with null value
- // currently it's done in DefaultSearchService with some filtering
- }
-
- BooleanQuery qu = qb.build();
- if ( qu.clauses() == null || qu.clauses().size() <= 0 )
- {
- throw new RepositorySearchException( "No search fields set." );
- }
- if (qu.clauses()!=null) {
- log.debug("CLAUSES ", qu.clauses());
- for (BooleanClause cl : qu.clauses()) {
- log.debug("Clause ",cl);
- }
- }
-
- return search( limits, qu, indexingContextIds, Collections.<ArtifactInfoFilter>emptyList(),
- searchFields.getRepositories(), searchFields.isIncludePomArtifacts() );
- }
-
- private static class NullSearch
- implements SearchTyped, SearchExpression
- {
- private static final NullSearch INSTANCE = new NullSearch();
-
- @Override
- public String getStringValue()
- {
- return "[[NULL_VALUE]]";
- }
-
- @Override
- public SearchType getSearchType()
- {
- return SearchType.EXACT;
- }
- }
-
- private SearchResults search( SearchResultLimits limits, BooleanQuery q, List<String> indexingContextIds,
- List<? extends ArtifactInfoFilter> filters, List<String> selectedRepos,
- boolean includePoms )
- throws RepositorySearchException
- {
-
- try
- {
- FlatSearchRequest request = new FlatSearchRequest( q );
-
- request.setContexts( getIndexingContexts( indexingContextIds ) );
- if ( limits != null )
- {
- // we apply limits only when first page asked
- if ( limits.getSelectedPage() == 0 )
- {
- request.setCount( limits.getPageSize() * ( Math.max( 1, limits.getSelectedPage() ) ) );
- }
- }
-
- FlatSearchResponse response = indexer.searchFlat( request );
-
- if ( response == null || response.getTotalHitsCount() == 0 )
- {
- SearchResults results = new SearchResults();
- results.setLimits( limits );
- return results;
- }
-
- return convertToSearchResults( response, limits, filters, selectedRepos, includePoms );
- }
- catch ( IOException e )
- {
- throw new RepositorySearchException( e.getMessage(), e );
- }
- catch ( RepositoryAdminException e )
- {
- throw new RepositorySearchException( e.getMessage(), e );
- }
-
- }
-
- private IndexingContext getIndexingContext(String id) {
- String repoId;
- if (StringUtils.startsWith(id, "remote-")) {
- repoId = StringUtils.substringAfter(id, "remote-");
- } else {
- repoId = id;
- }
- Repository repo = repositoryRegistry.getRepository(repoId);
- if (repo==null) {
- return null;
- } else {
- if (repo.getIndexingContext()!=null) {
- try {
- return repo.getIndexingContext().getBaseContext(IndexingContext.class);
- } catch (UnsupportedBaseContextException e) {
- return null;
- }
- } else {
- return null;
- }
- }
- }
-
- private List<IndexingContext> getIndexingContexts( List<String> ids )
- {
- List<IndexingContext> contexts = new ArrayList<>( ids.size() );
-
- for ( String id : ids )
- {
- IndexingContext context = getIndexingContext(id);
- if ( context != null )
- {
- contexts.add( context );
- }
- else
- {
- log.warn( "context with id {} not exists", id );
- }
- }
-
- return contexts;
- }
-
- private void constructQuery( String term, BooleanQuery.Builder q )
- {
- q.add( indexer.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ), Occur.SHOULD );
- q.add( indexer.constructQuery( MAVEN.ARTIFACT_ID, new UserInputSearchExpression( term ) ), Occur.SHOULD );
- q.add( indexer.constructQuery( MAVEN.VERSION, new UserInputSearchExpression( term ) ), Occur.SHOULD );
- q.add( indexer.constructQuery( MAVEN.PACKAGING, new UserInputSearchExpression( term ) ), Occur.SHOULD );
- q.add( indexer.constructQuery( MAVEN.CLASSNAMES, new UserInputSearchExpression( term ) ), Occur.SHOULD );
-
- //Query query =
- // new WildcardQuery( new Term( MAVEN.CLASSNAMES.getFieldName(), "*" ) );
- //q.add( query, Occur.MUST_NOT );
- // olamy IMHO we could set this option as at least one must match
- //q.setMinimumNumberShouldMatch( 1 );
- }
-
-
- /**
- * @param selectedRepos
- * @return indexing contextId used
- */
- private List<String> addIndexingContexts( List<String> selectedRepos )
- {
- Set<String> indexingContextIds = new HashSet<>();
- for ( String repo : selectedRepos )
- {
- try
- {
- Repository rRepo = repositoryRegistry.getRepository(repo);
-
- if ( rRepo != null )
- {
-
- if (rRepo.getType().equals(RepositoryType.MAVEN)) {
- assert rRepo.getIndexingContext() != null;
- IndexingContext context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
- if (context.isSearchable()) {
- indexingContextIds.addAll(getRemoteIndexingContextIds(repo));
- indexingContextIds.add(context.getId());
- } else {
- log.warn("indexingContext with id {} not searchable", rRepo.getId());
- }
- }
-
- }
- else
- {
- log.warn( "Repository '{}' not found in configuration.", repo );
- }
- }
- catch ( RepositorySearchException e )
- {
- log.warn( "RepositorySearchException occured while accessing index of repository '{}' : {}", repo,
- e.getMessage() );
- continue;
- } catch (UnsupportedBaseContextException e) {
- log.error("Fatal situation: Maven repository without IndexingContext found.");
- continue;
- }
- }
-
- return new ArrayList<>( indexingContextIds );
- }
-
-
- @Override
- public Set<String> getRemoteIndexingContextIds( String managedRepoId )
- throws RepositorySearchException
- {
- Set<String> ids = new HashSet<>();
-
- List<ProxyConnector> proxyConnectors = null;
- try
- {
- proxyConnectors = proxyConnectorAdmin.getProxyConnectorAsMap().get( managedRepoId );
- }
- catch ( RepositoryAdminException e )
- {
- throw new RepositorySearchException( e.getMessage(), e );
- }
-
- if ( proxyConnectors == null || proxyConnectors.isEmpty() )
- {
- return ids;
- }
-
- for ( ProxyConnector proxyConnector : proxyConnectors )
- {
- String remoteId = "remote-" + proxyConnector.getTargetRepoId();
- RemoteRepository repo = repositoryRegistry.getRemoteRepository(proxyConnector.getTargetRepoId());
- if (repo.getType()==RepositoryType.MAVEN) {
- try {
- IndexingContext context = repo.getIndexingContext() != null ? repo.getIndexingContext().getBaseContext(IndexingContext.class) : null;
- if (context!=null && context.isSearchable()) {
- ids.add(remoteId);
- }
- } catch (UnsupportedBaseContextException e) {
- // Ignore this one
- }
- }
- }
-
- return ids;
- }
-
- @Override
- public Collection<String> getAllGroupIds( String principal, List<String> selectedRepos )
- throws RepositorySearchException
- {
- List<IndexingContext> indexContexts = getIndexingContexts( selectedRepos );
-
- if ( indexContexts == null || indexContexts.isEmpty() )
- {
- return Collections.emptyList();
- }
-
- try
- {
- Set<String> allGroupIds = new HashSet<>();
- for ( IndexingContext indexingContext : indexContexts )
- {
- allGroupIds.addAll( indexingContext.getAllGroups() );
- }
- return allGroupIds;
- }
- catch ( IOException e )
- {
- throw new RepositorySearchException( e.getMessage(), e );
- }
-
- }
-
- private SearchResults convertToSearchResults( FlatSearchResponse response, SearchResultLimits limits,
- List<? extends ArtifactInfoFilter> artifactInfoFilters,
- List<String> selectedRepos, boolean includePoms )
- throws RepositoryAdminException
- {
- SearchResults results = new SearchResults();
- Set<ArtifactInfo> artifactInfos = response.getResults();
-
- for ( ArtifactInfo artifactInfo : artifactInfos )
- {
- if ( StringUtils.equalsIgnoreCase( "pom", artifactInfo.getFileExtension() ) && !includePoms )
- {
- continue;
- }
- String id = SearchUtil.getHitId( artifactInfo.getGroupId(), //
- artifactInfo.getArtifactId(), //
- artifactInfo.getClassifier(), //
- artifactInfo.getPackaging() );
- Map<String, SearchResultHit> hitsMap = results.getHitsMap();
-
-
- if ( !applyArtifactInfoFilters( artifactInfo, artifactInfoFilters, hitsMap ) )
- {
- continue;
- }
-
- SearchResultHit hit = hitsMap.get( id );
- if ( hit != null )
- {
- if ( !hit.getVersions().contains( artifactInfo.getVersion() ) )
- {
- hit.addVersion( artifactInfo.getVersion() );
- }
- }
- else
- {
- hit = new SearchResultHit();
- hit.setArtifactId( artifactInfo.getArtifactId() );
- hit.setGroupId( artifactInfo.getGroupId() );
- hit.setRepositoryId( artifactInfo.getRepository() );
- hit.addVersion( artifactInfo.getVersion() );
- hit.setBundleExportPackage( artifactInfo.getBundleExportPackage() );
- hit.setBundleExportService( artifactInfo.getBundleExportService() );
- hit.setBundleSymbolicName( artifactInfo.getBundleSymbolicName() );
- hit.setBundleVersion( artifactInfo.getBundleVersion() );
- hit.setBundleDescription( artifactInfo.getBundleDescription() );
- hit.setBundleDocUrl( artifactInfo.getBundleDocUrl() );
- hit.setBundleRequireBundle( artifactInfo.getBundleRequireBundle() );
- hit.setBundleImportPackage( artifactInfo.getBundleImportPackage() );
- hit.setBundleLicense( artifactInfo.getBundleLicense() );
- hit.setBundleName( artifactInfo.getBundleName() );
- hit.setContext( artifactInfo.getContext() );
- hit.setGoals( artifactInfo.getGoals() );
- hit.setPrefix( artifactInfo.getPrefix() );
- hit.setPackaging( artifactInfo.getPackaging() );
- hit.setClassifier( artifactInfo.getClassifier() );
- hit.setFileExtension( artifactInfo.getFileExtension() );
- hit.setUrl( getBaseUrl( artifactInfo, selectedRepos ) );
- }
-
- results.addHit( id, hit );
- }
-
- results.setTotalHits( response.getTotalHitsCount() );
- results.setTotalHitsMapSize( results.getHitsMap().values().size() );
- results.setReturnedHitsCount( response.getReturnedHitsCount() );
- results.setLimits( limits );
-
- if ( limits == null || limits.getSelectedPage() == SearchResultLimits.ALL_PAGES )
- {
- return results;
- }
- else
- {
- return paginate( results );
- }
- }
-
- /**
- * calculate baseUrl without the context and base Archiva Url
- *
- * @param artifactInfo
- * @return
- */
- protected String getBaseUrl( ArtifactInfo artifactInfo, List<String> selectedRepos )
- throws RepositoryAdminException
- {
- StringBuilder sb = new StringBuilder();
- if ( StringUtils.startsWith( artifactInfo.getContext(), "remote-" ) )
- {
- // it's a remote index result we search a managed which proxying this remote and on which
- // current user has read karma
- String managedRepoId =
- getManagedRepoId( StringUtils.substringAfter( artifactInfo.getContext(), "remote-" ), selectedRepos );
- if ( managedRepoId != null )
- {
- sb.append( '/' ).append( managedRepoId );
- artifactInfo.setContext( managedRepoId );
- }
- }
- else
- {
- sb.append( '/' ).append( artifactInfo.getContext() );
- }
-
- sb.append( '/' ).append( StringUtils.replaceChars( artifactInfo.getGroupId(), '.', '/' ) );
- sb.append( '/' ).append( artifactInfo.getArtifactId() );
- sb.append( '/' ).append( artifactInfo.getVersion() );
- sb.append( '/' ).append( artifactInfo.getArtifactId() );
- sb.append( '-' ).append( artifactInfo.getVersion() );
- if ( StringUtils.isNotBlank( artifactInfo.getClassifier() ) )
- {
- sb.append( '-' ).append( artifactInfo.getClassifier() );
- }
- // maven-plugin packaging is a jar
- if ( StringUtils.equals( "maven-plugin", artifactInfo.getPackaging() ) )
- {
- sb.append( "jar" );
- }
- else
- {
- sb.append( '.' ).append( artifactInfo.getPackaging() );
- }
-
- return sb.toString();
- }
-
- /**
- * return a managed repo for a remote result
- *
- * @param remoteRepo
- * @param selectedRepos
- * @return
- * @throws RepositoryAdminException
- */
- private String getManagedRepoId( String remoteRepo, List<String> selectedRepos )
- throws RepositoryAdminException
- {
- Map<String, List<ProxyConnector>> proxyConnectorMap = proxyConnectorAdmin.getProxyConnectorAsMap();
- if ( proxyConnectorMap == null || proxyConnectorMap.isEmpty() )
- {
- return null;
- }
- if ( selectedRepos != null && !selectedRepos.isEmpty() )
- {
- for ( Map.Entry<String, List<ProxyConnector>> entry : proxyConnectorMap.entrySet() )
- {
- if ( selectedRepos.contains( entry.getKey() ) )
- {
- for ( ProxyConnector proxyConnector : entry.getValue() )
- {
- if ( StringUtils.equals( remoteRepo, proxyConnector.getTargetRepoId() ) )
- {
- return proxyConnector.getSourceRepoId();
- }
- }
- }
- }
- }
-
- // we don't find in search selected repos so return the first one
- for ( Map.Entry<String, List<ProxyConnector>> entry : proxyConnectorMap.entrySet() )
- {
-
- for ( ProxyConnector proxyConnector : entry.getValue() )
- {
- if ( StringUtils.equals( remoteRepo, proxyConnector.getTargetRepoId() ) )
- {
- return proxyConnector.getSourceRepoId();
- }
- }
-
- }
- return null;
- }
-
- private boolean applyArtifactInfoFilters( ArtifactInfo artifactInfo,
- List<? extends ArtifactInfoFilter> artifactInfoFilters,
- Map<String, SearchResultHit> currentResult )
- {
- if ( artifactInfoFilters == null || artifactInfoFilters.isEmpty() )
- {
- return true;
- }
-
- ArchivaArtifactModel artifact = new ArchivaArtifactModel();
- artifact.setArtifactId( artifactInfo.getArtifactId() );
- artifact.setClassifier( artifactInfo.getClassifier() );
- artifact.setGroupId( artifactInfo.getGroupId() );
- artifact.setRepositoryId( artifactInfo.getRepository() );
- artifact.setVersion( artifactInfo.getVersion() );
- artifact.setChecksumMD5( artifactInfo.getMd5() );
- artifact.setChecksumSHA1( artifactInfo.getSha1() );
- for ( ArtifactInfoFilter filter : artifactInfoFilters )
- {
- if ( !filter.addArtifactInResult( artifact, currentResult ) )
- {
- return false;
- }
- }
- return true;
- }
-
- protected SearchResults paginate( SearchResults results )
- {
- SearchResultLimits limits = results.getLimits();
- SearchResults paginated = new SearchResults();
-
- // ( limits.getPageSize() * ( Math.max( 1, limits.getSelectedPage() ) ) );
-
- int fetchCount = limits.getPageSize();
- int offset = ( limits.getSelectedPage() * limits.getPageSize() );
-
- if ( fetchCount > results.getTotalHits() )
- {
- fetchCount = results.getTotalHits();
- }
-
- // Goto offset.
- if ( offset < results.getTotalHits() )
- {
- // only process if the offset is within the hit count.
- for ( int i = 0; i < fetchCount; i++ )
- {
- // Stop fetching if we are past the total # of available hits.
- if ( offset + i >= results.getHits().size() )
- {
- break;
- }
-
- SearchResultHit hit = results.getHits().get( ( offset + i ) );
- if ( hit != null )
- {
- String id = SearchUtil.getHitId( hit.getGroupId(), hit.getArtifactId(), hit.getClassifier(),
- hit.getPackaging() );
- paginated.addHit( id, hit );
- }
- else
- {
- break;
- }
- }
- }
- paginated.setTotalHits( results.getTotalHits() );
- paginated.setReturnedHitsCount( paginated.getHits().size() );
- paginated.setTotalHitsMapSize( results.getTotalHitsMapSize() );
- paginated.setLimits( limits );
-
- return paginated;
- }
-
-
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.admin.model.beans.RepositoryGroup;
-import org.apache.archiva.scheduler.MergedRemoteIndexesScheduler;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.scheduling.TaskScheduler;
-import org.springframework.scheduling.support.CronTrigger;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import javax.inject.Named;
-import java.nio.file.Path;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ScheduledFuture;
-
-/**
- * @author Olivier Lamy
- * @since 2.0.0
- */
-@Service( "mergedRemoteIndexesScheduler#default" )
-public class DefaultMergedRemoteIndexesScheduler
- implements MergedRemoteIndexesScheduler
-{
-
- private Logger logger = LoggerFactory.getLogger( getClass() );
-
- @Inject
- @Named( value = "taskScheduler#mergeRemoteIndexes" )
- private TaskScheduler taskScheduler;
-
- @Inject
- private IndexMerger indexMerger;
-
- private Map<String, ScheduledFuture> scheduledFutureMap = new ConcurrentHashMap<>();
-
- @Override
- public void schedule( RepositoryGroup repositoryGroup, Path directory )
- {
- if ( StringUtils.isEmpty( repositoryGroup.getCronExpression() ) )
- {
- return;
- }
- CronTrigger cronTrigger = new CronTrigger( repositoryGroup.getCronExpression() );
-
- List<String> repositories = repositoryGroup.getRepositories();
-
- IndexMergerRequest indexMergerRequest =
- new IndexMergerRequest( repositories, true, repositoryGroup.getId(), repositoryGroup.getMergedIndexPath(),
- repositoryGroup.getMergedIndexTtl() ).mergedIndexDirectory( directory );
-
- MergedRemoteIndexesTaskRequest taskRequest =
- new MergedRemoteIndexesTaskRequest( indexMergerRequest, indexMerger );
-
- logger.info( "schedule merge remote index for group {} with cron {}", repositoryGroup.getId(),
- repositoryGroup.getCronExpression() );
-
- ScheduledFuture scheduledFuture =
- taskScheduler.schedule( new MergedRemoteIndexesTask( taskRequest ), cronTrigger );
- scheduledFutureMap.put( repositoryGroup.getId(), scheduledFuture );
- }
-
- @Override
- public void unschedule( RepositoryGroup repositoryGroup )
- {
- ScheduledFuture scheduledFuture = scheduledFutureMap.remove( repositoryGroup.getId() );
- if ( scheduledFuture != null )
- {
- scheduledFuture.cancel( true );
- }
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.index.context.IndexingContext;
-
-import java.util.Collection;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M2
- */
-public interface IndexMerger
-{
- /**
- * @param indexMergerRequest
- * @return a temporary directory with a merge index (directory marked deleteOnExit)
- * @throws IndexMergerException
- */
- IndexingContext buildMergedIndex( IndexMergerRequest indexMergerRequest )
- throws IndexMergerException;
-
- void cleanTemporaryGroupIndex( TemporaryGroupIndex temporaryGroupIndex );
-
- Collection<TemporaryGroupIndex> getTemporaryGroupIndexes();
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M2
- */
-public class IndexMergerException
- extends Exception
-{
- public IndexMergerException( String message, Throwable t )
- {
- super( message, t );
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.nio.file.Path;
-import java.util.Collection;
-
-/**
- * @author Olivier Lamy
- */
-public class IndexMergerRequest
-{
- /**
- * repositories Ids to merge content
- */
- private Collection<String> repositoriesIds;
-
- /**
- * will generate a downloadable index
- */
- private boolean packIndex;
-
- /**
- * original groupId (repositoryGroup id)
- */
- private String groupId;
-
- private String mergedIndexPath = ".indexer";
-
- private int mergedIndexTtl;
-
- private Path mergedIndexDirectory;
-
- private boolean temporary;
-
- public IndexMergerRequest( Collection<String> repositoriesIds, boolean packIndex, String groupId )
- {
- this.repositoriesIds = repositoriesIds;
- this.packIndex = packIndex;
- this.groupId = groupId;
- }
-
- /**
- * @since 1.4-M4
- */
- public IndexMergerRequest( Collection<String> repositoriesIds, boolean packIndex, String groupId,
- String mergedIndexPath, int mergedIndexTtl )
- {
- this.repositoriesIds = repositoriesIds;
- this.packIndex = packIndex;
- this.groupId = groupId;
- this.mergedIndexPath = mergedIndexPath;
- this.mergedIndexTtl = mergedIndexTtl;
- }
-
- public Collection<String> getRepositoriesIds()
- {
- return repositoriesIds;
- }
-
- public void setRepositoriesIds( Collection<String> repositoriesIds )
- {
- this.repositoriesIds = repositoriesIds;
- }
-
- public boolean isPackIndex()
- {
- return packIndex;
- }
-
- public void setPackIndex( boolean packIndex )
- {
- this.packIndex = packIndex;
- }
-
- public String getGroupId()
- {
- return groupId;
- }
-
- public void setGroupId( String groupId )
- {
- this.groupId = groupId;
- }
-
- public String getMergedIndexPath()
- {
- return mergedIndexPath;
- }
-
- public void setMergedIndexPath( String mergedIndexPath )
- {
- this.mergedIndexPath = mergedIndexPath;
- }
-
- public int getMergedIndexTtl()
- {
- return mergedIndexTtl;
- }
-
- public void setMergedIndexTtl( int mergedIndexTtl )
- {
- this.mergedIndexTtl = mergedIndexTtl;
- }
-
- public Path getMergedIndexDirectory()
- {
- return mergedIndexDirectory;
- }
-
- public void setMergedIndexDirectory( Path mergedIndexDirectory )
- {
- this.mergedIndexDirectory = mergedIndexDirectory;
- }
-
- public IndexMergerRequest mergedIndexDirectory( Path mergedIndexDirectory )
- {
- this.mergedIndexDirectory = mergedIndexDirectory;
- return this;
- }
-
- public boolean isTemporary()
- {
- return temporary;
- }
-
- public void setTemporary( boolean temporary )
- {
- this.temporary = temporary;
- }
-
-
- public IndexMergerRequest temporary( boolean temporary )
- {
- this.temporary = temporary;
- return this;
- }
-
- @Override
- public String toString()
- {
- final StringBuilder sb = new StringBuilder( "IndexMergerRequest{" );
- sb.append( "repositoriesIds=" ).append( repositoriesIds );
- sb.append( ", packIndex=" ).append( packIndex );
- sb.append( ", groupId='" ).append( groupId ).append( '\'' );
- sb.append( ", mergedIndexPath='" ).append( mergedIndexPath ).append( '\'' );
- sb.append( ", mergedIndexTtl=" ).append( mergedIndexTtl );
- sb.append( ", mergedIndexDirectory=" ).append( mergedIndexDirectory );
- sb.append( ", temporary=" ).append( temporary );
- sb.append( '}' );
- return sb.toString();
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( this == o )
- {
- return true;
- }
- if ( o == null || getClass() != o.getClass() )
- {
- return false;
- }
-
- IndexMergerRequest that = (IndexMergerRequest) o;
-
- return groupId.equals( that.groupId );
- }
-
- @Override
- public int hashCode()
- {
- return groupId.hashCode();
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.index.context.IndexingContext;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * @author Olivier Lamy
- * @since 2.0.0
- */
-public class MergedRemoteIndexesTask
- implements Runnable
-{
-
- private Logger logger = LoggerFactory.getLogger( getClass() );
-
- private MergedRemoteIndexesTaskRequest mergedRemoteIndexesTaskRequest;
-
- public MergedRemoteIndexesTask( MergedRemoteIndexesTaskRequest mergedRemoteIndexesTaskRequest )
- {
- this.mergedRemoteIndexesTaskRequest = mergedRemoteIndexesTaskRequest;
- }
-
- @Override
- public void run()
- {
- try
- {
- this.execute();
- }
- catch ( IndexMergerException e )
- {
- logger.error( e.getMessage(), e );
- }
- }
-
- public MergedRemoteIndexesTaskResult execute()
- throws IndexMergerException
- {
- IndexMerger indexMerger = mergedRemoteIndexesTaskRequest.getIndexMerger();
-
- IndexingContext indexingContext =
- indexMerger.buildMergedIndex( mergedRemoteIndexesTaskRequest.getIndexMergerRequest() );
-
- return new MergedRemoteIndexesTaskResult( indexingContext );
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( this == o )
- {
- return true;
- }
- if ( !( o instanceof MergedRemoteIndexesTask ) )
- {
- return false;
- }
-
- MergedRemoteIndexesTask that = (MergedRemoteIndexesTask) o;
-
- return mergedRemoteIndexesTaskRequest.equals( that.mergedRemoteIndexesTaskRequest );
- }
-
- @Override
- public int hashCode()
- {
- return mergedRemoteIndexesTaskRequest.hashCode();
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-/**
- * @author Olivier Lamy
- * @since 2.0.0
- */
-public class MergedRemoteIndexesTaskRequest
-{
- private IndexMergerRequest indexMergerRequest;
-
- private IndexMerger indexMerger;
-
- public MergedRemoteIndexesTaskRequest( IndexMergerRequest indexMergerRequest, IndexMerger indexMerger )
- {
- this.indexMergerRequest = indexMergerRequest;
- this.indexMerger = indexMerger;
- }
-
- public IndexMergerRequest getIndexMergerRequest()
- {
- return indexMergerRequest;
- }
-
- public void setIndexMergerRequest( IndexMergerRequest indexMergerRequest )
- {
- this.indexMergerRequest = indexMergerRequest;
- }
-
- public IndexMerger getIndexMerger()
- {
- return indexMerger;
- }
-
- public void setIndexMerger( IndexMerger indexMerger )
- {
- this.indexMerger = indexMerger;
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( this == o )
- {
- return true;
- }
- if ( o == null || getClass() != o.getClass() )
- {
- return false;
- }
-
- MergedRemoteIndexesTaskRequest that = (MergedRemoteIndexesTaskRequest) o;
-
- return indexMergerRequest.equals( that.indexMergerRequest );
- }
-
- @Override
- public int hashCode()
- {
- return indexMergerRequest.hashCode();
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.maven.index.context.IndexingContext;
-
-/**
- * @author Olivier Lamy
- * @since 2.0.0
- */
-public class MergedRemoteIndexesTaskResult
-{
- private IndexingContext indexingContext;
-
- public MergedRemoteIndexesTaskResult( IndexingContext indexingContext )
- {
- this.indexingContext = indexingContext;
- }
-
- public IndexingContext getIndexingContext()
- {
- return indexingContext;
- }
-
- public void setIndexingContext( IndexingContext indexingContext )
- {
- this.indexingContext = indexingContext;
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import java.io.Serializable;
-import java.nio.file.Path;
-import java.util.Date;
-
-/**
- * @author Olivier Lamy
- */
-public class TemporaryGroupIndex
- implements Serializable
-{
- private long creationTime = new Date().getTime();
-
- private Path directory;
-
- private String indexId;
-
- private String groupId;
-
- private int mergedIndexTtl;
-
- public TemporaryGroupIndex(Path directory, String indexId, String groupId, int mergedIndexTtl)
- {
- this.directory = directory;
- this.indexId = indexId;
- this.groupId = groupId;
- this.mergedIndexTtl = mergedIndexTtl;
- }
-
- public long getCreationTime()
- {
- return creationTime;
- }
-
- public TemporaryGroupIndex setCreationTime( long creationTime )
- {
- this.creationTime = creationTime;
- return this;
- }
-
- public Path getDirectory()
- {
- return directory;
- }
-
- public TemporaryGroupIndex setDirectory( Path directory )
- {
- this.directory = directory;
- return this;
- }
-
- public String getIndexId()
- {
- return indexId;
- }
-
- public TemporaryGroupIndex setIndexId( String indexId )
- {
- this.indexId = indexId;
- return this;
- }
-
- public String getGroupId()
- {
- return groupId;
- }
-
- public void setGroupId( String groupId )
- {
- this.groupId = groupId;
- }
-
- public int getMergedIndexTtl() {
- return mergedIndexTtl;
- }
-
- public void setMergedIndexTtl(int mergedIndexTtl) {
- this.mergedIndexTtl = mergedIndexTtl;
- }
-
- @Override
- public int hashCode()
- {
- return Long.toString( creationTime ).hashCode();
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( this == o )
- {
- return true;
- }
- if ( !( o instanceof TemporaryGroupIndex ) )
- {
- return false;
- }
- return this.creationTime == ( (TemporaryGroupIndex) o ).creationTime;
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.merger;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.scheduling.annotation.Scheduled;
-import org.springframework.stereotype.Service;
-
-import javax.inject.Inject;
-import java.util.Date;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M2
- */
-@Service
-public class TemporaryGroupIndexCleaner
-{
- private Logger log = LoggerFactory.getLogger( getClass() );
-
- @Inject
- private IndexMerger indexMerger;
-
-
- public TemporaryGroupIndexCleaner( )
- {
-
- }
-
- // 900000
- @Scheduled(fixedDelay = 900000)
- public void cleanTemporaryIndex()
- {
-
- indexMerger.getTemporaryGroupIndexes()
- .stream()
- .forEach( temporaryGroupIndex ->
- {
- // cleanup files older than the ttl
- if ( new Date().getTime() - temporaryGroupIndex.getCreationTime() >
- temporaryGroupIndex.getMergedIndexTtl() )
- {
- log.info( "cleanTemporaryIndex for groupId {}", temporaryGroupIndex.getGroupId() );
- indexMerger.cleanTemporaryGroupIndex( temporaryGroupIndex );
-
- }
- }
- );
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.search;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.model.ArchivaArtifactModel;
-import org.apache.commons.lang.StringUtils;
-
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author Olivier Lamy
- */
-public class NoClassifierArtifactInfoFilter
- implements ArtifactInfoFilter
-{
- public static final NoClassifierArtifactInfoFilter INSTANCE = new NoClassifierArtifactInfoFilter();
-
- public static final List<? extends ArtifactInfoFilter> LIST = Arrays.asList( INSTANCE );
-
- @Override
- public boolean addArtifactInResult( ArchivaArtifactModel artifact, Map<String, SearchResultHit> currentResult )
- {
- return StringUtils.isBlank( artifact.getClassifier() );
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.util;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.commons.lang.StringUtils;
-
-/**
- * SearchUtil - utility class for search.
- */
-public class SearchUtil
-{
- public static String getHitId( String groupId, String artifactId, String classifier, String packaging )
- {
- return ( StringUtils.isBlank( groupId ) ? "" : StringUtils.trim( groupId ) ) + ":" //
- + ( StringUtils.isBlank( artifactId ) ? "" : StringUtils.trim( artifactId ) ) + ":" //
- + ( StringUtils.isBlank( classifier ) ? "" : StringUtils.trim( classifier ) ) + ":" //
- + ( StringUtils.isBlank( packaging ) ? "" : StringUtils.trim( packaging ) );
- }
-}
+++ /dev/null
-<?xml version="1.0"?>
-
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-<beans xmlns="http://www.springframework.org/schema/beans"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xmlns:context="http://www.springframework.org/schema/context"
- xsi:schemaLocation="http://www.springframework.org/schema/beans
- http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
- http://www.springframework.org/schema/context
- http://www.springframework.org/schema/context/spring-context-3.0.xsd"
- default-lazy-init="false">
-
- <context:annotation-config/>
- <context:component-scan base-package="org.apache.archiva.indexer.maven,org.apache.maven.index"/>
-
-
- <bean name="taskScheduler#mergeRemoteIndexes"
- class="org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler">
- <property name="poolSize" value="4"/>
- <property name="threadGroupName" value="mergeRemoteIndexes"/>
- </bean>
-
-</beans>
\ No newline at end of file
+++ /dev/null
-package org.apache.archiva.indexer.maven;
-
-/*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements. See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership. The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied. See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*/
-
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.indexer.ArchivaIndexingContext;
-import org.apache.archiva.indexer.IndexCreationFailedException;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.RepositoryType;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.repository.features.RemoteIndexFeature;
-import org.apache.archiva.repository.maven2.MavenManagedRepository;
-import org.apache.archiva.repository.maven2.MavenRemoteRepository;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.apache.maven.index.MAVEN;
-import org.apache.maven.index.QueryCreator;
-import org.apache.maven.index.context.IndexingContext;
-import org.apache.maven.index.expr.UserInputSearchExpression;
-import org.apache.maven.index_shaded.lucene.search.BooleanClause;
-import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
-import org.apache.maven.index_shaded.lucene.search.Query;
-import org.junit.After;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.test.context.ContextConfiguration;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
-
-import static org.junit.Assert.*;
-
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
-public class MavenIndexManagerTest {
-
- @Inject
- RepositoryRegistry repositoryRegistry;
-
-
- private Path indexPath;
- private MavenManagedRepository repository;
- private ArchivaIndexingContext ctx;
- private MavenRemoteRepository repositoryRemote;
-
- @Inject
- MavenIndexManager mavenIndexManager;
-
- @Inject
- QueryCreator queryCreator;
-
-
- @After
- public void tearDown() {
- repositoryRegistry.destroy();
- if (ctx!=null) {
- try {
- ctx.close(true);
- } catch (IOException e) {
- //
- }
- }
- if (indexPath!=null && Files.exists(indexPath)) {
- FileUtils.deleteQuietly(indexPath);
- }
-
- }
-
- @Test
- public void pack() throws Exception {
- createTestContext();
- Path destDir = repository.getLocalPath().resolve("org/apache/archiva/archiva-webapp/1.0");
- Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-webapp/1.0");
- org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(),destDir.toFile());
- mavenIndexManager.scan(ctx);
- mavenIndexManager.pack(ctx);
- assertTrue(Files.list(indexPath).filter(path -> {
- try {
- return path.getFileName().toString().endsWith(".gz") && Files.size(path) > 0;
- } catch (IOException e) {
- return false;
- }
- }).findAny().isPresent());
- }
-
- @Test
- public void scan() throws Exception {
- createTestContext();
- Path destDir = repository.getLocalPath().resolve("org/apache/archiva/archiva-webapp/1.0");
- Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-webapp/1.0");
- org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(),destDir.toFile());
- mavenIndexManager.scan(ctx);
-
- IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
- String term = "org.apache.archiva";
- Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
- BooleanClause.Occur.SHOULD ).build();
- assertEquals(4, mvnCtx.acquireIndexSearcher().count(q));
- }
-
- /*
- * Does only a index update via file uri, no HTTP uri
- */
- @Test
- public void update() throws Exception {
- createTestContext();
- mavenIndexManager.pack(ctx);
- ctx.close(false);
- createTestContextForRemote();
- mavenIndexManager.update(ctx, true);
- }
-
- @Test
- public void addArtifactsToIndex() throws Exception {
-
- ArchivaIndexingContext ctx = createTestContext();
- Path destDir = repository.getLocalPath().resolve("org/apache/archiva/archiva-search/1.0");
- Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-search/1.0");
- org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(), destDir.toFile());
- List<URI> uriList = new ArrayList<>();
- uriList.add(destDir.resolve("archiva-search-1.0.jar").toUri());
- uriList.add(destDir.resolve("archiva-search-1.0-sources.jar").toUri());
- mavenIndexManager.addArtifactsToIndex(ctx, uriList);
-
- IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
- String term = "org.apache.archiva";
- Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
- BooleanClause.Occur.SHOULD ).build();
- assertEquals(2, mvnCtx.acquireIndexSearcher().count(q));
- }
-
- @Test
- public void removeArtifactsFromIndex() throws Exception {
- ArchivaIndexingContext ctx = createTestContext();
- Path destDir = repository.getLocalPath().resolve("org/apache/archiva/archiva-search/1.0");
- Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-search/1.0");
- org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(), destDir.toFile());
- List<URI> uriList = new ArrayList<>();
- uriList.add(destDir.resolve("archiva-search-1.0.jar").toUri());
- uriList.add(destDir.resolve("archiva-search-1.0-sources.jar").toUri());
- mavenIndexManager.addArtifactsToIndex(ctx, uriList);
-
- IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
- String term = "org.apache.archiva";
- Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
- BooleanClause.Occur.SHOULD ).build();
- assertEquals(2, mvnCtx.acquireIndexSearcher().count(q));
- uriList.remove(0);
- mavenIndexManager.removeArtifactsFromIndex(ctx, uriList);
- assertEquals(1, mvnCtx.acquireIndexSearcher().count(q));
- }
-
- @Test
- public void supportsRepository() throws Exception {
- assertTrue(mavenIndexManager.supportsRepository(RepositoryType.MAVEN));
- assertFalse(mavenIndexManager.supportsRepository(RepositoryType.NPM));
- }
-
- private ArchivaIndexingContext createTestContext() throws URISyntaxException, IndexCreationFailedException, IOException {
- indexPath = Paths.get("target/repositories/test-repo/.index-test");
- FileUtils.deleteDirectory(indexPath);
- repository = new MavenManagedRepository("test-repo", "Test Repo", Paths.get("target/repositories"));
- repository.setLocation(new URI("test-repo"));
- IndexCreationFeature icf = repository.getFeature(IndexCreationFeature.class).get();
- icf.setIndexPath(new URI(".index-test"));
- ctx = mavenIndexManager.createContext(repository);
- return ctx;
- }
-
- private ArchivaIndexingContext createTestContextForRemote() throws URISyntaxException, IndexCreationFailedException, IOException {
- indexPath = Paths.get("target/repositories/test-repo/.index-test");
- Path repoPath = Paths.get("target/repositories").toAbsolutePath();
- repositoryRemote = new MavenRemoteRepository("test-repo", "Test Repo", repoPath);
- repositoryRemote.setLocation(repoPath.resolve("test-repo").toUri());
- RemoteIndexFeature icf = repositoryRemote.getFeature(RemoteIndexFeature.class).get();
- icf.setIndexUri(new URI(".index-test"));
- ctx = mavenIndexManager.createContext(repositoryRemote);
- return ctx;
- }
-
- @Test
- public void createContext() throws Exception {
- ArchivaIndexingContext ctx = createTestContext();
- assertNotNull(ctx);
- assertEquals(repository, ctx.getRepository());
- assertEquals("test-repo", ctx.getId());
- assertEquals(indexPath.toAbsolutePath(), Paths.get(ctx.getPath()).toAbsolutePath());
- assertTrue(Files.exists(indexPath));
- List<Path> li = Files.list(indexPath).collect(Collectors.toList());
- assertTrue(li.size()>0);
-
- }
-
-}
\ No newline at end of file
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-
-/*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements. See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership. The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing,
-* software distributed under the License is distributed on an
-* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-* KIND, either express or implied. See the License for the
-* specific language governing permissions and limitations
-* under the License.
-*/
-
-import junit.framework.TestCase;
-import org.apache.archiva.admin.repository.proxyconnector.DefaultProxyConnectorAdmin;
-import org.apache.archiva.common.utils.FileUtils;
-import org.apache.archiva.configuration.ArchivaConfiguration;
-import org.apache.archiva.configuration.Configuration;
-import org.apache.archiva.configuration.ConfigurationListener;
-import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.repository.Repository;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.repository.features.IndexCreationFeature;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.apache.commons.lang.SystemUtils;
-import org.apache.maven.index.ArtifactContext;
-import org.apache.maven.index.ArtifactContextProducer;
-import org.apache.maven.index.ArtifactScanningListener;
-import org.apache.maven.index.DefaultScannerListener;
-import org.apache.maven.index.Indexer;
-import org.apache.maven.index.IndexerEngine;
-import org.apache.maven.index.QueryCreator;
-import org.apache.maven.index.Scanner;
-import org.apache.maven.index.ScanningRequest;
-import org.apache.maven.index.ScanningResult;
-import org.apache.maven.index.context.IndexingContext;
-import org.easymock.EasyMock;
-import org.easymock.IMocksControl;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.runner.RunWith;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.test.context.ContextConfiguration;
-
-import javax.inject.Inject;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.List;
-import java.util.Locale;
-
-/**
- * @author Olivier Lamy
- */
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
-public abstract class AbstractMavenRepositorySearch
- extends TestCase
-{
-
- protected Logger log = LoggerFactory.getLogger( getClass() );
-
- public static String TEST_REPO_1 = "maven-search-test-repo";
-
- public static String TEST_REPO_2 = "maven-search-test-repo-2";
-
-
- public static String REPO_RELEASE = "repo-release";
-
- MavenRepositorySearch search;
-
- ArchivaConfiguration archivaConfig;
-
- @Inject
- ArtifactContextProducer artifactContextProducer;
-
- @Inject
- RepositoryRegistry repositoryRegistry;
-
- @Inject
- private IndexerEngine indexerEngine;
-
- IMocksControl archivaConfigControl;
-
- Configuration config;
-
- @Inject
- Indexer indexer;
-
- @Inject
- Scanner scanner;
-
- @Inject
- QueryCreator queryCreator;
-
- @Before
- @Override
- public void setUp()
- throws Exception
- {
- super.setUp();
-
- FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ) );
- assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" )) );
-
- FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ) );
- assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" )) );
-
- archivaConfigControl = EasyMock.createControl();
-
- archivaConfig = archivaConfigControl.createMock( ArchivaConfiguration.class );
-
- DefaultProxyConnectorAdmin defaultProxyConnectorAdmin = new DefaultProxyConnectorAdmin();
- defaultProxyConnectorAdmin.setArchivaConfiguration( archivaConfig );
- repositoryRegistry.setArchivaConfiguration( archivaConfig );
-
- search = new MavenRepositorySearch( indexer, repositoryRegistry, defaultProxyConnectorAdmin,
- queryCreator );
-
- assertNotNull( repositoryRegistry );
-
- config = new Configuration();
- config.addManagedRepository( createRepositoryConfig( TEST_REPO_1 ) );
- config.addManagedRepository( createRepositoryConfig( TEST_REPO_2 ) );
- config.addManagedRepository( createRepositoryConfig( REPO_RELEASE ) );
-
- archivaConfig.addListener( EasyMock.anyObject( ConfigurationListener.class ) );
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
- archivaConfig.save(EasyMock.anyObject(Configuration.class));
- EasyMock.expectLastCall().anyTimes();
- archivaConfigControl.replay();
- repositoryRegistry.reload();
- archivaConfigControl.reset();
- }
-
- @After
- @Override
- public void tearDown()
- throws Exception
- {
- archivaConfigControl.reset();
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
- archivaConfig.save(EasyMock.anyObject(Configuration.class));
- EasyMock.expectLastCall().anyTimes();
- archivaConfigControl.replay();
- repositoryRegistry.removeRepository(TEST_REPO_1);
- repositoryRegistry.removeRepository(TEST_REPO_2);
- repositoryRegistry.removeRepository(REPO_RELEASE);
- repositoryRegistry.destroy();
- FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 ) );
- assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 )) );
-
- FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 ) );
- assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 )) );
-
- super.tearDown();
- }
-
- protected ManagedRepositoryConfiguration createRepositoryConfig( String repository )
- {
- ManagedRepositoryConfiguration repositoryConfig = new ManagedRepositoryConfiguration();
- repositoryConfig.setId( repository );
- repositoryConfig.setLocation( org.apache.archiva.common.utils.FileUtils.getBasedir() + "/target/repos/" + repository );
- Path f = Paths.get( repositoryConfig.getLocation() );
- if ( !Files.exists(f) )
- {
- try
- {
- Files.createDirectories( f );
- }
- catch ( IOException e )
- {
- log.error("Could not create directories for {}", f);
- }
- }
- repositoryConfig.setLayout( "default" );
- repositoryConfig.setName( repository );
- repositoryConfig.setScanned( true );
- repositoryConfig.setSnapshots( false );
- repositoryConfig.setReleases( true );
- repositoryConfig.setIndexDir(".indexer");
-
- return repositoryConfig;
- }
-
- protected void createIndex( String repository, List<Path> filesToBeIndexed, boolean scan) throws Exception {
- createIndex(repository, filesToBeIndexed, scan, null);
- }
-
- protected void createIndex( String repository, List<Path> filesToBeIndexed, boolean scan, Path indexDir)
- throws Exception
- {
- Repository rRepo = repositoryRegistry.getRepository(repository);
- IndexCreationFeature icf = rRepo.getFeature(IndexCreationFeature.class).get();
-
-
- IndexingContext context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
-
- if ( context != null )
- {
- context.close(true);
- }
-
- Path repoDir = Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir()).resolve("target").resolve("repos").resolve(repository);
-
- Path indexerDirectory = repoDir.resolve(".indexer" );
-
- if ( Files.exists(indexerDirectory) )
- {
- FileUtils.deleteDirectory( indexerDirectory );
- }
-
- assertFalse( Files.exists(indexerDirectory) );
-
- Path lockFile = repoDir.resolve(".indexer/write.lock" );
- if ( Files.exists(lockFile) )
- {
- Files.delete(lockFile);
- }
-
- assertFalse( Files.exists(lockFile) );
-
- Path repo = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + repository );
- assertTrue( Files.exists(repo) );
- org.apache.commons.io.FileUtils.copyDirectory(repo.toFile(), repoDir.toFile());
-
- if (indexDir==null) {
- Path indexDirectory =
- Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/index/test-" + Long.toString(System.currentTimeMillis()));
- indexDirectory.toFile().deleteOnExit();
- FileUtils.deleteDirectory(indexDirectory);
- icf.setIndexPath(indexDirectory.toUri());
- } else {
-
- icf.setIndexPath(indexDir.toUri());
- }
- context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
-
-
- // minimize datas in memory
-// context.getIndexWriter().setMaxBufferedDocs( -1 );
-// context.getIndexWriter().setRAMBufferSizeMB( 1 );
- for ( Path artifactFile : filesToBeIndexed )
- {
- assertTrue( "file not exists " + artifactFile, Files.exists(artifactFile) );
- ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile() );
-
- if ( artifactFile.toString().endsWith( ".pom" ) )
- {
- ac.getArtifactInfo().setFileExtension( "pom" );
- ac.getArtifactInfo().setPackaging( "pom" );
- ac.getArtifactInfo().setClassifier( "pom" );
- }
- indexer.addArtifactToIndex( ac, context );
- context.updateTimestamp( true );
- }
-
- if ( scan )
- {
- DefaultScannerListener listener = new DefaultScannerListener( context, indexerEngine, true, new ArtifactScanListener());
- ScanningRequest req = new ScanningRequest(context, listener );
- scanner.scan( req );
- context.commit();
- }
- // force flushing
- context.commit();
- // context.getIndexWriter().commit();
- context.setSearchable( true );
-
- }
-
- static class ArtifactScanListener
- implements ArtifactScanningListener
- {
- protected Logger log = LoggerFactory.getLogger( getClass() );
-
- @Override
- public void scanningStarted( IndexingContext ctx )
- {
- //
- }
-
- @Override
- public void scanningFinished( IndexingContext ctx, ScanningResult result )
- {
- // no op
- }
-
- @Override
- public void artifactError( ArtifactContext ac, Exception e )
- {
- log.debug( "artifactError {}", ac.getArtifact().getPath(), e );
- }
-
- @Override
- public void artifactDiscovered( ArtifactContext ac )
- {
- log.debug( "artifactDiscovered {}:{}", //
- ac.getArtifact() == null ? "" : ac.getArtifact().getPath(), //
- ac.getArtifact() == null ? "" : ac.getArtifactInfo() );
- }
- }
-
- public String niceDisplay( SearchResults searchResults )
- throws Exception
- {
- StringBuilder sb = new StringBuilder();
- for ( SearchResultHit hit : searchResults.getHits() )
- {
- sb.append( hit.toString() ).append( SystemUtils.LINE_SEPARATOR );
- }
- return sb.toString();
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.indexer.search.SearchFields;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.easymock.EasyMock;
-import org.junit.After;
-import org.junit.Test;
-
-import javax.inject.Inject;
-import java.nio.file.Path;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Locale;
-
-/**
- * @author Olivier Lamy
- */
-public class MavenRepositorySearchOSGITest
- extends AbstractMavenRepositorySearch
-{
-
- @Inject
- RepositoryRegistry repositoryRegistry;
-
- @After
- @Override
- public void tearDown() throws Exception {
- super.tearDown();
- repositoryRegistry.destroy();
- }
-
- @Test
- public void searchFelixWithSymbolicName()
- throws Exception
- {
-
- createIndex( TEST_REPO_1, Collections.<Path>emptyList(), true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- // search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchFields searchFields = new SearchFields();
- searchFields.setBundleSymbolicName( "org.apache.felix.bundlerepository" );
- searchFields.setBundleVersion( "1.6.6" );
- searchFields.setRepositories( selectedRepos );
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.felix", hit.getGroupId() );
- assertEquals( "org.apache.felix.bundlerepository", hit.getArtifactId() );
- assertEquals( "1.6.6", hit.getVersions().get( 0 ) );
-
- assertEquals( "org.apache.felix.bundlerepository;uses:=\"org.osgi.framework\";version=\"2.0\"",
- hit.getBundleExportPackage() );
- assertEquals( "org.apache.felix.bundlerepository.RepositoryAdmin,org.osgi.service.obr.RepositoryAdmin",
- hit.getBundleExportService() );
- assertEquals( "org.apache.felix.bundlerepository", hit.getBundleSymbolicName() );
- assertEquals( "1.6.6", hit.getBundleVersion() );
- }
-
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import junit.framework.TestCase;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResultLimits;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.indexer.util.SearchUtil;
-import org.apache.archiva.repository.RepositoryRegistry;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.junit.After;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.test.context.ContextConfiguration;
-
-import java.util.Arrays;
-
-/**
- * @author Olivier Lamy
- */
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
-public class MavenRepositorySearchPaginateTest
- extends TestCase
-{
-
- @Autowired
- RepositoryRegistry repositoryRegistry;
-
- @After
- public void endTests() {
- assert repositoryRegistry!=null;
- repositoryRegistry.destroy();
- }
-
- @Test
- public void nonPaginatedResult()
- throws Exception
- {
- MavenRepositorySearch search = new MavenRepositorySearch();
-
- SearchResults searchResults = build( 10, new SearchResultLimits( 0 ) );
-
- searchResults = search.paginate( searchResults );
-
- assertEquals( 10, searchResults.getReturnedHitsCount() );
-
- }
-
- @Test
- public void nonPaginatedHugeResult()
- throws Exception
- {
- MavenRepositorySearch search = new MavenRepositorySearch();
-
- SearchResults origSearchResults = build( 63, new SearchResultLimits( 0 ) );
-
- SearchResults searchResults = search.paginate( origSearchResults );
-
- assertEquals( 30, searchResults.getReturnedHitsCount() );
-
- origSearchResults = build( 63, new SearchResultLimits( 1 ) );
-
- searchResults = search.paginate( origSearchResults );
-
- assertEquals( 30, searchResults.getReturnedHitsCount() );
-
- }
-
- @Test
- public void paginatedResult()
- throws Exception
- {
- MavenRepositorySearch search = new MavenRepositorySearch();
-
- SearchResults searchResults = build( 32, new SearchResultLimits( 1 ) );
-
- searchResults = search.paginate( searchResults );
-
- assertEquals( 2, searchResults.getReturnedHitsCount() );
-
- }
-
-
- SearchResults build( int number, SearchResultLimits limits )
- {
- SearchResults searchResults = new SearchResults();
- searchResults.setLimits( limits );
- for ( int i = 0; i < number; i++ )
- {
- SearchResultHit hit = new SearchResultHit();
- hit.setGroupId( "commons-foo" );
- hit.setArtifactId( "commons-bar-" + i );
- hit.setPackaging( "jar" );
- hit.setVersions( Arrays.asList( "1.0" ) );
- String id =
- SearchUtil.getHitId( hit.getGroupId(), hit.getArtifactId(), hit.getClassifier(), hit.getPackaging() );
- searchResults.addHit( id, hit );
- }
-
- searchResults.setTotalHits( number );
- return searchResults;
-
- }
-}
+++ /dev/null
-package org.apache.archiva.indexer.maven.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.indexer.search.RepositorySearchException;
-import org.apache.archiva.indexer.search.SearchFields;
-import org.apache.archiva.indexer.search.SearchResultHit;
-import org.apache.archiva.indexer.search.SearchResultLimits;
-import org.apache.archiva.indexer.search.SearchResults;
-import org.apache.archiva.indexer.util.SearchUtil;
-import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
-import org.apache.maven.index_shaded.lucene.index.IndexUpgrader;
-import org.codehaus.plexus.util.FileUtils;
-import org.easymock.EasyMock;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.test.context.ContextConfiguration;
-
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Locale;
-
-
-@RunWith( ArchivaSpringJUnit4ClassRunner.class )
-@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
-public class MavenRepositorySearchTest
- extends AbstractMavenRepositorySearch
-{
-
-
- private void createSimpleIndex( boolean scan )
- throws Exception
- {
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
- "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
- "/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ));
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
- "org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ));
-
- createIndex( TEST_REPO_1, files, scan );
- }
-
- private void createIndexContainingMoreArtifacts( boolean scan )
- throws Exception
- {
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-webapp/1.0/archiva-webapp-1.0.war" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
- "target/repos/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
- "target/repos/" + TEST_REPO_1 + "/com/classname-search/1.0/classname-search-1.0.jar" ) );
-
- createIndex( TEST_REPO_1, files, scan );
- }
-
- private void createIndexContainingMultipleArtifactsSameVersion( boolean scan )
- throws Exception
- {
- List<Path> files = new ArrayList<>();
-
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
-
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.pom" ) );
-
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0-sources.jar" ) );
-
- createIndex( TEST_REPO_1, files, scan );
- }
-
- @Test
- public void testQuickSearch()
- throws Exception
- {
- createSimpleIndex( false );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- // search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- SearchResultHit hit =
- results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
- assertNotNull( "hit null in result " + results.getHits(), hit );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
-
- archivaConfigControl.reset();
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- // search groupId
- archivaConfigControl.replay();
-
- results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "total hints not 3", 3, results.getTotalHits() );
-
- //TODO: search for class & package names
- }
-
- @Test
- public void testQuickSearchNotWithClassifier()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- // search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- SearchResultHit hit =
- results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
- assertNotNull( "hit null in result " + results.getHits(), hit );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
-
- archivaConfigControl.reset();
-
- // search groupId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "total hints not 3 hits " + results.getHits(), 3, results.getTotalHits() );
-
- //TODO: search for class & package names
- }
-
- @Test
- public void testQuickSearchMultipleArtifactsSameVersion()
- throws Exception
- {
- createIndexContainingMultipleArtifactsSameVersion( false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- // search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 3, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
-
- //only 1 version of 1.0 is retrieved
- assertEquals( 1, hit.getVersions().size() );
- }
-
- @Test
- public void testMultipleArtifactsSameVersionWithClassifier()
- throws Exception
- {
- createIndexContainingMultipleArtifactsSameVersion( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- // search artifactId
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setArtifactId( "archiva-search" );
- searchFields.setClassifier( "sources" );
- searchFields.setRepositories( selectedRepos );
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
-
- //only 1 version of 1.0 is retrieved
- assertEquals( 1, hit.getVersions().size() );
- }
-
- // search for existing artifact using multiple keywords
- @Test
- public void testQuickSearchWithMultipleKeywords()
- throws Exception
- {
- createSimpleIndex( false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "archiva search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getTotalHits() );
- }
-
- @Test
- public void testQuickSearchWithPagination()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- // page 1
- SearchResultLimits limits = new SearchResultLimits( 0 );
- limits.setPageSize( 1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "org", limits, Collections.emptyList() );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getHits().size() );
- assertEquals( "total hits not 9 for page1 " + results, 9, results.getTotalHits() );
- assertEquals( "returned hits not 1 for page1 " + results, 1, results.getReturnedHitsCount() );
- assertEquals( limits, results.getLimits() );
-
- archivaConfigControl.reset();
-
- // page 2
- limits = new SearchResultLimits( 1 );
- limits.setPageSize( 1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- results = search.search( "user", selectedRepos, "org", limits, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- assertEquals( "hits not 1", 1, results.getHits().size() );
- assertEquals( "total hits not 9 for page 2 " + results, 9, results.getTotalHits() );
- assertEquals( "returned hits not 1 for page2 " + results, 1, results.getReturnedHitsCount() );
- assertEquals( limits, results.getLimits() );
- }
-
- @Test
- public void testArtifactFoundInMultipleRepositories()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
- + "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
- createIndex( TEST_REPO_2, files, false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
- selectedRepos.add( TEST_REPO_2 );
-
- config.addManagedRepository( createRepositoryConfig( TEST_REPO_2 ) );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- // wait lucene flush.....
- Thread.sleep( 2000 );
-
- SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- SearchResultHit hit =
- results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
- assertNotNull(hit);
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "not 2 version for hit " + hit + "::" + niceDisplay( results ), 2, hit.getVersions().size() );
- assertTrue( hit.getVersions().contains( "1.0" ) );
- assertTrue( hit.getVersions().contains( "1.1" ) );
-
- archivaConfigControl.reset();
-
- // TODO: [BROWSE] in artifact info from browse, display all the repositories where the artifact is found
- }
-
- @Test
- public void testNoMatchFound()
- throws Exception
- {
- createSimpleIndex( false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "dfghdfkweriuasndsaie", null, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
- }
-
- @Test
- public void testNoIndexFound()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
-
- archivaConfigControl.verify();
- }
-
- @Test
- public void testRepositoryNotFound()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( "non-existing-repo" );
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
-
- archivaConfigControl.verify();
- }
-
- @Test
- public void testSearchWithinSearchResults()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- List<String> previousSearchTerms = new ArrayList<>();
- previousSearchTerms.add( "archiva-test" );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", selectedRepos, "1.0", null, previousSearchTerms );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "total hints not 1", 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-test", hit.getArtifactId() );
- assertEquals( "versions not 1", 1, hit.getVersions().size() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
- }
-
- // tests for advanced search
- @Test
- public void testAdvancedSearch()
- throws Exception
- {
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
- + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
- + "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
- createIndex( TEST_REPO_2, files, false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_2 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setVersion( "1.0" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-search", hit.getArtifactId() );
- assertEquals( "1.0", hit.getVersions().get( 0 ) );
- }
-
- @Test
- public void testAdvancedSearchWithPagination()
- throws Exception
- {
- createIndexContainingMoreArtifacts( false );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setRepositories( selectedRepos );
-
- // page 1
-
- SearchResultLimits limits = new SearchResultLimits( 0 );
- limits.setPageSize( 1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, limits );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 4, results.getTotalHits() );
- assertEquals( 1, results.getHits().size() );
-
- // page 2
- archivaConfigControl.reset();
-
- limits = new SearchResultLimits( 1 );
- limits.setPageSize( 1 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- results = search.search( "user", searchFields, limits );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 4, results.getTotalHits() );
- assertEquals( 1, results.getHits().size() );
- }
-
- // MRM-981 - artifactIds with numeric characters aren't found in advanced search
- @Test
- public void testAdvancedSearchArtifactIdHasNumericChar()
- throws Exception
- {
- List<Path> files = new ArrayList<>();
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
- "target/repos/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
- files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
- + "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
- createIndex( TEST_REPO_1, files, true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setArtifactId( "artifactid-numeric" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 2, results.getTotalHits() );
- }
-
- @Test
- public void testAdvancedSearchNoRepositoriesConfigured()
- throws Exception
- {
- SearchFields searchFields = new SearchFields();
- searchFields.setArtifactId( "archiva" );
- searchFields.setRepositories( null );
-
- try
- {
- search.search( "user", searchFields, null );
- fail( "A RepositorySearchExcecption should have been thrown." );
- }
- catch ( RepositorySearchException e )
- {
- assertEquals( "Repositories cannot be null.", e.getMessage() );
- }
- }
-
- @Test
- public void testAdvancedSearchSearchFieldsAreNull()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setRepositories( selectedRepos );
-
- try
- {
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- fail( "A RepositorySearchExcecption should have been thrown." );
- }
- catch ( RepositorySearchException e )
- {
- assertEquals( "No search fields set.", e.getMessage() );
- }
- }
-
- @Test
- public void testAdvancedSearchSearchFieldsAreBlank()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "" );
- searchFields.setArtifactId( "" );
- searchFields.setVersion( "" );
- searchFields.setPackaging( "" );
- searchFields.setClassName( "" );
-
- searchFields.setRepositories( selectedRepos );
-
- try
- {
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
- archivaConfigControl.replay();
-
- search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- fail( "A RepositorySearchException should have been thrown." );
- }
- catch ( RepositorySearchException e )
- {
- assertEquals( "No search fields set.", e.getMessage() );
- }
- }
-
- @Test
- public void testAdvancedSearchAllSearchCriteriaSpecified()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setArtifactId( "archiva-test" );
- searchFields.setVersion( "2.0" );
- searchFields.setPackaging( "jar" );
- searchFields.setClassName( "org.apache.archiva.test.App" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
-
- assertEquals( "total hints not 1" + results, 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "org.apache.archiva", hit.getGroupId() );
- assertEquals( "archiva-test", hit.getArtifactId() );
- assertEquals( "version not 2.0", "2.0", hit.getVersions().get( 0 ) );
- }
-
- @Test
- public void testAdvancedSearchJarArtifacts()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setPackaging( "jar" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "not 8 but " + results.getTotalHits() + ":" + niceDisplay( results ), 8, results.getTotalHits() );
- }
-
- @Test
- public void testAdvancedSearchWithIncorrectPackaging()
- throws Exception
- {
- createSimpleIndex( true );
-
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setArtifactId( "archiva-test" );
- searchFields.setVersion( "2.0" );
- searchFields.setPackaging( "war" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
- }
-
- @Test
- public void testAdvancedSearchClassname()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setClassName( "com.classname.search.App" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( "totalHits not 1 results " + results, 1, results.getTotalHits() );
-
- SearchResultHit hit = results.getHits().get( 0 );
- assertEquals( "groupId not com", "com", hit.getGroupId() );
- assertEquals( "arttifactId not classname-search", "classname-search", hit.getArtifactId() );
- assertEquals( " hits.version(0) not 1.0", "1.0", hit.getVersions().get( 0 ) );
- }
-
- @Test
- public void testAdvancedSearchNoIndexFound()
- throws Exception
- {
- List<String> selectedRepos = new ArrayList<>();
- selectedRepos.add( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.apache.archiva" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 0, results.getTotalHits() );
- }
-
- @Test
- public void testAdvancedSearchClassNameInWar()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setClassName( "SomeClass" );
- searchFields.setRepositories( selectedRepos );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 1, results.getHits().size() );
- assertEquals( "test-webapp", results.getHits().get( 0 ).getArtifactId() );
- }
-
- @Test
- public void getAllGroupIds()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
-
- EasyMock.expect( archivaConfig.getConfiguration() ).andReturn( config ).times( 0, 2 );
-
- archivaConfigControl.replay();
-
- Collection<String> groupIds = search.getAllGroupIds( "user", selectedRepos );
-
- archivaConfigControl.verify();
-
- log.info( "groupIds: {}", groupIds );
-
- assertEquals( 3, groupIds.size() );
- assertTrue( groupIds.contains( "com" ) );
- assertTrue( groupIds.contains( "org.apache.felix" ) );
- assertTrue( groupIds.contains( "org.apache.archiva" ) );
- }
-
- @Test
- public void testSearchWithUnknownRepo()
- throws Exception
- {
- createIndexContainingMoreArtifacts( true );
-
- List<String> selectedRepos = Arrays.asList( "foo" );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setClassName( "SomeClass" );
- searchFields.setRepositories( selectedRepos );
-
- archivaConfigControl.replay();
-
- SearchResults results = search.search( "user", searchFields, null );
-
- archivaConfigControl.verify();
-
- assertNotNull( results );
- assertEquals( 0, results.getHits().size() );
- }
-
- @Test
- public void nolimitedResult()
- throws Exception
- {
-
- Path repo = Paths.get( "target/repo-release" );
- FileUtils.deleteDirectory(repo.toFile());
- Path indexDirectory = repo.resolve(".index" );
- FileUtils.copyDirectoryStructure( Paths.get( "src/test/repo-release" ).toFile(), repo.toFile() );
-
- IndexUpgrader.main( new String[]{ indexDirectory.toAbsolutePath().toString() } );
-
- createIndex(REPO_RELEASE, Collections.emptyList(), false, indexDirectory );
-
-// indexer.addIndexingContext( REPO_RELEASE, REPO_RELEASE, repo.toFile(), indexDirectory.toFile(),
-// repo.toUri().toURL().toExternalForm(),
-// indexDirectory.toUri().toURL().toString(), indexCreators );
-
-
-
- SearchResultLimits limits = new SearchResultLimits( SearchResultLimits.ALL_PAGES );
- limits.setPageSize( 300 );
-
- EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
- EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
-
- archivaConfigControl.replay();
-
- SearchResults searchResults = search.search( null, Arrays.asList( REPO_RELEASE ), //
- "org.example", limits, //
- Collections.emptyList() );
-
- log.info( "results: {}", searchResults.getHits().size() );
-
- assertEquals( 255, searchResults.getHits().size() );
-
- SearchFields searchFields = new SearchFields();
- searchFields.setGroupId( "org.example" );
- searchFields.setRepositories( Arrays.asList( REPO_RELEASE ) );
-
- searchResults = search.search( null, searchFields, limits );
-
- log.info( "results: {}", searchResults.getHits().size() );
-
- assertEquals( 255, searchResults.getHits().size() );
-
- archivaConfigControl.verify();
- }
-}
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-search</artifactId>
- <packaging>jar</packaging>
- <version>1.0</version>
- <name>Archiva Search</name>
- <url>http://archiva.apache.org</url>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>commons-lang</groupId>
- <artifactId>commons-lang</artifactId>
- <version>2.3</version>
- </dependency>
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>1.2.8</version>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-search</artifactId>
- <packaging>jar</packaging>
- <version>1.1</version>
- <name>Archiva Search</name>
- <url>http://archiva.apache.org</url>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>commons-lang</groupId>
- <artifactId>commons-lang</artifactId>
- <version>2.3</version>
- </dependency>
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>1.2.8</version>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>com</groupId>
- <artifactId>artifactid-numeric</artifactId>
- <packaging>jar</packaging>
- <version>1.0</version>
- <name>ArtifactID numeric - NOT</name>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>com</groupId>
- <artifactId>artifactid-numeric123</artifactId>
- <packaging>jar</packaging>
- <version>1.0</version>
- <name>ArtifactID numeric</name>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>com</groupId>
- <artifactId>classname-search</artifactId>
- <packaging>jar</packaging>
- <version>1.0</version>
- <name>classname-search</name>
- <url>http://maven.apache.org</url>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-search</artifactId>
- <packaging>jar</packaging>
- <version>1.0</version>
- <name>Archiva Search</name>
- <url>http://archiva.apache.org</url>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>commons-lang</groupId>
- <artifactId>commons-lang</artifactId>
- <version>2.3</version>
- </dependency>
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>1.2.8</version>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-test</artifactId>
- <packaging>jar</packaging>
- <version>1.0</version>
- <name>archiva-test</name>
- <url>http://maven.apache.org</url>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>commons-lang</groupId>
- <artifactId>commons-lang</artifactId>
- <version>2.3</version>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-test</artifactId>
- <packaging>jar</packaging>
- <version>2.0</version>
- <name>Archiva Test</name>
- <url>http://archiva.apache.org</url>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>commons-lang</groupId>
- <artifactId>commons-lang</artifactId>
- <version>2.3</version>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- <version>1.4</version>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-webapp</artifactId>
- <packaging>war</packaging>
- <version>1.0</version>
- <name>Archiva Webapp</name>
- <url>http://archiva.apache.org</url>
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>1.2.8</version>
- </dependency>
- </dependencies>
-</project>
+++ /dev/null
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied. See the License for the
- specific language governing permissions and limitations
- under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <parent>
- <groupId>org.apache.felix</groupId>
- <artifactId>felix-parent</artifactId>
- <version>2.1</version>
- <relativePath>../pom/pom.xml</relativePath>
- </parent>
- <modelVersion>4.0.0</modelVersion>
- <packaging>bundle</packaging>
- <name>Apache Felix Bundle Repository</name>
- <description>Bundle repository service.</description>
- <artifactId>org.apache.felix.bundlerepository</artifactId>
- <version>1.6.6</version>
- <scm>
- <connection>scm:svn:http://svn.apache.org/repos/asf/felix/releases/org.apache.felix.bundlerepository-1.6.6</connection>
- <developerConnection>scm:svn:https://svn.apache.org/repos/asf/felix/releases/org.apache.felix.bundlerepository-1.6.6</developerConnection>
- <url>http://svn.apache.org/repos/asf/felix/releases/org.apache.felix.bundlerepository-1.6.6</url>
- </scm>
- <dependencies>
- <dependency>
- <groupId>${project.groupId}</groupId>
- <artifactId>org.apache.felix.utils</artifactId>
- <version>1.1.0</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>${project.groupId}</groupId>
- <artifactId>org.osgi.service.obr</artifactId>
- <version>1.0.2</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>${project.groupId}</groupId>
- <artifactId>org.apache.felix.shell</artifactId>
- <version>1.4.1</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>net.sf.kxml</groupId>
- <artifactId>kxml2</artifactId>
- <version>2.3.0</version>
- <optional>true</optional>
- <exclusions>
- <exclusion>
- <groupId>xmlpull</groupId>
- <artifactId>xmlpull</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.osgi</groupId>
- <artifactId>org.osgi.compendium</artifactId>
- <version>4.0.0</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.osgi</groupId>
- <artifactId>org.osgi.core</artifactId>
- <version>4.1.0</version>
- </dependency>
- <dependency>
- <groupId>org.codehaus.woodstox</groupId>
- <artifactId>woodstox-core-asl</artifactId>
- <version>4.0.7</version>
- <optional>true</optional>
- </dependency>
- <dependency>
- <groupId>org.easymock</groupId>
- <artifactId>easymock</artifactId>
- <version>2.4</version>
- </dependency>
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.felix</groupId>
- <artifactId>maven-bundle-plugin</artifactId>
- <version>2.3.4</version>
- <extensions>true</extensions>
- <configuration>
- <instructions>
- <Export-Package>org.apache.felix.bundlerepository;version="2.0"</Export-Package>
- <Private-Package>
- org.kxml2.io,
- org.xmlpull.v1,
- org.apache.felix.bundlerepository.impl.*,
- org.apache.felix.utils.*
- </Private-Package>
- <Import-Package>!javax.xml.parsers,!org.xml.sax,org.osgi.service.log;resolution:=optional,org.osgi.service.obr;resolution:=optional,javax.xml.stream;resolution:=optional,*</Import-Package>
- <DynamicImport-Package>org.apache.felix.shell</DynamicImport-Package>
- <Bundle-Activator>${project.artifactId}.impl.Activator</Bundle-Activator>
- <Bundle-DocURL>http://felix.apache.org/site/apache-felix-osgi-bundle-repository.html</Bundle-DocURL>
- <Bundle-Url>http://felix.apache.org/site/downloads.cgi</Bundle-Url>
- <Bundle-Source>http://felix.apache.org/site/downloads.cgi</Bundle-Source>
- <Bundle-SymbolicName>${project.artifactId}</Bundle-SymbolicName>
- <Bundle-Vendor>The Apache Software Foundation</Bundle-Vendor>
- <Export-Service>org.apache.felix.bundlerepository.RepositoryAdmin,org.osgi.service.obr.RepositoryAdmin</Export-Service>
- <_versionpolicy>[$(version;==;$(@)),$(version;+;$(@)))</_versionpolicy>
- <Include-Resource>META-INF/LICENSE=LICENSE,META-INF/LICENSE.kxml2=LICENSE.kxml2,META-INF/NOTICE=NOTICE,META-INF/DEPENDENCIES=DEPENDENCIES</Include-Resource>
- </instructions>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.rat</groupId>
- <artifactId>apache-rat-plugin</artifactId>
- <configuration>
- <excludeSubProjects>false</excludeSubProjects>
- <useEclipseDefaultExcludes>true</useEclipseDefaultExcludes>
- <useMavenDefaultExcludes>true</useMavenDefaultExcludes>
- <excludes>
- <param>doc/*</param>
- <param>maven-eclipse.xml</param>
- <param>.checkstyle</param>
- <param>.externalToolBuilders/*</param>
- </excludes>
- </configuration>
- </plugin>
- </plugins>
- </build>
-</project>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-
-
-<configuration status="debug">
- <appenders>
- <Console name="console" target="SYSTEM_OUT">
- <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
- </Console>
- </appenders>
- <loggers>
-
- <logger name="org.apache.archiva.repository" level="info"/>
- <logger name="org.apache.archiva.indexer" level="info" />
-
- <logger name="JPOX" level="error"/>
-
-
- <logger name="org.springframework" level="error"/>
-
-
- <root level="info">
- <appender-ref ref="console"/>
- </root>
- </loggers>
-</configuration>
-
-
+++ /dev/null
-<?xml version="1.0"?>
-
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-<beans xmlns="http://www.springframework.org/schema/beans"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xmlns:context="http://www.springframework.org/schema/context" xmlns:tx="http://www.springframework.org/schema/tx"
- xsi:schemaLocation="http://www.springframework.org/schema/beans
- http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
- http://www.springframework.org/schema/context
- http://www.springframework.org/schema/context/spring-context-3.0.xsd http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"
- default-lazy-init="false">
-
- <context:annotation-config/>
- <context:component-scan base-package="org.apache.archiva.indexer.maven,org.apache.archiva.repository,org.apache.archiva.repository.content.maven2" />
-
-
- <bean name="wagon#file" scope="prototype" class="org.apache.maven.wagon.providers.file.FileWagon"/>
-
- <bean name="scheduler" class="org.apache.archiva.redback.components.scheduler.DefaultScheduler">
- <property name="properties">
- <props>
- <prop key="org.quartz.scheduler.instanceName">scheduler1</prop>
- <prop key="org.quartz.threadPool.class">org.quartz.simpl.SimpleThreadPool</prop>
- <prop key="org.quartz.threadPool.threadCount">2</prop>
- <prop key="org.quartz.threadPool.threadPriority">4</prop>
- <prop key="org.quartz.jobStore.class">org.quartz.simpl.RAMJobStore</prop>
- </props>
- </property>
- </bean>
- <alias name="userConfiguration#redback" alias="userConfiguration#default"/>
-
- <!-- ***
- JPA settings
- *** -->
- <bean name="entityManagerFactory" class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean">
- <property name="jpaVendorAdapter" >
- <bean class="org.springframework.orm.jpa.vendor.OpenJpaVendorAdapter" />
- </property>
- <property name="persistenceXmlLocation" value="classpath:META-INF/persistence-hsqldb.xml" />
- <property name="jpaPropertyMap">
- <map>
- <entry key="openjpa.ConnectionURL" value="jdbc:hsqldb:mem:redback_database" />
- <entry key="openjpa.ConnectionDriverName" value="org.hsqldb.jdbcDriver" />
- <entry key="openjpa.ConnectionUserName" value="sa" />
- <entry key="openjpa.ConnectionPassword" value="" />
- <entry key="openjpa.Log" value="${openjpa.Log:DefaultLevel=INFO,Runtime=ERROR,Tool=ERROR,SQL=ERROR,Schema=ERROR,MetaData=ERROR}" />
- <entry key="openjpa.jdbc.SynchronizeMappings" value="buildSchema(ForeignKeys=true)" />
- <entry key="openjpa.jdbc.MappingDefaults"
- value="ForeignKeyDeleteAction=restrict,JoinForeignKeyDeleteAction=restrict"/>
- </map>
- </property>
-
- </bean>
-
- <bean name="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager" >
- <property name="entityManagerFactory" ref="entityManagerFactory" />
- </bean>
-
- <tx:annotation-driven />
- <!-- ***
- End of JPA settings
- *** -->
-
-</beans>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-base</artifactId>
- <version>3.0.0-SNAPSHOT</version>
- </parent>
-
- <artifactId>archiva-maven2-metadata</artifactId>
-
- <name>Archiva Base :: Maven 2 Metadata</name>
-
-
- <properties>
- <site.staging.base>${project.parent.parent.basedir}</site.staging.base>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-model</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-xml-tools</artifactId>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
-</project>
+++ /dev/null
-package org.apache.archiva.maven2.metadata;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.archiva.model.ArchivaRepositoryMetadata;
-import org.apache.archiva.model.Plugin;
-import org.apache.archiva.model.SnapshotVersion;
-import org.apache.archiva.xml.XMLException;
-import org.apache.archiva.xml.XMLReader;
-import org.apache.commons.lang.math.NumberUtils;
-import org.dom4j.Element;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Date;
-
-/**
- * @author Olivier Lamy
- * @since 1.4-M3
- */
-public class MavenMetadataReader
-{
- /*
- <?xml version="1.0" encoding="UTF-8"?>
- <metadata modelVersion="1.1.0">
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva</artifactId>
- <version>1.4-M3-SNAPSHOT</version>
- <versioning>
- <snapshot>
- <timestamp>20120310.230917</timestamp>
- <buildNumber>2</buildNumber>
- </snapshot>
- <lastUpdated>20120310230917</lastUpdated>
- <snapshotVersions>
- <snapshotVersion>
- <extension>pom</extension>
- <value>1.4-M3-20120310.230917-2</value>
- <updated>20120310230917</updated>
- </snapshotVersion>
- </snapshotVersions>
- </versioning>
- </metadata>
- */
-
- private static final Logger log = LoggerFactory.getLogger( MavenMetadataReader.class );
-
- /**
- * Read and return the {@link org.apache.archiva.model.ArchivaRepositoryMetadata} object from the provided xml file.
- *
- * @param metadataFile the maven-metadata.xml file to read.
- * @return the archiva repository metadata object that represents the provided file contents.
- * @throws XMLException
- */
- public static ArchivaRepositoryMetadata read( Path metadataFile )
- throws XMLException
- {
-
- XMLReader xml = new XMLReader( "metadata", metadataFile );
- // invoke this to remove namespaces, see MRM-1136
- xml.removeNamespaces();
-
- ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
-
- metadata.setGroupId( xml.getElementText( "//metadata/groupId" ) );
- metadata.setArtifactId( xml.getElementText( "//metadata/artifactId" ) );
- metadata.setVersion( xml.getElementText( "//metadata/version" ) );
- Date modTime;
- try
- {
- modTime = new Date(Files.getLastModifiedTime( metadataFile ).toMillis( ));
- }
- catch ( IOException e )
- {
- modTime = new Date();
- log.error("Could not read modification time of {}", metadataFile);
- }
- metadata.setFileLastModified( modTime );
- try
- {
- metadata.setFileSize( Files.size( metadataFile ) );
- }
- catch ( IOException e )
- {
- metadata.setFileSize( 0 );
- log.error("Could not read file size of {}", metadataFile);
- }
-
- metadata.setLastUpdated( xml.getElementText( "//metadata/versioning/lastUpdated" ) );
- metadata.setLatestVersion( xml.getElementText( "//metadata/versioning/latest" ) );
- metadata.setReleasedVersion( xml.getElementText( "//metadata/versioning/release" ) );
- metadata.setAvailableVersions( xml.getElementListText( "//metadata/versioning/versions/version" ) );
-
- Element snapshotElem = xml.getElement( "//metadata/versioning/snapshot" );
- if ( snapshotElem != null )
- {
- SnapshotVersion snapshot = new SnapshotVersion();
- snapshot.setTimestamp( snapshotElem.elementTextTrim( "timestamp" ) );
- String tmp = snapshotElem.elementTextTrim( "buildNumber" );
- if ( NumberUtils.isNumber( tmp ) )
- {
- snapshot.setBuildNumber( NumberUtils.toInt( tmp ) );
- }
- metadata.setSnapshotVersion( snapshot );
- }
-
- for ( Element plugin : xml.getElementList( "//metadata/plugins/plugin" ) )
- {
- Plugin p = new Plugin();
- p.setPrefix( plugin.elementTextTrim( "prefix" ) );
- p.setArtifactId( plugin.elementTextTrim( "artifactId" ) );
- p.setName( plugin.elementTextTrim( "name" ) );
- metadata.addPlugin( p );
- }
-
- return metadata;
-
- }
-}
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing,
- ~ software distributed under the License is distributed on an
- ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ~ KIND, either express or implied. See the License for the
- ~ specific language governing permissions and limitations
- ~ under the License.
- -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-base</artifactId>
- <version>3.0.0-SNAPSHOT</version>
- </parent>
-
- <artifactId>archiva-maven2-model</artifactId>
-
- <name>Archiva Base :: Maven 2 Model</name>
-
-
- <properties>
- <site.staging.base>${project.parent.parent.basedir}</site.staging.base>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>com.fasterxml.jackson.core</groupId>
- <artifactId>jackson-annotations</artifactId>
- </dependency>
- </dependencies>
-
-</project>
+++ /dev/null
-package org.apache.archiva.maven2.model;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import javax.xml.bind.annotation.XmlRootElement;
-import java.io.Serializable;
-import java.util.List;
-
-@XmlRootElement( name = "artifact" )
-public class Artifact
- implements Serializable
-{
- // The (optional) context for this result.
- private String context;
-
- // Basic hit, direct to non-artifact resource.
- private String url;
-
- // Advanced hit, reference to groupId.
- private String groupId;
-
- // Advanced hit, reference to artifactId.
- private String artifactId;
-
- private String repositoryId;
-
- private String version;
-
- /**
- * Plugin goal prefix (only if packaging is "maven-plugin")
- */
- private String prefix;
-
- /**
- * Plugin goals (only if packaging is "maven-plugin")
- */
- private List<String> goals;
-
- /**
- * contains osgi metadata Bundle-Version if available
- *
- * @since 1.4-M1
- */
- private String bundleVersion;
-
- /**
- * contains osgi metadata Bundle-SymbolicName if available
- *
- * @since 1.4-M1
- */
- private String bundleSymbolicName;
-
- /**
- * contains osgi metadata Export-Package if available
- *
- * @since 1.4-M1
- */
- private String bundleExportPackage;
-
- /**
- * contains osgi metadata Export-Service if available
- *
- * @since 1.4-M1
- */
- private String bundleExportService;
-
- /**
- * contains osgi metadata Bundle-Description if available
- *
- * @since 1.4-M1
- */
- private String bundleDescription;
-
- /**
- * contains osgi metadata Bundle-Name if available
- *
- * @since 1.4-M1
- */
- private String bundleName;
-
- /**
- * contains osgi metadata Bundle-License if available
- *
- * @since 1.4-M1
- */
- private String bundleLicense;
-
- /**
- * contains osgi metadata Bundle-DocURL if available
- *
- * @since 1.4-M1
- */
- private String bundleDocUrl;
-
- /**
- * contains osgi metadata Import-Package if available
- *
- * @since 1.4-M1
- */
- private String bundleImportPackage;
-
- /**
- * contains osgi metadata Require-Bundle if available
- *
- * @since 1.4-M1
- */
- private String bundleRequireBundle;
-
- private String classifier;
-
- private String packaging;
-
- /**
- * file extension of the artifact
- *
- * @since 1.4-M2
- */
- private String fileExtension;
-
- /**
- * human readable size : not available for all services
- *
- * @since 1.4-M3
- */
- private String size;
-
- /**
- * @since 1.4-M3
- */
- private String type;
-
-
- /**
- * @since 1.4-M3
- */
- private String path;
-
- /**
- * concat of artifactId+'-'+version+'.'+type
- *
- * @since 1.4-M3
- */
- private String id;
-
- /**
- * @since 1.4-M3
- */
- private String scope;
-
-
- public Artifact()
- {
- // no op
- }
-
- public Artifact( String groupId, String artifactId, String version )
- {
- this.artifactId = artifactId;
- this.groupId = groupId;
- this.version = version;
- }
-
- /**
- * @since 1.4-M3
- */
- public Artifact( String groupId, String artifactId, String version, String scope )
- {
- this( groupId, artifactId, version );
- this.scope = scope;
- }
-
- /**
- * @since 1.4-M3
- */
- public Artifact( String groupId, String artifactId, String version, String scope, String classifier )
- {
- this( groupId, artifactId, version );
- this.scope = scope;
- this.classifier = classifier;
- }
-
- public String getGroupId()
- {
- return groupId;
- }
-
- public String getArtifactId()
- {
- return artifactId;
- }
-
- public String getVersion()
- {
- return version;
- }
-
- public String getRepositoryId()
- {
- return repositoryId;
- }
-
- public void setGroupId( String groupId )
- {
- this.groupId = groupId;
- }
-
- public void setArtifactId( String artifactId )
- {
- this.artifactId = artifactId;
- }
-
- public void setVersion( String version )
- {
- this.version = version;
- }
-
- public void setRepositoryId( String repositoryId )
- {
- this.repositoryId = repositoryId;
- }
-
- public String getContext()
- {
- return context;
- }
-
- public void setContext( String context )
- {
- this.context = context;
- }
-
- public String getUrl()
- {
- return url;
- }
-
- public void setUrl( String url )
- {
- this.url = url;
- }
-
- public String getPrefix()
- {
- return prefix;
- }
-
- public void setPrefix( String prefix )
- {
- this.prefix = prefix;
- }
-
- public List<String> getGoals()
- {
- return goals;
- }
-
- public void setGoals( List<String> goals )
- {
- this.goals = goals;
- }
-
- public String getBundleVersion()
- {
- return bundleVersion;
- }
-
- public void setBundleVersion( String bundleVersion )
- {
- this.bundleVersion = bundleVersion;
- }
-
- public String getBundleSymbolicName()
- {
- return bundleSymbolicName;
- }
-
- public void setBundleSymbolicName( String bundleSymbolicName )
- {
- this.bundleSymbolicName = bundleSymbolicName;
- }
-
- public String getBundleExportPackage()
- {
- return bundleExportPackage;
- }
-
- public void setBundleExportPackage( String bundleExportPackage )
- {
- this.bundleExportPackage = bundleExportPackage;
- }
-
- public String getBundleExportService()
- {
- return bundleExportService;
- }
-
- public void setBundleExportService( String bundleExportService )
- {
- this.bundleExportService = bundleExportService;
- }
-
- public String getBundleDescription()
- {
- return bundleDescription;
- }
-
- public void setBundleDescription( String bundleDescription )
- {
- this.bundleDescription = bundleDescription;
- }
-
- public String getBundleName()
- {
- return bundleName;
- }
-
- public void setBundleName( String bundleName )
- {
- this.bundleName = bundleName;
- }
-
- public String getBundleLicense()
- {
- return bundleLicense;
- }
-
- public void setBundleLicense( String bundleLicense )
- {
- this.bundleLicense = bundleLicense;
- }
-
- public String getBundleDocUrl()
- {
- return bundleDocUrl;
- }
-
- public void setBundleDocUrl( String bundleDocUrl )
- {
- this.bundleDocUrl = bundleDocUrl;
- }
-
- public String getBundleImportPackage()
- {
- return bundleImportPackage;
- }
-
- public void setBundleImportPackage( String bundleImportPackage )
- {
- this.bundleImportPackage = bundleImportPackage;
- }
-
- public String getBundleRequireBundle()
- {
- return bundleRequireBundle;
- }
-
- public void setBundleRequireBundle( String bundleRequireBundle )
- {
- this.bundleRequireBundle = bundleRequireBundle;
- }
-
- public String getClassifier()
- {
- return classifier;
- }
-
- public void setClassifier( String classifier )
- {
- this.classifier = classifier;
- }
-
-
- public String getPackaging()
- {
- return packaging;
- }
-
- public void setPackaging( String packaging )
- {
- this.packaging = packaging;
- }
-
- public String getFileExtension()
- {
- return fileExtension;
- }
-
- public void setFileExtension( String fileExtension )
- {
- this.fileExtension = fileExtension;
- }
-
- public String getSize()
- {
- return size;
- }
-
- public void setSize( String size )
- {
- this.size = size;
- }
-
- public String getType()
- {
- return type;
- }
-
- public void setType( String type )
- {
- this.type = type;
- }
-
- public String getPath()
- {
- return path;
- }
-
- public void setPath( String path )
- {
- this.path = path;
- }
-
- public String getId()
- {
- return id;
- }
-
- public void setId( String id )
- {
- this.id = id;
- }
-
- public String getScope()
- {
- return scope;
- }
-
- public void setScope( String scope )
- {
- this.scope = scope;
- }
-
- @Override
- public String toString()
- {
- final StringBuilder sb = new StringBuilder();
- sb.append( "Artifact" );
- sb.append( "{context='" ).append( context ).append( '\'' );
- sb.append( ", url='" ).append( url ).append( '\'' );
- sb.append( ", groupId='" ).append( groupId ).append( '\'' );
- sb.append( ", artifactId='" ).append( artifactId ).append( '\'' );
- sb.append( ", repositoryId='" ).append( repositoryId ).append( '\'' );
- sb.append( ", version='" ).append( version ).append( '\'' );
- sb.append( ", prefix='" ).append( prefix ).append( '\'' );
- sb.append( ", goals=" ).append( goals );
- sb.append( ", bundleVersion='" ).append( bundleVersion ).append( '\'' );
- sb.append( ", bundleSymbolicName='" ).append( bundleSymbolicName ).append( '\'' );
- sb.append( ", bundleExportPackage='" ).append( bundleExportPackage ).append( '\'' );
- sb.append( ", bundleExportService='" ).append( bundleExportService ).append( '\'' );
- sb.append( ", bundleDescription='" ).append( bundleDescription ).append( '\'' );
- sb.append( ", bundleName='" ).append( bundleName ).append( '\'' );
- sb.append( ", bundleLicense='" ).append( bundleLicense ).append( '\'' );
- sb.append( ", bundleDocUrl='" ).append( bundleDocUrl ).append( '\'' );
- sb.append( ", bundleImportPackage='" ).append( bundleImportPackage ).append( '\'' );
- sb.append( ", bundleRequireBundle='" ).append( bundleRequireBundle ).append( '\'' );
- sb.append( ", classifier='" ).append( classifier ).append( '\'' );
- sb.append( ", packaging='" ).append( packaging ).append( '\'' );
- sb.append( ", fileExtension='" ).append( fileExtension ).append( '\'' );
- sb.append( ", size='" ).append( size ).append( '\'' );
- sb.append( ", type='" ).append( type ).append( '\'' );
- sb.append( ", path='" ).append( path ).append( '\'' );
- sb.append( ", id='" ).append( id ).append( '\'' );
- sb.append( '}' );
- return sb.toString();
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( this == o )
- {
- return true;
- }
- if ( !( o instanceof Artifact ) )
- {
- return false;
- }
-
- Artifact artifact = (Artifact) o;
-
- if ( !artifactId.equals( artifact.artifactId ) )
- {
- return false;
- }
- if ( bundleDescription != null
- ? !bundleDescription.equals( artifact.bundleDescription )
- : artifact.bundleDescription != null )
- {
- return false;
- }
- if ( bundleDocUrl != null ? !bundleDocUrl.equals( artifact.bundleDocUrl ) : artifact.bundleDocUrl != null )
- {
- return false;
- }
- if ( bundleExportPackage != null
- ? !bundleExportPackage.equals( artifact.bundleExportPackage )
- : artifact.bundleExportPackage != null )
- {
- return false;
- }
- if ( bundleExportService != null
- ? !bundleExportService.equals( artifact.bundleExportService )
- : artifact.bundleExportService != null )
- {
- return false;
- }
- if ( bundleImportPackage != null
- ? !bundleImportPackage.equals( artifact.bundleImportPackage )
- : artifact.bundleImportPackage != null )
- {
- return false;
- }
- if ( bundleLicense != null ? !bundleLicense.equals( artifact.bundleLicense ) : artifact.bundleLicense != null )
- {
- return false;
- }
- if ( bundleName != null ? !bundleName.equals( artifact.bundleName ) : artifact.bundleName != null )
- {
- return false;
- }
- if ( bundleRequireBundle != null
- ? !bundleRequireBundle.equals( artifact.bundleRequireBundle )
- : artifact.bundleRequireBundle != null )
- {
- return false;
- }
- if ( bundleSymbolicName != null
- ? !bundleSymbolicName.equals( artifact.bundleSymbolicName )
- : artifact.bundleSymbolicName != null )
- {
- return false;
- }
- if ( bundleVersion != null ? !bundleVersion.equals( artifact.bundleVersion ) : artifact.bundleVersion != null )
- {
- return false;
- }
- if ( classifier != null ? !classifier.equals( artifact.classifier ) : artifact.classifier != null )
- {
- return false;
- }
- if ( context != null ? !context.equals( artifact.context ) : artifact.context != null )
- {
- return false;
- }
- if ( fileExtension != null ? !fileExtension.equals( artifact.fileExtension ) : artifact.fileExtension != null )
- {
- return false;
- }
- if ( goals != null ? !goals.equals( artifact.goals ) : artifact.goals != null )
- {
- return false;
- }
- if ( !groupId.equals( artifact.groupId ) )
- {
- return false;
- }
- if ( id != null ? !id.equals( artifact.id ) : artifact.id != null )
- {
- return false;
- }
- if ( packaging != null ? !packaging.equals( artifact.packaging ) : artifact.packaging != null )
- {
- return false;
- }
- if ( path != null ? !path.equals( artifact.path ) : artifact.path != null )
- {
- return false;
- }
- if ( prefix != null ? !prefix.equals( artifact.prefix ) : artifact.prefix != null )
- {
- return false;
- }
- if ( repositoryId != null ? !repositoryId.equals( artifact.repositoryId ) : artifact.repositoryId != null )
- {
- return false;
- }
- if ( scope != null ? !scope.equals( artifact.scope ) : artifact.scope != null )
- {
- return false;
- }
- if ( size != null ? !size.equals( artifact.size ) : artifact.size != null )
- {
- return false;
- }
- if ( type != null ? !type.equals( artifact.type ) : artifact.type != null )
- {
- return false;
- }
- if ( url != null ? !url.equals( artifact.url ) : artifact.url != null )
- {
- return false;
- }
- if ( !version.equals( artifact.version ) )
- {
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode()
- {
- int result = context != null ? context.hashCode() : 0;
- result = 31 * result + ( url != null ? url.hashCode() : 0 );
- result = 31 * result + groupId.hashCode();
- result = 31 * result + artifactId.hashCode();
- result = 31 * result + ( repositoryId != null ? repositoryId.hashCode() : 0 );
- result = 31 * result + version.hashCode();
- result = 31 * result + ( prefix != null ? prefix.hashCode() : 0 );
- result = 31 * result + ( goals != null ? goals.hashCode() : 0 );
- result = 31 * result + ( bundleVersion != null ? bundleVersion.hashCode() : 0 );
- result = 31 * result + ( bundleSymbolicName != null ? bundleSymbolicName.hashCode() : 0 );
- result = 31 * result + ( bundleExportPackage != null ? bundleExportPackage.hashCode() : 0 );
- result = 31 * result + ( bundleExportService != null ? bundleExportService.hashCode() : 0 );
- result = 31 * result + ( bundleDescription != null ? bundleDescription.hashCode() : 0 );
- result = 31 * result + ( bundleName != null ? bundleName.hashCode() : 0 );
- result = 31 * result + ( bundleLicense != null ? bundleLicense.hashCode() : 0 );
- result = 31 * result + ( bundleDocUrl != null ? bundleDocUrl.hashCode() : 0 );
- result = 31 * result + ( bundleImportPackage != null ? bundleImportPackage.hashCode() : 0 );
- result = 31 * result + ( bundleRequireBundle != null ? bundleRequireBundle.hashCode() : 0 );
- result = 31 * result + ( classifier != null ? classifier.hashCode() : 0 );
- result = 31 * result + ( packaging != null ? packaging.hashCode() : 0 );
- result = 31 * result + ( fileExtension != null ? fileExtension.hashCode() : 0 );
- result = 31 * result + ( size != null ? size.hashCode() : 0 );
- result = 31 * result + ( type != null ? type.hashCode() : 0 );
- result = 31 * result + ( path != null ? path.hashCode() : 0 );
- result = 31 * result + ( id != null ? id.hashCode() : 0 );
- result = 31 * result + ( scope != null ? scope.hashCode() : 0 );
- return result;
- }
-}
+++ /dev/null
-package org.apache.archiva.maven2.model;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-
-import javax.xml.bind.annotation.XmlRootElement;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * @author Olivier Lamy
- */
-@XmlRootElement( name = "treeEntry" )
-public class TreeEntry
- implements Serializable
-{
-
- private List<TreeEntry> childs = new ArrayList<>();
-
- private Artifact artifact;
-
- @JsonIgnore
- private TreeEntry parent;
-
- public TreeEntry()
- {
- // no op
- }
-
- public TreeEntry( Artifact artifact )
- {
- this.artifact = artifact;
- }
-
-
- public Artifact getArtifact()
- {
- return artifact;
- }
-
- public void setArtifact( Artifact artifact )
- {
- this.artifact = artifact;
- }
-
- public List<TreeEntry> getChilds()
- {
- return childs;
- }
-
- public void setChilds( List<TreeEntry> childs )
- {
- this.childs = childs;
- }
-
- @JsonIgnore
- public TreeEntry getParent()
- {
- return parent;
- }
-
- @JsonIgnore
- public void setParent( TreeEntry parent )
- {
- this.parent = parent;
- }
-
- @Override
- public boolean equals( Object o )
- {
- if ( this == o )
- {
- return true;
- }
- if ( !( o instanceof TreeEntry ) )
- {
- return false;
- }
-
- TreeEntry treeEntry = (TreeEntry) o;
-
- if ( artifact != null ? !artifact.equals( treeEntry.artifact ) : treeEntry.artifact != null )
- {
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode()
- {
- return artifact != null ? artifact.hashCode() : 0;
- }
-}
<dependencies>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-common</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-xml-tools</artifactId>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-metadata</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-metadata</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<module>archiva-plexus-bridge</module>
<module>archiva-policies</module>
<module>archiva-repository-api</module>
- <module>archiva-maven2-common</module>
- <module>archiva-maven2-indexer</module>
<module>archiva-consumers</module>
<module>archiva-repository-layer</module>
<module>archiva-xml-tools</module>
<module>archiva-repository-scanner</module>
<module>archiva-repository-admin</module>
<module>archiva-security-common</module>
- <module>archiva-maven2-metadata</module>
- <module>archiva-maven2-model</module>
</modules>
</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven</artifactId>
+ <version>3.0.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+
+ <artifactId>archiva-maven-common</artifactId>
+ <name>Archiva :: Maven :: Common</name>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-proxy-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.wagon</groupId>
+ <artifactId>wagon-provider-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.wagon</groupId>
+ <artifactId>wagon-file</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </dependency>
+ </dependencies>
+
+
+</project>
\ No newline at end of file
--- /dev/null
+package org.apache.archiva.proxy.maven;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.wagon.events.TransferEvent;
+import org.apache.maven.wagon.events.TransferListener;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M1
+ */
+public class DebugTransferListener
+ implements TransferListener
+{
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ @Override
+ public void transferInitiated( TransferEvent transferEvent )
+ {
+ log.debug( "transferInitiated for resource {} on repository url {}", transferEvent.getResource().getName(),
+ transferEvent.getWagon().getRepository().getUrl() );
+ }
+
+ @Override
+ public void transferStarted( TransferEvent transferEvent )
+ {
+ log.debug( "transferStarted for resource {} on repository url {}", transferEvent.getResource().getName(),
+ transferEvent.getWagon().getRepository().getUrl() );
+ }
+
+ @Override
+ public void transferProgress( TransferEvent transferEvent, byte[] bytes, int i )
+ {
+ log.debug( "transferProgress for resource {} on repository url {}", transferEvent.getResource().getName(),
+ transferEvent.getWagon().getRepository().getUrl() );
+ }
+
+ @Override
+ public void transferCompleted( TransferEvent transferEvent )
+ {
+ log.debug( "transferCompleted for resource {} on repository url {}", transferEvent.getResource().getName(),
+ transferEvent.getWagon().getRepository().getUrl() );
+ }
+
+ @Override
+ public void transferError( TransferEvent transferEvent )
+ {
+ log.debug( "transferError for resource {} on repository url {}", transferEvent.getResource().getName(),
+ transferEvent.getWagon().getRepository().getUrl(), transferEvent.getException() );
+ }
+
+ @Override
+ public void debug( String s )
+ {
+ log.debug( "wagon debug {}", s );
+ }
+}
--- /dev/null
+package org.apache.archiva.proxy.maven;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.wagon.Wagon;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.context.ApplicationContext;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.lang.reflect.Method;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M1
+ */
+@Service ("wagonFactory")
+public class DefaultWagonFactory
+ implements WagonFactory
+{
+
+ private ApplicationContext applicationContext;
+
+ private Logger logger = LoggerFactory.getLogger( getClass() );
+
+ private DebugTransferListener debugTransferListener = new DebugTransferListener();
+
+ @Inject
+ public DefaultWagonFactory( ApplicationContext applicationContext )
+ {
+ this.applicationContext = applicationContext;
+ }
+
+ @Override
+ public Wagon getWagon( WagonFactoryRequest wagonFactoryRequest )
+ throws WagonFactoryException
+ {
+ try
+ {
+ String protocol = StringUtils.startsWith( wagonFactoryRequest.getProtocol(), "wagon#" )
+ ? wagonFactoryRequest.getProtocol()
+ : "wagon#" + wagonFactoryRequest.getProtocol();
+
+ // if it's a ntlm proxy we have to lookup the wagon light which support thats
+ // wagon http client doesn't support that
+ if ( wagonFactoryRequest.getNetworkProxy() != null && wagonFactoryRequest.getNetworkProxy().isUseNtlm() )
+ {
+ protocol = protocol + "-ntlm";
+ }
+
+ Wagon wagon = applicationContext.getBean( protocol, Wagon.class );
+ wagon.addTransferListener( debugTransferListener );
+ configureUserAgent( wagon, wagonFactoryRequest );
+ return wagon;
+ }
+ catch ( BeansException e )
+ {
+ throw new WagonFactoryException( e.getMessage(), e );
+ }
+ }
+
+ protected void configureUserAgent( Wagon wagon, WagonFactoryRequest wagonFactoryRequest )
+ {
+ try
+ {
+ Class<? extends Wagon> clazz = wagon.getClass();
+ Method getHttpHeaders = clazz.getMethod( "getHttpHeaders" );
+
+ Properties headers = (Properties) getHttpHeaders.invoke( wagon );
+ if ( headers == null )
+ {
+ headers = new Properties();
+ }
+
+ headers.put( "User-Agent", wagonFactoryRequest.getUserAgent() );
+
+ if ( !wagonFactoryRequest.getHeaders().isEmpty() )
+ {
+ for ( Map.Entry<String, String> entry : wagonFactoryRequest.getHeaders().entrySet() )
+ {
+ headers.put( entry.getKey(), entry.getValue() );
+ }
+ }
+
+ Method setHttpHeaders = clazz.getMethod( "setHttpHeaders", new Class[]{ Properties.class } );
+ setHttpHeaders.invoke( wagon, headers );
+
+ logger.debug( "http headers set to: {}", headers );
+ }
+ catch ( Exception e )
+ {
+ logger.warn( "fail to configure User-Agent: {}", e.getMessage(), e );
+ }
+ }
+}
--- /dev/null
+package org.apache.archiva.proxy.maven;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.wagon.Wagon;
+
+/**
+ * Create a Wagon instance for the given protocol.
+ */
+public interface WagonFactory
+{
+ /**
+ * Create a new Wagon instance for the given protocol.
+ *
+ * @param wagonFactoryRequest
+ *
+ * @return the Wagon instance
+ */
+ Wagon getWagon( WagonFactoryRequest wagonFactoryRequest )
+ throws WagonFactoryException;
+}
--- /dev/null
+package org.apache.archiva.proxy.maven;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M1
+ */
+public class WagonFactoryException
+ extends Exception
+{
+ public WagonFactoryException( String message, Throwable e )
+ {
+ super( message, e );
+ }
+}
--- /dev/null
+package org.apache.archiva.proxy.maven;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.proxy.model.NetworkProxy;
+import org.apache.commons.lang.StringUtils;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M4
+ */
+public class WagonFactoryRequest
+{
+
+ public static final String USER_AGENT_SYSTEM_PROPERTY = "archiva.userAgent";
+
+ private static String DEFAULT_USER_AGENT = "Java-Archiva";
+
+ /**
+ * the protocol to find the Wagon for, which must be prefixed with <code>wagon#</code>, for example
+ * <code>wagon#http</code>. <b>to have a wagon supporting ntlm add -ntlm</b>
+ */
+ private String protocol;
+
+ private Map<String, String> headers = new HashMap<>();
+
+ private String userAgent = DEFAULT_USER_AGENT;
+
+ static {
+ if ( StringUtils.isNotBlank( System.getProperty( USER_AGENT_SYSTEM_PROPERTY))) {
+ DEFAULT_USER_AGENT=System.getProperty(USER_AGENT_SYSTEM_PROPERTY);
+ }
+ }
+
+ private NetworkProxy networkProxy;
+
+ public WagonFactoryRequest()
+ {
+ // no op
+ }
+
+ public WagonFactoryRequest( String protocol, Map<String, String> headers )
+ {
+ this.protocol = protocol;
+ this.headers = headers;
+ }
+
+ public String getProtocol()
+ {
+ return protocol;
+ }
+
+ public void setProtocol( String protocol )
+ {
+ this.protocol = protocol;
+ }
+
+ public WagonFactoryRequest protocol( String protocol )
+ {
+ this.protocol = protocol;
+ return this;
+ }
+
+ public Map<String, String> getHeaders()
+ {
+ if ( this.headers == null )
+ {
+ this.headers = new HashMap<>();
+ }
+ return headers;
+ }
+
+ public void setHeaders( Map<String, String> headers )
+ {
+ this.headers = headers;
+ }
+
+ public WagonFactoryRequest headers( Map<String, String> headers )
+ {
+ this.headers = headers;
+ return this;
+ }
+
+ public String getUserAgent()
+ {
+ return userAgent;
+ }
+
+ public void setUserAgent( String userAgent )
+ {
+ this.userAgent = userAgent;
+ }
+
+ public WagonFactoryRequest userAgent( String userAgent )
+ {
+ this.userAgent = userAgent;
+ return this;
+ }
+
+ public NetworkProxy getNetworkProxy()
+ {
+ return networkProxy;
+ }
+
+ public void setNetworkProxy( NetworkProxy networkProxy )
+ {
+ this.networkProxy = networkProxy;
+ }
+
+ public WagonFactoryRequest networkProxy( NetworkProxy networkProxy )
+ {
+ this.networkProxy = networkProxy;
+ return this;
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( !( o instanceof WagonFactoryRequest ) )
+ {
+ return false;
+ }
+
+ WagonFactoryRequest that = (WagonFactoryRequest) o;
+
+ if ( protocol != null ? !protocol.equals( that.protocol ) : that.protocol != null )
+ {
+ return false;
+ }
+ if ( userAgent != null ? !userAgent.equals( that.userAgent ) : that.userAgent != null )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ int result = protocol != null ? protocol.hashCode() : 0;
+ result = 31 * result + ( userAgent != null ? userAgent.hashCode() : 0 );
+ return result;
+ }
+
+ @Override
+ public String toString()
+ {
+ return "WagonFactoryRequest{" +
+ "protocol='" + protocol + '\'' +
+ ", headers=" + headers +
+ ", userAgent='" + userAgent + '\'' +
+ ", networkProxy=" + networkProxy +
+ '}';
+ }
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven</artifactId>
+ <version>3.0.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>archiva-maven-indexer</artifactId>
+ <name>Archiva :: Maven :: Indexer</name>
+
+ <properties>
+ <site.staging.base>${project.parent.parent.basedir}</site.staging.base>
+ </properties>
+
+ <dependencies>
+
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-repository-admin-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-repository-layer</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-proxy</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-context</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework</groupId>
+ <artifactId>spring-context-support</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-utils</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </dependency>
+ <!--
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-digest</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-component-api</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ -->
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.indexer</groupId>
+ <artifactId>indexer-reader</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.indexer</groupId>
+ <artifactId>indexer-core</artifactId>
+ <classifier>shaded-lucene</classifier>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-queryparser</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-analyzers-common</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.wagon</groupId>
+ <artifactId>wagon-http</artifactId>
+ <scope>provided</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.eclipse.sisu</groupId>
+ <artifactId>org.eclipse.sisu.plexus</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.google.inject</groupId>
+ <artifactId>guice</artifactId>
+ <classifier>no_aop</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-plexus-bridge</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-scheduler-repository</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>javax.inject</groupId>
+ <artifactId>javax.inject</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-mock</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-repository-admin-default</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-test-utils</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.derby</groupId>
+ <artifactId>derby</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva.redback</groupId>
+ <artifactId>redback-keys-memory</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva.redback</groupId>
+ <artifactId>redback-rbac-cached</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva.redback</groupId>
+ <artifactId>redback-rbac-memory</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva.redback</groupId>
+ <artifactId>redback-users-memory</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva.redback</groupId>
+ <artifactId>redback-common-test-resources</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven.wagon</groupId>
+ <artifactId>wagon-file</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.maven.wagon</groupId>
+ <artifactId>wagon-http-lightweight</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>xerces</groupId>
+ <artifactId>xercesImpl</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-configuration</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.maven</groupId>
+ <artifactId>maven-core</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.sonatype.sisu</groupId>
+ <artifactId>sisu-guava</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.sonatype.sisu</groupId>
+ <artifactId>sisu-inject</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.sonatype.sisu</groupId>
+ <artifactId>sisu-guice</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-slf4j-impl</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-jcl</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.ow2.asm</groupId>
+ <artifactId>asm</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.rat</groupId>
+ <artifactId>apache-rat-plugin</artifactId>
+ <configuration>
+ <excludes>
+ <exclude>src/test/maven-search-test-repo*/**</exclude>
+ <exclude>src/test/repo-release*/**</exclude>
+ </excludes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <appserver.base>${project.build.directory}/appserver-base</appserver.base>
+ <plexus.home>${project.build.directory}/appserver-base</plexus.home>
+ <derby.system.home>${project.build.directory}/appserver-base</derby.system.home>
+ <redback.jdbc.url>${redbackTestJdbcUrl}</redback.jdbc.url>
+ <redback.jdbc.driver.name>${redbackTestJdbcDriver}</redback.jdbc.driver.name>
+ <archiva.repositorySessionFactory.id>mock</archiva.repositorySessionFactory.id>
+ <openjpa.Log>${openjpa.Log}</openjpa.Log>
+ </systemPropertyVariables>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+</project>
--- /dev/null
+package org.apache.archiva.indexer.maven;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.updater.IndexUpdateSideEffect;
+import org.apache.maven.index_shaded.lucene.store.Directory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+/**
+ * Not doing much but required at least one implementation
+ *
+ * @since 3.0.0
+ */
+@Service("archivaIndexUpdater")
+public class DefaultIndexUpdateSideEffect
+ implements IndexUpdateSideEffect
+{
+ private static final Logger LOGGER = LoggerFactory.getLogger( DefaultIndexUpdateSideEffect.class );
+
+ @Override
+ public void updateIndex( Directory directory, IndexingContext indexingContext, boolean b )
+ {
+ LOGGER.info( "updating index: {} with directory: {}", //
+ indexingContext.getId(), //
+ directory.toString() );
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.maven;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.repository.Repository;
+import org.apache.maven.index.context.IndexingContext;
+
+import java.io.IOException;
+import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
+import java.sql.Date;
+import java.time.ZonedDateTime;
+import java.util.Set;
+
+/**
+ * Maven implementation of index context
+ */
+public class MavenIndexContext implements ArchivaIndexingContext {
+
+ private IndexingContext delegate;
+ private Repository repository;
+
+ MavenIndexContext(Repository repository, IndexingContext delegate) {
+ this.delegate = delegate;
+ this.repository = repository;
+
+ }
+
+ @Override
+ public String getId() {
+ return delegate.getId();
+ }
+
+ @Override
+ public Repository getRepository() {
+ return repository;
+ }
+
+ @Override
+ public URI getPath() {
+ return delegate.getIndexDirectoryFile().toURI();
+ }
+
+ @Override
+ public boolean isEmpty() throws IOException {
+ return Files.list(delegate.getIndexDirectoryFile().toPath()).count()==0;
+ }
+
+ @Override
+ public void commit() throws IOException {
+ delegate.commit();
+ }
+
+ @Override
+ public void rollback() throws IOException {
+ delegate.rollback();
+ }
+
+ @Override
+ public void optimize() throws IOException {
+ delegate.optimize();
+ }
+
+ @Override
+ public void close(boolean deleteFiles) throws IOException {
+ try {
+ delegate.close(deleteFiles);
+ } catch (NoSuchFileException e) {
+ // Ignore missing directory
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ try {
+ delegate.close(false);
+ } catch (NoSuchFileException e) {
+ // Ignore missing directory
+ }
+ }
+
+ @Override
+ public void purge() throws IOException {
+ delegate.purge();
+ }
+
+ @Override
+ public boolean supports(Class<?> clazz) {
+ return IndexingContext.class.equals(clazz);
+ }
+
+ @SuppressWarnings( "unchecked" )
+ @Override
+ public <T> T getBaseContext(Class<T> clazz) throws UnsupportedOperationException {
+ if (IndexingContext.class.equals(clazz)) {
+ return (T) delegate;
+ } else {
+ throw new UnsupportedOperationException("The class "+clazz+" is not supported by the maven indexer");
+ }
+ }
+
+ @Override
+ public Set<String> getGroups() throws IOException {
+ return delegate.getAllGroups();
+ }
+
+ @Override
+ public void updateTimestamp(boolean save) throws IOException {
+ delegate.updateTimestamp(save);
+ }
+
+ @Override
+ public void updateTimestamp(boolean save, ZonedDateTime time) throws IOException {
+ delegate.updateTimestamp(save, Date.from(time.toInstant()));
+ }
+
+
+}
--- /dev/null
+package org.apache.archiva.indexer.maven;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.admin.model.RepositoryAdminException;
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.common.utils.PathUtil;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.indexer.ArchivaIndexManager;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.indexer.IndexCreationFailedException;
+import org.apache.archiva.indexer.IndexUpdateFailedException;
+import org.apache.archiva.indexer.UnsupportedBaseContextException;
+import org.apache.archiva.proxy.ProxyRegistry;
+import org.apache.archiva.proxy.maven.WagonFactory;
+import org.apache.archiva.proxy.maven.WagonFactoryException;
+import org.apache.archiva.proxy.maven.WagonFactoryRequest;
+import org.apache.archiva.proxy.model.NetworkProxy;
+import org.apache.archiva.repository.EditableRepository;
+import org.apache.archiva.repository.ManagedRepository;
+import org.apache.archiva.repository.PasswordCredentials;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.archiva.repository.UnsupportedRepositoryTypeException;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.index.ArtifactContext;
+import org.apache.maven.index.ArtifactContextProducer;
+import org.apache.maven.index.DefaultScannerListener;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.IndexerEngine;
+import org.apache.maven.index.Scanner;
+import org.apache.maven.index.ScanningRequest;
+import org.apache.maven.index.ScanningResult;
+import org.apache.maven.index.context.IndexCreator;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.packer.IndexPacker;
+import org.apache.maven.index.packer.IndexPackingRequest;
+import org.apache.maven.index.updater.IndexUpdateRequest;
+import org.apache.maven.index.updater.IndexUpdater;
+import org.apache.maven.index.updater.ResourceFetcher;
+import org.apache.maven.index_shaded.lucene.index.IndexFormatTooOldException;
+import org.apache.maven.wagon.ConnectionException;
+import org.apache.maven.wagon.ResourceDoesNotExistException;
+import org.apache.maven.wagon.StreamWagon;
+import org.apache.maven.wagon.TransferFailedException;
+import org.apache.maven.wagon.Wagon;
+import org.apache.maven.wagon.authentication.AuthenticationException;
+import org.apache.maven.wagon.authentication.AuthenticationInfo;
+import org.apache.maven.wagon.authorization.AuthorizationException;
+import org.apache.maven.wagon.events.TransferEvent;
+import org.apache.maven.wagon.events.TransferListener;
+import org.apache.maven.wagon.proxy.ProxyInfo;
+import org.apache.maven.wagon.shared.http.AbstractHttpClientWagon;
+import org.apache.maven.wagon.shared.http.HttpConfiguration;
+import org.apache.maven.wagon.shared.http.HttpMethodConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentSkipListSet;
+import java.util.stream.Collectors;
+
+/**
+ * Maven implementation of index manager.
+ * The index manager is a singleton, so we try to make sure, that index operations are not running
+ * parallel by synchronizing on the index path.
+ * A update operation waits for parallel running methods to finish before starting, but after a certain
+ * time of retries a IndexUpdateFailedException is thrown.
+ */
+@Service( "archivaIndexManager#maven" )
+public class MavenIndexManager implements ArchivaIndexManager {
+
+ private static final Logger log = LoggerFactory.getLogger( MavenIndexManager.class );
+
+ @Inject
+ private Indexer indexer;
+
+ @Inject
+ private IndexerEngine indexerEngine;
+
+ @Inject
+ private List<? extends IndexCreator> indexCreators;
+
+ @Inject
+ private IndexPacker indexPacker;
+
+ @Inject
+ private Scanner scanner;
+
+ @Inject
+ private ArchivaConfiguration archivaConfiguration;
+
+ @Inject
+ private WagonFactory wagonFactory;
+
+ @Inject
+ private IndexUpdater indexUpdater;
+
+ @Inject
+ private ArtifactContextProducer artifactContextProducer;
+
+ @Inject
+ private ProxyRegistry proxyRegistry;
+
+
+ public static final String DEFAULT_INDEXER_DIR = ".indexer";
+ public static final String DEFAULT_PACKED_INDEX_DIR = ".index";
+
+ private ConcurrentSkipListSet<Path> activeContexts = new ConcurrentSkipListSet<>( );
+
+ private static final int WAIT_TIME = 100;
+ private static final int MAX_WAIT = 10;
+
+
+ public static IndexingContext getMvnContext( ArchivaIndexingContext context ) throws UnsupportedBaseContextException
+ {
+ if ( !context.supports( IndexingContext.class ) )
+ {
+ log.error( "The provided archiva index context does not support the maven IndexingContext" );
+ throw new UnsupportedBaseContextException( "The context does not support the Maven IndexingContext" );
+ }
+ return context.getBaseContext( IndexingContext.class );
+ }
+
+ private Path getIndexPath( ArchivaIndexingContext ctx )
+ {
+ return PathUtil.getPathFromUri( ctx.getPath( ) );
+ }
+
+ @FunctionalInterface
+ interface IndexUpdateConsumer
+ {
+
+ void accept( IndexingContext indexingContext ) throws IndexUpdateFailedException;
+ }
+
+ /*
+ * This method is used to do some actions around the update execution code. And to make sure, that no other
+ * method is running on the same index.
+ */
+ private void executeUpdateFunction( ArchivaIndexingContext context, IndexUpdateConsumer function ) throws IndexUpdateFailedException
+ {
+ IndexingContext indexingContext = null;
+ try
+ {
+ indexingContext = getMvnContext( context );
+ }
+ catch ( UnsupportedBaseContextException e )
+ {
+ throw new IndexUpdateFailedException( "Maven index is not supported by this context", e );
+ }
+ final Path ctxPath = getIndexPath( context );
+ int loop = MAX_WAIT;
+ boolean active = false;
+ while ( loop-- > 0 && !active )
+ {
+ active = activeContexts.add( ctxPath );
+ try
+ {
+ Thread.currentThread( ).sleep( WAIT_TIME );
+ }
+ catch ( InterruptedException e )
+ {
+ // Ignore this
+ }
+ }
+ if ( active )
+ {
+ try
+ {
+ function.accept( indexingContext );
+ }
+ finally
+ {
+ activeContexts.remove( ctxPath );
+ }
+ }
+ else
+ {
+ throw new IndexUpdateFailedException( "Timeout while waiting for index release on context " + context.getId( ) );
+ }
+ }
+
+ @Override
+ public void pack( final ArchivaIndexingContext context ) throws IndexUpdateFailedException
+ {
+ executeUpdateFunction( context, indexingContext -> {
+ try
+ {
+ IndexPackingRequest request = new IndexPackingRequest( indexingContext,
+ indexingContext.acquireIndexSearcher( ).getIndexReader( ),
+ indexingContext.getIndexDirectoryFile( ) );
+ indexPacker.packIndex( request );
+ indexingContext.updateTimestamp( true );
+ }
+ catch ( IOException e )
+ {
+ log.error( "IOException while packing index of context " + context.getId( ) + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ) );
+ throw new IndexUpdateFailedException( "IOException during update of " + context.getId( ), e );
+ }
+ }
+ );
+
+ }
+
+ @Override
+ public void scan(final ArchivaIndexingContext context) throws IndexUpdateFailedException
+ {
+ executeUpdateFunction( context, indexingContext -> {
+ DefaultScannerListener listener = new DefaultScannerListener( indexingContext, indexerEngine, true, null );
+ ScanningRequest request = new ScanningRequest( indexingContext, listener );
+ ScanningResult result = scanner.scan( request );
+ if ( result.hasExceptions( ) )
+ {
+ log.error( "Exceptions occured during index scan of " + context.getId( ) );
+ result.getExceptions( ).stream( ).map( e -> e.getMessage( ) ).distinct( ).limit( 5 ).forEach(
+ s -> log.error( "Message: " + s )
+ );
+ }
+
+ } );
+ }
+
+ @Override
+ public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException
+ {
+ log.info( "start download remote index for remote repository {}", context.getRepository( ).getId( ) );
+ URI remoteUpdateUri;
+ if ( !( context.getRepository( ) instanceof RemoteRepository ) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class)) )
+ {
+ throw new IndexUpdateFailedException( "The context is not associated to a remote repository with remote index " + context.getId( ) );
+ } else {
+ RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get();
+ remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri());
+ }
+ final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository( );
+
+ executeUpdateFunction( context,
+ indexingContext -> {
+ try
+ {
+ // create a temp directory to download files
+ Path tempIndexDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".tmpIndex" );
+ Path indexCacheDirectory = Paths.get( indexingContext.getIndexDirectoryFile( ).getParent( ), ".indexCache" );
+ Files.createDirectories( indexCacheDirectory );
+ if ( Files.exists( tempIndexDirectory ) )
+ {
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( tempIndexDirectory );
+ }
+ Files.createDirectories( tempIndexDirectory );
+ tempIndexDirectory.toFile( ).deleteOnExit( );
+ String baseIndexUrl = indexingContext.getIndexUpdateUrl( );
+
+ String wagonProtocol = remoteUpdateUri.toURL( ).getProtocol( );
+
+ NetworkProxy networkProxy = null;
+ if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
+ {
+ RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
+ if ( StringUtils.isNotBlank( rif.getProxyId( ) ) )
+ {
+ networkProxy = proxyRegistry.getNetworkProxy( rif.getProxyId( ) );
+ if ( networkProxy == null )
+ {
+ log.warn(
+ "your remote repository is configured to download remote index trought a proxy we cannot find id:{}",
+ rif.getProxyId( ) );
+ }
+ }
+
+ final StreamWagon wagon = (StreamWagon) wagonFactory.getWagon(
+ new WagonFactoryRequest( wagonProtocol, remoteRepository.getExtraHeaders( ) ).networkProxy(
+ networkProxy )
+ );
+ int readTimeout = (int) rif.getDownloadTimeout( ).toMillis( ) * 1000;
+ wagon.setReadTimeout( readTimeout );
+ wagon.setTimeout( (int) remoteRepository.getTimeout( ).toMillis( ) * 1000 );
+
+ if ( wagon instanceof AbstractHttpClientWagon )
+ {
+ HttpConfiguration httpConfiguration = new HttpConfiguration( );
+ HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration( );
+ httpMethodConfiguration.setUsePreemptive( true );
+ httpMethodConfiguration.setReadTimeout( readTimeout );
+ httpConfiguration.setGet( httpMethodConfiguration );
+ AbstractHttpClientWagon.class.cast( wagon ).setHttpConfiguration( httpConfiguration );
+ }
+
+ wagon.addTransferListener( new DownloadListener( ) );
+ ProxyInfo proxyInfo = null;
+ if ( networkProxy != null )
+ {
+ proxyInfo = new ProxyInfo( );
+ proxyInfo.setType( networkProxy.getProtocol( ) );
+ proxyInfo.setHost( networkProxy.getHost( ) );
+ proxyInfo.setPort( networkProxy.getPort( ) );
+ proxyInfo.setUserName( networkProxy.getUsername( ) );
+ proxyInfo.setPassword( networkProxy.getPassword( ) );
+ }
+ AuthenticationInfo authenticationInfo = null;
+ if ( remoteRepository.getLoginCredentials( ) != null && ( remoteRepository.getLoginCredentials( ) instanceof PasswordCredentials ) )
+ {
+ PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials( );
+ authenticationInfo = new AuthenticationInfo( );
+ authenticationInfo.setUserName( creds.getUsername( ) );
+ authenticationInfo.setPassword( new String( creds.getPassword( ) ) );
+ }
+ wagon.connect( new org.apache.maven.wagon.repository.Repository( remoteRepository.getId( ), baseIndexUrl ), authenticationInfo,
+ proxyInfo );
+
+ Path indexDirectory = indexingContext.getIndexDirectoryFile( ).toPath( );
+ if ( !Files.exists( indexDirectory ) )
+ {
+ Files.createDirectories( indexDirectory );
+ }
+
+ ResourceFetcher resourceFetcher =
+ new WagonResourceFetcher( log, tempIndexDirectory, wagon, remoteRepository );
+ IndexUpdateRequest request = new IndexUpdateRequest( indexingContext, resourceFetcher );
+ request.setForceFullUpdate( fullUpdate );
+ request.setLocalIndexCacheDir( indexCacheDirectory.toFile( ) );
+
+ indexUpdater.fetchAndUpdateIndex( request );
+
+ indexingContext.updateTimestamp( true );
+ }
+
+ }
+ catch ( AuthenticationException e )
+ {
+ log.error( "Could not login to the remote proxy for updating index of {}", remoteRepository.getId( ), e );
+ throw new IndexUpdateFailedException( "Login in to proxy failed while updating remote repository " + remoteRepository.getId( ), e );
+ }
+ catch ( ConnectionException e )
+ {
+ log.error( "Connection error during index update for remote repository {}", remoteRepository.getId( ), e );
+ throw new IndexUpdateFailedException( "Connection error during index update for remote repository " + remoteRepository.getId( ), e );
+ }
+ catch ( MalformedURLException e )
+ {
+ log.error( "URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId( ), remoteUpdateUri, e );
+ throw new IndexUpdateFailedException( "URL for remote index update of repository is not correct " + remoteUpdateUri, e );
+ }
+ catch ( IOException e )
+ {
+ log.error( "IOException during index update of remote repository {}: {}", remoteRepository.getId( ), e.getMessage( ), e );
+ throw new IndexUpdateFailedException( "IOException during index update of remote repository " + remoteRepository.getId( )
+ + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
+ }
+ catch ( WagonFactoryException e )
+ {
+ log.error( "Wagon for remote index download of {} could not be created: {}", remoteRepository.getId( ), e.getMessage( ), e );
+ throw new IndexUpdateFailedException( "Error while updating the remote index of " + remoteRepository.getId( ), e );
+ }
+ } );
+
+ }
+
+ @Override
+ public void addArtifactsToIndex( final ArchivaIndexingContext context, final Collection<URI> artifactReference ) throws IndexUpdateFailedException
+ {
+ final URI ctxUri = context.getPath();
+ executeUpdateFunction(context, indexingContext -> {
+ Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+ try {
+ indexer.addArtifactsToIndex(artifacts, indexingContext);
+ } catch (IOException e) {
+ log.error("IOException while adding artifact {}", e.getMessage(), e);
+ throw new IndexUpdateFailedException("Error occured while adding artifact to index of "+context.getId()
+ + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
+ }
+ });
+ }
+
+ @Override
+ public void removeArtifactsFromIndex( ArchivaIndexingContext context, Collection<URI> artifactReference ) throws IndexUpdateFailedException
+ {
+ final URI ctxUri = context.getPath();
+ executeUpdateFunction(context, indexingContext -> {
+ Collection<ArtifactContext> artifacts = artifactReference.stream().map(r -> artifactContextProducer.getArtifactContext(indexingContext, Paths.get(ctxUri.resolve(r)).toFile())).collect(Collectors.toList());
+ try {
+ indexer.deleteArtifactsFromIndex(artifacts, indexingContext);
+ } catch (IOException e) {
+ log.error("IOException while removing artifact {}", e.getMessage(), e);
+ throw new IndexUpdateFailedException("Error occured while removing artifact from index of "+context.getId()
+ + (StringUtils.isNotEmpty(e.getMessage()) ? ": "+e.getMessage() : ""));
+ }
+ });
+
+ }
+
+ @Override
+ public boolean supportsRepository( RepositoryType type )
+ {
+ return type == RepositoryType.MAVEN;
+ }
+
+ @Override
+ public ArchivaIndexingContext createContext( Repository repository ) throws IndexCreationFailedException
+ {
+ log.debug("Creating context for repo {}, type: {}", repository.getId(), repository.getType());
+ if ( repository.getType( ) != RepositoryType.MAVEN )
+ {
+ throw new UnsupportedRepositoryTypeException( repository.getType( ) );
+ }
+ IndexingContext mvnCtx = null;
+ try
+ {
+ if ( repository instanceof RemoteRepository )
+ {
+ mvnCtx = createRemoteContext( (RemoteRepository) repository );
+ }
+ else if ( repository instanceof ManagedRepository )
+ {
+ mvnCtx = createManagedContext( (ManagedRepository) repository );
+ }
+ }
+ catch ( IOException e )
+ {
+ log.error( "IOException during context creation " + e.getMessage( ), e );
+ throw new IndexCreationFailedException( "Could not create index context for repository " + repository.getId( )
+ + ( StringUtils.isNotEmpty( e.getMessage( ) ) ? ": " + e.getMessage( ) : "" ), e );
+ }
+ MavenIndexContext context = new MavenIndexContext( repository, mvnCtx );
+
+ return context;
+ }
+
+ @Override
+ public ArchivaIndexingContext reset(ArchivaIndexingContext context) throws IndexUpdateFailedException {
+ ArchivaIndexingContext ctx;
+ executeUpdateFunction(context, indexingContext -> {
+ try {
+ indexingContext.close(true);
+ } catch (IOException e) {
+ log.warn("Index close failed");
+ }
+ try {
+ FileUtils.deleteDirectory(Paths.get(context.getPath()));
+ } catch (IOException e) {
+ throw new IndexUpdateFailedException("Could not delete index files");
+ }
+ });
+ try {
+ Repository repo = context.getRepository();
+ ctx = createContext(context.getRepository());
+ if (repo instanceof EditableRepository) {
+ ((EditableRepository)repo).setIndexingContext(ctx);
+ }
+ } catch (IndexCreationFailedException e) {
+ throw new IndexUpdateFailedException("Could not create index");
+ }
+ return ctx;
+ }
+
+ @Override
+ public ArchivaIndexingContext move(ArchivaIndexingContext context, Repository repo) throws IndexCreationFailedException {
+ if (context==null) {
+ return null;
+ }
+ if (context.supports(IndexingContext.class)) {
+ try {
+ Path newPath = getIndexPath(repo);
+ IndexingContext ctx = context.getBaseContext(IndexingContext.class);
+ Path oldPath = ctx.getIndexDirectoryFile().toPath();
+ if (oldPath.equals(newPath)) {
+ // Nothing to do, if path does not change
+ return context;
+ }
+ if (!Files.exists(oldPath)) {
+ return createContext(repo);
+ } else if (context.isEmpty()) {
+ context.close();
+ return createContext(repo);
+ } else {
+ context.close(false);
+ Files.move(oldPath, newPath);
+ return createContext(repo);
+ }
+ } catch (IOException e) {
+ log.error("IOException while moving index directory {}", e.getMessage(), e);
+ throw new IndexCreationFailedException("Could not recreated the index.", e);
+ } catch (UnsupportedBaseContextException e) {
+ throw new IndexCreationFailedException("The given context, is not a maven context.");
+ }
+ } else {
+ throw new IndexCreationFailedException("Bad context type. This is not a maven context.");
+ }
+ }
+
+ @Override
+ public void updateLocalIndexPath(Repository repo) {
+ if (repo.supportsFeature(IndexCreationFeature.class)) {
+ IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
+ try {
+ icf.setLocalIndexPath(getIndexPath(repo));
+ icf.setLocalPackedIndexPath(getPackedIndexPath(repo));
+ } catch (IOException e) {
+ log.error("Could not set local index path for {}. New URI: {}", repo.getId(), icf.getIndexPath());
+ }
+ }
+ }
+
+ private Path getIndexPath(Repository repo) throws IOException {
+ IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
+ Path repoDir = repo.getLocalPath();
+ URI indexDir = icf.getIndexPath();
+ Path indexDirectory = null;
+ if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
+ {
+
+ indexDirectory = PathUtil.getPathFromUri( indexDir );
+ // not absolute so create it in repository directory
+ if ( !indexDirectory.isAbsolute( ) )
+ {
+ indexDirectory = repoDir.resolve( indexDirectory );
+ }
+ }
+ else
+ {
+ indexDirectory = repoDir.resolve( DEFAULT_INDEXER_DIR );
+ }
+
+ if ( !Files.exists( indexDirectory ) )
+ {
+ Files.createDirectories( indexDirectory );
+ }
+ return indexDirectory;
+ }
+
+ private Path getPackedIndexPath(Repository repo) throws IOException {
+ IndexCreationFeature icf = repo.getFeature(IndexCreationFeature.class).get();
+ Path repoDir = repo.getLocalPath();
+ URI indexDir = icf.getPackedIndexPath();
+ Path indexDirectory = null;
+ if ( ! StringUtils.isEmpty(indexDir.toString( ) ) )
+ {
+
+ indexDirectory = PathUtil.getPathFromUri( indexDir );
+ // not absolute so create it in repository directory
+ if ( !indexDirectory.isAbsolute( ) )
+ {
+ indexDirectory = repoDir.resolve( indexDirectory );
+ }
+ }
+ else
+ {
+ indexDirectory = repoDir.resolve( DEFAULT_PACKED_INDEX_DIR );
+ }
+
+ if ( !Files.exists( indexDirectory ) )
+ {
+ Files.createDirectories( indexDirectory );
+ }
+ return indexDirectory;
+ }
+
+ private IndexingContext createRemoteContext(RemoteRepository remoteRepository ) throws IOException
+ {
+ Path appServerBase = archivaConfiguration.getAppServerBaseDir( );
+
+ String contextKey = "remote-" + remoteRepository.getId( );
+
+
+ // create remote repository path
+ Path repoDir = remoteRepository.getLocalPath();
+ if ( !Files.exists( repoDir ) )
+ {
+ Files.createDirectories( repoDir );
+ }
+
+ Path indexDirectory = null;
+
+ // is there configured indexDirectory ?
+ if ( remoteRepository.supportsFeature( RemoteIndexFeature.class ) )
+ {
+ RemoteIndexFeature rif = remoteRepository.getFeature( RemoteIndexFeature.class ).get( );
+ indexDirectory = getIndexPath(remoteRepository);
+ String remoteIndexUrl = calculateIndexRemoteUrl( remoteRepository.getLocation( ), rif );
+ try
+ {
+
+ return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
+ }
+ catch ( IndexFormatTooOldException e )
+ {
+ // existing index with an old lucene format so we need to delete it!!!
+ // delete it first then recreate it.
+ log.warn( "the index of repository {} is too old we have to delete and recreate it", //
+ remoteRepository.getId( ) );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( indexDirectory );
+ return getIndexingContext( remoteRepository, contextKey, repoDir, indexDirectory, remoteIndexUrl );
+
+ }
+ }
+ else
+ {
+ throw new IOException( "No remote index defined" );
+ }
+ }
+
+ private IndexingContext getIndexingContext( Repository repository, String contextKey, Path repoDir, Path indexDirectory, String indexUrl ) throws IOException
+ {
+ return indexer.createIndexingContext( contextKey, repository.getId( ), repoDir.toFile( ), indexDirectory.toFile( ),
+ repository.getLocation( ) == null ? null : repository.getLocation( ).toString( ),
+ indexUrl,
+ true, false,
+ indexCreators );
+ }
+
+ private IndexingContext createManagedContext( ManagedRepository repository ) throws IOException
+ {
+
+ IndexingContext context;
+ // take care first about repository location as can be relative
+ Path repositoryDirectory = repository.getLocalPath();
+
+ if ( !Files.exists( repositoryDirectory ) )
+ {
+ try
+ {
+ Files.createDirectories( repositoryDirectory );
+ }
+ catch ( IOException e )
+ {
+ log.error( "Could not create directory {}", repositoryDirectory );
+ }
+ }
+
+ Path indexDirectory = null;
+
+ if ( repository.supportsFeature( IndexCreationFeature.class ) )
+ {
+ indexDirectory = getIndexPath(repository);
+
+ String indexUrl = repositoryDirectory.toUri( ).toURL( ).toExternalForm( );
+ try
+ {
+ context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
+ context.setSearchable( repository.isScanned( ) );
+ }
+ catch ( IndexFormatTooOldException e )
+ {
+ // existing index with an old lucene format so we need to delete it!!!
+ // delete it first then recreate it.
+ log.warn( "the index of repository {} is too old we have to delete and recreate it", //
+ repository.getId( ) );
+ org.apache.archiva.common.utils.FileUtils.deleteDirectory( indexDirectory );
+ context = getIndexingContext( repository, repository.getId( ), repositoryDirectory, indexDirectory, indexUrl );
+ context.setSearchable( repository.isScanned( ) );
+ }
+ return context;
+ }
+ else
+ {
+ throw new IOException( "No repository index defined" );
+ }
+ }
+
+ private String calculateIndexRemoteUrl( URI baseUri, RemoteIndexFeature rif )
+ {
+ if ( rif.getIndexUri( ) == null )
+ {
+ return baseUri.resolve( DEFAULT_INDEXER_DIR ).toString( );
+ }
+ else
+ {
+ return baseUri.resolve( rif.getIndexUri( ) ).toString( );
+ }
+ }
+
+ private static final class DownloadListener
+ implements TransferListener
+ {
+ private Logger log = LoggerFactory.getLogger( getClass( ) );
+
+ private String resourceName;
+
+ private long startTime;
+
+ private int totalLength = 0;
+
+ @Override
+ public void transferInitiated( TransferEvent transferEvent )
+ {
+ startTime = System.currentTimeMillis( );
+ resourceName = transferEvent.getResource( ).getName( );
+ log.debug( "initiate transfer of {}", resourceName );
+ }
+
+ @Override
+ public void transferStarted( TransferEvent transferEvent )
+ {
+ this.totalLength = 0;
+ resourceName = transferEvent.getResource( ).getName( );
+ log.info( "start transfer of {}", transferEvent.getResource( ).getName( ) );
+ }
+
+ @Override
+ public void transferProgress( TransferEvent transferEvent, byte[] buffer, int length )
+ {
+ log.debug( "transfer of {} : {}/{}", transferEvent.getResource( ).getName( ), buffer.length, length );
+ this.totalLength += length;
+ }
+
+ @Override
+ public void transferCompleted( TransferEvent transferEvent )
+ {
+ resourceName = transferEvent.getResource( ).getName( );
+ long endTime = System.currentTimeMillis( );
+ log.info( "end of transfer file {} {} kb: {}s", transferEvent.getResource( ).getName( ),
+ this.totalLength / 1024, ( endTime - startTime ) / 1000 );
+ }
+
+ @Override
+ public void transferError( TransferEvent transferEvent )
+ {
+ log.info( "error of transfer file {}: {}", transferEvent.getResource( ).getName( ),
+ transferEvent.getException( ).getMessage( ), transferEvent.getException( ) );
+ }
+
+ @Override
+ public void debug( String message )
+ {
+ log.debug( "transfer debug {}", message );
+ }
+ }
+
+ private static class WagonResourceFetcher
+ implements ResourceFetcher
+ {
+
+ Logger log;
+
+ Path tempIndexDirectory;
+
+ Wagon wagon;
+
+ RemoteRepository remoteRepository;
+
+ private WagonResourceFetcher( Logger log, Path tempIndexDirectory, Wagon wagon,
+ RemoteRepository remoteRepository )
+ {
+ this.log = log;
+ this.tempIndexDirectory = tempIndexDirectory;
+ this.wagon = wagon;
+ this.remoteRepository = remoteRepository;
+ }
+
+ @Override
+ public void connect( String id, String url )
+ throws IOException
+ {
+ //no op
+ }
+
+ @Override
+ public void disconnect( )
+ throws IOException
+ {
+ // no op
+ }
+
+ @Override
+ public InputStream retrieve( String name )
+ throws IOException, FileNotFoundException
+ {
+ try
+ {
+ log.info( "index update retrieve file, name:{}", name );
+ Path file = tempIndexDirectory.resolve( name );
+ Files.deleteIfExists( file );
+ file.toFile( ).deleteOnExit( );
+ wagon.get( addParameters( name, remoteRepository ), file.toFile( ) );
+ return Files.newInputStream( file );
+ }
+ catch ( AuthorizationException | TransferFailedException e )
+ {
+ throw new IOException( e.getMessage( ), e );
+ }
+ catch ( ResourceDoesNotExistException e )
+ {
+ FileNotFoundException fnfe = new FileNotFoundException( e.getMessage( ) );
+ fnfe.initCause( e );
+ throw fnfe;
+ }
+ }
+
+ // FIXME remove crappy copy/paste
+ protected String addParameters( String path, RemoteRepository remoteRepository )
+ {
+ if ( remoteRepository.getExtraParameters( ).isEmpty( ) )
+ {
+ return path;
+ }
+
+ boolean question = false;
+
+ StringBuilder res = new StringBuilder( path == null ? "" : path );
+
+ for ( Map.Entry<String, String> entry : remoteRepository.getExtraParameters( ).entrySet( ) )
+ {
+ if ( !question )
+ {
+ res.append( '?' ).append( entry.getKey( ) ).append( '=' ).append( entry.getValue( ) );
+ }
+ }
+
+ return res.toString( );
+ }
+
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.maven.merger;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.indexer.UnsupportedBaseContextException;
+import org.apache.archiva.indexer.merger.IndexMerger;
+import org.apache.archiva.indexer.merger.IndexMergerException;
+import org.apache.archiva.indexer.merger.IndexMergerRequest;
+import org.apache.archiva.indexer.merger.TemporaryGroupIndex;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.commons.lang.time.StopWatch;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.context.ContextMemberProvider;
+import org.apache.maven.index.context.IndexCreator;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.context.StaticContextMemberProvider;
+import org.apache.maven.index.packer.IndexPacker;
+import org.apache.maven.index.packer.IndexPackingRequest;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collection;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.stream.Collectors;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M2
+ */
+@Service("indexMerger#default")
+public class DefaultIndexMerger
+ implements IndexMerger
+{
+
+ @Inject
+ RepositoryRegistry repositoryRegistry;
+
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+
+ private final IndexPacker indexPacker;
+
+ private Indexer indexer;
+
+ private final List<IndexCreator> indexCreators;
+
+ private List<TemporaryGroupIndex> temporaryGroupIndexes = new CopyOnWriteArrayList<>();
+
+ private List<IndexingContext> temporaryContextes = new CopyOnWriteArrayList<>( );
+
+ private List<String> runningGroups = new CopyOnWriteArrayList<>();
+
+ @Inject
+ public DefaultIndexMerger( Indexer indexer, IndexPacker indexPacker, List<IndexCreator> indexCreators )
+ {
+ this.indexer = indexer;
+ this.indexPacker = indexPacker;
+ this.indexCreators = indexCreators;
+ }
+
+ @Override
+ public IndexingContext buildMergedIndex( IndexMergerRequest indexMergerRequest )
+ throws IndexMergerException
+ {
+ String groupId = indexMergerRequest.getGroupId();
+
+ if ( runningGroups.contains( groupId ) )
+ {
+ log.info( "skip build merge remote indexes for id: '{}' as already running", groupId );
+ return null;
+ }
+
+ runningGroups.add( groupId );
+
+ StopWatch stopWatch = new StopWatch();
+ stopWatch.reset();
+ stopWatch.start();
+
+ Path mergedIndexDirectory = indexMergerRequest.getMergedIndexDirectory();
+
+ String tempRepoId = mergedIndexDirectory.getFileName().toString();
+
+ try
+ {
+ Path indexLocation = mergedIndexDirectory.resolve( indexMergerRequest.getMergedIndexPath() );
+
+ List<IndexingContext> members = indexMergerRequest.getRepositoriesIds( ).stream( ).map( id ->
+ repositoryRegistry.getRepository( id ) ).filter( repo -> repo.getType().equals( RepositoryType.MAVEN ) )
+ .map( repo -> {
+ try
+ {
+ return repo.getIndexingContext().getBaseContext( IndexingContext.class );
+ }
+ catch ( UnsupportedBaseContextException e )
+ {
+ return null;
+ // Ignore
+ }
+ } ).filter( Objects::nonNull ).collect( Collectors.toList() );
+ ContextMemberProvider memberProvider = new StaticContextMemberProvider(members);
+ IndexingContext mergedCtx = indexer.createMergedIndexingContext( tempRepoId, tempRepoId, mergedIndexDirectory.toFile(),
+ indexLocation.toFile(), true, memberProvider);
+ mergedCtx.optimize();
+
+ if ( indexMergerRequest.isPackIndex() )
+ {
+ IndexPackingRequest request = new IndexPackingRequest( mergedCtx, //
+ mergedCtx.acquireIndexSearcher().getIndexReader(), //
+ indexLocation.toFile() );
+ indexPacker.packIndex( request );
+ }
+
+ if ( indexMergerRequest.isTemporary() )
+ {
+ temporaryGroupIndexes.add( new TemporaryGroupIndex( mergedIndexDirectory, tempRepoId, groupId,
+ indexMergerRequest.getMergedIndexTtl() ) );
+ temporaryContextes.add(mergedCtx);
+ }
+ stopWatch.stop();
+ log.info( "merged index for repos {} in {} s", indexMergerRequest.getRepositoriesIds(),
+ stopWatch.getTime() );
+ return mergedCtx;
+ }
+ catch ( IOException e)
+ {
+ throw new IndexMergerException( e.getMessage(), e );
+ }
+ finally
+ {
+ runningGroups.remove( groupId );
+ }
+ }
+
+ @Async
+ @Override
+ public void cleanTemporaryGroupIndex( TemporaryGroupIndex temporaryGroupIndex )
+ {
+ if ( temporaryGroupIndex == null )
+ {
+ return;
+ }
+
+ try
+ {
+
+ Optional<IndexingContext> ctxOpt = temporaryContextes.stream( ).filter( ctx -> ctx.getId( ).equals( temporaryGroupIndex.getIndexId( ) ) ).findFirst( );
+ if (ctxOpt.isPresent()) {
+ IndexingContext ctx = ctxOpt.get();
+ indexer.closeIndexingContext( ctx, true );
+ temporaryGroupIndexes.remove( temporaryGroupIndex );
+ temporaryContextes.remove( ctx );
+ Path directory = temporaryGroupIndex.getDirectory();
+ if ( directory != null && Files.exists(directory) )
+ {
+ FileUtils.deleteDirectory( directory );
+ }
+ }
+ }
+ catch ( IOException e )
+ {
+ log.warn( "fail to delete temporary group index {}", temporaryGroupIndex.getIndexId(), e );
+ }
+ }
+
+ @Override
+ public Collection<TemporaryGroupIndex> getTemporaryGroupIndexes()
+ {
+ return this.temporaryGroupIndexes;
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.maven.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.admin.model.RepositoryAdminException;
+import org.apache.archiva.admin.model.beans.ProxyConnector;
+import org.apache.archiva.admin.model.proxyconnector.ProxyConnectorAdmin;
+import org.apache.archiva.indexer.UnsupportedBaseContextException;
+import org.apache.archiva.indexer.search.ArtifactInfoFilter;
+import org.apache.archiva.indexer.search.NoClassifierArtifactInfoFilter;
+import org.apache.archiva.indexer.search.RepositorySearch;
+import org.apache.archiva.indexer.search.RepositorySearchException;
+import org.apache.archiva.indexer.search.SearchFields;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResultLimits;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.indexer.util.SearchUtil;
+import org.apache.archiva.model.ArchivaArtifactModel;
+import org.apache.archiva.repository.RemoteRepository;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.commons.lang.StringUtils;
+import org.apache.maven.index.ArtifactInfo;
+import org.apache.maven.index.FlatSearchRequest;
+import org.apache.maven.index.FlatSearchResponse;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.MAVEN;
+import org.apache.maven.index.OSGI;
+import org.apache.maven.index.QueryCreator;
+import org.apache.maven.index.SearchType;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.expr.SearchExpression;
+import org.apache.maven.index.expr.SearchTyped;
+import org.apache.maven.index.expr.SourcedSearchExpression;
+import org.apache.maven.index.expr.UserInputSearchExpression;
+import org.apache.maven.index_shaded.lucene.search.BooleanClause;
+import org.apache.maven.index_shaded.lucene.search.BooleanClause.Occur;
+import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * RepositorySearch implementation which uses the Maven Indexer for searching.
+ */
+@Service( "repositorySearch#maven" )
+public class MavenRepositorySearch
+ implements RepositorySearch
+{
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ private Indexer indexer;
+
+ private QueryCreator queryCreator;
+
+
+ RepositoryRegistry repositoryRegistry;
+
+ private ProxyConnectorAdmin proxyConnectorAdmin;
+
+ protected MavenRepositorySearch()
+ {
+ // for test purpose
+ }
+
+ @Inject
+ public MavenRepositorySearch( Indexer nexusIndexer, RepositoryRegistry repositoryRegistry,
+
+ ProxyConnectorAdmin proxyConnectorAdmin, QueryCreator queryCreator )
+ {
+ this.indexer = nexusIndexer;
+ this.queryCreator = queryCreator;
+ this.repositoryRegistry = repositoryRegistry;
+ this.proxyConnectorAdmin = proxyConnectorAdmin;
+ }
+
+ /**
+ * @see RepositorySearch#search(String, List, String, SearchResultLimits, List)
+ */
+ @Override
+ public SearchResults search(String principal, List<String> selectedRepos, String term, SearchResultLimits limits,
+ List<String> previousSearchTerms )
+ throws RepositorySearchException
+ {
+ List<String> indexingContextIds = addIndexingContexts( selectedRepos );
+
+ // since upgrade to nexus 2.0.0, query has changed from g:[QUERIED TERM]* to g:*[QUERIED TERM]*
+ // resulting to more wildcard searches so we need to increase max clause count
+ BooleanQuery.setMaxClauseCount( Integer.MAX_VALUE );
+ BooleanQuery.Builder qb = new BooleanQuery.Builder();
+
+ if ( previousSearchTerms == null || previousSearchTerms.isEmpty() )
+ {
+ constructQuery( term, qb );
+ }
+ else
+ {
+ for ( String previousTerm : previousSearchTerms )
+ {
+ BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
+ constructQuery( previousTerm, iQuery );
+
+ qb.add( iQuery.build(), BooleanClause.Occur.MUST );
+ }
+
+ BooleanQuery.Builder iQuery = new BooleanQuery.Builder();
+ constructQuery( term, iQuery );
+ qb.add( iQuery.build(), BooleanClause.Occur.MUST );
+ }
+
+ // we retun only artifacts without classifier in quick search, olamy cannot find a way to say with this field empty
+ // FIXME cannot find a way currently to setup this in constructQuery !!!
+ return search( limits, qb.build(), indexingContextIds, NoClassifierArtifactInfoFilter.LIST, selectedRepos, true );
+
+ }
+
+ /**
+ * @see RepositorySearch#search(String, SearchFields, SearchResultLimits)
+ */
+ @SuppressWarnings( "deprecation" )
+ @Override
+ public SearchResults search( String principal, SearchFields searchFields, SearchResultLimits limits )
+ throws RepositorySearchException
+ {
+ if ( searchFields.getRepositories() == null )
+ {
+ throw new RepositorySearchException( "Repositories cannot be null." );
+ }
+
+ List<String> indexingContextIds = addIndexingContexts( searchFields.getRepositories() );
+
+ // if no index found in the specified ones return an empty search result instead of doing a search on all index
+ // olamy: IMHO doesn't make sense
+ if ( !searchFields.getRepositories().isEmpty() && ( indexingContextIds == null
+ || indexingContextIds.isEmpty() ) )
+ {
+ return new SearchResults();
+ }
+
+ BooleanQuery.Builder qb = new BooleanQuery.Builder();
+ if ( StringUtils.isNotBlank( searchFields.getGroupId() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.GROUP_ID, searchFields.isExactSearch() ? new SourcedSearchExpression(
+ searchFields.getGroupId() ) : new UserInputSearchExpression( searchFields.getGroupId() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getArtifactId() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.ARTIFACT_ID,
+ searchFields.isExactSearch()
+ ? new SourcedSearchExpression( searchFields.getArtifactId() )
+ : new UserInputSearchExpression( searchFields.getArtifactId() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getVersion() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.VERSION, searchFields.isExactSearch() ? new SourcedSearchExpression(
+ searchFields.getVersion() ) : new SourcedSearchExpression( searchFields.getVersion() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getPackaging() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.PACKAGING, searchFields.isExactSearch() ? new SourcedSearchExpression(
+ searchFields.getPackaging() ) : new UserInputSearchExpression( searchFields.getPackaging() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getClassName() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.CLASSNAMES,
+ new UserInputSearchExpression( searchFields.getClassName() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleSymbolicName() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.SYMBOLIC_NAME,
+ new UserInputSearchExpression( searchFields.getBundleSymbolicName() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleVersion() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.VERSION,
+ new UserInputSearchExpression( searchFields.getBundleVersion() ) ),
+ BooleanClause.Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleExportPackage() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.EXPORT_PACKAGE,
+ new UserInputSearchExpression( searchFields.getBundleExportPackage() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleExportService() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.EXPORT_SERVICE,
+ new UserInputSearchExpression( searchFields.getBundleExportService() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleImportPackage() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.IMPORT_PACKAGE,
+ new UserInputSearchExpression( searchFields.getBundleImportPackage() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleName() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.NAME, new UserInputSearchExpression( searchFields.getBundleName() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleImportPackage() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.IMPORT_PACKAGE,
+ new UserInputSearchExpression( searchFields.getBundleImportPackage() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getBundleRequireBundle() ) )
+ {
+ qb.add( indexer.constructQuery( OSGI.REQUIRE_BUNDLE,
+ new UserInputSearchExpression( searchFields.getBundleRequireBundle() ) ),
+ Occur.MUST );
+ }
+
+ if ( StringUtils.isNotBlank( searchFields.getClassifier() ) )
+ {
+ qb.add( indexer.constructQuery( MAVEN.CLASSIFIER, searchFields.isExactSearch() ? new SourcedSearchExpression(
+ searchFields.getClassifier() ) : new UserInputSearchExpression( searchFields.getClassifier() ) ),
+ Occur.MUST );
+ }
+ else if ( searchFields.isExactSearch() )
+ {
+ //TODO improvement in case of exact search and no classifier we must query for classifier with null value
+ // currently it's done in DefaultSearchService with some filtering
+ }
+
+ BooleanQuery qu = qb.build();
+ if ( qu.clauses() == null || qu.clauses().size() <= 0 )
+ {
+ throw new RepositorySearchException( "No search fields set." );
+ }
+ if (qu.clauses()!=null) {
+ log.debug("CLAUSES ", qu.clauses());
+ for (BooleanClause cl : qu.clauses()) {
+ log.debug("Clause ",cl);
+ }
+ }
+
+ return search( limits, qu, indexingContextIds, Collections.<ArtifactInfoFilter>emptyList(),
+ searchFields.getRepositories(), searchFields.isIncludePomArtifacts() );
+ }
+
+ private static class NullSearch
+ implements SearchTyped, SearchExpression
+ {
+ private static final NullSearch INSTANCE = new NullSearch();
+
+ @Override
+ public String getStringValue()
+ {
+ return "[[NULL_VALUE]]";
+ }
+
+ @Override
+ public SearchType getSearchType()
+ {
+ return SearchType.EXACT;
+ }
+ }
+
+ private SearchResults search( SearchResultLimits limits, BooleanQuery q, List<String> indexingContextIds,
+ List<? extends ArtifactInfoFilter> filters, List<String> selectedRepos,
+ boolean includePoms )
+ throws RepositorySearchException
+ {
+
+ try
+ {
+ FlatSearchRequest request = new FlatSearchRequest( q );
+
+ request.setContexts( getIndexingContexts( indexingContextIds ) );
+ if ( limits != null )
+ {
+ // we apply limits only when first page asked
+ if ( limits.getSelectedPage() == 0 )
+ {
+ request.setCount( limits.getPageSize() * ( Math.max( 1, limits.getSelectedPage() ) ) );
+ }
+ }
+
+ FlatSearchResponse response = indexer.searchFlat( request );
+
+ if ( response == null || response.getTotalHitsCount() == 0 )
+ {
+ SearchResults results = new SearchResults();
+ results.setLimits( limits );
+ return results;
+ }
+
+ return convertToSearchResults( response, limits, filters, selectedRepos, includePoms );
+ }
+ catch ( IOException e )
+ {
+ throw new RepositorySearchException( e.getMessage(), e );
+ }
+ catch ( RepositoryAdminException e )
+ {
+ throw new RepositorySearchException( e.getMessage(), e );
+ }
+
+ }
+
+ private IndexingContext getIndexingContext(String id) {
+ String repoId;
+ if (StringUtils.startsWith(id, "remote-")) {
+ repoId = StringUtils.substringAfter(id, "remote-");
+ } else {
+ repoId = id;
+ }
+ Repository repo = repositoryRegistry.getRepository(repoId);
+ if (repo==null) {
+ return null;
+ } else {
+ if (repo.getIndexingContext()!=null) {
+ try {
+ return repo.getIndexingContext().getBaseContext(IndexingContext.class);
+ } catch (UnsupportedBaseContextException e) {
+ return null;
+ }
+ } else {
+ return null;
+ }
+ }
+ }
+
+ private List<IndexingContext> getIndexingContexts( List<String> ids )
+ {
+ List<IndexingContext> contexts = new ArrayList<>( ids.size() );
+
+ for ( String id : ids )
+ {
+ IndexingContext context = getIndexingContext(id);
+ if ( context != null )
+ {
+ contexts.add( context );
+ }
+ else
+ {
+ log.warn( "context with id {} not exists", id );
+ }
+ }
+
+ return contexts;
+ }
+
+ private void constructQuery( String term, BooleanQuery.Builder q )
+ {
+ q.add( indexer.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+ q.add( indexer.constructQuery( MAVEN.ARTIFACT_ID, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+ q.add( indexer.constructQuery( MAVEN.VERSION, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+ q.add( indexer.constructQuery( MAVEN.PACKAGING, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+ q.add( indexer.constructQuery( MAVEN.CLASSNAMES, new UserInputSearchExpression( term ) ), Occur.SHOULD );
+
+ //Query query =
+ // new WildcardQuery( new Term( MAVEN.CLASSNAMES.getFieldName(), "*" ) );
+ //q.add( query, Occur.MUST_NOT );
+ // olamy IMHO we could set this option as at least one must match
+ //q.setMinimumNumberShouldMatch( 1 );
+ }
+
+
+ /**
+ * @param selectedRepos
+ * @return indexing contextId used
+ */
+ private List<String> addIndexingContexts( List<String> selectedRepos )
+ {
+ Set<String> indexingContextIds = new HashSet<>();
+ for ( String repo : selectedRepos )
+ {
+ try
+ {
+ Repository rRepo = repositoryRegistry.getRepository(repo);
+
+ if ( rRepo != null )
+ {
+
+ if (rRepo.getType().equals(RepositoryType.MAVEN)) {
+ assert rRepo.getIndexingContext() != null;
+ IndexingContext context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
+ if (context.isSearchable()) {
+ indexingContextIds.addAll(getRemoteIndexingContextIds(repo));
+ indexingContextIds.add(context.getId());
+ } else {
+ log.warn("indexingContext with id {} not searchable", rRepo.getId());
+ }
+ }
+
+ }
+ else
+ {
+ log.warn( "Repository '{}' not found in configuration.", repo );
+ }
+ }
+ catch ( RepositorySearchException e )
+ {
+ log.warn( "RepositorySearchException occured while accessing index of repository '{}' : {}", repo,
+ e.getMessage() );
+ continue;
+ } catch (UnsupportedBaseContextException e) {
+ log.error("Fatal situation: Maven repository without IndexingContext found.");
+ continue;
+ }
+ }
+
+ return new ArrayList<>( indexingContextIds );
+ }
+
+
+ @Override
+ public Set<String> getRemoteIndexingContextIds( String managedRepoId )
+ throws RepositorySearchException
+ {
+ Set<String> ids = new HashSet<>();
+
+ List<ProxyConnector> proxyConnectors = null;
+ try
+ {
+ proxyConnectors = proxyConnectorAdmin.getProxyConnectorAsMap().get( managedRepoId );
+ }
+ catch ( RepositoryAdminException e )
+ {
+ throw new RepositorySearchException( e.getMessage(), e );
+ }
+
+ if ( proxyConnectors == null || proxyConnectors.isEmpty() )
+ {
+ return ids;
+ }
+
+ for ( ProxyConnector proxyConnector : proxyConnectors )
+ {
+ String remoteId = "remote-" + proxyConnector.getTargetRepoId();
+ RemoteRepository repo = repositoryRegistry.getRemoteRepository(proxyConnector.getTargetRepoId());
+ if (repo.getType()==RepositoryType.MAVEN) {
+ try {
+ IndexingContext context = repo.getIndexingContext() != null ? repo.getIndexingContext().getBaseContext(IndexingContext.class) : null;
+ if (context!=null && context.isSearchable()) {
+ ids.add(remoteId);
+ }
+ } catch (UnsupportedBaseContextException e) {
+ // Ignore this one
+ }
+ }
+ }
+
+ return ids;
+ }
+
+ @Override
+ public Collection<String> getAllGroupIds( String principal, List<String> selectedRepos )
+ throws RepositorySearchException
+ {
+ List<IndexingContext> indexContexts = getIndexingContexts( selectedRepos );
+
+ if ( indexContexts == null || indexContexts.isEmpty() )
+ {
+ return Collections.emptyList();
+ }
+
+ try
+ {
+ Set<String> allGroupIds = new HashSet<>();
+ for ( IndexingContext indexingContext : indexContexts )
+ {
+ allGroupIds.addAll( indexingContext.getAllGroups() );
+ }
+ return allGroupIds;
+ }
+ catch ( IOException e )
+ {
+ throw new RepositorySearchException( e.getMessage(), e );
+ }
+
+ }
+
+ private SearchResults convertToSearchResults( FlatSearchResponse response, SearchResultLimits limits,
+ List<? extends ArtifactInfoFilter> artifactInfoFilters,
+ List<String> selectedRepos, boolean includePoms )
+ throws RepositoryAdminException
+ {
+ SearchResults results = new SearchResults();
+ Set<ArtifactInfo> artifactInfos = response.getResults();
+
+ for ( ArtifactInfo artifactInfo : artifactInfos )
+ {
+ if ( StringUtils.equalsIgnoreCase( "pom", artifactInfo.getFileExtension() ) && !includePoms )
+ {
+ continue;
+ }
+ String id = SearchUtil.getHitId( artifactInfo.getGroupId(), //
+ artifactInfo.getArtifactId(), //
+ artifactInfo.getClassifier(), //
+ artifactInfo.getPackaging() );
+ Map<String, SearchResultHit> hitsMap = results.getHitsMap();
+
+
+ if ( !applyArtifactInfoFilters( artifactInfo, artifactInfoFilters, hitsMap ) )
+ {
+ continue;
+ }
+
+ SearchResultHit hit = hitsMap.get( id );
+ if ( hit != null )
+ {
+ if ( !hit.getVersions().contains( artifactInfo.getVersion() ) )
+ {
+ hit.addVersion( artifactInfo.getVersion() );
+ }
+ }
+ else
+ {
+ hit = new SearchResultHit();
+ hit.setArtifactId( artifactInfo.getArtifactId() );
+ hit.setGroupId( artifactInfo.getGroupId() );
+ hit.setRepositoryId( artifactInfo.getRepository() );
+ hit.addVersion( artifactInfo.getVersion() );
+ hit.setBundleExportPackage( artifactInfo.getBundleExportPackage() );
+ hit.setBundleExportService( artifactInfo.getBundleExportService() );
+ hit.setBundleSymbolicName( artifactInfo.getBundleSymbolicName() );
+ hit.setBundleVersion( artifactInfo.getBundleVersion() );
+ hit.setBundleDescription( artifactInfo.getBundleDescription() );
+ hit.setBundleDocUrl( artifactInfo.getBundleDocUrl() );
+ hit.setBundleRequireBundle( artifactInfo.getBundleRequireBundle() );
+ hit.setBundleImportPackage( artifactInfo.getBundleImportPackage() );
+ hit.setBundleLicense( artifactInfo.getBundleLicense() );
+ hit.setBundleName( artifactInfo.getBundleName() );
+ hit.setContext( artifactInfo.getContext() );
+ hit.setGoals( artifactInfo.getGoals() );
+ hit.setPrefix( artifactInfo.getPrefix() );
+ hit.setPackaging( artifactInfo.getPackaging() );
+ hit.setClassifier( artifactInfo.getClassifier() );
+ hit.setFileExtension( artifactInfo.getFileExtension() );
+ hit.setUrl( getBaseUrl( artifactInfo, selectedRepos ) );
+ }
+
+ results.addHit( id, hit );
+ }
+
+ results.setTotalHits( response.getTotalHitsCount() );
+ results.setTotalHitsMapSize( results.getHitsMap().values().size() );
+ results.setReturnedHitsCount( response.getReturnedHitsCount() );
+ results.setLimits( limits );
+
+ if ( limits == null || limits.getSelectedPage() == SearchResultLimits.ALL_PAGES )
+ {
+ return results;
+ }
+ else
+ {
+ return paginate( results );
+ }
+ }
+
+ /**
+ * calculate baseUrl without the context and base Archiva Url
+ *
+ * @param artifactInfo
+ * @return
+ */
+ protected String getBaseUrl( ArtifactInfo artifactInfo, List<String> selectedRepos )
+ throws RepositoryAdminException
+ {
+ StringBuilder sb = new StringBuilder();
+ if ( StringUtils.startsWith( artifactInfo.getContext(), "remote-" ) )
+ {
+ // it's a remote index result we search a managed which proxying this remote and on which
+ // current user has read karma
+ String managedRepoId =
+ getManagedRepoId( StringUtils.substringAfter( artifactInfo.getContext(), "remote-" ), selectedRepos );
+ if ( managedRepoId != null )
+ {
+ sb.append( '/' ).append( managedRepoId );
+ artifactInfo.setContext( managedRepoId );
+ }
+ }
+ else
+ {
+ sb.append( '/' ).append( artifactInfo.getContext() );
+ }
+
+ sb.append( '/' ).append( StringUtils.replaceChars( artifactInfo.getGroupId(), '.', '/' ) );
+ sb.append( '/' ).append( artifactInfo.getArtifactId() );
+ sb.append( '/' ).append( artifactInfo.getVersion() );
+ sb.append( '/' ).append( artifactInfo.getArtifactId() );
+ sb.append( '-' ).append( artifactInfo.getVersion() );
+ if ( StringUtils.isNotBlank( artifactInfo.getClassifier() ) )
+ {
+ sb.append( '-' ).append( artifactInfo.getClassifier() );
+ }
+ // maven-plugin packaging is a jar
+ if ( StringUtils.equals( "maven-plugin", artifactInfo.getPackaging() ) )
+ {
+ sb.append( "jar" );
+ }
+ else
+ {
+ sb.append( '.' ).append( artifactInfo.getPackaging() );
+ }
+
+ return sb.toString();
+ }
+
+ /**
+ * return a managed repo for a remote result
+ *
+ * @param remoteRepo
+ * @param selectedRepos
+ * @return
+ * @throws RepositoryAdminException
+ */
+ private String getManagedRepoId( String remoteRepo, List<String> selectedRepos )
+ throws RepositoryAdminException
+ {
+ Map<String, List<ProxyConnector>> proxyConnectorMap = proxyConnectorAdmin.getProxyConnectorAsMap();
+ if ( proxyConnectorMap == null || proxyConnectorMap.isEmpty() )
+ {
+ return null;
+ }
+ if ( selectedRepos != null && !selectedRepos.isEmpty() )
+ {
+ for ( Map.Entry<String, List<ProxyConnector>> entry : proxyConnectorMap.entrySet() )
+ {
+ if ( selectedRepos.contains( entry.getKey() ) )
+ {
+ for ( ProxyConnector proxyConnector : entry.getValue() )
+ {
+ if ( StringUtils.equals( remoteRepo, proxyConnector.getTargetRepoId() ) )
+ {
+ return proxyConnector.getSourceRepoId();
+ }
+ }
+ }
+ }
+ }
+
+ // we don't find in search selected repos so return the first one
+ for ( Map.Entry<String, List<ProxyConnector>> entry : proxyConnectorMap.entrySet() )
+ {
+
+ for ( ProxyConnector proxyConnector : entry.getValue() )
+ {
+ if ( StringUtils.equals( remoteRepo, proxyConnector.getTargetRepoId() ) )
+ {
+ return proxyConnector.getSourceRepoId();
+ }
+ }
+
+ }
+ return null;
+ }
+
+ private boolean applyArtifactInfoFilters( ArtifactInfo artifactInfo,
+ List<? extends ArtifactInfoFilter> artifactInfoFilters,
+ Map<String, SearchResultHit> currentResult )
+ {
+ if ( artifactInfoFilters == null || artifactInfoFilters.isEmpty() )
+ {
+ return true;
+ }
+
+ ArchivaArtifactModel artifact = new ArchivaArtifactModel();
+ artifact.setArtifactId( artifactInfo.getArtifactId() );
+ artifact.setClassifier( artifactInfo.getClassifier() );
+ artifact.setGroupId( artifactInfo.getGroupId() );
+ artifact.setRepositoryId( artifactInfo.getRepository() );
+ artifact.setVersion( artifactInfo.getVersion() );
+ artifact.setChecksumMD5( artifactInfo.getMd5() );
+ artifact.setChecksumSHA1( artifactInfo.getSha1() );
+ for ( ArtifactInfoFilter filter : artifactInfoFilters )
+ {
+ if ( !filter.addArtifactInResult( artifact, currentResult ) )
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ protected SearchResults paginate( SearchResults results )
+ {
+ SearchResultLimits limits = results.getLimits();
+ SearchResults paginated = new SearchResults();
+
+ // ( limits.getPageSize() * ( Math.max( 1, limits.getSelectedPage() ) ) );
+
+ int fetchCount = limits.getPageSize();
+ int offset = ( limits.getSelectedPage() * limits.getPageSize() );
+
+ if ( fetchCount > results.getTotalHits() )
+ {
+ fetchCount = results.getTotalHits();
+ }
+
+ // Goto offset.
+ if ( offset < results.getTotalHits() )
+ {
+ // only process if the offset is within the hit count.
+ for ( int i = 0; i < fetchCount; i++ )
+ {
+ // Stop fetching if we are past the total # of available hits.
+ if ( offset + i >= results.getHits().size() )
+ {
+ break;
+ }
+
+ SearchResultHit hit = results.getHits().get( ( offset + i ) );
+ if ( hit != null )
+ {
+ String id = SearchUtil.getHitId( hit.getGroupId(), hit.getArtifactId(), hit.getClassifier(),
+ hit.getPackaging() );
+ paginated.addHit( id, hit );
+ }
+ else
+ {
+ break;
+ }
+ }
+ }
+ paginated.setTotalHits( results.getTotalHits() );
+ paginated.setReturnedHitsCount( paginated.getHits().size() );
+ paginated.setTotalHitsMapSize( results.getTotalHitsMapSize() );
+ paginated.setLimits( limits );
+
+ return paginated;
+ }
+
+
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.admin.model.beans.RepositoryGroup;
+import org.apache.archiva.scheduler.MergedRemoteIndexesScheduler;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.scheduling.TaskScheduler;
+import org.springframework.scheduling.support.CronTrigger;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+import java.nio.file.Path;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ScheduledFuture;
+
+/**
+ * @author Olivier Lamy
+ * @since 2.0.0
+ */
+@Service( "mergedRemoteIndexesScheduler#default" )
+public class DefaultMergedRemoteIndexesScheduler
+ implements MergedRemoteIndexesScheduler
+{
+
+ private Logger logger = LoggerFactory.getLogger( getClass() );
+
+ @Inject
+ @Named( value = "taskScheduler#mergeRemoteIndexes" )
+ private TaskScheduler taskScheduler;
+
+ @Inject
+ private IndexMerger indexMerger;
+
+ private Map<String, ScheduledFuture> scheduledFutureMap = new ConcurrentHashMap<>();
+
+ @Override
+ public void schedule( RepositoryGroup repositoryGroup, Path directory )
+ {
+ if ( StringUtils.isEmpty( repositoryGroup.getCronExpression() ) )
+ {
+ return;
+ }
+ CronTrigger cronTrigger = new CronTrigger( repositoryGroup.getCronExpression() );
+
+ List<String> repositories = repositoryGroup.getRepositories();
+
+ IndexMergerRequest indexMergerRequest =
+ new IndexMergerRequest( repositories, true, repositoryGroup.getId(), repositoryGroup.getMergedIndexPath(),
+ repositoryGroup.getMergedIndexTtl() ).mergedIndexDirectory( directory );
+
+ MergedRemoteIndexesTaskRequest taskRequest =
+ new MergedRemoteIndexesTaskRequest( indexMergerRequest, indexMerger );
+
+ logger.info( "schedule merge remote index for group {} with cron {}", repositoryGroup.getId(),
+ repositoryGroup.getCronExpression() );
+
+ ScheduledFuture scheduledFuture =
+ taskScheduler.schedule( new MergedRemoteIndexesTask( taskRequest ), cronTrigger );
+ scheduledFutureMap.put( repositoryGroup.getId(), scheduledFuture );
+ }
+
+ @Override
+ public void unschedule( RepositoryGroup repositoryGroup )
+ {
+ ScheduledFuture scheduledFuture = scheduledFutureMap.remove( repositoryGroup.getId() );
+ if ( scheduledFuture != null )
+ {
+ scheduledFuture.cancel( true );
+ }
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.index.context.IndexingContext;
+
+import java.util.Collection;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M2
+ */
+public interface IndexMerger
+{
+ /**
+ * @param indexMergerRequest
+ * @return a temporary directory with a merge index (directory marked deleteOnExit)
+ * @throws IndexMergerException
+ */
+ IndexingContext buildMergedIndex( IndexMergerRequest indexMergerRequest )
+ throws IndexMergerException;
+
+ void cleanTemporaryGroupIndex( TemporaryGroupIndex temporaryGroupIndex );
+
+ Collection<TemporaryGroupIndex> getTemporaryGroupIndexes();
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M2
+ */
+public class IndexMergerException
+ extends Exception
+{
+ public IndexMergerException( String message, Throwable t )
+ {
+ super( message, t );
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.nio.file.Path;
+import java.util.Collection;
+
+/**
+ * @author Olivier Lamy
+ */
+public class IndexMergerRequest
+{
+ /**
+ * repositories Ids to merge content
+ */
+ private Collection<String> repositoriesIds;
+
+ /**
+ * will generate a downloadable index
+ */
+ private boolean packIndex;
+
+ /**
+ * original groupId (repositoryGroup id)
+ */
+ private String groupId;
+
+ private String mergedIndexPath = ".indexer";
+
+ private int mergedIndexTtl;
+
+ private Path mergedIndexDirectory;
+
+ private boolean temporary;
+
+ public IndexMergerRequest( Collection<String> repositoriesIds, boolean packIndex, String groupId )
+ {
+ this.repositoriesIds = repositoriesIds;
+ this.packIndex = packIndex;
+ this.groupId = groupId;
+ }
+
+ /**
+ * @since 1.4-M4
+ */
+ public IndexMergerRequest( Collection<String> repositoriesIds, boolean packIndex, String groupId,
+ String mergedIndexPath, int mergedIndexTtl )
+ {
+ this.repositoriesIds = repositoriesIds;
+ this.packIndex = packIndex;
+ this.groupId = groupId;
+ this.mergedIndexPath = mergedIndexPath;
+ this.mergedIndexTtl = mergedIndexTtl;
+ }
+
+ public Collection<String> getRepositoriesIds()
+ {
+ return repositoriesIds;
+ }
+
+ public void setRepositoriesIds( Collection<String> repositoriesIds )
+ {
+ this.repositoriesIds = repositoriesIds;
+ }
+
+ public boolean isPackIndex()
+ {
+ return packIndex;
+ }
+
+ public void setPackIndex( boolean packIndex )
+ {
+ this.packIndex = packIndex;
+ }
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public void setGroupId( String groupId )
+ {
+ this.groupId = groupId;
+ }
+
+ public String getMergedIndexPath()
+ {
+ return mergedIndexPath;
+ }
+
+ public void setMergedIndexPath( String mergedIndexPath )
+ {
+ this.mergedIndexPath = mergedIndexPath;
+ }
+
+ public int getMergedIndexTtl()
+ {
+ return mergedIndexTtl;
+ }
+
+ public void setMergedIndexTtl( int mergedIndexTtl )
+ {
+ this.mergedIndexTtl = mergedIndexTtl;
+ }
+
+ public Path getMergedIndexDirectory()
+ {
+ return mergedIndexDirectory;
+ }
+
+ public void setMergedIndexDirectory( Path mergedIndexDirectory )
+ {
+ this.mergedIndexDirectory = mergedIndexDirectory;
+ }
+
+ public IndexMergerRequest mergedIndexDirectory( Path mergedIndexDirectory )
+ {
+ this.mergedIndexDirectory = mergedIndexDirectory;
+ return this;
+ }
+
+ public boolean isTemporary()
+ {
+ return temporary;
+ }
+
+ public void setTemporary( boolean temporary )
+ {
+ this.temporary = temporary;
+ }
+
+
+ public IndexMergerRequest temporary( boolean temporary )
+ {
+ this.temporary = temporary;
+ return this;
+ }
+
+ @Override
+ public String toString()
+ {
+ final StringBuilder sb = new StringBuilder( "IndexMergerRequest{" );
+ sb.append( "repositoriesIds=" ).append( repositoriesIds );
+ sb.append( ", packIndex=" ).append( packIndex );
+ sb.append( ", groupId='" ).append( groupId ).append( '\'' );
+ sb.append( ", mergedIndexPath='" ).append( mergedIndexPath ).append( '\'' );
+ sb.append( ", mergedIndexTtl=" ).append( mergedIndexTtl );
+ sb.append( ", mergedIndexDirectory=" ).append( mergedIndexDirectory );
+ sb.append( ", temporary=" ).append( temporary );
+ sb.append( '}' );
+ return sb.toString();
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( o == null || getClass() != o.getClass() )
+ {
+ return false;
+ }
+
+ IndexMergerRequest that = (IndexMergerRequest) o;
+
+ return groupId.equals( that.groupId );
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return groupId.hashCode();
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.index.context.IndexingContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Olivier Lamy
+ * @since 2.0.0
+ */
+public class MergedRemoteIndexesTask
+ implements Runnable
+{
+
+ private Logger logger = LoggerFactory.getLogger( getClass() );
+
+ private MergedRemoteIndexesTaskRequest mergedRemoteIndexesTaskRequest;
+
+ public MergedRemoteIndexesTask( MergedRemoteIndexesTaskRequest mergedRemoteIndexesTaskRequest )
+ {
+ this.mergedRemoteIndexesTaskRequest = mergedRemoteIndexesTaskRequest;
+ }
+
+ @Override
+ public void run()
+ {
+ try
+ {
+ this.execute();
+ }
+ catch ( IndexMergerException e )
+ {
+ logger.error( e.getMessage(), e );
+ }
+ }
+
+ public MergedRemoteIndexesTaskResult execute()
+ throws IndexMergerException
+ {
+ IndexMerger indexMerger = mergedRemoteIndexesTaskRequest.getIndexMerger();
+
+ IndexingContext indexingContext =
+ indexMerger.buildMergedIndex( mergedRemoteIndexesTaskRequest.getIndexMergerRequest() );
+
+ return new MergedRemoteIndexesTaskResult( indexingContext );
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( !( o instanceof MergedRemoteIndexesTask ) )
+ {
+ return false;
+ }
+
+ MergedRemoteIndexesTask that = (MergedRemoteIndexesTask) o;
+
+ return mergedRemoteIndexesTaskRequest.equals( that.mergedRemoteIndexesTaskRequest );
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return mergedRemoteIndexesTaskRequest.hashCode();
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * @author Olivier Lamy
+ * @since 2.0.0
+ */
+public class MergedRemoteIndexesTaskRequest
+{
+ private IndexMergerRequest indexMergerRequest;
+
+ private IndexMerger indexMerger;
+
+ public MergedRemoteIndexesTaskRequest( IndexMergerRequest indexMergerRequest, IndexMerger indexMerger )
+ {
+ this.indexMergerRequest = indexMergerRequest;
+ this.indexMerger = indexMerger;
+ }
+
+ public IndexMergerRequest getIndexMergerRequest()
+ {
+ return indexMergerRequest;
+ }
+
+ public void setIndexMergerRequest( IndexMergerRequest indexMergerRequest )
+ {
+ this.indexMergerRequest = indexMergerRequest;
+ }
+
+ public IndexMerger getIndexMerger()
+ {
+ return indexMerger;
+ }
+
+ public void setIndexMerger( IndexMerger indexMerger )
+ {
+ this.indexMerger = indexMerger;
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( o == null || getClass() != o.getClass() )
+ {
+ return false;
+ }
+
+ MergedRemoteIndexesTaskRequest that = (MergedRemoteIndexesTaskRequest) o;
+
+ return indexMergerRequest.equals( that.indexMergerRequest );
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return indexMergerRequest.hashCode();
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.maven.index.context.IndexingContext;
+
+/**
+ * @author Olivier Lamy
+ * @since 2.0.0
+ */
+public class MergedRemoteIndexesTaskResult
+{
+ private IndexingContext indexingContext;
+
+ public MergedRemoteIndexesTaskResult( IndexingContext indexingContext )
+ {
+ this.indexingContext = indexingContext;
+ }
+
+ public IndexingContext getIndexingContext()
+ {
+ return indexingContext;
+ }
+
+ public void setIndexingContext( IndexingContext indexingContext )
+ {
+ this.indexingContext = indexingContext;
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import java.io.Serializable;
+import java.nio.file.Path;
+import java.util.Date;
+
+/**
+ * @author Olivier Lamy
+ */
+public class TemporaryGroupIndex
+ implements Serializable
+{
+ private long creationTime = new Date().getTime();
+
+ private Path directory;
+
+ private String indexId;
+
+ private String groupId;
+
+ private int mergedIndexTtl;
+
+ public TemporaryGroupIndex(Path directory, String indexId, String groupId, int mergedIndexTtl)
+ {
+ this.directory = directory;
+ this.indexId = indexId;
+ this.groupId = groupId;
+ this.mergedIndexTtl = mergedIndexTtl;
+ }
+
+ public long getCreationTime()
+ {
+ return creationTime;
+ }
+
+ public TemporaryGroupIndex setCreationTime( long creationTime )
+ {
+ this.creationTime = creationTime;
+ return this;
+ }
+
+ public Path getDirectory()
+ {
+ return directory;
+ }
+
+ public TemporaryGroupIndex setDirectory( Path directory )
+ {
+ this.directory = directory;
+ return this;
+ }
+
+ public String getIndexId()
+ {
+ return indexId;
+ }
+
+ public TemporaryGroupIndex setIndexId( String indexId )
+ {
+ this.indexId = indexId;
+ return this;
+ }
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public void setGroupId( String groupId )
+ {
+ this.groupId = groupId;
+ }
+
+ public int getMergedIndexTtl() {
+ return mergedIndexTtl;
+ }
+
+ public void setMergedIndexTtl(int mergedIndexTtl) {
+ this.mergedIndexTtl = mergedIndexTtl;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return Long.toString( creationTime ).hashCode();
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( !( o instanceof TemporaryGroupIndex ) )
+ {
+ return false;
+ }
+ return this.creationTime == ( (TemporaryGroupIndex) o ).creationTime;
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.merger;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+
+import javax.inject.Inject;
+import java.util.Date;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M2
+ */
+@Service
+public class TemporaryGroupIndexCleaner
+{
+ private Logger log = LoggerFactory.getLogger( getClass() );
+
+ @Inject
+ private IndexMerger indexMerger;
+
+
+ public TemporaryGroupIndexCleaner( )
+ {
+
+ }
+
+ // 900000
+ @Scheduled(fixedDelay = 900000)
+ public void cleanTemporaryIndex()
+ {
+
+ indexMerger.getTemporaryGroupIndexes()
+ .stream()
+ .forEach( temporaryGroupIndex ->
+ {
+ // cleanup files older than the ttl
+ if ( new Date().getTime() - temporaryGroupIndex.getCreationTime() >
+ temporaryGroupIndex.getMergedIndexTtl() )
+ {
+ log.info( "cleanTemporaryIndex for groupId {}", temporaryGroupIndex.getGroupId() );
+ indexMerger.cleanTemporaryGroupIndex( temporaryGroupIndex );
+
+ }
+ }
+ );
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.search;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.model.ArchivaArtifactModel;
+import org.apache.commons.lang.StringUtils;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @author Olivier Lamy
+ */
+public class NoClassifierArtifactInfoFilter
+ implements ArtifactInfoFilter
+{
+ public static final NoClassifierArtifactInfoFilter INSTANCE = new NoClassifierArtifactInfoFilter();
+
+ public static final List<? extends ArtifactInfoFilter> LIST = Arrays.asList( INSTANCE );
+
+ @Override
+ public boolean addArtifactInResult( ArchivaArtifactModel artifact, Map<String, SearchResultHit> currentResult )
+ {
+ return StringUtils.isBlank( artifact.getClassifier() );
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.util;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.commons.lang.StringUtils;
+
+/**
+ * SearchUtil - utility class for search.
+ */
+public class SearchUtil
+{
+ public static String getHitId( String groupId, String artifactId, String classifier, String packaging )
+ {
+ return ( StringUtils.isBlank( groupId ) ? "" : StringUtils.trim( groupId ) ) + ":" //
+ + ( StringUtils.isBlank( artifactId ) ? "" : StringUtils.trim( artifactId ) ) + ":" //
+ + ( StringUtils.isBlank( classifier ) ? "" : StringUtils.trim( classifier ) ) + ":" //
+ + ( StringUtils.isBlank( packaging ) ? "" : StringUtils.trim( packaging ) );
+ }
+}
--- /dev/null
+<?xml version="1.0"?>
+
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:context="http://www.springframework.org/schema/context"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans
+ http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
+ http://www.springframework.org/schema/context
+ http://www.springframework.org/schema/context/spring-context-3.0.xsd"
+ default-lazy-init="false">
+
+ <context:annotation-config/>
+ <context:component-scan base-package="org.apache.archiva.indexer.maven,org.apache.maven.index"/>
+
+
+ <bean name="taskScheduler#mergeRemoteIndexes"
+ class="org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler">
+ <property name="poolSize" value="4"/>
+ <property name="threadGroupName" value="mergeRemoteIndexes"/>
+ </bean>
+
+</beans>
\ No newline at end of file
--- /dev/null
+package org.apache.archiva.indexer.maven;
+
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied. See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*/
+
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.indexer.ArchivaIndexingContext;
+import org.apache.archiva.indexer.IndexCreationFailedException;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.RepositoryType;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.repository.features.RemoteIndexFeature;
+import org.apache.archiva.repository.maven2.MavenManagedRepository;
+import org.apache.archiva.repository.maven2.MavenRemoteRepository;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.apache.maven.index.MAVEN;
+import org.apache.maven.index.QueryCreator;
+import org.apache.maven.index.context.IndexingContext;
+import org.apache.maven.index.expr.UserInputSearchExpression;
+import org.apache.maven.index_shaded.lucene.search.BooleanClause;
+import org.apache.maven.index_shaded.lucene.search.BooleanQuery;
+import org.apache.maven.index_shaded.lucene.search.Query;
+import org.junit.After;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.test.context.ContextConfiguration;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.junit.Assert.*;
+
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
+public class MavenIndexManagerTest {
+
+ @Inject
+ RepositoryRegistry repositoryRegistry;
+
+
+ private Path indexPath;
+ private MavenManagedRepository repository;
+ private ArchivaIndexingContext ctx;
+ private MavenRemoteRepository repositoryRemote;
+
+ @Inject
+ MavenIndexManager mavenIndexManager;
+
+ @Inject
+ QueryCreator queryCreator;
+
+
+ @After
+ public void tearDown() {
+ repositoryRegistry.destroy();
+ if (ctx!=null) {
+ try {
+ ctx.close(true);
+ } catch (IOException e) {
+ //
+ }
+ }
+ if (indexPath!=null && Files.exists(indexPath)) {
+ FileUtils.deleteQuietly(indexPath);
+ }
+
+ }
+
+ @Test
+ public void pack() throws Exception {
+ createTestContext();
+ Path destDir = repository.getLocalPath().resolve("org/apache/archiva/archiva-webapp/1.0");
+ Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-webapp/1.0");
+ org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(),destDir.toFile());
+ mavenIndexManager.scan(ctx);
+ mavenIndexManager.pack(ctx);
+ assertTrue(Files.list(indexPath).filter(path -> {
+ try {
+ return path.getFileName().toString().endsWith(".gz") && Files.size(path) > 0;
+ } catch (IOException e) {
+ return false;
+ }
+ }).findAny().isPresent());
+ }
+
+ @Test
+ public void scan() throws Exception {
+ createTestContext();
+ Path destDir = repository.getLocalPath().resolve("org/apache/archiva/archiva-webapp/1.0");
+ Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-webapp/1.0");
+ org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(),destDir.toFile());
+ mavenIndexManager.scan(ctx);
+
+ IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
+ String term = "org.apache.archiva";
+ Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
+ BooleanClause.Occur.SHOULD ).build();
+ assertEquals(4, mvnCtx.acquireIndexSearcher().count(q));
+ }
+
+ /*
+ * Does only a index update via file uri, no HTTP uri
+ */
+ @Test
+ public void update() throws Exception {
+ createTestContext();
+ mavenIndexManager.pack(ctx);
+ ctx.close(false);
+ createTestContextForRemote();
+ mavenIndexManager.update(ctx, true);
+ }
+
+ @Test
+ public void addArtifactsToIndex() throws Exception {
+
+ ArchivaIndexingContext ctx = createTestContext();
+ Path destDir = repository.getLocalPath().resolve("org/apache/archiva/archiva-search/1.0");
+ Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-search/1.0");
+ org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(), destDir.toFile());
+ List<URI> uriList = new ArrayList<>();
+ uriList.add(destDir.resolve("archiva-search-1.0.jar").toUri());
+ uriList.add(destDir.resolve("archiva-search-1.0-sources.jar").toUri());
+ mavenIndexManager.addArtifactsToIndex(ctx, uriList);
+
+ IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
+ String term = "org.apache.archiva";
+ Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
+ BooleanClause.Occur.SHOULD ).build();
+ assertEquals(2, mvnCtx.acquireIndexSearcher().count(q));
+ }
+
+ @Test
+ public void removeArtifactsFromIndex() throws Exception {
+ ArchivaIndexingContext ctx = createTestContext();
+ Path destDir = repository.getLocalPath().resolve("org/apache/archiva/archiva-search/1.0");
+ Path srcDir = Paths.get("src/test/maven-search-test-repo/org/apache/archiva/archiva-search/1.0");
+ org.apache.commons.io.FileUtils.copyDirectory(srcDir.toFile(), destDir.toFile());
+ List<URI> uriList = new ArrayList<>();
+ uriList.add(destDir.resolve("archiva-search-1.0.jar").toUri());
+ uriList.add(destDir.resolve("archiva-search-1.0-sources.jar").toUri());
+ mavenIndexManager.addArtifactsToIndex(ctx, uriList);
+
+ IndexingContext mvnCtx = mavenIndexManager.getMvnContext(ctx);
+ String term = "org.apache.archiva";
+ Query q = new BooleanQuery.Builder().add( queryCreator.constructQuery( MAVEN.GROUP_ID, new UserInputSearchExpression( term ) ),
+ BooleanClause.Occur.SHOULD ).build();
+ assertEquals(2, mvnCtx.acquireIndexSearcher().count(q));
+ uriList.remove(0);
+ mavenIndexManager.removeArtifactsFromIndex(ctx, uriList);
+ assertEquals(1, mvnCtx.acquireIndexSearcher().count(q));
+ }
+
+ @Test
+ public void supportsRepository() throws Exception {
+ assertTrue(mavenIndexManager.supportsRepository(RepositoryType.MAVEN));
+ assertFalse(mavenIndexManager.supportsRepository(RepositoryType.NPM));
+ }
+
+ private ArchivaIndexingContext createTestContext() throws URISyntaxException, IndexCreationFailedException, IOException {
+ indexPath = Paths.get("target/repositories/test-repo/.index-test");
+ FileUtils.deleteDirectory(indexPath);
+ repository = new MavenManagedRepository("test-repo", "Test Repo", Paths.get("target/repositories"));
+ repository.setLocation(new URI("test-repo"));
+ IndexCreationFeature icf = repository.getFeature(IndexCreationFeature.class).get();
+ icf.setIndexPath(new URI(".index-test"));
+ ctx = mavenIndexManager.createContext(repository);
+ return ctx;
+ }
+
+ private ArchivaIndexingContext createTestContextForRemote() throws URISyntaxException, IndexCreationFailedException, IOException {
+ indexPath = Paths.get("target/repositories/test-repo/.index-test");
+ Path repoPath = Paths.get("target/repositories").toAbsolutePath();
+ repositoryRemote = new MavenRemoteRepository("test-repo", "Test Repo", repoPath);
+ repositoryRemote.setLocation(repoPath.resolve("test-repo").toUri());
+ RemoteIndexFeature icf = repositoryRemote.getFeature(RemoteIndexFeature.class).get();
+ icf.setIndexUri(new URI(".index-test"));
+ ctx = mavenIndexManager.createContext(repositoryRemote);
+ return ctx;
+ }
+
+ @Test
+ public void createContext() throws Exception {
+ ArchivaIndexingContext ctx = createTestContext();
+ assertNotNull(ctx);
+ assertEquals(repository, ctx.getRepository());
+ assertEquals("test-repo", ctx.getId());
+ assertEquals(indexPath.toAbsolutePath(), Paths.get(ctx.getPath()).toAbsolutePath());
+ assertTrue(Files.exists(indexPath));
+ List<Path> li = Files.list(indexPath).collect(Collectors.toList());
+ assertTrue(li.size()>0);
+
+ }
+
+}
\ No newline at end of file
--- /dev/null
+package org.apache.archiva.indexer.maven.search;
+
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing,
+* software distributed under the License is distributed on an
+* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+* KIND, either express or implied. See the License for the
+* specific language governing permissions and limitations
+* under the License.
+*/
+
+import junit.framework.TestCase;
+import org.apache.archiva.admin.repository.proxyconnector.DefaultProxyConnectorAdmin;
+import org.apache.archiva.common.utils.FileUtils;
+import org.apache.archiva.configuration.ArchivaConfiguration;
+import org.apache.archiva.configuration.Configuration;
+import org.apache.archiva.configuration.ConfigurationListener;
+import org.apache.archiva.configuration.ManagedRepositoryConfiguration;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.repository.Repository;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.repository.features.IndexCreationFeature;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.apache.commons.lang.SystemUtils;
+import org.apache.maven.index.ArtifactContext;
+import org.apache.maven.index.ArtifactContextProducer;
+import org.apache.maven.index.ArtifactScanningListener;
+import org.apache.maven.index.DefaultScannerListener;
+import org.apache.maven.index.Indexer;
+import org.apache.maven.index.IndexerEngine;
+import org.apache.maven.index.QueryCreator;
+import org.apache.maven.index.Scanner;
+import org.apache.maven.index.ScanningRequest;
+import org.apache.maven.index.ScanningResult;
+import org.apache.maven.index.context.IndexingContext;
+import org.easymock.EasyMock;
+import org.easymock.IMocksControl;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.runner.RunWith;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.test.context.ContextConfiguration;
+
+import javax.inject.Inject;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * @author Olivier Lamy
+ */
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
+public abstract class AbstractMavenRepositorySearch
+ extends TestCase
+{
+
+ protected Logger log = LoggerFactory.getLogger( getClass() );
+
+ public static String TEST_REPO_1 = "maven-search-test-repo";
+
+ public static String TEST_REPO_2 = "maven-search-test-repo-2";
+
+
+ public static String REPO_RELEASE = "repo-release";
+
+ MavenRepositorySearch search;
+
+ ArchivaConfiguration archivaConfig;
+
+ @Inject
+ ArtifactContextProducer artifactContextProducer;
+
+ @Inject
+ RepositoryRegistry repositoryRegistry;
+
+ @Inject
+ private IndexerEngine indexerEngine;
+
+ IMocksControl archivaConfigControl;
+
+ Configuration config;
+
+ @Inject
+ Indexer indexer;
+
+ @Inject
+ Scanner scanner;
+
+ @Inject
+ QueryCreator queryCreator;
+
+ @Before
+ @Override
+ public void setUp()
+ throws Exception
+ {
+ super.setUp();
+
+ FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" ) );
+ assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 + "/.indexer" )) );
+
+ FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" ) );
+ assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 + "/.indexer" )) );
+
+ archivaConfigControl = EasyMock.createControl();
+
+ archivaConfig = archivaConfigControl.createMock( ArchivaConfiguration.class );
+
+ DefaultProxyConnectorAdmin defaultProxyConnectorAdmin = new DefaultProxyConnectorAdmin();
+ defaultProxyConnectorAdmin.setArchivaConfiguration( archivaConfig );
+ repositoryRegistry.setArchivaConfiguration( archivaConfig );
+
+ search = new MavenRepositorySearch( indexer, repositoryRegistry, defaultProxyConnectorAdmin,
+ queryCreator );
+
+ assertNotNull( repositoryRegistry );
+
+ config = new Configuration();
+ config.addManagedRepository( createRepositoryConfig( TEST_REPO_1 ) );
+ config.addManagedRepository( createRepositoryConfig( TEST_REPO_2 ) );
+ config.addManagedRepository( createRepositoryConfig( REPO_RELEASE ) );
+
+ archivaConfig.addListener( EasyMock.anyObject( ConfigurationListener.class ) );
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
+ archivaConfig.save(EasyMock.anyObject(Configuration.class));
+ EasyMock.expectLastCall().anyTimes();
+ archivaConfigControl.replay();
+ repositoryRegistry.reload();
+ archivaConfigControl.reset();
+ }
+
+ @After
+ @Override
+ public void tearDown()
+ throws Exception
+ {
+ archivaConfigControl.reset();
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration() ).andReturn(config).anyTimes();
+ archivaConfig.save(EasyMock.anyObject(Configuration.class));
+ EasyMock.expectLastCall().anyTimes();
+ archivaConfigControl.replay();
+ repositoryRegistry.removeRepository(TEST_REPO_1);
+ repositoryRegistry.removeRepository(TEST_REPO_2);
+ repositoryRegistry.removeRepository(REPO_RELEASE);
+ repositoryRegistry.destroy();
+ FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 ) );
+ assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_1 )) );
+
+ FileUtils.deleteDirectory( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 ) );
+ assertFalse( Files.exists(Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "/target/repos/" + TEST_REPO_2 )) );
+
+ super.tearDown();
+ }
+
+ protected ManagedRepositoryConfiguration createRepositoryConfig( String repository )
+ {
+ ManagedRepositoryConfiguration repositoryConfig = new ManagedRepositoryConfiguration();
+ repositoryConfig.setId( repository );
+ repositoryConfig.setLocation( org.apache.archiva.common.utils.FileUtils.getBasedir() + "/target/repos/" + repository );
+ Path f = Paths.get( repositoryConfig.getLocation() );
+ if ( !Files.exists(f) )
+ {
+ try
+ {
+ Files.createDirectories( f );
+ }
+ catch ( IOException e )
+ {
+ log.error("Could not create directories for {}", f);
+ }
+ }
+ repositoryConfig.setLayout( "default" );
+ repositoryConfig.setName( repository );
+ repositoryConfig.setScanned( true );
+ repositoryConfig.setSnapshots( false );
+ repositoryConfig.setReleases( true );
+ repositoryConfig.setIndexDir(".indexer");
+
+ return repositoryConfig;
+ }
+
+ protected void createIndex( String repository, List<Path> filesToBeIndexed, boolean scan) throws Exception {
+ createIndex(repository, filesToBeIndexed, scan, null);
+ }
+
+ protected void createIndex( String repository, List<Path> filesToBeIndexed, boolean scan, Path indexDir)
+ throws Exception
+ {
+ Repository rRepo = repositoryRegistry.getRepository(repository);
+ IndexCreationFeature icf = rRepo.getFeature(IndexCreationFeature.class).get();
+
+
+ IndexingContext context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
+
+ if ( context != null )
+ {
+ context.close(true);
+ }
+
+ Path repoDir = Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir()).resolve("target").resolve("repos").resolve(repository);
+
+ Path indexerDirectory = repoDir.resolve(".indexer" );
+
+ if ( Files.exists(indexerDirectory) )
+ {
+ FileUtils.deleteDirectory( indexerDirectory );
+ }
+
+ assertFalse( Files.exists(indexerDirectory) );
+
+ Path lockFile = repoDir.resolve(".indexer/write.lock" );
+ if ( Files.exists(lockFile) )
+ {
+ Files.delete(lockFile);
+ }
+
+ assertFalse( Files.exists(lockFile) );
+
+ Path repo = Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "src/test/" + repository );
+ assertTrue( Files.exists(repo) );
+ org.apache.commons.io.FileUtils.copyDirectory(repo.toFile(), repoDir.toFile());
+
+ if (indexDir==null) {
+ Path indexDirectory =
+ Paths.get(org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/index/test-" + Long.toString(System.currentTimeMillis()));
+ indexDirectory.toFile().deleteOnExit();
+ FileUtils.deleteDirectory(indexDirectory);
+ icf.setIndexPath(indexDirectory.toUri());
+ } else {
+
+ icf.setIndexPath(indexDir.toUri());
+ }
+ context = rRepo.getIndexingContext().getBaseContext(IndexingContext.class);
+
+
+ // minimize datas in memory
+// context.getIndexWriter().setMaxBufferedDocs( -1 );
+// context.getIndexWriter().setRAMBufferSizeMB( 1 );
+ for ( Path artifactFile : filesToBeIndexed )
+ {
+ assertTrue( "file not exists " + artifactFile, Files.exists(artifactFile) );
+ ArtifactContext ac = artifactContextProducer.getArtifactContext( context, artifactFile.toFile() );
+
+ if ( artifactFile.toString().endsWith( ".pom" ) )
+ {
+ ac.getArtifactInfo().setFileExtension( "pom" );
+ ac.getArtifactInfo().setPackaging( "pom" );
+ ac.getArtifactInfo().setClassifier( "pom" );
+ }
+ indexer.addArtifactToIndex( ac, context );
+ context.updateTimestamp( true );
+ }
+
+ if ( scan )
+ {
+ DefaultScannerListener listener = new DefaultScannerListener( context, indexerEngine, true, new ArtifactScanListener());
+ ScanningRequest req = new ScanningRequest(context, listener );
+ scanner.scan( req );
+ context.commit();
+ }
+ // force flushing
+ context.commit();
+ // context.getIndexWriter().commit();
+ context.setSearchable( true );
+
+ }
+
+ static class ArtifactScanListener
+ implements ArtifactScanningListener
+ {
+ protected Logger log = LoggerFactory.getLogger( getClass() );
+
+ @Override
+ public void scanningStarted( IndexingContext ctx )
+ {
+ //
+ }
+
+ @Override
+ public void scanningFinished( IndexingContext ctx, ScanningResult result )
+ {
+ // no op
+ }
+
+ @Override
+ public void artifactError( ArtifactContext ac, Exception e )
+ {
+ log.debug( "artifactError {}", ac.getArtifact().getPath(), e );
+ }
+
+ @Override
+ public void artifactDiscovered( ArtifactContext ac )
+ {
+ log.debug( "artifactDiscovered {}:{}", //
+ ac.getArtifact() == null ? "" : ac.getArtifact().getPath(), //
+ ac.getArtifact() == null ? "" : ac.getArtifactInfo() );
+ }
+ }
+
+ public String niceDisplay( SearchResults searchResults )
+ throws Exception
+ {
+ StringBuilder sb = new StringBuilder();
+ for ( SearchResultHit hit : searchResults.getHits() )
+ {
+ sb.append( hit.toString() ).append( SystemUtils.LINE_SEPARATOR );
+ }
+ return sb.toString();
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.maven.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.indexer.search.SearchFields;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.Test;
+
+import javax.inject.Inject;
+import java.nio.file.Path;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * @author Olivier Lamy
+ */
+public class MavenRepositorySearchOSGITest
+ extends AbstractMavenRepositorySearch
+{
+
+ @Inject
+ RepositoryRegistry repositoryRegistry;
+
+ @After
+ @Override
+ public void tearDown() throws Exception {
+ super.tearDown();
+ repositoryRegistry.destroy();
+ }
+
+ @Test
+ public void searchFelixWithSymbolicName()
+ throws Exception
+ {
+
+ createIndex( TEST_REPO_1, Collections.<Path>emptyList(), true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ // search artifactId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setBundleSymbolicName( "org.apache.felix.bundlerepository" );
+ searchFields.setBundleVersion( "1.6.6" );
+ searchFields.setRepositories( selectedRepos );
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.felix", hit.getGroupId() );
+ assertEquals( "org.apache.felix.bundlerepository", hit.getArtifactId() );
+ assertEquals( "1.6.6", hit.getVersions().get( 0 ) );
+
+ assertEquals( "org.apache.felix.bundlerepository;uses:=\"org.osgi.framework\";version=\"2.0\"",
+ hit.getBundleExportPackage() );
+ assertEquals( "org.apache.felix.bundlerepository.RepositoryAdmin,org.osgi.service.obr.RepositoryAdmin",
+ hit.getBundleExportService() );
+ assertEquals( "org.apache.felix.bundlerepository", hit.getBundleSymbolicName() );
+ assertEquals( "1.6.6", hit.getBundleVersion() );
+ }
+
+}
--- /dev/null
+package org.apache.archiva.indexer.maven.search;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import junit.framework.TestCase;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResultLimits;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.indexer.util.SearchUtil;
+import org.apache.archiva.repository.RepositoryRegistry;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.junit.After;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.test.context.ContextConfiguration;
+
+import java.util.Arrays;
+
+/**
+ * @author Olivier Lamy
+ */
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
+public class MavenRepositorySearchPaginateTest
+ extends TestCase
+{
+
+ @Autowired
+ RepositoryRegistry repositoryRegistry;
+
+ @After
+ public void endTests() {
+ assert repositoryRegistry!=null;
+ repositoryRegistry.destroy();
+ }
+
+ @Test
+ public void nonPaginatedResult()
+ throws Exception
+ {
+ MavenRepositorySearch search = new MavenRepositorySearch();
+
+ SearchResults searchResults = build( 10, new SearchResultLimits( 0 ) );
+
+ searchResults = search.paginate( searchResults );
+
+ assertEquals( 10, searchResults.getReturnedHitsCount() );
+
+ }
+
+ @Test
+ public void nonPaginatedHugeResult()
+ throws Exception
+ {
+ MavenRepositorySearch search = new MavenRepositorySearch();
+
+ SearchResults origSearchResults = build( 63, new SearchResultLimits( 0 ) );
+
+ SearchResults searchResults = search.paginate( origSearchResults );
+
+ assertEquals( 30, searchResults.getReturnedHitsCount() );
+
+ origSearchResults = build( 63, new SearchResultLimits( 1 ) );
+
+ searchResults = search.paginate( origSearchResults );
+
+ assertEquals( 30, searchResults.getReturnedHitsCount() );
+
+ }
+
+ @Test
+ public void paginatedResult()
+ throws Exception
+ {
+ MavenRepositorySearch search = new MavenRepositorySearch();
+
+ SearchResults searchResults = build( 32, new SearchResultLimits( 1 ) );
+
+ searchResults = search.paginate( searchResults );
+
+ assertEquals( 2, searchResults.getReturnedHitsCount() );
+
+ }
+
+
+ SearchResults build( int number, SearchResultLimits limits )
+ {
+ SearchResults searchResults = new SearchResults();
+ searchResults.setLimits( limits );
+ for ( int i = 0; i < number; i++ )
+ {
+ SearchResultHit hit = new SearchResultHit();
+ hit.setGroupId( "commons-foo" );
+ hit.setArtifactId( "commons-bar-" + i );
+ hit.setPackaging( "jar" );
+ hit.setVersions( Arrays.asList( "1.0" ) );
+ String id =
+ SearchUtil.getHitId( hit.getGroupId(), hit.getArtifactId(), hit.getClassifier(), hit.getPackaging() );
+ searchResults.addHit( id, hit );
+ }
+
+ searchResults.setTotalHits( number );
+ return searchResults;
+
+ }
+}
--- /dev/null
+package org.apache.archiva.indexer.maven.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.indexer.search.RepositorySearchException;
+import org.apache.archiva.indexer.search.SearchFields;
+import org.apache.archiva.indexer.search.SearchResultHit;
+import org.apache.archiva.indexer.search.SearchResultLimits;
+import org.apache.archiva.indexer.search.SearchResults;
+import org.apache.archiva.indexer.util.SearchUtil;
+import org.apache.archiva.test.utils.ArchivaSpringJUnit4ClassRunner;
+import org.apache.maven.index_shaded.lucene.index.IndexUpgrader;
+import org.codehaus.plexus.util.FileUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.test.context.ContextConfiguration;
+
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+
+
+@RunWith( ArchivaSpringJUnit4ClassRunner.class )
+@ContextConfiguration( locations = { "classpath*:/META-INF/spring-context.xml", "classpath:/spring-context.xml" } )
+public class MavenRepositorySearchTest
+ extends AbstractMavenRepositorySearch
+{
+
+
+ private void createSimpleIndex( boolean scan )
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
+ "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
+ "/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ));
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos", TEST_REPO_1,
+ "org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ));
+
+ createIndex( TEST_REPO_1, files, scan );
+ }
+
+ private void createIndexContainingMoreArtifacts( boolean scan )
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-test/1.0/archiva-test-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-test/2.0/archiva-test-2.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-webapp/1.0/archiva-webapp-1.0.war" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
+ "target/repos/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
+ "target/repos/" + TEST_REPO_1 + "/com/classname-search/1.0/classname-search-1.0.jar" ) );
+
+ createIndex( TEST_REPO_1, files, scan );
+ }
+
+ private void createIndexContainingMultipleArtifactsSameVersion( boolean scan )
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.pom" ) );
+
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0-sources.jar" ) );
+
+ createIndex( TEST_REPO_1, files, scan );
+ }
+
+ @Test
+ public void testQuickSearch()
+ throws Exception
+ {
+ createSimpleIndex( false );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ // search artifactId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ SearchResultHit hit =
+ results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
+ assertNotNull( "hit null in result " + results.getHits(), hit );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+
+ archivaConfigControl.reset();
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ // search groupId
+ archivaConfigControl.replay();
+
+ results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "total hints not 3", 3, results.getTotalHits() );
+
+ //TODO: search for class & package names
+ }
+
+ @Test
+ public void testQuickSearchNotWithClassifier()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ // search artifactId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ SearchResultHit hit =
+ results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
+ assertNotNull( "hit null in result " + results.getHits(), hit );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+
+ archivaConfigControl.reset();
+
+ // search groupId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "total hints not 3 hits " + results.getHits(), 3, results.getTotalHits() );
+
+ //TODO: search for class & package names
+ }
+
+ @Test
+ public void testQuickSearchMultipleArtifactsSameVersion()
+ throws Exception
+ {
+ createIndexContainingMultipleArtifactsSameVersion( false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ // search artifactId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 3, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+
+ //only 1 version of 1.0 is retrieved
+ assertEquals( 1, hit.getVersions().size() );
+ }
+
+ @Test
+ public void testMultipleArtifactsSameVersionWithClassifier()
+ throws Exception
+ {
+ createIndexContainingMultipleArtifactsSameVersion( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ // search artifactId
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setArtifactId( "archiva-search" );
+ searchFields.setClassifier( "sources" );
+ searchFields.setRepositories( selectedRepos );
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+
+ //only 1 version of 1.0 is retrieved
+ assertEquals( 1, hit.getVersions().size() );
+ }
+
+ // search for existing artifact using multiple keywords
+ @Test
+ public void testQuickSearchWithMultipleKeywords()
+ throws Exception
+ {
+ createSimpleIndex( false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getTotalHits() );
+ }
+
+ @Test
+ public void testQuickSearchWithPagination()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ // page 1
+ SearchResultLimits limits = new SearchResultLimits( 0 );
+ limits.setPageSize( 1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "org", limits, Collections.emptyList() );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getHits().size() );
+ assertEquals( "total hits not 9 for page1 " + results, 9, results.getTotalHits() );
+ assertEquals( "returned hits not 1 for page1 " + results, 1, results.getReturnedHitsCount() );
+ assertEquals( limits, results.getLimits() );
+
+ archivaConfigControl.reset();
+
+ // page 2
+ limits = new SearchResultLimits( 1 );
+ limits.setPageSize( 1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ results = search.search( "user", selectedRepos, "org", limits, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ assertEquals( "hits not 1", 1, results.getHits().size() );
+ assertEquals( "total hits not 9 for page 2 " + results, 9, results.getTotalHits() );
+ assertEquals( "returned hits not 1 for page2 " + results, 1, results.getReturnedHitsCount() );
+ assertEquals( limits, results.getLimits() );
+ }
+
+ @Test
+ public void testArtifactFoundInMultipleRepositories()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
+ + "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
+ createIndex( TEST_REPO_2, files, false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+ selectedRepos.add( TEST_REPO_2 );
+
+ config.addManagedRepository( createRepositoryConfig( TEST_REPO_2 ) );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ // wait lucene flush.....
+ Thread.sleep( 2000 );
+
+ SearchResults results = search.search( "user", selectedRepos, "archiva-search", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ SearchResultHit hit =
+ results.getSearchResultHit( SearchUtil.getHitId( "org.apache.archiva", "archiva-search", null, "jar" ) );
+ assertNotNull(hit);
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "not 2 version for hit " + hit + "::" + niceDisplay( results ), 2, hit.getVersions().size() );
+ assertTrue( hit.getVersions().contains( "1.0" ) );
+ assertTrue( hit.getVersions().contains( "1.1" ) );
+
+ archivaConfigControl.reset();
+
+ // TODO: [BROWSE] in artifact info from browse, display all the repositories where the artifact is found
+ }
+
+ @Test
+ public void testNoMatchFound()
+ throws Exception
+ {
+ createSimpleIndex( false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "dfghdfkweriuasndsaie", null, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+ }
+
+ @Test
+ public void testNoIndexFound()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+
+ archivaConfigControl.verify();
+ }
+
+ @Test
+ public void testRepositoryNotFound()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( "non-existing-repo" );
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "org.apache.archiva", null, null );
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+
+ archivaConfigControl.verify();
+ }
+
+ @Test
+ public void testSearchWithinSearchResults()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ List<String> previousSearchTerms = new ArrayList<>();
+ previousSearchTerms.add( "archiva-test" );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", selectedRepos, "1.0", null, previousSearchTerms );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "total hints not 1", 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-test", hit.getArtifactId() );
+ assertEquals( "versions not 1", 1, hit.getVersions().size() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+ }
+
+ // tests for advanced search
+ @Test
+ public void testAdvancedSearch()
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
+ + "/org/apache/archiva/archiva-search/1.0/archiva-search-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_2
+ + "/org/apache/archiva/archiva-search/1.1/archiva-search-1.1.jar" ) );
+ createIndex( TEST_REPO_2, files, false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_2 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setVersion( "1.0" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-search", hit.getArtifactId() );
+ assertEquals( "1.0", hit.getVersions().get( 0 ) );
+ }
+
+ @Test
+ public void testAdvancedSearchWithPagination()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( false );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setRepositories( selectedRepos );
+
+ // page 1
+
+ SearchResultLimits limits = new SearchResultLimits( 0 );
+ limits.setPageSize( 1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, limits );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 4, results.getTotalHits() );
+ assertEquals( 1, results.getHits().size() );
+
+ // page 2
+ archivaConfigControl.reset();
+
+ limits = new SearchResultLimits( 1 );
+ limits.setPageSize( 1 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ results = search.search( "user", searchFields, limits );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 4, results.getTotalHits() );
+ assertEquals( 1, results.getHits().size() );
+ }
+
+ // MRM-981 - artifactIds with numeric characters aren't found in advanced search
+ @Test
+ public void testAdvancedSearchArtifactIdHasNumericChar()
+ throws Exception
+ {
+ List<Path> files = new ArrayList<>();
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(),
+ "target/repos/" + TEST_REPO_1 + "/com/artifactid-numeric/1.0/artifactid-numeric-1.0.jar" ) );
+ files.add( Paths.get( org.apache.archiva.common.utils.FileUtils.getBasedir(), "target/repos/" + TEST_REPO_1
+ + "/com/artifactid-numeric123/1.0/artifactid-numeric123-1.0.jar" ) );
+ createIndex( TEST_REPO_1, files, true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setArtifactId( "artifactid-numeric" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 2, results.getTotalHits() );
+ }
+
+ @Test
+ public void testAdvancedSearchNoRepositoriesConfigured()
+ throws Exception
+ {
+ SearchFields searchFields = new SearchFields();
+ searchFields.setArtifactId( "archiva" );
+ searchFields.setRepositories( null );
+
+ try
+ {
+ search.search( "user", searchFields, null );
+ fail( "A RepositorySearchExcecption should have been thrown." );
+ }
+ catch ( RepositorySearchException e )
+ {
+ assertEquals( "Repositories cannot be null.", e.getMessage() );
+ }
+ }
+
+ @Test
+ public void testAdvancedSearchSearchFieldsAreNull()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setRepositories( selectedRepos );
+
+ try
+ {
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ fail( "A RepositorySearchExcecption should have been thrown." );
+ }
+ catch ( RepositorySearchException e )
+ {
+ assertEquals( "No search fields set.", e.getMessage() );
+ }
+ }
+
+ @Test
+ public void testAdvancedSearchSearchFieldsAreBlank()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "" );
+ searchFields.setArtifactId( "" );
+ searchFields.setVersion( "" );
+ searchFields.setPackaging( "" );
+ searchFields.setClassName( "" );
+
+ searchFields.setRepositories( selectedRepos );
+
+ try
+ {
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+ archivaConfigControl.replay();
+
+ search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ fail( "A RepositorySearchException should have been thrown." );
+ }
+ catch ( RepositorySearchException e )
+ {
+ assertEquals( "No search fields set.", e.getMessage() );
+ }
+ }
+
+ @Test
+ public void testAdvancedSearchAllSearchCriteriaSpecified()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setArtifactId( "archiva-test" );
+ searchFields.setVersion( "2.0" );
+ searchFields.setPackaging( "jar" );
+ searchFields.setClassName( "org.apache.archiva.test.App" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+
+ assertEquals( "total hints not 1" + results, 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "org.apache.archiva", hit.getGroupId() );
+ assertEquals( "archiva-test", hit.getArtifactId() );
+ assertEquals( "version not 2.0", "2.0", hit.getVersions().get( 0 ) );
+ }
+
+ @Test
+ public void testAdvancedSearchJarArtifacts()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setPackaging( "jar" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "not 8 but " + results.getTotalHits() + ":" + niceDisplay( results ), 8, results.getTotalHits() );
+ }
+
+ @Test
+ public void testAdvancedSearchWithIncorrectPackaging()
+ throws Exception
+ {
+ createSimpleIndex( true );
+
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setArtifactId( "archiva-test" );
+ searchFields.setVersion( "2.0" );
+ searchFields.setPackaging( "war" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+ }
+
+ @Test
+ public void testAdvancedSearchClassname()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setClassName( "com.classname.search.App" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( "totalHits not 1 results " + results, 1, results.getTotalHits() );
+
+ SearchResultHit hit = results.getHits().get( 0 );
+ assertEquals( "groupId not com", "com", hit.getGroupId() );
+ assertEquals( "arttifactId not classname-search", "classname-search", hit.getArtifactId() );
+ assertEquals( " hits.version(0) not 1.0", "1.0", hit.getVersions().get( 0 ) );
+ }
+
+ @Test
+ public void testAdvancedSearchNoIndexFound()
+ throws Exception
+ {
+ List<String> selectedRepos = new ArrayList<>();
+ selectedRepos.add( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.apache.archiva" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 0, results.getTotalHits() );
+ }
+
+ @Test
+ public void testAdvancedSearchClassNameInWar()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setClassName( "SomeClass" );
+ searchFields.setRepositories( selectedRepos );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 1, results.getHits().size() );
+ assertEquals( "test-webapp", results.getHits().get( 0 ).getArtifactId() );
+ }
+
+ @Test
+ public void getAllGroupIds()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = Arrays.asList( TEST_REPO_1 );
+
+ EasyMock.expect( archivaConfig.getConfiguration() ).andReturn( config ).times( 0, 2 );
+
+ archivaConfigControl.replay();
+
+ Collection<String> groupIds = search.getAllGroupIds( "user", selectedRepos );
+
+ archivaConfigControl.verify();
+
+ log.info( "groupIds: {}", groupIds );
+
+ assertEquals( 3, groupIds.size() );
+ assertTrue( groupIds.contains( "com" ) );
+ assertTrue( groupIds.contains( "org.apache.felix" ) );
+ assertTrue( groupIds.contains( "org.apache.archiva" ) );
+ }
+
+ @Test
+ public void testSearchWithUnknownRepo()
+ throws Exception
+ {
+ createIndexContainingMoreArtifacts( true );
+
+ List<String> selectedRepos = Arrays.asList( "foo" );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setClassName( "SomeClass" );
+ searchFields.setRepositories( selectedRepos );
+
+ archivaConfigControl.replay();
+
+ SearchResults results = search.search( "user", searchFields, null );
+
+ archivaConfigControl.verify();
+
+ assertNotNull( results );
+ assertEquals( 0, results.getHits().size() );
+ }
+
+ @Test
+ public void nolimitedResult()
+ throws Exception
+ {
+
+ Path repo = Paths.get( "target/repo-release" );
+ FileUtils.deleteDirectory(repo.toFile());
+ Path indexDirectory = repo.resolve(".index" );
+ FileUtils.copyDirectoryStructure( Paths.get( "src/test/repo-release" ).toFile(), repo.toFile() );
+
+ IndexUpgrader.main( new String[]{ indexDirectory.toAbsolutePath().toString() } );
+
+ createIndex(REPO_RELEASE, Collections.emptyList(), false, indexDirectory );
+
+// indexer.addIndexingContext( REPO_RELEASE, REPO_RELEASE, repo.toFile(), indexDirectory.toFile(),
+// repo.toUri().toURL().toExternalForm(),
+// indexDirectory.toUri().toURL().toString(), indexCreators );
+
+
+
+ SearchResultLimits limits = new SearchResultLimits( SearchResultLimits.ALL_PAGES );
+ limits.setPageSize( 300 );
+
+ EasyMock.expect( archivaConfig.getDefaultLocale() ).andReturn( Locale.getDefault( ) ).anyTimes();
+ EasyMock.expect( archivaConfig.getConfiguration()).andReturn(config).anyTimes();
+
+ archivaConfigControl.replay();
+
+ SearchResults searchResults = search.search( null, Arrays.asList( REPO_RELEASE ), //
+ "org.example", limits, //
+ Collections.emptyList() );
+
+ log.info( "results: {}", searchResults.getHits().size() );
+
+ assertEquals( 255, searchResults.getHits().size() );
+
+ SearchFields searchFields = new SearchFields();
+ searchFields.setGroupId( "org.example" );
+ searchFields.setRepositories( Arrays.asList( REPO_RELEASE ) );
+
+ searchResults = search.search( null, searchFields, limits );
+
+ log.info( "results: {}", searchResults.getHits().size() );
+
+ assertEquals( 255, searchResults.getHits().size() );
+
+ archivaConfigControl.verify();
+ }
+}
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-search</artifactId>
+ <packaging>jar</packaging>
+ <version>1.0</version>
+ <name>Archiva Search</name>
+ <url>http://archiva.apache.org</url>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>2.3</version>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>1.2.8</version>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-search</artifactId>
+ <packaging>jar</packaging>
+ <version>1.1</version>
+ <name>Archiva Search</name>
+ <url>http://archiva.apache.org</url>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>2.3</version>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>1.2.8</version>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com</groupId>
+ <artifactId>artifactid-numeric</artifactId>
+ <packaging>jar</packaging>
+ <version>1.0</version>
+ <name>ArtifactID numeric - NOT</name>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com</groupId>
+ <artifactId>artifactid-numeric123</artifactId>
+ <packaging>jar</packaging>
+ <version>1.0</version>
+ <name>ArtifactID numeric</name>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com</groupId>
+ <artifactId>classname-search</artifactId>
+ <packaging>jar</packaging>
+ <version>1.0</version>
+ <name>classname-search</name>
+ <url>http://maven.apache.org</url>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-search</artifactId>
+ <packaging>jar</packaging>
+ <version>1.0</version>
+ <name>Archiva Search</name>
+ <url>http://archiva.apache.org</url>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>2.3</version>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>1.2.8</version>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-test</artifactId>
+ <packaging>jar</packaging>
+ <version>1.0</version>
+ <name>archiva-test</name>
+ <url>http://maven.apache.org</url>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>2.3</version>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-test</artifactId>
+ <packaging>jar</packaging>
+ <version>2.0</version>
+ <name>Archiva Test</name>
+ <url>http://archiva.apache.org</url>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>2.3</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>1.4</version>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-webapp</artifactId>
+ <packaging>war</packaging>
+ <version>1.0</version>
+ <name>Archiva Webapp</name>
+ <url>http://archiva.apache.org</url>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>3.8.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>1.2.8</version>
+ </dependency>
+ </dependencies>
+</project>
--- /dev/null
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>felix-parent</artifactId>
+ <version>2.1</version>
+ <relativePath>../pom/pom.xml</relativePath>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <packaging>bundle</packaging>
+ <name>Apache Felix Bundle Repository</name>
+ <description>Bundle repository service.</description>
+ <artifactId>org.apache.felix.bundlerepository</artifactId>
+ <version>1.6.6</version>
+ <scm>
+ <connection>scm:svn:http://svn.apache.org/repos/asf/felix/releases/org.apache.felix.bundlerepository-1.6.6</connection>
+ <developerConnection>scm:svn:https://svn.apache.org/repos/asf/felix/releases/org.apache.felix.bundlerepository-1.6.6</developerConnection>
+ <url>http://svn.apache.org/repos/asf/felix/releases/org.apache.felix.bundlerepository-1.6.6</url>
+ </scm>
+ <dependencies>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>org.apache.felix.utils</artifactId>
+ <version>1.1.0</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>org.osgi.service.obr</artifactId>
+ <version>1.0.2</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>org.apache.felix.shell</artifactId>
+ <version>1.4.1</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>net.sf.kxml</groupId>
+ <artifactId>kxml2</artifactId>
+ <version>2.3.0</version>
+ <optional>true</optional>
+ <exclusions>
+ <exclusion>
+ <groupId>xmlpull</groupId>
+ <artifactId>xmlpull</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.osgi</groupId>
+ <artifactId>org.osgi.compendium</artifactId>
+ <version>4.0.0</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.osgi</groupId>
+ <artifactId>org.osgi.core</artifactId>
+ <version>4.1.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.woodstox</groupId>
+ <artifactId>woodstox-core-asl</artifactId>
+ <version>4.0.7</version>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.easymock</groupId>
+ <artifactId>easymock</artifactId>
+ <version>2.4</version>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.felix</groupId>
+ <artifactId>maven-bundle-plugin</artifactId>
+ <version>2.3.4</version>
+ <extensions>true</extensions>
+ <configuration>
+ <instructions>
+ <Export-Package>org.apache.felix.bundlerepository;version="2.0"</Export-Package>
+ <Private-Package>
+ org.kxml2.io,
+ org.xmlpull.v1,
+ org.apache.felix.bundlerepository.impl.*,
+ org.apache.felix.utils.*
+ </Private-Package>
+ <Import-Package>!javax.xml.parsers,!org.xml.sax,org.osgi.service.log;resolution:=optional,org.osgi.service.obr;resolution:=optional,javax.xml.stream;resolution:=optional,*</Import-Package>
+ <DynamicImport-Package>org.apache.felix.shell</DynamicImport-Package>
+ <Bundle-Activator>${project.artifactId}.impl.Activator</Bundle-Activator>
+ <Bundle-DocURL>http://felix.apache.org/site/apache-felix-osgi-bundle-repository.html</Bundle-DocURL>
+ <Bundle-Url>http://felix.apache.org/site/downloads.cgi</Bundle-Url>
+ <Bundle-Source>http://felix.apache.org/site/downloads.cgi</Bundle-Source>
+ <Bundle-SymbolicName>${project.artifactId}</Bundle-SymbolicName>
+ <Bundle-Vendor>The Apache Software Foundation</Bundle-Vendor>
+ <Export-Service>org.apache.felix.bundlerepository.RepositoryAdmin,org.osgi.service.obr.RepositoryAdmin</Export-Service>
+ <_versionpolicy>[$(version;==;$(@)),$(version;+;$(@)))</_versionpolicy>
+ <Include-Resource>META-INF/LICENSE=LICENSE,META-INF/LICENSE.kxml2=LICENSE.kxml2,META-INF/NOTICE=NOTICE,META-INF/DEPENDENCIES=DEPENDENCIES</Include-Resource>
+ </instructions>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.rat</groupId>
+ <artifactId>apache-rat-plugin</artifactId>
+ <configuration>
+ <excludeSubProjects>false</excludeSubProjects>
+ <useEclipseDefaultExcludes>true</useEclipseDefaultExcludes>
+ <useMavenDefaultExcludes>true</useMavenDefaultExcludes>
+ <excludes>
+ <param>doc/*</param>
+ <param>maven-eclipse.xml</param>
+ <param>.checkstyle</param>
+ <param>.externalToolBuilders/*</param>
+ </excludes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+
+<configuration status="debug">
+ <appenders>
+ <Console name="console" target="SYSTEM_OUT">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
+ </Console>
+ </appenders>
+ <loggers>
+
+ <logger name="org.apache.archiva.repository" level="info"/>
+ <logger name="org.apache.archiva.indexer" level="info" />
+
+ <logger name="JPOX" level="error"/>
+
+
+ <logger name="org.springframework" level="error"/>
+
+
+ <root level="info">
+ <appender-ref ref="console"/>
+ </root>
+ </loggers>
+</configuration>
+
+
--- /dev/null
+<?xml version="1.0"?>
+
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+<beans xmlns="http://www.springframework.org/schema/beans"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:context="http://www.springframework.org/schema/context" xmlns:tx="http://www.springframework.org/schema/tx"
+ xsi:schemaLocation="http://www.springframework.org/schema/beans
+ http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
+ http://www.springframework.org/schema/context
+ http://www.springframework.org/schema/context/spring-context-3.0.xsd http://www.springframework.org/schema/tx http://www.springframework.org/schema/tx/spring-tx.xsd"
+ default-lazy-init="false">
+
+ <context:annotation-config/>
+ <context:component-scan base-package="org.apache.archiva.indexer.maven,org.apache.archiva.repository,org.apache.archiva.repository.content.maven2" />
+
+
+ <bean name="wagon#file" scope="prototype" class="org.apache.maven.wagon.providers.file.FileWagon"/>
+
+ <bean name="scheduler" class="org.apache.archiva.redback.components.scheduler.DefaultScheduler">
+ <property name="properties">
+ <props>
+ <prop key="org.quartz.scheduler.instanceName">scheduler1</prop>
+ <prop key="org.quartz.threadPool.class">org.quartz.simpl.SimpleThreadPool</prop>
+ <prop key="org.quartz.threadPool.threadCount">2</prop>
+ <prop key="org.quartz.threadPool.threadPriority">4</prop>
+ <prop key="org.quartz.jobStore.class">org.quartz.simpl.RAMJobStore</prop>
+ </props>
+ </property>
+ </bean>
+ <alias name="userConfiguration#redback" alias="userConfiguration#default"/>
+
+ <!-- ***
+ JPA settings
+ *** -->
+ <bean name="entityManagerFactory" class="org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean">
+ <property name="jpaVendorAdapter" >
+ <bean class="org.springframework.orm.jpa.vendor.OpenJpaVendorAdapter" />
+ </property>
+ <property name="persistenceXmlLocation" value="classpath:META-INF/persistence-hsqldb.xml" />
+ <property name="jpaPropertyMap">
+ <map>
+ <entry key="openjpa.ConnectionURL" value="jdbc:hsqldb:mem:redback_database" />
+ <entry key="openjpa.ConnectionDriverName" value="org.hsqldb.jdbcDriver" />
+ <entry key="openjpa.ConnectionUserName" value="sa" />
+ <entry key="openjpa.ConnectionPassword" value="" />
+ <entry key="openjpa.Log" value="${openjpa.Log:DefaultLevel=INFO,Runtime=ERROR,Tool=ERROR,SQL=ERROR,Schema=ERROR,MetaData=ERROR}" />
+ <entry key="openjpa.jdbc.SynchronizeMappings" value="buildSchema(ForeignKeys=true)" />
+ <entry key="openjpa.jdbc.MappingDefaults"
+ value="ForeignKeyDeleteAction=restrict,JoinForeignKeyDeleteAction=restrict"/>
+ </map>
+ </property>
+
+ </bean>
+
+ <bean name="transactionManager" class="org.springframework.orm.jpa.JpaTransactionManager" >
+ <property name="entityManagerFactory" ref="entityManagerFactory" />
+ </bean>
+
+ <tx:annotation-driven />
+ <!-- ***
+ End of JPA settings
+ *** -->
+
+</beans>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven</artifactId>
+ <version>3.0.0-SNAPSHOT</version>
+ </parent>
+
+ <artifactId>archiva-maven-metadata</artifactId>
+
+ <name>Archiva :: Maven :: Metadata</name>
+
+
+ <properties>
+ <site.staging.base>${project.parent.parent.basedir}</site.staging.base>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-model</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-xml-tools</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+</project>
--- /dev/null
+package org.apache.archiva.maven2.metadata;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.apache.archiva.model.ArchivaRepositoryMetadata;
+import org.apache.archiva.model.Plugin;
+import org.apache.archiva.model.SnapshotVersion;
+import org.apache.archiva.xml.XMLException;
+import org.apache.archiva.xml.XMLReader;
+import org.apache.commons.lang.math.NumberUtils;
+import org.dom4j.Element;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Date;
+
+/**
+ * @author Olivier Lamy
+ * @since 1.4-M3
+ */
+public class MavenMetadataReader
+{
+ /*
+ <?xml version="1.0" encoding="UTF-8"?>
+ <metadata modelVersion="1.1.0">
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva</artifactId>
+ <version>1.4-M3-SNAPSHOT</version>
+ <versioning>
+ <snapshot>
+ <timestamp>20120310.230917</timestamp>
+ <buildNumber>2</buildNumber>
+ </snapshot>
+ <lastUpdated>20120310230917</lastUpdated>
+ <snapshotVersions>
+ <snapshotVersion>
+ <extension>pom</extension>
+ <value>1.4-M3-20120310.230917-2</value>
+ <updated>20120310230917</updated>
+ </snapshotVersion>
+ </snapshotVersions>
+ </versioning>
+ </metadata>
+ */
+
+ private static final Logger log = LoggerFactory.getLogger( MavenMetadataReader.class );
+
+ /**
+ * Read and return the {@link org.apache.archiva.model.ArchivaRepositoryMetadata} object from the provided xml file.
+ *
+ * @param metadataFile the maven-metadata.xml file to read.
+ * @return the archiva repository metadata object that represents the provided file contents.
+ * @throws XMLException
+ */
+ public static ArchivaRepositoryMetadata read( Path metadataFile )
+ throws XMLException
+ {
+
+ XMLReader xml = new XMLReader( "metadata", metadataFile );
+ // invoke this to remove namespaces, see MRM-1136
+ xml.removeNamespaces();
+
+ ArchivaRepositoryMetadata metadata = new ArchivaRepositoryMetadata();
+
+ metadata.setGroupId( xml.getElementText( "//metadata/groupId" ) );
+ metadata.setArtifactId( xml.getElementText( "//metadata/artifactId" ) );
+ metadata.setVersion( xml.getElementText( "//metadata/version" ) );
+ Date modTime;
+ try
+ {
+ modTime = new Date(Files.getLastModifiedTime( metadataFile ).toMillis( ));
+ }
+ catch ( IOException e )
+ {
+ modTime = new Date();
+ log.error("Could not read modification time of {}", metadataFile);
+ }
+ metadata.setFileLastModified( modTime );
+ try
+ {
+ metadata.setFileSize( Files.size( metadataFile ) );
+ }
+ catch ( IOException e )
+ {
+ metadata.setFileSize( 0 );
+ log.error("Could not read file size of {}", metadataFile);
+ }
+
+ metadata.setLastUpdated( xml.getElementText( "//metadata/versioning/lastUpdated" ) );
+ metadata.setLatestVersion( xml.getElementText( "//metadata/versioning/latest" ) );
+ metadata.setReleasedVersion( xml.getElementText( "//metadata/versioning/release" ) );
+ metadata.setAvailableVersions( xml.getElementListText( "//metadata/versioning/versions/version" ) );
+
+ Element snapshotElem = xml.getElement( "//metadata/versioning/snapshot" );
+ if ( snapshotElem != null )
+ {
+ SnapshotVersion snapshot = new SnapshotVersion();
+ snapshot.setTimestamp( snapshotElem.elementTextTrim( "timestamp" ) );
+ String tmp = snapshotElem.elementTextTrim( "buildNumber" );
+ if ( NumberUtils.isNumber( tmp ) )
+ {
+ snapshot.setBuildNumber( NumberUtils.toInt( tmp ) );
+ }
+ metadata.setSnapshotVersion( snapshot );
+ }
+
+ for ( Element plugin : xml.getElementList( "//metadata/plugins/plugin" ) )
+ {
+ Plugin p = new Plugin();
+ p.setPrefix( plugin.elementTextTrim( "prefix" ) );
+ p.setArtifactId( plugin.elementTextTrim( "artifactId" ) );
+ p.setName( plugin.elementTextTrim( "name" ) );
+ metadata.addPlugin( p );
+ }
+
+ return metadata;
+
+ }
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven</artifactId>
+ <version>3.0.0-SNAPSHOT</version>
+ </parent>
+
+ <artifactId>archiva-maven-model</artifactId>
+ <name>Archiva :: Maven :: Model</name>
+
+
+ <properties>
+ <site.staging.base>${project.parent.parent.basedir}</site.staging.base>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-annotations</artifactId>
+ </dependency>
+ </dependencies>
+
+</project>
--- /dev/null
+package org.apache.archiva.maven2.model;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.List;
+
+@XmlRootElement( name = "artifact" )
+public class Artifact
+ implements Serializable
+{
+ // The (optional) context for this result.
+ private String context;
+
+ // Basic hit, direct to non-artifact resource.
+ private String url;
+
+ // Advanced hit, reference to groupId.
+ private String groupId;
+
+ // Advanced hit, reference to artifactId.
+ private String artifactId;
+
+ private String repositoryId;
+
+ private String version;
+
+ /**
+ * Plugin goal prefix (only if packaging is "maven-plugin")
+ */
+ private String prefix;
+
+ /**
+ * Plugin goals (only if packaging is "maven-plugin")
+ */
+ private List<String> goals;
+
+ /**
+ * contains osgi metadata Bundle-Version if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleVersion;
+
+ /**
+ * contains osgi metadata Bundle-SymbolicName if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleSymbolicName;
+
+ /**
+ * contains osgi metadata Export-Package if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleExportPackage;
+
+ /**
+ * contains osgi metadata Export-Service if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleExportService;
+
+ /**
+ * contains osgi metadata Bundle-Description if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleDescription;
+
+ /**
+ * contains osgi metadata Bundle-Name if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleName;
+
+ /**
+ * contains osgi metadata Bundle-License if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleLicense;
+
+ /**
+ * contains osgi metadata Bundle-DocURL if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleDocUrl;
+
+ /**
+ * contains osgi metadata Import-Package if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleImportPackage;
+
+ /**
+ * contains osgi metadata Require-Bundle if available
+ *
+ * @since 1.4-M1
+ */
+ private String bundleRequireBundle;
+
+ private String classifier;
+
+ private String packaging;
+
+ /**
+ * file extension of the artifact
+ *
+ * @since 1.4-M2
+ */
+ private String fileExtension;
+
+ /**
+ * human readable size : not available for all services
+ *
+ * @since 1.4-M3
+ */
+ private String size;
+
+ /**
+ * @since 1.4-M3
+ */
+ private String type;
+
+
+ /**
+ * @since 1.4-M3
+ */
+ private String path;
+
+ /**
+ * concat of artifactId+'-'+version+'.'+type
+ *
+ * @since 1.4-M3
+ */
+ private String id;
+
+ /**
+ * @since 1.4-M3
+ */
+ private String scope;
+
+
+ public Artifact()
+ {
+ // no op
+ }
+
+ public Artifact( String groupId, String artifactId, String version )
+ {
+ this.artifactId = artifactId;
+ this.groupId = groupId;
+ this.version = version;
+ }
+
+ /**
+ * @since 1.4-M3
+ */
+ public Artifact( String groupId, String artifactId, String version, String scope )
+ {
+ this( groupId, artifactId, version );
+ this.scope = scope;
+ }
+
+ /**
+ * @since 1.4-M3
+ */
+ public Artifact( String groupId, String artifactId, String version, String scope, String classifier )
+ {
+ this( groupId, artifactId, version );
+ this.scope = scope;
+ this.classifier = classifier;
+ }
+
+ public String getGroupId()
+ {
+ return groupId;
+ }
+
+ public String getArtifactId()
+ {
+ return artifactId;
+ }
+
+ public String getVersion()
+ {
+ return version;
+ }
+
+ public String getRepositoryId()
+ {
+ return repositoryId;
+ }
+
+ public void setGroupId( String groupId )
+ {
+ this.groupId = groupId;
+ }
+
+ public void setArtifactId( String artifactId )
+ {
+ this.artifactId = artifactId;
+ }
+
+ public void setVersion( String version )
+ {
+ this.version = version;
+ }
+
+ public void setRepositoryId( String repositoryId )
+ {
+ this.repositoryId = repositoryId;
+ }
+
+ public String getContext()
+ {
+ return context;
+ }
+
+ public void setContext( String context )
+ {
+ this.context = context;
+ }
+
+ public String getUrl()
+ {
+ return url;
+ }
+
+ public void setUrl( String url )
+ {
+ this.url = url;
+ }
+
+ public String getPrefix()
+ {
+ return prefix;
+ }
+
+ public void setPrefix( String prefix )
+ {
+ this.prefix = prefix;
+ }
+
+ public List<String> getGoals()
+ {
+ return goals;
+ }
+
+ public void setGoals( List<String> goals )
+ {
+ this.goals = goals;
+ }
+
+ public String getBundleVersion()
+ {
+ return bundleVersion;
+ }
+
+ public void setBundleVersion( String bundleVersion )
+ {
+ this.bundleVersion = bundleVersion;
+ }
+
+ public String getBundleSymbolicName()
+ {
+ return bundleSymbolicName;
+ }
+
+ public void setBundleSymbolicName( String bundleSymbolicName )
+ {
+ this.bundleSymbolicName = bundleSymbolicName;
+ }
+
+ public String getBundleExportPackage()
+ {
+ return bundleExportPackage;
+ }
+
+ public void setBundleExportPackage( String bundleExportPackage )
+ {
+ this.bundleExportPackage = bundleExportPackage;
+ }
+
+ public String getBundleExportService()
+ {
+ return bundleExportService;
+ }
+
+ public void setBundleExportService( String bundleExportService )
+ {
+ this.bundleExportService = bundleExportService;
+ }
+
+ public String getBundleDescription()
+ {
+ return bundleDescription;
+ }
+
+ public void setBundleDescription( String bundleDescription )
+ {
+ this.bundleDescription = bundleDescription;
+ }
+
+ public String getBundleName()
+ {
+ return bundleName;
+ }
+
+ public void setBundleName( String bundleName )
+ {
+ this.bundleName = bundleName;
+ }
+
+ public String getBundleLicense()
+ {
+ return bundleLicense;
+ }
+
+ public void setBundleLicense( String bundleLicense )
+ {
+ this.bundleLicense = bundleLicense;
+ }
+
+ public String getBundleDocUrl()
+ {
+ return bundleDocUrl;
+ }
+
+ public void setBundleDocUrl( String bundleDocUrl )
+ {
+ this.bundleDocUrl = bundleDocUrl;
+ }
+
+ public String getBundleImportPackage()
+ {
+ return bundleImportPackage;
+ }
+
+ public void setBundleImportPackage( String bundleImportPackage )
+ {
+ this.bundleImportPackage = bundleImportPackage;
+ }
+
+ public String getBundleRequireBundle()
+ {
+ return bundleRequireBundle;
+ }
+
+ public void setBundleRequireBundle( String bundleRequireBundle )
+ {
+ this.bundleRequireBundle = bundleRequireBundle;
+ }
+
+ public String getClassifier()
+ {
+ return classifier;
+ }
+
+ public void setClassifier( String classifier )
+ {
+ this.classifier = classifier;
+ }
+
+
+ public String getPackaging()
+ {
+ return packaging;
+ }
+
+ public void setPackaging( String packaging )
+ {
+ this.packaging = packaging;
+ }
+
+ public String getFileExtension()
+ {
+ return fileExtension;
+ }
+
+ public void setFileExtension( String fileExtension )
+ {
+ this.fileExtension = fileExtension;
+ }
+
+ public String getSize()
+ {
+ return size;
+ }
+
+ public void setSize( String size )
+ {
+ this.size = size;
+ }
+
+ public String getType()
+ {
+ return type;
+ }
+
+ public void setType( String type )
+ {
+ this.type = type;
+ }
+
+ public String getPath()
+ {
+ return path;
+ }
+
+ public void setPath( String path )
+ {
+ this.path = path;
+ }
+
+ public String getId()
+ {
+ return id;
+ }
+
+ public void setId( String id )
+ {
+ this.id = id;
+ }
+
+ public String getScope()
+ {
+ return scope;
+ }
+
+ public void setScope( String scope )
+ {
+ this.scope = scope;
+ }
+
+ @Override
+ public String toString()
+ {
+ final StringBuilder sb = new StringBuilder();
+ sb.append( "Artifact" );
+ sb.append( "{context='" ).append( context ).append( '\'' );
+ sb.append( ", url='" ).append( url ).append( '\'' );
+ sb.append( ", groupId='" ).append( groupId ).append( '\'' );
+ sb.append( ", artifactId='" ).append( artifactId ).append( '\'' );
+ sb.append( ", repositoryId='" ).append( repositoryId ).append( '\'' );
+ sb.append( ", version='" ).append( version ).append( '\'' );
+ sb.append( ", prefix='" ).append( prefix ).append( '\'' );
+ sb.append( ", goals=" ).append( goals );
+ sb.append( ", bundleVersion='" ).append( bundleVersion ).append( '\'' );
+ sb.append( ", bundleSymbolicName='" ).append( bundleSymbolicName ).append( '\'' );
+ sb.append( ", bundleExportPackage='" ).append( bundleExportPackage ).append( '\'' );
+ sb.append( ", bundleExportService='" ).append( bundleExportService ).append( '\'' );
+ sb.append( ", bundleDescription='" ).append( bundleDescription ).append( '\'' );
+ sb.append( ", bundleName='" ).append( bundleName ).append( '\'' );
+ sb.append( ", bundleLicense='" ).append( bundleLicense ).append( '\'' );
+ sb.append( ", bundleDocUrl='" ).append( bundleDocUrl ).append( '\'' );
+ sb.append( ", bundleImportPackage='" ).append( bundleImportPackage ).append( '\'' );
+ sb.append( ", bundleRequireBundle='" ).append( bundleRequireBundle ).append( '\'' );
+ sb.append( ", classifier='" ).append( classifier ).append( '\'' );
+ sb.append( ", packaging='" ).append( packaging ).append( '\'' );
+ sb.append( ", fileExtension='" ).append( fileExtension ).append( '\'' );
+ sb.append( ", size='" ).append( size ).append( '\'' );
+ sb.append( ", type='" ).append( type ).append( '\'' );
+ sb.append( ", path='" ).append( path ).append( '\'' );
+ sb.append( ", id='" ).append( id ).append( '\'' );
+ sb.append( '}' );
+ return sb.toString();
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( !( o instanceof Artifact ) )
+ {
+ return false;
+ }
+
+ Artifact artifact = (Artifact) o;
+
+ if ( !artifactId.equals( artifact.artifactId ) )
+ {
+ return false;
+ }
+ if ( bundleDescription != null
+ ? !bundleDescription.equals( artifact.bundleDescription )
+ : artifact.bundleDescription != null )
+ {
+ return false;
+ }
+ if ( bundleDocUrl != null ? !bundleDocUrl.equals( artifact.bundleDocUrl ) : artifact.bundleDocUrl != null )
+ {
+ return false;
+ }
+ if ( bundleExportPackage != null
+ ? !bundleExportPackage.equals( artifact.bundleExportPackage )
+ : artifact.bundleExportPackage != null )
+ {
+ return false;
+ }
+ if ( bundleExportService != null
+ ? !bundleExportService.equals( artifact.bundleExportService )
+ : artifact.bundleExportService != null )
+ {
+ return false;
+ }
+ if ( bundleImportPackage != null
+ ? !bundleImportPackage.equals( artifact.bundleImportPackage )
+ : artifact.bundleImportPackage != null )
+ {
+ return false;
+ }
+ if ( bundleLicense != null ? !bundleLicense.equals( artifact.bundleLicense ) : artifact.bundleLicense != null )
+ {
+ return false;
+ }
+ if ( bundleName != null ? !bundleName.equals( artifact.bundleName ) : artifact.bundleName != null )
+ {
+ return false;
+ }
+ if ( bundleRequireBundle != null
+ ? !bundleRequireBundle.equals( artifact.bundleRequireBundle )
+ : artifact.bundleRequireBundle != null )
+ {
+ return false;
+ }
+ if ( bundleSymbolicName != null
+ ? !bundleSymbolicName.equals( artifact.bundleSymbolicName )
+ : artifact.bundleSymbolicName != null )
+ {
+ return false;
+ }
+ if ( bundleVersion != null ? !bundleVersion.equals( artifact.bundleVersion ) : artifact.bundleVersion != null )
+ {
+ return false;
+ }
+ if ( classifier != null ? !classifier.equals( artifact.classifier ) : artifact.classifier != null )
+ {
+ return false;
+ }
+ if ( context != null ? !context.equals( artifact.context ) : artifact.context != null )
+ {
+ return false;
+ }
+ if ( fileExtension != null ? !fileExtension.equals( artifact.fileExtension ) : artifact.fileExtension != null )
+ {
+ return false;
+ }
+ if ( goals != null ? !goals.equals( artifact.goals ) : artifact.goals != null )
+ {
+ return false;
+ }
+ if ( !groupId.equals( artifact.groupId ) )
+ {
+ return false;
+ }
+ if ( id != null ? !id.equals( artifact.id ) : artifact.id != null )
+ {
+ return false;
+ }
+ if ( packaging != null ? !packaging.equals( artifact.packaging ) : artifact.packaging != null )
+ {
+ return false;
+ }
+ if ( path != null ? !path.equals( artifact.path ) : artifact.path != null )
+ {
+ return false;
+ }
+ if ( prefix != null ? !prefix.equals( artifact.prefix ) : artifact.prefix != null )
+ {
+ return false;
+ }
+ if ( repositoryId != null ? !repositoryId.equals( artifact.repositoryId ) : artifact.repositoryId != null )
+ {
+ return false;
+ }
+ if ( scope != null ? !scope.equals( artifact.scope ) : artifact.scope != null )
+ {
+ return false;
+ }
+ if ( size != null ? !size.equals( artifact.size ) : artifact.size != null )
+ {
+ return false;
+ }
+ if ( type != null ? !type.equals( artifact.type ) : artifact.type != null )
+ {
+ return false;
+ }
+ if ( url != null ? !url.equals( artifact.url ) : artifact.url != null )
+ {
+ return false;
+ }
+ if ( !version.equals( artifact.version ) )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ int result = context != null ? context.hashCode() : 0;
+ result = 31 * result + ( url != null ? url.hashCode() : 0 );
+ result = 31 * result + groupId.hashCode();
+ result = 31 * result + artifactId.hashCode();
+ result = 31 * result + ( repositoryId != null ? repositoryId.hashCode() : 0 );
+ result = 31 * result + version.hashCode();
+ result = 31 * result + ( prefix != null ? prefix.hashCode() : 0 );
+ result = 31 * result + ( goals != null ? goals.hashCode() : 0 );
+ result = 31 * result + ( bundleVersion != null ? bundleVersion.hashCode() : 0 );
+ result = 31 * result + ( bundleSymbolicName != null ? bundleSymbolicName.hashCode() : 0 );
+ result = 31 * result + ( bundleExportPackage != null ? bundleExportPackage.hashCode() : 0 );
+ result = 31 * result + ( bundleExportService != null ? bundleExportService.hashCode() : 0 );
+ result = 31 * result + ( bundleDescription != null ? bundleDescription.hashCode() : 0 );
+ result = 31 * result + ( bundleName != null ? bundleName.hashCode() : 0 );
+ result = 31 * result + ( bundleLicense != null ? bundleLicense.hashCode() : 0 );
+ result = 31 * result + ( bundleDocUrl != null ? bundleDocUrl.hashCode() : 0 );
+ result = 31 * result + ( bundleImportPackage != null ? bundleImportPackage.hashCode() : 0 );
+ result = 31 * result + ( bundleRequireBundle != null ? bundleRequireBundle.hashCode() : 0 );
+ result = 31 * result + ( classifier != null ? classifier.hashCode() : 0 );
+ result = 31 * result + ( packaging != null ? packaging.hashCode() : 0 );
+ result = 31 * result + ( fileExtension != null ? fileExtension.hashCode() : 0 );
+ result = 31 * result + ( size != null ? size.hashCode() : 0 );
+ result = 31 * result + ( type != null ? type.hashCode() : 0 );
+ result = 31 * result + ( path != null ? path.hashCode() : 0 );
+ result = 31 * result + ( id != null ? id.hashCode() : 0 );
+ result = 31 * result + ( scope != null ? scope.hashCode() : 0 );
+ return result;
+ }
+}
--- /dev/null
+package org.apache.archiva.maven2.model;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * @author Olivier Lamy
+ */
+@XmlRootElement( name = "treeEntry" )
+public class TreeEntry
+ implements Serializable
+{
+
+ private List<TreeEntry> childs = new ArrayList<>();
+
+ private Artifact artifact;
+
+ @JsonIgnore
+ private TreeEntry parent;
+
+ public TreeEntry()
+ {
+ // no op
+ }
+
+ public TreeEntry( Artifact artifact )
+ {
+ this.artifact = artifact;
+ }
+
+
+ public Artifact getArtifact()
+ {
+ return artifact;
+ }
+
+ public void setArtifact( Artifact artifact )
+ {
+ this.artifact = artifact;
+ }
+
+ public List<TreeEntry> getChilds()
+ {
+ return childs;
+ }
+
+ public void setChilds( List<TreeEntry> childs )
+ {
+ this.childs = childs;
+ }
+
+ @JsonIgnore
+ public TreeEntry getParent()
+ {
+ return parent;
+ }
+
+ @JsonIgnore
+ public void setParent( TreeEntry parent )
+ {
+ this.parent = parent;
+ }
+
+ @Override
+ public boolean equals( Object o )
+ {
+ if ( this == o )
+ {
+ return true;
+ }
+ if ( !( o instanceof TreeEntry ) )
+ {
+ return false;
+ }
+
+ TreeEntry treeEntry = (TreeEntry) o;
+
+ if ( artifact != null ? !artifact.equals( treeEntry.artifact ) : treeEntry.artifact != null )
+ {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ return artifact != null ? artifact.hashCode() : 0;
+ }
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing,
+ ~ software distributed under the License is distributed on an
+ ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ~ KIND, either express or implied. See the License for the
+ ~ specific language governing permissions and limitations
+ ~ under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.archiva</groupId>
+ <artifactId>archiva-modules</artifactId>
+ <version>3.0.0-SNAPSHOT</version>
+ </parent>
+
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven</artifactId>
+ <name>Archiva :: Maven</name>
+ <packaging>pom</packaging>
+
+ <properties>
+ <site.staging.base>${project.parent.basedir}</site.staging.base>
+ </properties>
+
+ <modules>
+ <module>archiva-maven-common</module>
+ <module>archiva-maven-model</module>
+ <module>archiva-maven-metadata</module>
+ <module>archiva-maven-indexer</module>
+ </modules>
+</project>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
<scope>test</scope>
</dependency>
<dependency>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-model</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-model</artifactId>
</dependency>
<dependency>
<artifactId>archiva-scheduler-repository</artifactId>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-model</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-model</artifactId>
</dependency>
<dependency>
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
<scope>test</scope>
</dependency>
<artifactId>archiva-scheduler-repository</artifactId>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
</dependency>
<dependency>
<artifactId>archiva-security</artifactId>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>metadata-repository-api</artifactId>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-metadata</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-metadata</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-configuration</artifactId>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-model</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-model</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.wagon</groupId>
<modules>
<module>archiva-base</module>
-
<module>archiva-scheduler</module>
<module>archiva-web</module>
<module>metadata</module>
<module>plugins</module>
+ <module>archiva-maven</module>
</modules>
<build>
<version>1.1</version>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-common</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-metadata</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-metadata</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-model</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-model</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.apache.archiva</groupId>
- <artifactId>archiva-maven2-indexer</artifactId>
+ <groupId>org.apache.archiva.maven</groupId>
+ <artifactId>archiva-maven-indexer</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>