001package org.apache.archiva.repository.scanner.functors; 002 003/* 004 * Licensed to the Apache Software Foundation (ASF) under one 005 * or more contributor license agreements. See the NOTICE file 006 * distributed with this work for additional information 007 * regarding copyright ownership. The ASF licenses this file 008 * to you under the Apache License, Version 2.0 (the 009 * "License"); you may not use this file except in compliance 010 * with the License. You may obtain a copy of the License at 011 * 012 * http://www.apache.org/licenses/LICENSE-2.0 013 * 014 * Unless required by applicable law or agreed to in writing, 015 * software distributed under the License is distributed on an 016 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 017 * KIND, either express or implied. See the License for the 018 * specific language governing permissions and limitations 019 * under the License. 020 */ 021 022import org.apache.commons.collections.Closure; 023import org.apache.archiva.common.utils.BaseFile; 024import org.apache.archiva.consumers.RepositoryContentConsumer; 025import org.slf4j.Logger; 026import org.slf4j.LoggerFactory; 027 028import java.util.Map; 029 030/** 031 * ConsumerProcessFileClosure 032 * 033 */ 034public class ConsumerProcessFileClosure 035 implements Closure 036{ 037 private Logger log = LoggerFactory.getLogger( ConsumerProcessFileClosure.class ); 038 039 private BaseFile basefile; 040 041 private boolean executeOnEntireRepo; 042 043 private Map<String,Long> consumerTimings; 044 045 private Map<String,Long> consumerCounts; 046 047 @Override 048 public void execute( Object input ) 049 { 050 if ( input instanceof RepositoryContentConsumer ) 051 { 052 RepositoryContentConsumer consumer = (RepositoryContentConsumer) input; 053 054 String id = consumer.getId(); 055 try 056 { 057 // Safety check to avoid errors, if a parallel process removes files 058 if (basefile.exists()) 059 { 060 log.debug( "Sending to consumer: {}", id ); 061 062 long startTime = System.currentTimeMillis( ); 063 consumer.processFile( basefile.getRelativePath( ), executeOnEntireRepo ); 064 long endTime = System.currentTimeMillis( ); 065 066 if ( consumerTimings != null ) 067 { 068 Long value = consumerTimings.get( id ); 069 consumerTimings.put( id, ( value != null ? value : 0 ) + endTime - startTime ); 070 } 071 072 if ( consumerCounts != null ) 073 { 074 Long value = consumerCounts.get( id ); 075 consumerCounts.put( id, ( value != null ? value : 0 ) + 1 ); 076 } 077 } 078 } 079 catch ( Exception e ) 080 { 081 /* Intentionally Catch all exceptions. 082 * So that the discoverer processing can continue. 083 */ 084 log.error( "Consumer [" + id + "] had an error when processing file [" 085 + basefile.getAbsolutePath() + "]: " + e.getMessage(), e ); 086 } 087 } 088 } 089 090 public BaseFile getBasefile() 091 { 092 return basefile; 093 } 094 095 public void setBasefile( BaseFile basefile ) 096 { 097 this.basefile = basefile; 098 } 099 100 public boolean isExecuteOnEntireRepo() 101 { 102 return executeOnEntireRepo; 103 } 104 105 public void setExecuteOnEntireRepo( boolean executeOnEntireRepo ) 106 { 107 this.executeOnEntireRepo = executeOnEntireRepo; 108 } 109 110 public void setConsumerTimings( Map<String, Long> consumerTimings ) 111 { 112 this.consumerTimings = consumerTimings; 113 } 114 115 public void setConsumerCounts( Map<String, Long> consumerCounts ) 116 { 117 this.consumerCounts = consumerCounts; 118 } 119 120 public Logger getLogger() 121 { 122 return log; 123 } 124 125 public void setLogger( Logger logger ) 126 { 127 this.log = logger; 128 } 129}