001    /*
002     * Sonar, open source software quality management tool.
003     * Copyright (C) 2008-2011 SonarSource
004     * mailto:contact AT sonarsource DOT com
005     *
006     * Sonar is free software; you can redistribute it and/or
007     * modify it under the terms of the GNU Lesser General Public
008     * License as published by the Free Software Foundation; either
009     * version 3 of the License, or (at your option) any later version.
010     *
011     * Sonar is distributed in the hope that it will be useful,
012     * but WITHOUT ANY WARRANTY; without even the implied warranty of
013     * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
014     * Lesser General Public License for more details.
015     *
016     * You should have received a copy of the GNU Lesser General Public
017     * License along with Sonar; if not, write to the Free Software
018     * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02
019     */
020    package org.sonar.plugins.cpd;
021    
022    import org.apache.commons.io.IOUtils;
023    import org.apache.commons.lang.StringUtils;
024    import org.slf4j.Logger;
025    import org.slf4j.LoggerFactory;
026    import org.sonar.api.CoreProperties;
027    import org.sonar.api.batch.SensorContext;
028    import org.sonar.api.database.model.ResourceModel;
029    import org.sonar.api.measures.CoreMetrics;
030    import org.sonar.api.measures.Measure;
031    import org.sonar.api.resources.*;
032    import org.sonar.api.utils.SonarException;
033    import org.sonar.batch.index.ResourcePersister;
034    import org.sonar.duplications.block.Block;
035    import org.sonar.duplications.block.BlockChunker;
036    import org.sonar.duplications.detector.original.OriginalCloneDetectionAlgorithm;
037    import org.sonar.duplications.index.CloneGroup;
038    import org.sonar.duplications.index.CloneIndex;
039    import org.sonar.duplications.index.ClonePart;
040    import org.sonar.duplications.java.JavaStatementBuilder;
041    import org.sonar.duplications.java.JavaTokenProducer;
042    import org.sonar.duplications.statement.Statement;
043    import org.sonar.duplications.statement.StatementChunker;
044    import org.sonar.duplications.token.TokenChunker;
045    import org.sonar.persistence.dao.DuplicationDao;
046    import org.sonar.plugins.cpd.index.DbDuplicationsIndex;
047    import org.sonar.plugins.cpd.index.SonarDuplicationsIndex;
048    
049    import java.io.FileInputStream;
050    import java.io.FileNotFoundException;
051    import java.io.InputStreamReader;
052    import java.io.Reader;
053    import java.util.Collection;
054    import java.util.HashSet;
055    import java.util.List;
056    import java.util.Set;
057    import java.util.concurrent.*;
058    
059    public class SonarEngine extends CpdEngine {
060    
061      private static final Logger LOG = LoggerFactory.getLogger(SonarEngine.class);
062    
063      private static final int BLOCK_SIZE = 10;
064    
065      /**
066       * Limit of time to analyse one file (in seconds).
067       */
068      private static final int TIMEOUT = 5 * 60;
069    
070      private final ResourcePersister resourcePersister;
071      private final DuplicationDao dao;
072    
073      /**
074       * For dry run, where is no access to database.
075       */
076      public SonarEngine() {
077        this(null, null);
078      }
079    
080      public SonarEngine(ResourcePersister resourcePersister, DuplicationDao dao) {
081        this.resourcePersister = resourcePersister;
082        this.dao = dao;
083      }
084    
085      @Override
086      public boolean isLanguageSupported(Language language) {
087        return Java.INSTANCE.equals(language);
088      }
089    
090      /**
091       * @return true, if was enabled by user and database is available
092       */
093      private boolean isCrossProject(Project project) {
094        return project.getConfiguration().getBoolean(CoreProperties.CPD_CROSS_RPOJECT, CoreProperties.CPD_CROSS_RPOJECT_DEFAULT_VALUE)
095          && resourcePersister != null && dao != null
096          && StringUtils.isBlank(project.getConfiguration().getString(CoreProperties.PROJECT_BRANCH_PROPERTY));
097      }
098    
099      private static String getFullKey(Project project, Resource resource) {
100        return new StringBuilder(ResourceModel.KEY_SIZE)
101          .append(project.getKey())
102          .append(':')
103          .append(resource.getKey())
104          .toString();
105      }
106    
107      @Override
108      public void analyse(Project project, SensorContext context) {
109        List<InputFile> inputFiles = project.getFileSystem().mainFiles(project.getLanguageKey());
110        if (inputFiles.isEmpty()) {
111          return;
112        }
113    
114        // Create index
115        final SonarDuplicationsIndex index;
116        if (isCrossProject(project)) {
117          LOG.info("Cross-project analysis enabled");
118          index = new SonarDuplicationsIndex(new DbDuplicationsIndex(resourcePersister, project, dao));
119        } else {
120          LOG.info("Cross-project analysis disabled");
121          index = new SonarDuplicationsIndex();
122        }
123    
124        TokenChunker tokenChunker = JavaTokenProducer.build();
125        StatementChunker statementChunker = JavaStatementBuilder.build();
126        BlockChunker blockChunker = new BlockChunker(BLOCK_SIZE);
127    
128        for (InputFile inputFile : inputFiles) {
129          LOG.debug("Populating index from {}", inputFile.getFile());
130          Resource resource = getResource(inputFile);
131          String resourceKey = getFullKey(project, resource);
132    
133          List<Statement> statements;
134    
135          Reader reader = null;
136          try {
137            reader = new InputStreamReader(new FileInputStream(inputFile.getFile()), project.getFileSystem().getSourceCharset());
138            statements = statementChunker.chunk(tokenChunker.chunk(reader));
139          } catch (FileNotFoundException e) {
140            throw new SonarException(e);
141          } finally {
142            IOUtils.closeQuietly(reader);
143          }
144    
145          List<Block> blocks = blockChunker.chunk(resourceKey, statements);
146          index.insert(resource, blocks);
147        }
148    
149        // Detect
150        ExecutorService executorService = Executors.newSingleThreadExecutor();
151        try {
152          for (InputFile inputFile : inputFiles) {
153            LOG.debug("Detection of duplications for {}", inputFile.getFile());
154            Resource resource = getResource(inputFile);
155            String resourceKey = getFullKey(project, resource);
156    
157            Collection<Block> fileBlocks = index.getByResource(resource, resourceKey);
158    
159            List<CloneGroup> clones;
160            try {
161              clones = executorService.submit(new Task(index, fileBlocks)).get(TIMEOUT, TimeUnit.SECONDS);
162            } catch (TimeoutException e) {
163              clones = null;
164              LOG.warn("Timeout during detection of duplications for " + inputFile.getFile(), e);
165            } catch (InterruptedException e) {
166              throw new SonarException(e);
167            } catch (ExecutionException e) {
168              throw new SonarException(e);
169            }
170    
171            save(context, resource, clones);
172          }
173        } finally {
174          executorService.shutdown();
175        }
176      }
177    
178      private static class Task implements Callable<List<CloneGroup>> {
179        private final CloneIndex index;
180        private final Collection<Block> fileBlocks;
181    
182        public Task(CloneIndex index, Collection<Block> fileBlocks) {
183          this.index = index;
184          this.fileBlocks = fileBlocks;
185        }
186    
187        public List<CloneGroup> call() {
188          return OriginalCloneDetectionAlgorithm.detect(index, fileBlocks);
189        }
190      }
191    
192      private Resource getResource(InputFile inputFile) {
193        return JavaFile.fromRelativePath(inputFile.getRelativePath(), false);
194      }
195    
196      static void save(SensorContext context, Resource resource, List<CloneGroup> clones) {
197        if (clones == null || clones.isEmpty()) {
198          return;
199        }
200        // Calculate number of lines and blocks
201        Set<Integer> duplicatedLines = new HashSet<Integer>();
202        double duplicatedBlocks = 0;
203        for (CloneGroup clone : clones) {
204          ClonePart origin = clone.getOriginPart();
205          for (ClonePart part : clone.getCloneParts()) {
206            if (part.getResourceId().equals(origin.getResourceId())) {
207              duplicatedBlocks++;
208              for (int duplicatedLine = part.getLineStart(); duplicatedLine < part.getLineStart() + part.getLines(); duplicatedLine++) {
209                duplicatedLines.add(duplicatedLine);
210              }
211            }
212          }
213        }
214        // Build XML
215        StringBuilder xml = new StringBuilder();
216        xml.append("<duplications>");
217        for (CloneGroup clone : clones) {
218          xml.append("<g>");
219          for (ClonePart part : clone.getCloneParts()) {
220            xml.append("<b s=\"").append(part.getLineStart())
221                .append("\" l=\"").append(part.getLines())
222                .append("\" r=\"").append(part.getResourceId())
223                .append("\"/>");
224          }
225          xml.append("</g>");
226        }
227        xml.append("</duplications>");
228        // Save
229        context.saveMeasure(resource, CoreMetrics.DUPLICATED_FILES, 1d);
230        context.saveMeasure(resource, CoreMetrics.DUPLICATED_LINES, (double) duplicatedLines.size());
231        context.saveMeasure(resource, CoreMetrics.DUPLICATED_BLOCKS, duplicatedBlocks);
232        context.saveMeasure(resource, new Measure(CoreMetrics.DUPLICATIONS_DATA, xml.toString()));
233      }
234    
235    }