Use types are origins for incremental KAPT and track generated source
This change introduces tracking of generated sources structure in order to e.g track classpath changes impacting generated sources. This fixes KT-42182. Also, origin tracking for isolating processors is now using types, allowing for origin elements from classpath. This fixes KT-34340. However, classpath origin is used only to invalidate generated files when the type changes and processing will not be requested for that type. This is in line with the incap spec.
This commit is contained in:
committed by
Mikhael Bogdanov
parent
d512158c25
commit
c7e5beece5
+58
@@ -0,0 +1,58 @@
|
||||
/*
|
||||
* Copyright 2010-2020 JetBrains s.r.o. and Kotlin Programming Language contributors.
|
||||
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
|
||||
*/
|
||||
|
||||
package org.jetbrains.kotlin.gradle.incapt;
|
||||
|
||||
import javax.annotation.processing.AbstractProcessor;
|
||||
import javax.annotation.processing.RoundEnvironment;
|
||||
import javax.lang.model.element.Element;
|
||||
import javax.lang.model.element.ExecutableElement;
|
||||
import javax.lang.model.element.TypeElement;
|
||||
import javax.lang.model.element.VariableElement;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
/** Simple processor that generates a class for every annotated element (class, field, method). */
|
||||
public class IncrementalProcessorReferencingClasspath extends AbstractProcessor {
|
||||
|
||||
// Type that all generated sources will extend.
|
||||
public static final String CLASSPATH_TYPE = "com.example.FromClasspath";
|
||||
|
||||
@Override
|
||||
public Set<String> getSupportedAnnotationTypes() {
|
||||
return Collections.singleton("example.ExampleAnnotation");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
|
||||
if (annotations.isEmpty()) return true;
|
||||
|
||||
for (Element element : roundEnv.getElementsAnnotatedWith(annotations.iterator().next())) {
|
||||
if (element instanceof TypeElement || element instanceof ExecutableElement || element instanceof VariableElement) {
|
||||
String name = element.getSimpleName().toString();
|
||||
name = name.substring(0, 1).toUpperCase() + name.substring(1) + "Generated";
|
||||
|
||||
String packageName;
|
||||
if (element instanceof TypeElement) {
|
||||
packageName = element.getEnclosingElement().getSimpleName().toString();
|
||||
}
|
||||
else {
|
||||
packageName = element.getEnclosingElement().getEnclosingElement().getSimpleName().toString();
|
||||
}
|
||||
|
||||
try (Writer writer = processingEnv.getFiler().createSourceFile(packageName + "." + name, element).openWriter()) {
|
||||
writer.append("package ").append(packageName).append(";");
|
||||
writer.append("\npublic class ").append(name).append(" extends ").append(CLASSPATH_TYPE).append(" {}");
|
||||
}
|
||||
catch (IOException ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
+35
-11
@@ -27,6 +27,7 @@ class KaptIncrementalWithAggregatingApt : KaptIncrementalIT() {
|
||||
override fun defaultBuildOptions(): BuildOptions =
|
||||
super.defaultBuildOptions().copy(
|
||||
incremental = true,
|
||||
debug=false,
|
||||
kaptOptions = KaptOptions(
|
||||
verbose = true,
|
||||
useWorkers = true,
|
||||
@@ -171,7 +172,7 @@ class KaptIncrementalWithAggregatingApt : KaptIncrementalIT() {
|
||||
}
|
||||
project.build("build") {
|
||||
assertSuccessful()
|
||||
assertTrue(getProcessedSources(output).isEmpty())
|
||||
assertTrue(output.contains("Skipping annotation processing as all sources are up-to-date."))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -275,8 +276,9 @@ class KaptIncrementalWithAggregatingApt : KaptIncrementalIT() {
|
||||
assertEquals(
|
||||
setOf(
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/bar/NoAnnotationsKt.java").canonicalPath,
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath
|
||||
), getProcessedSources(output)
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath,
|
||||
fileInWorkingDir("build/generated/source/kapt/main/bar/WithAnnotationGenerated.java").canonicalPath.takeUnless { isBinary },
|
||||
).filterNotNull().toSet(), getProcessedSources(output)
|
||||
)
|
||||
|
||||
checkAggregatingResource { lines ->
|
||||
@@ -302,9 +304,9 @@ class KaptIncrementalWithAggregatingApt : KaptIncrementalIT() {
|
||||
assertEquals(
|
||||
setOf(
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/baz/BazClass.java").canonicalPath,
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath
|
||||
),
|
||||
getProcessedSources(output)
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath,
|
||||
fileInWorkingDir("build/generated/source/kapt/main/bar/WithAnnotationGenerated.java").canonicalPath.takeUnless { isBinary },
|
||||
).filterNotNull().toSet(), getProcessedSources(output)
|
||||
)
|
||||
|
||||
checkAggregatingResource { lines ->
|
||||
@@ -330,9 +332,9 @@ class KaptIncrementalWithAggregatingApt : KaptIncrementalIT() {
|
||||
assertEquals(
|
||||
setOf(
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/baz/BazClass.java").canonicalPath,
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath
|
||||
),
|
||||
getProcessedSources(output)
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath,
|
||||
fileInWorkingDir("build/generated/source/kapt/main/bar/WithAnnotationGenerated.java").canonicalPath.takeUnless { isBinary },
|
||||
).filterNotNull().toSet(), getProcessedSources(output)
|
||||
)
|
||||
|
||||
checkAggregatingResource { lines ->
|
||||
@@ -351,8 +353,30 @@ class KaptIncrementalWithAggregatingApt : KaptIncrementalIT() {
|
||||
assertEquals(
|
||||
setOf(
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/bar/NoAnnotationsKt.java").canonicalPath,
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath
|
||||
), getProcessedSources(output)
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath,
|
||||
fileInWorkingDir("build/generated/source/kapt/main/bar/WithAnnotationGenerated.java").canonicalPath.takeUnless { isBinary },
|
||||
fileInWorkingDir("build/generated/source/kapt/main/BazClass/BazNestedGenerated.java").canonicalPath.takeUnless { isBinary },
|
||||
).filterNotNull().toSet(), getProcessedSources(output)
|
||||
)
|
||||
|
||||
checkAggregatingResource { lines ->
|
||||
assertEquals(2, lines.size)
|
||||
assertTrue(lines.contains("WithAnnotationGenerated"))
|
||||
assertTrue(lines.contains("BazNestedGenerated"))
|
||||
}
|
||||
}
|
||||
|
||||
// make sure that changing the origin of isolating that produced
|
||||
project.projectFile("withAnnotation.kt").modify { current -> current.substringBeforeLast("}") + "\nfun otherFunction() {} }" }
|
||||
project.build("build", options = buildOptions) {
|
||||
assertSuccessful()
|
||||
|
||||
assertEquals(
|
||||
setOf(
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/bar/WithAnnotation.java").canonicalPath,
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath,
|
||||
fileInWorkingDir("build/generated/source/kapt/main/BazClass/BazNestedGenerated.java").canonicalPath.takeUnless { isBinary },
|
||||
).filterNotNull().toSet(), getProcessedSources(output)
|
||||
)
|
||||
|
||||
checkAggregatingResource { lines ->
|
||||
|
||||
+57
@@ -6,10 +6,14 @@
|
||||
package org.jetbrains.kotlin.gradle
|
||||
|
||||
import org.jetbrains.kotlin.gradle.incapt.IncrementalProcessor
|
||||
import org.jetbrains.kotlin.gradle.incapt.IncrementalProcessorReferencingClasspath
|
||||
import org.jetbrains.kotlin.gradle.util.modify
|
||||
import org.junit.Assert.assertEquals
|
||||
import org.junit.Assume
|
||||
import org.junit.Test
|
||||
import org.objectweb.asm.ClassWriter
|
||||
import org.objectweb.asm.Opcodes
|
||||
import org.objectweb.asm.Type
|
||||
import test.kt33617.MyClass
|
||||
import java.io.File
|
||||
import java.util.zip.ZipEntry
|
||||
@@ -192,6 +196,59 @@ class KaptIncrementalWithIsolatingApt : KaptIncrementalIT() {
|
||||
assertSuccessful()
|
||||
}
|
||||
}
|
||||
|
||||
/** Regression test for https://youtrack.jetbrains.com/issue/KT-42182. */
|
||||
@Test
|
||||
fun testGeneratedSourcesImpactedByClasspathChanges() {
|
||||
val project = Project(
|
||||
"kaptIncrementalCompilationProject",
|
||||
GradleVersionRequired.None
|
||||
).apply {
|
||||
setupIncrementalAptProject("ISOLATING", procClass = IncrementalProcessorReferencingClasspath::class.java)
|
||||
}
|
||||
project.gradleSettingsScript().writeText("include ':', ':lib'")
|
||||
val classpathTypeSource = project.projectDir.resolve("lib").run {
|
||||
mkdirs()
|
||||
resolve("build.gradle").writeText("apply plugin: 'java'")
|
||||
val source = resolve("src/main/java/" + IncrementalProcessorReferencingClasspath.CLASSPATH_TYPE.replace(".", "/") + ".java")
|
||||
source.parentFile.mkdirs()
|
||||
|
||||
source.writeText(
|
||||
"""
|
||||
package ${IncrementalProcessorReferencingClasspath.CLASSPATH_TYPE.substringBeforeLast(".")};
|
||||
public class ${IncrementalProcessorReferencingClasspath.CLASSPATH_TYPE.substringAfterLast(".")} {}
|
||||
""".trimIndent()
|
||||
)
|
||||
return@run source
|
||||
}
|
||||
project.gradleBuildScript().appendText(
|
||||
"""
|
||||
|
||||
dependencies {
|
||||
implementation project(':lib')
|
||||
}
|
||||
""".trimIndent()
|
||||
)
|
||||
project.build("clean", "kaptKotlin") {
|
||||
assertSuccessful()
|
||||
}
|
||||
|
||||
// change type that all generated sources reference
|
||||
classpathTypeSource.writeText(classpathTypeSource.readText().replace("}", "int i = 10;\n}"))
|
||||
project.build("build") {
|
||||
assertSuccessful()
|
||||
assertEquals(
|
||||
setOf(
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/foo/A.java").canonicalPath,
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/bar/B.java").canonicalPath,
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/bar/UseBKt.java").canonicalPath,
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/baz/UtilKt.java").canonicalPath,
|
||||
fileInWorkingDir("build/tmp/kapt3/stubs/main/error/NonExistentClass.java").canonicalPath
|
||||
),
|
||||
getProcessedSources(output)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private const val patternApt = "Processing java sources with annotation processors:"
|
||||
|
||||
@@ -60,10 +60,16 @@ open class KaptContext(val options: KaptOptions, val withJdk: Boolean, val logge
|
||||
JavaClassCacheManager(it)
|
||||
}
|
||||
if (options.flags[KaptFlag.INCREMENTAL_APT]) {
|
||||
sourcesToReprocess =
|
||||
sourcesToReprocess = run {
|
||||
val start = System.currentTimeMillis()
|
||||
cacheManager?.invalidateAndGetDirtyFiles(
|
||||
options.changedFiles, options.classpathChanges
|
||||
) ?: SourcesToReprocess.FullRebuild
|
||||
).also {
|
||||
if (logger.isVerbose) {
|
||||
logger.info("Computing sources to reprocess took ${System.currentTimeMillis() - start}[ms].")
|
||||
}
|
||||
}
|
||||
}?: SourcesToReprocess.FullRebuild
|
||||
|
||||
if (sourcesToReprocess == SourcesToReprocess.FullRebuild) {
|
||||
// remove all generated sources and classes
|
||||
|
||||
+10
-8
@@ -14,6 +14,7 @@ import com.sun.tools.javac.processing.JavacProcessingEnvironment
|
||||
import com.sun.tools.javac.tree.JCTree
|
||||
import org.jetbrains.kotlin.base.kapt3.KaptFlag
|
||||
import org.jetbrains.kotlin.kapt3.base.incremental.*
|
||||
import org.jetbrains.kotlin.kapt3.base.javac.KaptJavaCompiler
|
||||
import org.jetbrains.kotlin.kapt3.base.util.KaptBaseError
|
||||
import org.jetbrains.kotlin.kapt3.base.util.KaptLogger
|
||||
import org.jetbrains.kotlin.kapt3.base.util.isJava9OrLater
|
||||
@@ -40,6 +41,13 @@ fun KaptContext.doAnnotationProcessing(
|
||||
|
||||
val compilerAfterAP: JavaCompiler
|
||||
try {
|
||||
if (javaSourceFiles.isEmpty() && aggregatedTypes.isEmpty() && additionalSources.isEmpty()) {
|
||||
if (logger.isVerbose) {
|
||||
logger.info("Skipping annotation processing as all sources are up-to-date.")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if (isJava9OrLater()) {
|
||||
val initProcessAnnotationsMethod = JavaCompiler::class.java.declaredMethods.single { it.name == "initProcessAnnotations" }
|
||||
initProcessAnnotationsMethod.invoke(compiler, wrappedProcessors, emptyList<JavaFileObject>(), emptyList<String>())
|
||||
@@ -66,11 +74,6 @@ fun KaptContext.doAnnotationProcessing(
|
||||
CompileState.PARSE, compiler.enterTrees(parsedJavaFiles + additionalSources)
|
||||
)
|
||||
|
||||
val generatedSourcesListener = sourcesStructureListener?.let {
|
||||
compiler.getTaskListeners().remove(it)
|
||||
GeneratedTypesTaskListener(cacheManager!!.javaCache)
|
||||
}?.also { compiler.getTaskListeners().add(it) }
|
||||
|
||||
val additionalClassNames = JavacList.from(aggregatedTypes)
|
||||
if (isJava9OrLater()) {
|
||||
val processAnnotationsMethod =
|
||||
@@ -78,13 +81,12 @@ fun KaptContext.doAnnotationProcessing(
|
||||
processAnnotationsMethod.invoke(compiler, analyzedFiles, additionalClassNames)
|
||||
compiler
|
||||
} else {
|
||||
compiler.processAnnotations(analyzedFiles, additionalClassNames).also {
|
||||
generatedSourcesListener?.let { compiler.getTaskListeners().remove(it) }
|
||||
}
|
||||
compiler.processAnnotations(analyzedFiles, additionalClassNames)
|
||||
}
|
||||
} catch (e: AnnotationProcessingError) {
|
||||
throw KaptBaseError(KaptBaseError.Kind.EXCEPTION, e.cause ?: e)
|
||||
}
|
||||
sourcesStructureListener?.let { compiler.getTaskListeners().remove(it) }
|
||||
|
||||
cacheManager?.updateCache(processors, sourcesStructureListener?.failureReason != null)
|
||||
|
||||
|
||||
+84
-40
@@ -10,16 +10,19 @@ import java.io.Serializable
|
||||
|
||||
class IncrementalAptCache : Serializable {
|
||||
|
||||
private val aggregatingGenerated: MutableMap<File, String?> = mutableMapOf()
|
||||
private val aggregatingGenerated: MutableSet<File> = mutableSetOf()
|
||||
private val aggregatedTypes: MutableSet<String> = linkedSetOf()
|
||||
private val isolatingMapping: MutableMap<File, Pair<String?, File>> = mutableMapOf()
|
||||
// Annotations claimed by aggregating annotation processors
|
||||
private val aggregatingClaimedAnnotations: MutableSet<String> = mutableSetOf()
|
||||
private val isolatingMapping: MutableMap<File, String> = mutableMapOf()
|
||||
|
||||
var isIncremental = true
|
||||
private set
|
||||
|
||||
fun updateCache(processors: List<IncrementalProcessor>): Boolean {
|
||||
fun updateCache(processors: List<IncrementalProcessor>, failedToAnalyzeSources: Boolean): Boolean {
|
||||
if (failedToAnalyzeSources) {
|
||||
invalidateCache()
|
||||
return false
|
||||
}
|
||||
|
||||
val aggregating = mutableListOf<IncrementalProcessor>()
|
||||
val isolating = mutableListOf<IncrementalProcessor>()
|
||||
val nonIncremental = mutableListOf<IncrementalProcessor>()
|
||||
@@ -38,60 +41,68 @@ class IncrementalAptCache : Serializable {
|
||||
|
||||
aggregatingGenerated.clear()
|
||||
aggregating.forEach {
|
||||
it.getGeneratedToSourcesAll().mapValuesTo(aggregatingGenerated) { (_, value) ->
|
||||
value?.first
|
||||
}
|
||||
aggregatingGenerated.addAll(it.getGeneratedToSources().keys)
|
||||
}
|
||||
|
||||
aggregatingClaimedAnnotations.clear()
|
||||
aggregatingClaimedAnnotations.addAll(aggregating.flatMap { it.supportedAnnotationTypes })
|
||||
|
||||
aggregatedTypes.clear()
|
||||
aggregatedTypes.addAll(aggregating.flatMap { it.getAggregatedTypes() })
|
||||
|
||||
for (isolatingProcessor in isolating) {
|
||||
isolatingProcessor.getGeneratedToSourcesAll().forEach {
|
||||
isolatingMapping[it.key] = it.value!!.first to it.value!!.second!!
|
||||
isolating.forEach {
|
||||
it.getGeneratedToSources().forEach { (file, type) ->
|
||||
isolatingMapping[file] = type!!
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
fun getAggregatingClaimedAnnotations(): Set<String> = aggregatingClaimedAnnotations
|
||||
|
||||
/** Returns generated Java sources originating from aggregating APs. */
|
||||
fun invalidateAggregating(): Pair<List<File>, List<String>> {
|
||||
val dirtyAggregating = aggregatingGenerated.keys.filter { it.isJavaFileOrClass() }
|
||||
aggregatingGenerated.forEach { it.key.delete() }
|
||||
/**
|
||||
* Invalidates all data collected about aggregating APs, making the cache ready for the next round of data collection. Also,
|
||||
* all files generated by aggregating APs are deleted.
|
||||
*/
|
||||
fun invalidateAggregating() {
|
||||
aggregatingGenerated.forEach { it.delete() }
|
||||
aggregatingGenerated.clear()
|
||||
|
||||
val dirtyAggregated = ArrayList(aggregatedTypes)
|
||||
aggregatedTypes.clear()
|
||||
|
||||
return dirtyAggregating to dirtyAggregated
|
||||
}
|
||||
|
||||
/** Returns generated Java sources originating from the specified sources, and generated by isloating APs. */
|
||||
fun invalidateIsolatingGenerated(fromSources: Set<File>): Pair<List<File>, Set<String>> {
|
||||
val allInvalidated = mutableListOf<File>()
|
||||
val invalidatedClassIds = mutableSetOf<String>()
|
||||
var changedSources = fromSources.toSet()
|
||||
/**
|
||||
* Prepares isolating processors for incremental compilation. The specified generated files are removed, and mapping
|
||||
* information is deleted for them. The invalidation is non-transitive.
|
||||
*/
|
||||
fun invalidateIsolatingForOriginTypes(originatingTypes: Set<String>) {
|
||||
val isolatingGenerated = mutableSetOf<File>()
|
||||
isolatingMapping.forEach { (file, type) ->
|
||||
if (type in originatingTypes) {
|
||||
isolatingGenerated.add(file)
|
||||
}
|
||||
}
|
||||
|
||||
isolatingGenerated.forEach {
|
||||
isolatingMapping.remove(it)
|
||||
it.delete()
|
||||
}
|
||||
}
|
||||
|
||||
fun getIsolatingGeneratedTypesForOrigins(
|
||||
originatingTypes: Set<String>,
|
||||
typeInfoProvider: (Collection<File>) -> Set<String>
|
||||
): List<String> {
|
||||
val allGeneratedTypes = mutableListOf<String>()
|
||||
var currentOrigins = originatingTypes.toSet()
|
||||
|
||||
// We need to do it in a loop because mapping could be: [AGenerated.java -> A.java, AGeneratedGenerated.java -> AGenerated.java]
|
||||
while (changedSources.isNotEmpty()) {
|
||||
val generated = isolatingMapping.filter { changedSources.contains(it.value.second) }.keys
|
||||
generated.forEach {
|
||||
if (it.isJavaFileOrClass()) {
|
||||
allInvalidated.add(it)
|
||||
isolatingMapping[it]?.first?.let { invalidatedClassIds.add(it) }
|
||||
while (currentOrigins.isNotEmpty()) {
|
||||
val generated = mutableSetOf<File>()
|
||||
isolatingMapping.forEach { (file, origin) ->
|
||||
if (origin in currentOrigins) {
|
||||
generated.add(file)
|
||||
}
|
||||
|
||||
it.delete()
|
||||
isolatingMapping.remove(it)
|
||||
}
|
||||
changedSources = generated
|
||||
currentOrigins = typeInfoProvider(generated)
|
||||
allGeneratedTypes.addAll(currentOrigins)
|
||||
}
|
||||
return allInvalidated to invalidatedClassIds
|
||||
return allGeneratedTypes
|
||||
}
|
||||
|
||||
private fun File.isJavaFileOrClass() = extension == "java" || extension == "class"
|
||||
@@ -100,5 +111,38 @@ class IncrementalAptCache : Serializable {
|
||||
isIncremental = false
|
||||
aggregatingGenerated.clear()
|
||||
isolatingMapping.clear()
|
||||
aggregatedTypes.clear()
|
||||
}
|
||||
|
||||
/** Gets the originating type for the specified type generated by isolating AP. */
|
||||
fun getOriginForGeneratedIsolatingType(generatedType: String, sourceFileProvider: (String) -> File?): String {
|
||||
val generatedFile = checkNotNull(sourceFileProvider(generatedType)) { "Unable to find source for $generatedType" }
|
||||
return isolatingMapping.getValue(generatedFile)
|
||||
}
|
||||
|
||||
/** Returns types that were processed by aggregating APs. */
|
||||
fun getAggregatingOrigins(): Set<String> = aggregatedTypes
|
||||
|
||||
/** Returns all types generated by aggregating APs. */
|
||||
fun getAggregatingGeneratedTypes(typeInfoProvider: (Collection<File>) -> Set<String>): Set<String> {
|
||||
val generatedAggregating: MutableSet<File> = HashSet(aggregatingGenerated.size)
|
||||
aggregatingGenerated.forEach {
|
||||
if (it.isJavaFileOrClass()) {
|
||||
generatedAggregating.add(it)
|
||||
}
|
||||
}
|
||||
return typeInfoProvider(generatedAggregating)
|
||||
}
|
||||
|
||||
/** Returns all types generated by isolating APs. */
|
||||
fun getIsolatingGeneratedTypes(typeInfoProvider: (Collection<File>) -> Set<String>): Set<String> {
|
||||
val generatedIsolating: MutableSet<File> = HashSet(isolatingMapping.size)
|
||||
|
||||
isolatingMapping.keys.forEach {
|
||||
if (it.isJavaFileOrClass()) {
|
||||
generatedIsolating.add(it)
|
||||
}
|
||||
}
|
||||
return typeInfoProvider(generatedIsolating)
|
||||
}
|
||||
}
|
||||
@@ -19,8 +19,18 @@ class JavaClassCacheManager(val file: File) : Closeable {
|
||||
private var closed = false
|
||||
|
||||
fun updateCache(processors: List<IncrementalProcessor>, failedToAnalyzeSources: Boolean) {
|
||||
if (failedToAnalyzeSources || !aptCache.updateCache(processors)) {
|
||||
if (!aptCache.updateCache(processors, failedToAnalyzeSources)) {
|
||||
javaCache.invalidateAll()
|
||||
return
|
||||
}
|
||||
// Compilation is fully incremental, record types defined in generated .class files
|
||||
processors.forEach { processor ->
|
||||
processor.getGeneratedClassFilesToTypes().forEach { (classFile, type) ->
|
||||
val typeInformation = SourceFileStructure(classFile.toURI()).also {
|
||||
it.addDeclaredType(type)
|
||||
}
|
||||
javaCache.addSourceStructure(typeInformation)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,38 +49,93 @@ class JavaClassCacheManager(val file: File) : Closeable {
|
||||
}
|
||||
|
||||
val changes = Changes(changedSources, dirtyClasspathFqNames.toSet())
|
||||
val filesToReprocess = javaCache.invalidateEntriesForChangedFiles(changes)
|
||||
val aggregatingGeneratedTypes = aptCache.getAggregatingGeneratedTypes(javaCache::getTypesForFiles)
|
||||
val impactedTypes = getAllImpactedTypes(changes, aggregatingGeneratedTypes)
|
||||
val isolatingGeneratedTypes = aptCache.getIsolatingGeneratedTypes(javaCache::getTypesForFiles)
|
||||
|
||||
return when (filesToReprocess) {
|
||||
is SourcesToReprocess.FullRebuild -> SourcesToReprocess.FullRebuild
|
||||
is SourcesToReprocess.Incremental -> {
|
||||
val toReprocess = filesToReprocess.toReprocess.toMutableSet()
|
||||
val sourcesToReprocess = changedSources.toMutableSet()
|
||||
val classNamesToReprocess = mutableListOf<String>()
|
||||
var shouldProcessAggregating = false
|
||||
|
||||
val (invalidatedIsolatingGenerated, invalidatedIsolatingId) = aptCache.invalidateIsolatingGenerated(toReprocess)
|
||||
val generatedDirtyTypes = javaCache.invalidateGeneratedTypes(invalidatedIsolatingGenerated).toMutableSet() /*+*/
|
||||
|
||||
val aggregatedTypes = mutableListOf<String>()
|
||||
if (!toReprocess.isEmpty()) {
|
||||
// only if there are some files to reprocess we should invalidate the aggregating ones
|
||||
val (aggregatingGenerated, aggregatedTypes1) = aptCache.invalidateAggregating()
|
||||
aggregatedTypes.addAll(aggregatedTypes1)
|
||||
generatedDirtyTypes.addAll(javaCache.invalidateGeneratedTypes(aggregatingGenerated))
|
||||
|
||||
toReprocess.addAll(
|
||||
javaCache.invalidateEntriesAnnotatedWith(aptCache.getAggregatingClaimedAnnotations())
|
||||
)
|
||||
for (impactedType in impactedTypes) {
|
||||
if (impactedType !in isolatingGeneratedTypes && impactedType !in aggregatingGeneratedTypes) {
|
||||
// Reprocess only if original source
|
||||
javaCache.getSourceForType(impactedType)?.let {
|
||||
sourcesToReprocess.add(it)
|
||||
}
|
||||
|
||||
SourcesToReprocess.Incremental(
|
||||
toReprocess.toList(),
|
||||
generatedDirtyTypes,
|
||||
aggregatedTypes.also {
|
||||
it.removeAll(filesToReprocess.dirtyTypes)
|
||||
it.removeAll(generatedDirtyTypes)
|
||||
it.removeAll(invalidatedIsolatingId)
|
||||
})
|
||||
} else if (impactedType in isolatingGeneratedTypes) {
|
||||
// this is a generated type by isolating AP
|
||||
val isolatingOrigin = aptCache.getOriginForGeneratedIsolatingType(impactedType, javaCache::getSourceForType)
|
||||
if (isolatingOrigin in impactedTypes) {
|
||||
// we'll process origin, no need to do it now
|
||||
continue
|
||||
}
|
||||
val originSource = javaCache.getSourceForType(isolatingOrigin)
|
||||
if (originSource?.extension == "java") {
|
||||
sourcesToReprocess.add(originSource)
|
||||
} else if (originSource?.extension == "class") {
|
||||
// This is a generated .class file that we need to reprocess.
|
||||
classNamesToReprocess.add(isolatingOrigin)
|
||||
} else {
|
||||
// This is a type from classpath that was used as origin, just ignore it. It is used just to remove the generated file.
|
||||
}
|
||||
} else {
|
||||
// processed separately
|
||||
shouldProcessAggregating = true
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldProcessAggregating || sourcesToReprocess.isNotEmpty() || classNamesToReprocess.isNotEmpty()) {
|
||||
for (aggregatingOrigin in aptCache.getAggregatingOrigins()) {
|
||||
if (aggregatingOrigin in impactedTypes) continue
|
||||
|
||||
val originSource = javaCache.getSourceForType(aggregatingOrigin)
|
||||
if (originSource?.extension == "java") {
|
||||
sourcesToReprocess.add(originSource)
|
||||
} else if (originSource?.extension == "class") {
|
||||
// This is a generated .class file that we need to reprocess.
|
||||
classNamesToReprocess.add(aggregatingOrigin)
|
||||
}
|
||||
}
|
||||
|
||||
// Invalidate state only if there are some files that will be reprocessed
|
||||
javaCache.invalidateDataForTypes(impactedTypes)
|
||||
aptCache.invalidateAggregating()
|
||||
// for isolating, invalidate both own types and classpath types
|
||||
aptCache.invalidateIsolatingForOriginTypes(impactedTypes)
|
||||
aptCache.invalidateIsolatingForOriginTypes(dirtyClasspathFqNames)
|
||||
}
|
||||
|
||||
return SourcesToReprocess.Incremental(sourcesToReprocess.toList(), impactedTypes, classNamesToReprocess)
|
||||
}
|
||||
|
||||
private fun getAllImpactedTypes(changes: Changes, aggregatingGeneratedTypes: Set<String>): MutableSet<String> {
|
||||
val impactedTypes = javaCache.getAllImpactedTypes(changes)
|
||||
|
||||
/**
|
||||
* In order to find all impacted types we do the following:
|
||||
* - impacted types is a set of types that have changed from the previous compilation
|
||||
* - if there is a changed source or a source file that is impacted by type changes, we'll need to run aggregating APs, so we
|
||||
* invalidate all aggregating generated types, and add them to set of impacted types.
|
||||
* - using this new impacted types set, we find all types generated by isolating APs with origins in those types
|
||||
* - if there are some generated types by isolating APs, we'll need to run aggregating APs, so we invalidate all aggregating types
|
||||
* and add them to impacted types.
|
||||
* - using the final value of impacted types we get all generated types by isolating APs and add them to impacted types
|
||||
*/
|
||||
if (changes.sourceChanges.isNotEmpty() || impactedTypes.isNotEmpty()) {
|
||||
// Any source change or any source impacted by type change invalidates aggregating APs generated types
|
||||
impactedTypes.addAll(aggregatingGeneratedTypes)
|
||||
}
|
||||
aptCache.getIsolatingGeneratedTypesForOrigins(changes.dirtyFqNamesFromClasspath, javaCache::getTypesForFiles).let {
|
||||
if (it.isNotEmpty()) {
|
||||
impactedTypes.addAll(it)
|
||||
impactedTypes.addAll(aggregatingGeneratedTypes)
|
||||
}
|
||||
}
|
||||
aptCache.getIsolatingGeneratedTypesForOrigins(impactedTypes, javaCache::getTypesForFiles).let {
|
||||
impactedTypes.addAll(it)
|
||||
}
|
||||
return impactedTypes
|
||||
}
|
||||
|
||||
private fun maybeGetAptCacheFromFile(): IncrementalAptCache {
|
||||
|
||||
+70
-84
@@ -11,17 +11,18 @@ import java.io.ObjectOutputStream
|
||||
import java.io.Serializable
|
||||
import java.lang.IllegalArgumentException
|
||||
import java.net.URI
|
||||
import java.util.regex.Pattern
|
||||
|
||||
/**
|
||||
* Stores type information about processed and generated sources. For .java files a fine-grained type information
|
||||
* exists i.e we know all referenced types. For .class files we only know which type is defined in the .class file.
|
||||
*/
|
||||
class JavaClassCache() : Serializable {
|
||||
private var sourceCache = mutableMapOf<URI, SourceFileStructure>()
|
||||
|
||||
/** Record these separately because we only need to know where each generated type is coming from. */
|
||||
private var generatedTypes = mutableMapOf<File, MutableList<String>>()
|
||||
|
||||
/** Map from types to files they are mentioned in. */
|
||||
@Transient
|
||||
private var dependencyCache = mutableMapOf<String, MutableSet<URI>>()
|
||||
|
||||
@Transient
|
||||
private var nonTransitiveCache = mutableMapOf<String, MutableSet<URI>>()
|
||||
|
||||
@@ -29,21 +30,31 @@ class JavaClassCache() : Serializable {
|
||||
sourceCache[sourceStructure.sourceFile] = sourceStructure
|
||||
}
|
||||
|
||||
fun addGeneratedType(type: String, generatedFile: File) {
|
||||
val typesInFile = generatedTypes[generatedFile] ?: ArrayList(1)
|
||||
typesInFile.add(type)
|
||||
generatedTypes[generatedFile] = typesInFile
|
||||
/** Invalidates types for these files, and return the list of invalidated types.*/
|
||||
fun invalidateTypesForFiles(files: List<File>): Set<String> {
|
||||
val typesFromFiles = HashSet<String>()
|
||||
for (file in files) {
|
||||
sourceCache.remove(file.toURI())?.getDeclaredTypes()?.let {
|
||||
typesFromFiles.addAll(it)
|
||||
}
|
||||
}
|
||||
return typesFromFiles
|
||||
}
|
||||
|
||||
fun invalidateGeneratedTypes(files: List<File>): Set<String> {
|
||||
return files.mapNotNull { generatedTypes.remove(it) }.flatten().toSet()
|
||||
/** Returns all types defined in these files. */
|
||||
fun getTypesForFiles(files: Collection<File>): Set<String> {
|
||||
val typesFromFiles = HashSet<String>(files.size)
|
||||
for (file in files) {
|
||||
sourceCache[file.toURI()]?.getDeclaredTypes()?.let {
|
||||
typesFromFiles.addAll(it)
|
||||
}
|
||||
}
|
||||
return typesFromFiles
|
||||
}
|
||||
|
||||
private fun readObject(input: ObjectInputStream) {
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
sourceCache = input.readObject() as MutableMap<URI, SourceFileStructure>
|
||||
@Suppress("UNCHECKED_CAST")
|
||||
generatedTypes = input.readObject() as MutableMap<File, MutableList<String>>
|
||||
|
||||
dependencyCache = HashMap(sourceCache.size * 4)
|
||||
for (sourceInfo in sourceCache.values) {
|
||||
@@ -71,7 +82,6 @@ class JavaClassCache() : Serializable {
|
||||
|
||||
private fun writeObject(output: ObjectOutputStream) {
|
||||
output.writeObject(sourceCache)
|
||||
output.writeObject(generatedTypes)
|
||||
}
|
||||
|
||||
fun isAlreadyProcessed(sourceFile: URI): Boolean {
|
||||
@@ -84,8 +94,7 @@ class JavaClassCache() : Serializable {
|
||||
return true
|
||||
}
|
||||
return try {
|
||||
val fileFromUri = File(sourceFile)
|
||||
sourceCache.containsKey(sourceFile) || generatedTypes.containsKey(fileFromUri)
|
||||
sourceCache.containsKey(sourceFile)
|
||||
} catch (e: IllegalArgumentException) {
|
||||
// unable to create File instance, avoid processing these files
|
||||
true
|
||||
@@ -96,89 +105,66 @@ class JavaClassCache() : Serializable {
|
||||
internal fun getStructure(sourceFile: File) = sourceCache[sourceFile.toURI()]
|
||||
|
||||
/**
|
||||
* Invalidate cache entries for the specified files, and any files that depend on the changed ones. It returns the set of files that
|
||||
* should be re-processed.
|
||||
* */
|
||||
fun invalidateEntriesForChangedFiles(changes: Changes): SourcesToReprocess {
|
||||
val allDirtyFiles = mutableSetOf<URI>()
|
||||
var currentDirtyFiles = changes.sourceChanges.map { it.toURI() }.toMutableSet()
|
||||
|
||||
for (classpathFqName in changes.dirtyFqNamesFromClasspath) {
|
||||
nonTransitiveCache[classpathFqName]?.let {
|
||||
allDirtyFiles.addAll(it)
|
||||
* Compute the list of types that are impacted by source changes i.e [Changes.sourceChanges] and [Changes.dirtyFqNamesFromClasspath]
|
||||
* i.e classpath changes. The search is transitive, if a file is impacted, all files referencing types defined in that file are
|
||||
* also considered impacted. Only original sources and generated sources are reported as impacted (final result does not contain
|
||||
* classpath types).
|
||||
*/
|
||||
fun getAllImpactedTypes(changes: Changes): MutableSet<String> {
|
||||
fun findImpactedTypes(changedType: String, transitiveDeps: MutableSet<String>, nonTransitiveDeps: MutableSet<String>) {
|
||||
dependencyCache[changedType]?.let { impactedSources ->
|
||||
impactedSources.forEach {
|
||||
transitiveDeps.addAll(sourceCache.getValue(it).getDeclaredTypes())
|
||||
}
|
||||
}
|
||||
|
||||
dependencyCache[classpathFqName]?.let {
|
||||
currentDirtyFiles.addAll(it)
|
||||
nonTransitiveCache[changedType]?.let { impactedSources ->
|
||||
impactedSources.forEach {
|
||||
nonTransitiveDeps.addAll(sourceCache.getValue(it).getDeclaredTypes())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val allDirtyTypes = mutableSetOf<String>()
|
||||
var currentDirtyTypes = getTypesForFiles(changes.sourceChanges).toMutableSet()
|
||||
|
||||
while (currentDirtyFiles.isNotEmpty()) {
|
||||
changes.dirtyFqNamesFromClasspath.forEach { classpathChange ->
|
||||
findImpactedTypes(classpathChange, currentDirtyTypes, allDirtyTypes)
|
||||
}
|
||||
|
||||
val nextRound = mutableSetOf<URI>()
|
||||
for (dirtyFile in currentDirtyFiles) {
|
||||
allDirtyFiles.add(dirtyFile)
|
||||
|
||||
val structure = sourceCache.remove(dirtyFile) ?: continue
|
||||
val dirtyTypes = structure.getDeclaredTypes()
|
||||
allDirtyTypes.addAll(dirtyTypes)
|
||||
|
||||
dirtyTypes.forEach { type ->
|
||||
nonTransitiveCache[type]?.let {
|
||||
allDirtyFiles.addAll(it)
|
||||
}
|
||||
|
||||
dependencyCache[type]?.let {
|
||||
nextRound.addAll(it)
|
||||
}
|
||||
}
|
||||
while (currentDirtyTypes.isNotEmpty()) {
|
||||
val nextRound = mutableSetOf<String>()
|
||||
for (dirtyType in currentDirtyTypes) {
|
||||
allDirtyTypes.add(dirtyType)
|
||||
findImpactedTypes(dirtyType, nextRound, allDirtyTypes)
|
||||
}
|
||||
|
||||
currentDirtyFiles = nextRound.filter { !allDirtyFiles.contains(it) }.toMutableSet()
|
||||
currentDirtyTypes = nextRound.filterTo(HashSet()) { it !in allDirtyTypes }
|
||||
}
|
||||
|
||||
return SourcesToReprocess.Incremental(allDirtyFiles.map { File(it) }, allDirtyTypes, emptyList())
|
||||
}
|
||||
|
||||
/**
|
||||
* For aggregating annotation processors, we always need to reprocess all files annotated with an annotation claimed by the aggregating
|
||||
* annotation processor. This search is not transitive.
|
||||
*/
|
||||
fun invalidateEntriesAnnotatedWith(annotations: Set<String>): Set<File> {
|
||||
val patterns: List<Pattern> = if ("*" in annotations) {
|
||||
// optimize this case - create only one pattern
|
||||
listOf(Pattern.compile(".*"))
|
||||
} else {
|
||||
annotations.map {
|
||||
Pattern.compile(
|
||||
// These are already valid import statements, otherwise run fails when loading the annotation processor.
|
||||
// Handles structure; TypeName [.*] e.g. org.jetbrains.annotations.NotNull and org.jetbrains.annotations.*
|
||||
it.replace(".", "\\.").replace("*", ".+")
|
||||
)
|
||||
}
|
||||
}
|
||||
val matchesAnyPattern = { name: String -> patterns.any { it.matcher(name).matches() } }
|
||||
|
||||
val toReprocess = mutableSetOf<URI>()
|
||||
|
||||
for (cacheEntry in sourceCache) {
|
||||
if (cacheEntry.value.getMentionedAnnotations().any(matchesAnyPattern)) {
|
||||
toReprocess.add(cacheEntry.key)
|
||||
}
|
||||
}
|
||||
|
||||
toReprocess.forEach {
|
||||
sourceCache.remove(it)
|
||||
}
|
||||
|
||||
return toReprocess.map { File(it) }.toSet()
|
||||
return allDirtyTypes
|
||||
}
|
||||
|
||||
internal fun invalidateAll() {
|
||||
sourceCache.clear()
|
||||
generatedTypes.clear()
|
||||
}
|
||||
|
||||
fun getSourceForType(type: String): File? {
|
||||
sourceCache.forEach { (fileUri, typeInfo) ->
|
||||
if (type in typeInfo.getDeclaredTypes()) {
|
||||
return File(fileUri)
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
fun invalidateDataForTypes(impactedTypes: MutableSet<String>) {
|
||||
val allSources = mutableSetOf<URI>()
|
||||
sourceCache.forEach { (fileUri, typeInfo) ->
|
||||
if (typeInfo.getDeclaredTypes().any { it in impactedTypes }) {
|
||||
allSources.add(fileUri)
|
||||
}
|
||||
}
|
||||
|
||||
allSources.forEach { sourceCache.remove(it) }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
+35
-30
@@ -70,11 +70,17 @@ class IncrementalProcessor(private val processor: Processor, private val kind: D
|
||||
}
|
||||
|
||||
fun isUnableToRunIncrementally() = !kind.canRunIncrementally
|
||||
fun getGeneratedToSources() = dependencyCollector.value.getGeneratedToSources()
|
||||
fun getGeneratedToSourcesAll() = dependencyCollector.value.getGeneratedToSourcesAll()
|
||||
fun getAggregatedTypes() = dependencyCollector.value.getAggregatedTypes()
|
||||
fun getRuntimeType(): RuntimeProcType = dependencyCollector.value.getRuntimeType()
|
||||
|
||||
/** Mapping fromm generated file to type that were used as originating elements. For aggregating APs types will be [null]. */
|
||||
fun getGeneratedToSources(): Map<File, String?> = dependencyCollector.value.getGeneratedToSources()
|
||||
|
||||
/** All top-level types that were processed by aggregating APs. */
|
||||
fun getAggregatedTypes() = dependencyCollector.value.getAggregatedTypes()
|
||||
|
||||
/** Mapping from generated class file to type defined in that file. */
|
||||
fun getGeneratedClassFilesToTypes(): Map<File, String> = dependencyCollector.value.getGeneratedClassFilesToTypes()
|
||||
|
||||
fun getRuntimeType(): RuntimeProcType = dependencyCollector.value.getRuntimeType()
|
||||
|
||||
override fun process(annotations: Set<TypeElement>, roundEnv: RoundEnvironment): Boolean {
|
||||
if (getRuntimeType() == RuntimeProcType.AGGREGATING) {
|
||||
@@ -122,8 +128,9 @@ internal class AnnotationProcessorDependencyCollector(
|
||||
private val runtimeProcType: RuntimeProcType,
|
||||
private val warningCollector: (String) -> Unit
|
||||
) {
|
||||
private val generatedToSource = mutableMapOf<File, Pair<String?, File?>?>()
|
||||
private val generatedToSource = mutableMapOf<File, String?>()
|
||||
private val aggregatedTypes = mutableSetOf<String>()
|
||||
private val generatedClassFilesToTypes = mutableMapOf<File, String>()
|
||||
|
||||
private var isFullRebuild = !runtimeProcType.isIncremental
|
||||
|
||||
@@ -131,7 +138,7 @@ internal class AnnotationProcessorDependencyCollector(
|
||||
if (isFullRebuild) return
|
||||
|
||||
if (supportedAnnotationTypes.contains("*")) {
|
||||
aggregatedTypes.addAll(getTopLevelClassNames(roundEnv.rootElements?.filterNotNull() ?: emptyList()))
|
||||
aggregatedTypes.addAll(getTopLevelClassNames(roundEnv.rootElements?.filterNotNull() ?: emptySet()))
|
||||
} else {
|
||||
for (annotation in annotations) {
|
||||
aggregatedTypes.addAll(
|
||||
@@ -149,31 +156,40 @@ internal class AnnotationProcessorDependencyCollector(
|
||||
if (isFullRebuild) return
|
||||
|
||||
val generatedFile = File(createdFile)
|
||||
if (generatedFile.extension == "class") {
|
||||
if (classId == null) {
|
||||
isFullRebuild = true
|
||||
warningCollector.invoke(
|
||||
"Unable to determine type defined in $generatedFile."
|
||||
)
|
||||
return
|
||||
}
|
||||
generatedClassFilesToTypes[generatedFile] = classId
|
||||
}
|
||||
|
||||
if (runtimeProcType == RuntimeProcType.AGGREGATING) {
|
||||
generatedToSource[generatedFile] = classId to null
|
||||
generatedToSource[generatedFile] = null
|
||||
} else {
|
||||
val srcFiles = getSrcFiles(originatingElements)
|
||||
if (srcFiles.size != 1) {
|
||||
val srcClasses = getTopLevelClassNames(originatingElements.filterNotNull())
|
||||
if (srcClasses.size != 1) {
|
||||
isFullRebuild = true
|
||||
warningCollector.invoke(
|
||||
"Expected 1 originating source file when generating $generatedFile, " +
|
||||
"but detected ${srcFiles.size}: [${srcFiles.joinToString()}]."
|
||||
"but detected ${srcClasses.size}: [${srcClasses.joinToString()}]."
|
||||
)
|
||||
} else {
|
||||
generatedToSource[generatedFile] = classId to srcFiles.single()
|
||||
generatedToSource[generatedFile] = srcClasses.single()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal fun getGeneratedToSources(): Map<File, File?> = if (isFullRebuild) emptyMap() else generatedToSource.mapValues { (_, value) ->
|
||||
value?.second
|
||||
}
|
||||
|
||||
internal fun getGeneratedToSourcesAll(): Map<File, Pair<String?, File?>?> =
|
||||
if (isFullRebuild) emptyMap() else generatedToSource
|
||||
/** Mapping from generated files to top level class names that cause that file generation. */
|
||||
internal fun getGeneratedToSources(): Map<File, String?> = if (isFullRebuild) emptyMap() else generatedToSource
|
||||
|
||||
internal fun getAggregatedTypes(): Set<String> = if (isFullRebuild) emptySet() else aggregatedTypes
|
||||
|
||||
internal fun getGeneratedClassFilesToTypes(): Map<File, String> = if (isFullRebuild) emptyMap() else generatedClassFilesToTypes
|
||||
|
||||
internal fun getRuntimeType(): RuntimeProcType {
|
||||
return if (isFullRebuild) {
|
||||
RuntimeProcType.NON_INCREMENTAL
|
||||
@@ -183,17 +199,6 @@ internal class AnnotationProcessorDependencyCollector(
|
||||
}
|
||||
}
|
||||
|
||||
private fun getSrcFiles(elements: Array<out Element?>): Set<File> {
|
||||
return elements.filterNotNull().mapNotNull { elem ->
|
||||
var origin = elem
|
||||
while (origin.enclosingElement != null && origin.enclosingElement !is PackageElement) {
|
||||
origin = origin.enclosingElement
|
||||
}
|
||||
val uri = (origin as? Symbol.ClassSymbol)?.sourcefile?.toUri()?.takeIf { it.isAbsolute }
|
||||
uri?.let { File(it).canonicalFile }
|
||||
}.toSet()
|
||||
}
|
||||
|
||||
private const val PACKAGE_TYPE_NAME = "package-info"
|
||||
|
||||
fun getElementName(current: Element?): String? {
|
||||
@@ -211,8 +216,8 @@ fun getElementName(current: Element?): String? {
|
||||
return null
|
||||
}
|
||||
|
||||
private fun getTopLevelClassNames(elements: Collection<Element>): Collection<String> {
|
||||
return elements.mapNotNull { elem ->
|
||||
private fun getTopLevelClassNames(elements: Collection<Element>): Set<String> {
|
||||
return elements.mapNotNullTo(HashSet()) { elem ->
|
||||
var origin = elem
|
||||
while (origin.enclosingElement != null && origin.enclosingElement !is PackageElement) {
|
||||
origin = origin.enclosingElement
|
||||
|
||||
-24
@@ -246,28 +246,4 @@ private class ConstantTreeVisitor(val sourceStructure: SourceFileStructure) : Tr
|
||||
|
||||
sourceStructure.addMentionedConstant(containingClass.qualifiedName.toString(), name.toString())
|
||||
}
|
||||
}
|
||||
|
||||
class GeneratedTypesTaskListener(private val cache: JavaClassCache) : TaskListener {
|
||||
|
||||
override fun started(e: TaskEvent) {
|
||||
// do nothing, we just process on finish
|
||||
}
|
||||
|
||||
override fun finished(e: TaskEvent) {
|
||||
if (e.kind != TaskEvent.Kind.ENTER || cache.isAlreadyProcessed(e.sourceFile.toUri())) return
|
||||
|
||||
val treeVisitor = object : SimpleTreeVisitor<Void, Void>() {
|
||||
override fun visitClass(node: ClassTree, p: Void?): Void? {
|
||||
node as JCTree.JCClassDecl
|
||||
cache.addGeneratedType(node.sym.fullname.toString(), File(e.sourceFile.toUri()))
|
||||
|
||||
node.members.forEach { visit(it, null) }
|
||||
return null
|
||||
}
|
||||
}
|
||||
e.compilationUnit.typeDecls.forEach {
|
||||
it.accept(treeVisitor, null)
|
||||
}
|
||||
}
|
||||
}
|
||||
+2
-2
@@ -36,8 +36,8 @@ class DynamicIncrementalProcessorTest {
|
||||
|
||||
assertEquals(
|
||||
mapOf(
|
||||
generatedSources.resolve("test/UserGenerated.java") to File("plugins/kapt3/kapt3-base/testData/runner/incremental/User.java").absoluteFile,
|
||||
generatedSources.resolve("test/AddressGenerated.java") to File("plugins/kapt3/kapt3-base/testData/runner/incremental/Address.java").absoluteFile
|
||||
generatedSources.resolve("test/UserGenerated.java") to "test.User",
|
||||
generatedSources.resolve("test/AddressGenerated.java") to "test.Address"
|
||||
),
|
||||
dynamic.getGeneratedToSources()
|
||||
)
|
||||
|
||||
+13
-13
@@ -35,8 +35,8 @@ class IsolationgIncrementalProcessorTest {
|
||||
|
||||
assertEquals(
|
||||
mapOf(
|
||||
generatedSources.resolve("test/UserGenerated.java") to File("plugins/kapt3/kapt3-base/testData/runner/incremental/User.java").absoluteFile,
|
||||
generatedSources.resolve("test/AddressGenerated.java") to File("plugins/kapt3/kapt3-base/testData/runner/incremental/Address.java").absoluteFile
|
||||
generatedSources.resolve("test/UserGenerated.java") to "test.User",
|
||||
generatedSources.resolve("test/AddressGenerated.java") to "test.Address"
|
||||
),
|
||||
isolating.getGeneratedToSources()
|
||||
)
|
||||
@@ -62,12 +62,12 @@ class IsolationgIncrementalProcessorTest {
|
||||
|
||||
assertEquals(
|
||||
mapOf(
|
||||
generatedSources.resolve("test/UserGenerated.java") to TEST_DATA_DIR.resolve("User.java").absoluteFile,
|
||||
generatedSources.resolve("test/UserGeneratedClass.class") to TEST_DATA_DIR.resolve("User.java").absoluteFile,
|
||||
generatedSources.resolve("test/UserGeneratedResource") to TEST_DATA_DIR.resolve("User.java").absoluteFile,
|
||||
generatedSources.resolve("test/AddressGenerated.java") to TEST_DATA_DIR.resolve("Address.java").absoluteFile,
|
||||
generatedSources.resolve("test/AddressGeneratedClass.class") to TEST_DATA_DIR.resolve("Address.java").absoluteFile,
|
||||
generatedSources.resolve("test/AddressGeneratedResource") to TEST_DATA_DIR.resolve("Address.java").absoluteFile
|
||||
generatedSources.resolve("test/UserGenerated.java") to "test.User",
|
||||
generatedSources.resolve("test/UserGeneratedClass.class") to "test.User",
|
||||
generatedSources.resolve("test/UserGeneratedResource") to "test.User",
|
||||
generatedSources.resolve("test/AddressGenerated.java") to "test.Address",
|
||||
generatedSources.resolve("test/AddressGeneratedClass.class") to "test.Address",
|
||||
generatedSources.resolve("test/AddressGeneratedResource") to "test.Address"
|
||||
),
|
||||
isolating.getGeneratedToSources()
|
||||
)
|
||||
@@ -95,15 +95,15 @@ class IsolationgIncrementalProcessorTest {
|
||||
isolating.forEach { assertEquals(RuntimeProcType.ISOLATING, it.getRuntimeType()) }
|
||||
assertEquals(
|
||||
mapOf(
|
||||
generatedSources.resolve("test/UserGenerated.java") to File("plugins/kapt3/kapt3-base/testData/runner/incremental/User.java").absoluteFile,
|
||||
generatedSources.resolve("test/AddressGenerated.java") to File("plugins/kapt3/kapt3-base/testData/runner/incremental/Address.java").absoluteFile
|
||||
generatedSources.resolve("test/UserGenerated.java") to "test.User",
|
||||
generatedSources.resolve("test/AddressGenerated.java") to "test.Address"
|
||||
), isolating[0].getGeneratedToSources()
|
||||
)
|
||||
|
||||
assertEquals(
|
||||
mapOf(
|
||||
generatedSources.resolve("test/UserGeneratedTwo.java") to File("plugins/kapt3/kapt3-base/testData/runner/incremental/User.java").absoluteFile,
|
||||
generatedSources.resolve("test/AddressGeneratedTwo.java") to File("plugins/kapt3/kapt3-base/testData/runner/incremental/Address.java").absoluteFile
|
||||
generatedSources.resolve("test/UserGeneratedTwo.java") to "test.User",
|
||||
generatedSources.resolve("test/AddressGeneratedTwo.java") to "test.Address"
|
||||
), isolating[1].getGeneratedToSources()
|
||||
)
|
||||
}
|
||||
@@ -116,7 +116,7 @@ class IsolationgIncrementalProcessorTest {
|
||||
|
||||
assertEquals(
|
||||
mapOf(
|
||||
generatedSources.resolve("test/UserGenerated.java") to File("plugins/kapt3/kapt3-base/testData/runner/incremental/User.java").absoluteFile
|
||||
generatedSources.resolve("test/UserGenerated.java") to "test.User"
|
||||
), isolating[0].getGeneratedToSources()
|
||||
)
|
||||
}
|
||||
|
||||
+7
-29
@@ -12,7 +12,6 @@ import org.junit.Rule
|
||||
import org.junit.Test
|
||||
import org.junit.rules.TemporaryFolder
|
||||
import java.io.File
|
||||
import java.io.ObjectOutputStream
|
||||
|
||||
class JavaClassCacheManagerTest {
|
||||
|
||||
@@ -56,7 +55,8 @@ class JavaClassCacheManagerTest {
|
||||
}
|
||||
prepareForIncremental()
|
||||
|
||||
val dirtyFiles = cache.invalidateAndGetDirtyFiles(listOf(File("Mentioned.java")), emptyList()) as SourcesToReprocess.Incremental
|
||||
val dirtyFiles =
|
||||
cache.invalidateAndGetDirtyFiles(listOf(File("Mentioned.java").absoluteFile), emptyList()) as SourcesToReprocess.Incremental
|
||||
assertEquals(
|
||||
listOf(
|
||||
File("Mentioned.java").absoluteFile,
|
||||
@@ -84,7 +84,8 @@ class JavaClassCacheManagerTest {
|
||||
}
|
||||
prepareForIncremental()
|
||||
|
||||
val dirtyFiles = cache.invalidateAndGetDirtyFiles(listOf(File("Mentioned.java")), emptyList()) as SourcesToReprocess.Incremental
|
||||
val dirtyFiles =
|
||||
cache.invalidateAndGetDirtyFiles(listOf(File("Mentioned.java").absoluteFile), emptyList()) as SourcesToReprocess.Incremental
|
||||
assertEquals(
|
||||
listOf(
|
||||
File("Mentioned.java").absoluteFile,
|
||||
@@ -112,7 +113,8 @@ class JavaClassCacheManagerTest {
|
||||
}
|
||||
prepareForIncremental()
|
||||
|
||||
val dirtyFiles = cache.invalidateAndGetDirtyFiles(listOf(File("TwoTypes.java")), emptyList()) as SourcesToReprocess.Incremental
|
||||
val dirtyFiles =
|
||||
cache.invalidateAndGetDirtyFiles(listOf(File("TwoTypes.java").absoluteFile), emptyList()) as SourcesToReprocess.Incremental
|
||||
assertEquals(
|
||||
listOf(
|
||||
File("TwoTypes.java").absoluteFile,
|
||||
@@ -155,7 +157,7 @@ class JavaClassCacheManagerTest {
|
||||
|
||||
val dirtyFiles =
|
||||
cache.invalidateAndGetDirtyFiles(
|
||||
listOf(File("Constants.java")), emptyList()
|
||||
listOf(File("Constants.java").absoluteFile), emptyList()
|
||||
) as SourcesToReprocess.Incremental
|
||||
assertEquals(
|
||||
listOf(File("Constants.java").absoluteFile, File("MentionsConst.java").absoluteFile),
|
||||
@@ -163,30 +165,6 @@ class JavaClassCacheManagerTest {
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun testWithAnnotations() {
|
||||
SourceFileStructure(File("Annotated1.java").toURI()).also {
|
||||
it.addDeclaredType("test.Annotated1")
|
||||
it.addMentionedAnnotations("test.Annotation")
|
||||
cache.javaCache.addSourceStructure(it)
|
||||
}
|
||||
SourceFileStructure(File("Annotated2.java").toURI()).also {
|
||||
it.addDeclaredType("test.Annotated2")
|
||||
it.addMentionedAnnotations("com.test.MyAnnotation")
|
||||
cache.javaCache.addSourceStructure(it)
|
||||
}
|
||||
SourceFileStructure(File("Annotated3.java").toURI()).also {
|
||||
it.addDeclaredType("test.Annotated3")
|
||||
it.addMentionedAnnotations("Runnable")
|
||||
cache.javaCache.addSourceStructure(it)
|
||||
}
|
||||
prepareForIncremental()
|
||||
|
||||
assertEquals(setOf(File("Annotated1.java").absoluteFile), cache.javaCache.invalidateEntriesAnnotatedWith(setOf("test.Annotation")))
|
||||
assertEquals(setOf(File("Annotated2.java").absoluteFile), cache.javaCache.invalidateEntriesAnnotatedWith(setOf("com.test.*")))
|
||||
assertEquals(setOf(File("Annotated3.java").absoluteFile), cache.javaCache.invalidateEntriesAnnotatedWith(setOf("*")))
|
||||
}
|
||||
|
||||
private fun prepareForIncremental() {
|
||||
cache.close()
|
||||
cache = JavaClassCacheManager(cacheDir)
|
||||
|
||||
+3
-3
@@ -36,7 +36,7 @@ class TestSimpleIncrementalAptCache {
|
||||
fun testAggregatingAnnotations() {
|
||||
runProcessor(SimpleProcessor().toAggregating())
|
||||
|
||||
val dirtyFiles = cache.invalidateAndGetDirtyFiles(listOf(TEST_DATA_DIR.resolve("User.java")), emptyList()) as SourcesToReprocess.Incremental
|
||||
val dirtyFiles = cache.invalidateAndGetDirtyFiles(listOf(TEST_DATA_DIR.resolve("User.java").absoluteFile), emptyList()) as SourcesToReprocess.Incremental
|
||||
assertEquals(
|
||||
listOf(TEST_DATA_DIR.resolve("User.java").absoluteFile, TEST_DATA_DIR.resolve("Address.java").absoluteFile),
|
||||
dirtyFiles.toReprocess
|
||||
@@ -49,7 +49,7 @@ class TestSimpleIncrementalAptCache {
|
||||
fun testIsolatingAnnotations() {
|
||||
runProcessor(SimpleProcessor().toIsolating())
|
||||
|
||||
val dirtyFiles = cache.invalidateAndGetDirtyFiles(listOf(TEST_DATA_DIR.resolve("User.java")), emptyList()) as SourcesToReprocess.Incremental
|
||||
val dirtyFiles = cache.invalidateAndGetDirtyFiles(listOf(TEST_DATA_DIR.resolve("User.java").absoluteFile), emptyList()) as SourcesToReprocess.Incremental
|
||||
assertFalse(generatedSources.resolve("test/UserGenerated.java").exists())
|
||||
assertEquals(
|
||||
listOf(TEST_DATA_DIR.resolve("User.java").absoluteFile),
|
||||
@@ -61,7 +61,7 @@ class TestSimpleIncrementalAptCache {
|
||||
fun testNonIncremental() {
|
||||
runProcessor(SimpleProcessor().toNonIncremental())
|
||||
|
||||
val dirtyFiles = cache.invalidateAndGetDirtyFiles(listOf(TEST_DATA_DIR.resolve("User.java")), emptyList())
|
||||
val dirtyFiles = cache.invalidateAndGetDirtyFiles(listOf(TEST_DATA_DIR.resolve("User.java").absoluteFile), emptyList())
|
||||
assertTrue(dirtyFiles is SourcesToReprocess.FullRebuild)
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user