Return-Path: X-Original-To: apmail-hadoop-common-commits-archive@www.apache.org Delivered-To: apmail-hadoop-common-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 38056185A6 for ; Mon, 13 Jul 2015 18:59:31 +0000 (UTC) Received: (qmail 31171 invoked by uid 500); 13 Jul 2015 18:59:15 -0000 Delivered-To: apmail-hadoop-common-commits-archive@hadoop.apache.org Received: (qmail 30751 invoked by uid 500); 13 Jul 2015 18:59:15 -0000 Mailing-List: contact common-commits-help@hadoop.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: common-dev@hadoop.apache.org Delivered-To: mailing list common-commits@hadoop.apache.org Received: (qmail 29625 invoked by uid 99); 13 Jul 2015 18:59:14 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 13 Jul 2015 18:59:14 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 1F5F7E0D51; Mon, 13 Jul 2015 18:59:14 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: zjshen@apache.org To: common-commits@hadoop.apache.org Date: Mon, 13 Jul 2015 18:59:34 -0000 Message-Id: In-Reply-To: <33d989a3ccfe41de887186eacebf5660@git.apache.org> References: <33d989a3ccfe41de887186eacebf5660@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [22/48] hadoop git commit: HADOOP-12194. Support for incremental generation in the protoc plugin. HADOOP-12194. Support for incremental generation in the protoc plugin. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ca5d06b4 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ca5d06b4 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ca5d06b4 Branch: refs/heads/YARN-2928 Commit: ca5d06b44f12ac0e48454b791c12e7e9bba1f399 Parents: 7a9b6ad Author: Andrew Wang Authored: Wed Jul 8 11:09:43 2015 -0700 Committer: Zhijie Shen Committed: Mon Jul 13 11:43:29 2015 -0700 ---------------------------------------------------------------------- hadoop-common-project/hadoop-common/CHANGES.txt | 3 + hadoop-maven-plugins/pom.xml | 8 + .../hadoop/maven/plugin/protoc/ProtocMojo.java | 188 +++++++++++++++++-- 3 files changed, 185 insertions(+), 14 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/ca5d06b4/hadoop-common-project/hadoop-common/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 8ab109d..6cc6b71 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -719,6 +719,9 @@ Release 2.8.0 - UNRELEASED HADOOP-12172. FsShell mkdir -p makes an unnecessary check for the existence of the parent. (cnauroth) + HADOOP-12194. Support for incremental generation in the protoc plugin. + (wang) + BUG FIXES HADOOP-11802: DomainSocketWatcher thread terminates sometimes after there http://git-wip-us.apache.org/repos/asf/hadoop/blob/ca5d06b4/hadoop-maven-plugins/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-maven-plugins/pom.xml b/hadoop-maven-plugins/pom.xml index b48b9ac..b39c22b 100644 --- a/hadoop-maven-plugins/pom.xml +++ b/hadoop-maven-plugins/pom.xml @@ -47,6 +47,14 @@ ${maven.plugin-tools.version} provided + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-mapper-asl + http://git-wip-us.apache.org/repos/asf/hadoop/blob/ca5d06b4/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java ---------------------------------------------------------------------- diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java index 465b713..b9be33e 100644 --- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java +++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java @@ -22,11 +22,21 @@ import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; +import org.codehaus.jackson.map.ObjectMapper; +import org.codehaus.jackson.type.TypeReference; +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; - +import java.util.Map; +import java.util.zip.CRC32; @Mojo(name="protoc", defaultPhase = LifecyclePhase.GENERATE_SOURCES) public class ProtocMojo extends AbstractMojo { @@ -49,6 +59,118 @@ public class ProtocMojo extends AbstractMojo { @Parameter(required=true) private String protocVersion; + @Parameter(defaultValue = + "${project.build.directory}/hadoop-maven-plugins-protoc-checksums.json") + private String checksumPath; + + /** + * Compares include and source file checksums against previously computed + * checksums stored in a json file in the build directory. + */ + public class ChecksumComparator { + + private final Map storedChecksums; + private final Map computedChecksums; + + private final File checksumFile; + + ChecksumComparator(String checksumPath) throws IOException { + checksumFile = new File(checksumPath); + // Read in the checksums + if (checksumFile.exists()) { + ObjectMapper mapper = new ObjectMapper(); + storedChecksums = mapper + .readValue(checksumFile, new TypeReference>() { + }); + } else { + storedChecksums = new HashMap<>(0); + } + computedChecksums = new HashMap<>(); + } + + public boolean hasChanged(File file) throws IOException { + if (!file.exists()) { + throw new FileNotFoundException( + "Specified protoc include or source does not exist: " + file); + } + if (file.isDirectory()) { + return hasDirectoryChanged(file); + } else if (file.isFile()) { + return hasFileChanged(file); + } else { + throw new IOException("Not a file or directory: " + file); + } + } + + private boolean hasDirectoryChanged(File directory) throws IOException { + File[] listing = directory.listFiles(); + boolean changed = false; + // Do not exit early, since we need to compute and save checksums + // for each file within the directory. + for (File f : listing) { + if (f.isDirectory()) { + if (hasDirectoryChanged(f)) { + changed = true; + } + } else if (f.isFile()) { + if (hasFileChanged(f)) { + changed = true; + } + } else { + getLog().debug("Skipping entry that is not a file or directory: " + + f); + } + } + return changed; + } + + private boolean hasFileChanged(File file) throws IOException { + long computedCsum = computeChecksum(file); + + // Return if the generated csum matches the stored csum + Long storedCsum = storedChecksums.get(file.getCanonicalPath()); + if (storedCsum == null || storedCsum.longValue() != computedCsum) { + // It has changed. + return true; + } + return false; + } + + private long computeChecksum(File file) throws IOException { + // If we've already computed the csum, reuse the computed value + final String canonicalPath = file.getCanonicalPath(); + if (computedChecksums.containsKey(canonicalPath)) { + return computedChecksums.get(canonicalPath); + } + // Compute the csum for the file + CRC32 crc = new CRC32(); + byte[] buffer = new byte[1024*64]; + try (BufferedInputStream in = + new BufferedInputStream(new FileInputStream(file))) { + while (true) { + int read = in.read(buffer); + if (read <= 0) { + break; + } + crc.update(buffer, 0, read); + } + } + // Save it in the generated map and return + final long computedCsum = crc.getValue(); + computedChecksums.put(canonicalPath, computedCsum); + return crc.getValue(); + } + + public void writeChecksums() throws IOException { + ObjectMapper mapper = new ObjectMapper(); + try (BufferedOutputStream out = new BufferedOutputStream( + new FileOutputStream(checksumFile))) { + mapper.writeValue(out, computedChecksums); + getLog().info("Wrote protoc checksums to file " + checksumFile); + } + } + } + public void execute() throws MojoExecutionException { try { List command = new ArrayList(); @@ -58,7 +180,7 @@ public class ProtocMojo extends AbstractMojo { List out = new ArrayList(); if (exec.run(command, out) == 127) { getLog().error("protoc, not found at: " + protocCommand); - throw new MojoExecutionException("protoc failure"); + throw new MojoExecutionException("protoc failure"); } else { if (out.isEmpty()) { getLog().error("stdout: " + out); @@ -67,36 +189,74 @@ public class ProtocMojo extends AbstractMojo { } else { if (!out.get(0).endsWith(protocVersion)) { throw new MojoExecutionException( - "protoc version is '" + out.get(0) + "', expected version is '" - + protocVersion + "'"); + "protoc version is '" + out.get(0) + "', expected version is '" + + protocVersion + "'"); } } } if (!output.mkdirs()) { if (!output.exists()) { - throw new MojoExecutionException("Could not create directory: " + - output); + throw new MojoExecutionException( + "Could not create directory: " + output); } } + + // Whether the import or source protoc files have changed. + ChecksumComparator comparator = new ChecksumComparator(checksumPath); + boolean importsChanged = false; + command = new ArrayList(); command.add(protocCommand); command.add("--java_out=" + output.getCanonicalPath()); if (imports != null) { for (File i : imports) { + if (comparator.hasChanged(i)) { + importsChanged = true; + } command.add("-I" + i.getCanonicalPath()); } } + // Filter to generate classes for just the changed source files. + List changedSources = new ArrayList<>(); + boolean sourcesChanged = false; for (File f : FileSetUtils.convertFileSetToFiles(source)) { - command.add(f.getCanonicalPath()); + // Need to recompile if the source has changed, or if any import has + // changed. + if (comparator.hasChanged(f) || importsChanged) { + sourcesChanged = true; + changedSources.add(f); + command.add(f.getCanonicalPath()); + } } - exec = new Exec(this); - out = new ArrayList(); - if (exec.run(command, out) != 0) { - getLog().error("protoc compiler error"); - for (String s : out) { - getLog().error(s); + + if (!sourcesChanged && !importsChanged) { + getLog().info("No changes detected in protoc files, skipping " + + "generation."); + } else { + if (getLog().isDebugEnabled()) { + StringBuilder b = new StringBuilder(); + b.append("Generating classes for the following protoc files: ["); + String prefix = ""; + for (File f : changedSources) { + b.append(prefix); + b.append(f.toString()); + prefix = ", "; + } + b.append("]"); + getLog().debug(b.toString()); } - throw new MojoExecutionException("protoc failure"); + + exec = new Exec(this); + out = new ArrayList(); + if (exec.run(command, out) != 0) { + getLog().error("protoc compiler error"); + for (String s : out) { + getLog().error(s); + } + throw new MojoExecutionException("protoc failure"); + } + // Write the new checksum file on success. + comparator.writeChecksums(); } } catch (Throwable ex) { throw new MojoExecutionException(ex.toString(), ex);