From a982dda90e788a7bf2acea2e449943eb6d39cab9 Mon Sep 17 00:00:00 2001 From: Anthony Morris Date: Tue, 18 Oct 2016 15:56:05 +0100 Subject: [PATCH] Upstream changes to archive patcher: - Add a DeltaFriendlyOldBlobSizeLimiter - Fixes for bsdiff for some edge cases --- .classpath | 2 + .../explainer/PatchExplainer.java | 15 +- .../explainer/PatchExplainerTest.java | 18 +- .../DeltaFriendlyOldBlobSizeLimiter.java | 118 +++++++ .../generator/FileByFileV1DeltaGenerator.java | 42 +-- .../generator/PreDiffExecutor.java | 25 +- .../generator/PreDiffPlanner.java | 16 +- .../generator/QualifiedRecommendation.java | 14 + .../generator/RecommendationModifier.java | 5 +- .../generator/TotalRecompressionLimiter.java | 28 +- .../generator/bsdiff/BsDiffMatcher.java | 13 +- .../DeltaFriendlyOldBlobSizeLimiterTest.java | 316 ++++++++++++++++++ .../FileByFileV1DeltaGeneratorTest.java | 2 +- .../generator/PreDiffPlannerTest.java | 41 ++- .../TotalRecompressionLimiterTest.java | 24 +- .../FileByFileV1IntegrationTest.java | 4 +- .../archivepatcher/tools/FileByFileTool.java | 49 ++- .../tools/PatchExplainerTool.java | 34 +- 18 files changed, 655 insertions(+), 111 deletions(-) create mode 100644 generator/src/main/java/com/google/archivepatcher/generator/DeltaFriendlyOldBlobSizeLimiter.java create mode 100644 generator/src/test/java/com/google/archivepatcher/generator/DeltaFriendlyOldBlobSizeLimiterTest.java diff --git a/.classpath b/.classpath index a846001d..ff66d5a5 100644 --- a/.classpath +++ b/.classpath @@ -4,8 +4,10 @@ + + diff --git a/explainer/src/main/java/com/google/archivepatcher/explainer/PatchExplainer.java b/explainer/src/main/java/com/google/archivepatcher/explainer/PatchExplainer.java index ddb969d3..8a01fa84 100644 --- a/explainer/src/main/java/com/google/archivepatcher/explainer/PatchExplainer.java +++ b/explainer/src/main/java/com/google/archivepatcher/explainer/PatchExplainer.java @@ -89,14 +89,14 @@ public PatchExplainer(Compressor compressor, DeltaGenerator deltaGenerator) { * * @param oldFile the old file * @param newFile the new file - * @param recommendationModifier optionally, a {@link RecommendationModifier} to use during patch + * @param recommendationModifiers optionally, {@link RecommendationModifier}s to use during patch * planning. If null, a normal patch is generated. * @return a list of the explanations for each entry that would be * @throws IOException if unable to read data * @throws InterruptedException if any thread interrupts this thread */ public List explainPatch( - File oldFile, File newFile, RecommendationModifier recommendationModifier) + File oldFile, File newFile, RecommendationModifier... recommendationModifiers) throws IOException, InterruptedException { List result = new ArrayList<>(); @@ -118,11 +118,12 @@ public List explainPatch( } Uncompressor uncompressor = new DeflateUncompressor(); - PreDiffExecutor executor = - new PreDiffExecutor.Builder() - .readingOriginalFiles(oldFile, newFile) - .withRecommendationModifier(recommendationModifier) - .build(); + PreDiffExecutor.Builder builder = + new PreDiffExecutor.Builder().readingOriginalFiles(oldFile, newFile); + for (RecommendationModifier modifier : recommendationModifiers) { + builder.withRecommendationModifier(modifier); + } + PreDiffExecutor executor = builder.build(); PreDiffPlan plan = executor.prepareForDiffing(); try (TempFileHolder oldTemp = new TempFileHolder(); TempFileHolder newTemp = new TempFileHolder(); diff --git a/explainer/src/test/java/com/google/archivepatcher/explainer/PatchExplainerTest.java b/explainer/src/test/java/com/google/archivepatcher/explainer/PatchExplainerTest.java index b3ebb102..1a5a379a 100644 --- a/explainer/src/test/java/com/google/archivepatcher/explainer/PatchExplainerTest.java +++ b/explainer/src/test/java/com/google/archivepatcher/explainer/PatchExplainerTest.java @@ -163,7 +163,7 @@ public void testExplainPatch_CompressedBytesIdentical() throws Exception { save(bytes, oldFile); save(bytes, newFile); PatchExplainer explainer = new PatchExplainer(null, null); - List explanations = explainer.explainPatch(oldFile, newFile, null); + List explanations = explainer.explainPatch(oldFile, newFile); EntryExplanation expected = new EntryExplanation( @@ -178,7 +178,7 @@ public void testExplainPatch_CompressedBytesChanged_UncompressedUnchanged() thro save(oldBytes, oldFile); save(newBytes, newFile); PatchExplainer explainer = new PatchExplainer(null, null); - List explanations = explainer.explainPatch(oldFile, newFile, null); + List explanations = explainer.explainPatch(oldFile, newFile); // The compressed bytes changed, but the uncompressed bytes are the same. Thus the patch size // should be zero, because the entries are actually identical in the delta-friendly files. // Additionally no diffing or compression should be performed. @@ -201,7 +201,7 @@ public void testExplainPatch_CompressedBytesChanged_UncompressedChanged() throws FakeCompressor fakeCompressor = new FakeCompressor(FakeDeltaGenerator.OUTPUT.getBytes("US-ASCII")); PatchExplainer explainer = new PatchExplainer(fakeCompressor, fakeDeltaGenerator); - List explanations = explainer.explainPatch(oldFile, newFile, null); + List explanations = explainer.explainPatch(oldFile, newFile); // The compressed bytes changed, and so did the uncompressed bytes. The patch size should be // non-zero because the entries are not identical in the delta-friendly files. EntryExplanation expected = @@ -250,7 +250,7 @@ public void testExplainPatch_BothEntriesUncompressed_BytesUnchanged() throws Exc save(oldBytes, oldFile); save(newBytes, newFile); PatchExplainer explainer = new PatchExplainer(null, null); - List explanations = explainer.explainPatch(oldFile, newFile, null); + List explanations = explainer.explainPatch(oldFile, newFile); // The uncompressed bytes are the same. Thus the patch size should be zero, because the entries // are identical in the delta-friendly files. Additionally no diffing or compression should be // performed. @@ -273,7 +273,7 @@ public void testExplainPatch_BothEntriesUncompressed_BytesChanged() throws Excep FakeCompressor fakeCompressor = new FakeCompressor(FakeDeltaGenerator.OUTPUT.getBytes("US-ASCII")); PatchExplainer explainer = new PatchExplainer(fakeCompressor, fakeDeltaGenerator); - List explanations = explainer.explainPatch(oldFile, newFile, null); + List explanations = explainer.explainPatch(oldFile, newFile); // The uncompressed bytes are not the same. Thus the patch size should be non-zero. EntryExplanation expected = new EntryExplanation( @@ -297,7 +297,7 @@ public void testExplainPatch_CompressedChangedToUncompressed() throws Exception FakeCompressor fakeCompressor = new FakeCompressor(FakeDeltaGenerator.OUTPUT.getBytes("US-ASCII")); PatchExplainer explainer = new PatchExplainer(fakeCompressor, fakeDeltaGenerator); - List explanations = explainer.explainPatch(oldFile, newFile, null); + List explanations = explainer.explainPatch(oldFile, newFile); EntryExplanation expected = new EntryExplanation( path(ENTRY_A1_STORED), @@ -320,7 +320,7 @@ public void testExplainPatch_UncompressedChangedToCompressed() throws Exception FakeCompressor fakeCompressor = new FakeCompressor(FakeDeltaGenerator.OUTPUT.getBytes("US-ASCII")); PatchExplainer explainer = new PatchExplainer(fakeCompressor, fakeDeltaGenerator); - List explanations = explainer.explainPatch(oldFile, newFile, null); + List explanations = explainer.explainPatch(oldFile, newFile); EntryExplanation expected = new EntryExplanation( path(ENTRY_A1_LEVEL_6), @@ -354,7 +354,7 @@ public void testExplainPatch_Unsuitable() throws Exception { FakeCompressor fakeCompressor = new FakeCompressor(FakeDeltaGenerator.OUTPUT.getBytes("US-ASCII")); PatchExplainer explainer = new PatchExplainer(fakeCompressor, fakeDeltaGenerator); - List explanations = explainer.explainPatch(oldFile, newFile, null); + List explanations = explainer.explainPatch(oldFile, newFile); EntryExplanation expected = new EntryExplanation( path(ENTRY_A1_LEVEL_6), @@ -373,7 +373,7 @@ public void testExplainPatch_NewFile() throws Exception { FakeCompressor fakeCompressor = new FakeCompressor(ENTRY_B_LEVEL_6.getCompressedBinaryContent()); PatchExplainer explainer = new PatchExplainer(fakeCompressor, null); - List explanations = explainer.explainPatch(oldFile, newFile, null); + List explanations = explainer.explainPatch(oldFile, newFile); EntryExplanation expected = new EntryExplanation( path(ENTRY_B_LEVEL_6), diff --git a/generator/src/main/java/com/google/archivepatcher/generator/DeltaFriendlyOldBlobSizeLimiter.java b/generator/src/main/java/com/google/archivepatcher/generator/DeltaFriendlyOldBlobSizeLimiter.java new file mode 100644 index 00000000..4cafebc0 --- /dev/null +++ b/generator/src/main/java/com/google/archivepatcher/generator/DeltaFriendlyOldBlobSizeLimiter.java @@ -0,0 +1,118 @@ +// Copyright 2016 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.archivepatcher.generator; + +import java.io.File; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +/** + * Limits the size of the delta-friendly old blob, which is an implicit limitation on the amount of + * temp space required to apply a patch. + * + *

This class implements the following algorithm: + * + *

    + *
  1. Check the size of the old archive and subtract it from the maximum size, this is the number + * of bytes that can be used to uncompress entries in the delta-friendly old file. + *
  2. Identify all of the {@link QualifiedRecommendation}s that have {@link + * Recommendation#uncompressOldEntry} set to true. These identify all the entries + * that would be uncompressed in the delta-friendly old file. + *
  3. Sort those {@link QualifiedRecommendation}s in order of decreasing uncompressed size. + *
  4. Iterate over the list in order. For each entry, calculate the difference between the + * uncompressed size and the compressed size; this is the number of bytes that would be + * consumed to transform the data from compressed to uncompressed in the delta-friendly old + * file. If the number of bytes that would be consumed is less than the number of bytes + * remaining before hitting the cap, retain it; else, discard it. + *
  5. Return the resulting list of the retained entries. Note that the order of this list may not + * be the same as the input order (i.e., it has been sorted in order of decreasing compressed + * size). + *
+ */ +public class DeltaFriendlyOldBlobSizeLimiter implements RecommendationModifier { + + /** The maximum size of the delta-friendly old blob. */ + private final long maxSizeBytes; + + private static final Comparator COMPARATOR = + new UncompressedOldEntrySizeComparator(); + + /** + * Create a new limiter that will restrict the total size of the delta-friendly old blob. + * + * @param maxSizeBytes the maximum size of the delta-friendly old blob + */ + public DeltaFriendlyOldBlobSizeLimiter(long maxSizeBytes) { + if (maxSizeBytes < 0) { + throw new IllegalArgumentException("maxSizeBytes must be non-negative: " + maxSizeBytes); + } + this.maxSizeBytes = maxSizeBytes; + } + + @Override + public List getModifiedRecommendations( + File oldFile, File newFile, List originalRecommendations) { + + List sorted = sortRecommendations(originalRecommendations); + + List result = new ArrayList<>(sorted.size()); + long bytesRemaining = maxSizeBytes - oldFile.length(); + for (QualifiedRecommendation originalRecommendation : sorted) { + if (!originalRecommendation.getRecommendation().uncompressOldEntry) { + // Keep the original recommendation, no need to track size since it won't be uncompressed. + result.add(originalRecommendation); + } else { + long extraBytesConsumed = + originalRecommendation.getOldEntry().getUncompressedSize() + - originalRecommendation.getOldEntry().getCompressedSize(); + if (bytesRemaining - extraBytesConsumed >= 0) { + // Keep the original recommendation, but also subtract from the remaining space. + result.add(originalRecommendation); + bytesRemaining -= extraBytesConsumed; + } else { + // Update the recommendation to prevent uncompressing this tuple. + result.add( + new QualifiedRecommendation( + originalRecommendation.getOldEntry(), + originalRecommendation.getNewEntry(), + Recommendation.UNCOMPRESS_NEITHER, + RecommendationReason.RESOURCE_CONSTRAINED)); + } + } + } + return result; + } + + private static List sortRecommendations( + List originalRecommendations) { + List sorted = + new ArrayList(originalRecommendations); + Collections.sort(sorted, COMPARATOR); + Collections.reverse(sorted); + return sorted; + } + + /** Helper class implementing the sort order described in the class documentation. */ + private static class UncompressedOldEntrySizeComparator + implements Comparator { + @Override + public int compare(QualifiedRecommendation qr1, QualifiedRecommendation qr2) { + return Long.compare( + qr1.getOldEntry().getUncompressedSize(), qr2.getOldEntry().getUncompressedSize()); + } + } +} diff --git a/generator/src/main/java/com/google/archivepatcher/generator/FileByFileV1DeltaGenerator.java b/generator/src/main/java/com/google/archivepatcher/generator/FileByFileV1DeltaGenerator.java index 67c4bde8..8c817619 100644 --- a/generator/src/main/java/com/google/archivepatcher/generator/FileByFileV1DeltaGenerator.java +++ b/generator/src/main/java/com/google/archivepatcher/generator/FileByFileV1DeltaGenerator.java @@ -20,33 +20,33 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; /** * Generates file-by-file patches. */ public class FileByFileV1DeltaGenerator implements DeltaGenerator { - /** Optional modifier for planning and patch generation. */ - private final RecommendationModifier recommendationModifier; - - /** - * Constructs a new generator for File-by-File v1 patches, using the default configuration. This - * is equivalent to calling {@link #FileByFileV1DeltaGenerator(RecommendationModifier)} with a - * null {@link RecommendationModifier} argument. - */ - public FileByFileV1DeltaGenerator() { - this(null); - } + /** Optional modifiers for planning and patch generation. */ + private final List recommendationModifiers; /** * Constructs a new generator for File-by-File v1 patches, using the specified configuration. * - * @param recommendationModifier optionally, a {@link RecommendationModifier} to use for modifying - * the planning phase of patch generation. This can be used to, e.g., limit the total amount - * of recompression that a patch applier needs to do. + * @param recommendationModifiers optionally, {@link RecommendationModifier}s to use for modifying + * the planning phase of patch generation. These can be used to, e.g., limit the total amount + * of recompression that a patch applier needs to do. Modifiers are applied in the order they + * are specified. */ - public FileByFileV1DeltaGenerator(RecommendationModifier recommendationModifier) { - this.recommendationModifier = recommendationModifier; + public FileByFileV1DeltaGenerator(RecommendationModifier... recommendationModifiers) { + if (recommendationModifiers != null) { + this.recommendationModifiers = + Collections.unmodifiableList(Arrays.asList(recommendationModifiers)); + } else { + this.recommendationModifiers = Collections.emptyList(); + } } /** @@ -70,12 +70,14 @@ public void generateDelta(File oldFile, File newFile, OutputStream patchOut) TempFileHolder deltaFile = new TempFileHolder(); FileOutputStream deltaFileOut = new FileOutputStream(deltaFile.file); BufferedOutputStream bufferedDeltaOut = new BufferedOutputStream(deltaFileOut)) { - PreDiffExecutor executor = + PreDiffExecutor.Builder builder = new PreDiffExecutor.Builder() .readingOriginalFiles(oldFile, newFile) - .writingDeltaFriendlyFiles(deltaFriendlyOldFile.file, deltaFriendlyNewFile.file) - .withRecommendationModifier(recommendationModifier) - .build(); + .writingDeltaFriendlyFiles(deltaFriendlyOldFile.file, deltaFriendlyNewFile.file); + for (RecommendationModifier modifier : recommendationModifiers) { + builder.withRecommendationModifier(modifier); + } + PreDiffExecutor executor = builder.build(); PreDiffPlan preDiffPlan = executor.prepareForDiffing(); DeltaGenerator deltaGenerator = getDeltaGenerator(); deltaGenerator.generateDelta( diff --git a/generator/src/main/java/com/google/archivepatcher/generator/PreDiffExecutor.java b/generator/src/main/java/com/google/archivepatcher/generator/PreDiffExecutor.java index 35198860..7a83ed6d 100644 --- a/generator/src/main/java/com/google/archivepatcher/generator/PreDiffExecutor.java +++ b/generator/src/main/java/com/google/archivepatcher/generator/PreDiffExecutor.java @@ -22,6 +22,7 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -38,7 +39,8 @@ public static final class Builder { private File originalNewFile; private File deltaFriendlyOldFile; private File deltaFriendlyNewFile; - private RecommendationModifier recommendationModifier; + private List recommendationModifiers = + new ArrayList(); /** * Sets the original, read-only input files to the patch generation process. This has to be @@ -75,14 +77,17 @@ public Builder writingDeltaFriendlyFiles(File deltaFriendlyOldFile, File deltaFr } /** - * Sets an optional {@link RecommendationModifier} to be used during the generation of the + * Appends an optional {@link RecommendationModifier} to be used during the generation of the * {@link PreDiffPlan} and/or delta-friendly blobs. * * @param recommendationModifier the modifier to set * @return this builder */ public Builder withRecommendationModifier(RecommendationModifier recommendationModifier) { - this.recommendationModifier = recommendationModifier; + if (recommendationModifier == null) { + throw new IllegalArgumentException("recommendationModifier cannot be null"); + } + this.recommendationModifiers.add(recommendationModifier); return this; } @@ -101,7 +106,7 @@ public PreDiffExecutor build() { originalNewFile, deltaFriendlyOldFile, deltaFriendlyNewFile, - recommendationModifier); + recommendationModifiers); } } @@ -123,8 +128,10 @@ public PreDiffExecutor build() { */ private final File deltaFriendlyNewFile; - /** Optional {@link RecommendationModifier} to be used for modifying the patch to be generated. */ - private final RecommendationModifier recommendationModifier; + /** + * Optional {@link RecommendationModifier}s to be used for modifying the patch to be generated. + */ + private final List recommendationModifiers; /** Constructs a new PreDiffExecutor to work with the specified configuration. */ private PreDiffExecutor( @@ -132,12 +139,12 @@ private PreDiffExecutor( File originalNewFile, File deltaFriendlyOldFile, File deltaFriendlyNewFile, - RecommendationModifier recommendationModifier) { + List recommendationModifiers) { this.originalOldFile = originalOldFile; this.originalNewFile = originalNewFile; this.deltaFriendlyOldFile = deltaFriendlyOldFile; this.deltaFriendlyNewFile = deltaFriendlyNewFile; - this.recommendationModifier = recommendationModifier; + this.recommendationModifiers = recommendationModifiers; } /** @@ -220,7 +227,7 @@ private PreDiffPlan generatePreDiffPlan() throws IOException { originalNewFile, originalNewArchiveZipEntriesByPath, originalNewArchiveJreDeflateParametersByPath, - recommendationModifier); + recommendationModifiers.toArray(new RecommendationModifier[] {})); return preDiffPlanner.generatePreDiffPlan(); } } diff --git a/generator/src/main/java/com/google/archivepatcher/generator/PreDiffPlanner.java b/generator/src/main/java/com/google/archivepatcher/generator/PreDiffPlanner.java index 19edf05d..6b2d1ee6 100644 --- a/generator/src/main/java/com/google/archivepatcher/generator/PreDiffPlanner.java +++ b/generator/src/main/java/com/google/archivepatcher/generator/PreDiffPlanner.java @@ -22,6 +22,7 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -58,10 +59,10 @@ class PreDiffPlanner { private final Map newArchiveJreDeflateParametersByPath; /** - * Optional {@link RecommendationModifier} that will be applied after the default recommendations + * Optional {@link RecommendationModifier}s that will be applied after the default recommendations * have been made but before the {@link PreDiffPlan} is constructed. */ - private final RecommendationModifier recommendationModifier; + private final List recommendationModifiers; /** * Constructs a new planner that will work on the specified inputs @@ -72,7 +73,7 @@ class PreDiffPlanner { * @param newArchiveZipEntriesByPath the entries in the new archive, with paths as keys * @param newArchiveJreDeflateParametersByPath the {@link JreDeflateParameters} for each entry in * the new archive, with paths as keys - * @param recommendationModifier optionally, a {@link RecommendationModifier} to be applied after + * @param recommendationModifiers optionally, {@link RecommendationModifier}s to be applied after * the default recommendations have been made but before the {@link PreDiffPlan} is generated * in {@link #generatePreDiffPlan()}. */ @@ -82,13 +83,14 @@ class PreDiffPlanner { File newFile, Map newArchiveZipEntriesByPath, Map newArchiveJreDeflateParametersByPath, - RecommendationModifier recommendationModifier) { + RecommendationModifier... recommendationModifiers) { this.oldFile = oldFile; this.oldArchiveZipEntriesByPath = oldArchiveZipEntriesByPath; this.newFile = newFile; this.newArchiveZipEntriesByPath = newArchiveZipEntriesByPath; this.newArchiveJreDeflateParametersByPath = newArchiveJreDeflateParametersByPath; - this.recommendationModifier = recommendationModifier; + this.recommendationModifiers = + Collections.unmodifiableList(Arrays.asList(recommendationModifiers)); } /** @@ -100,9 +102,9 @@ class PreDiffPlanner { */ PreDiffPlan generatePreDiffPlan() throws IOException { List recommendations = getDefaultRecommendations(); - if (recommendationModifier != null) { + for (RecommendationModifier modifier : recommendationModifiers) { // Allow changing the recommendations base on arbitrary criteria. - recommendations = recommendationModifier.getModifiedRecommendations(recommendations); + recommendations = modifier.getModifiedRecommendations(oldFile, newFile, recommendations); } // Process recommendations to extract ranges for decompression & recompression diff --git a/generator/src/main/java/com/google/archivepatcher/generator/QualifiedRecommendation.java b/generator/src/main/java/com/google/archivepatcher/generator/QualifiedRecommendation.java index 99f9fe23..1ae15743 100644 --- a/generator/src/main/java/com/google/archivepatcher/generator/QualifiedRecommendation.java +++ b/generator/src/main/java/com/google/archivepatcher/generator/QualifiedRecommendation.java @@ -136,4 +136,18 @@ public boolean equals(Object obj) { } return true; } + + @Override + public String toString() { + return "QualifiedRecommendation [oldEntry=" + + oldEntry.getFileName() + + ", newEntry=" + + newEntry.getFileName() + + ", recommendation=" + + recommendation + + ", reason=" + + reason + + "]"; + } + } \ No newline at end of file diff --git a/generator/src/main/java/com/google/archivepatcher/generator/RecommendationModifier.java b/generator/src/main/java/com/google/archivepatcher/generator/RecommendationModifier.java index e8974270..364cf635 100644 --- a/generator/src/main/java/com/google/archivepatcher/generator/RecommendationModifier.java +++ b/generator/src/main/java/com/google/archivepatcher/generator/RecommendationModifier.java @@ -14,6 +14,7 @@ package com.google.archivepatcher.generator; +import java.io.File; import java.util.List; /** @@ -28,9 +29,11 @@ public interface RecommendationModifier { * the results of {@link QualifiedRecommendation#getRecommendation()} and {@link * QualifiedRecommendation#getReason()} to any sane values. * + * @param oldFile the old file that is being diffed + * @param newFile the new file that is being diffed * @param originalRecommendations the original recommendations * @return the updated list of recommendations */ public List getModifiedRecommendations( - List originalRecommendations); + File oldFile, File newFile, List originalRecommendations); } diff --git a/generator/src/main/java/com/google/archivepatcher/generator/TotalRecompressionLimiter.java b/generator/src/main/java/com/google/archivepatcher/generator/TotalRecompressionLimiter.java index e76feceb..3b42da23 100644 --- a/generator/src/main/java/com/google/archivepatcher/generator/TotalRecompressionLimiter.java +++ b/generator/src/main/java/com/google/archivepatcher/generator/TotalRecompressionLimiter.java @@ -14,6 +14,7 @@ package com.google.archivepatcher.generator; +import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -35,25 +36,26 @@ *

This class implements the following algorithm: * *

    - *
  1. Identify all of the {@link QualifiedRecommendation}s that have {@link - * Recommendation#uncompressNewEntry} set to true. These identify all the entries - * that have changed and that require recompression. - *
  2. Sort those {@link QualifiedRecommendation}s in order of decreasing uncompressed size. - *
  3. Iterate over the list in order. For each entry, if the uncompressed size is less than the - * number of uncompressed bytes remaining before hitting the cap, retain it; else, discard it. - *
  4. Return the resulting list of the retained entries. Note that the order of this list may not - * be the same as the input order (i.e., it has been sorted in order of decreasing compressed - * size). + *
  5. Identify all of the {@link QualifiedRecommendation}s that have {@link + * Recommendation#uncompressNewEntry} set to true. These identify all the entries + * that have changed and that require recompression. + *
  6. Sort those {@link QualifiedRecommendation}s in order of decreasing uncompressed size. + *
  7. Iterate over the list in order. For each entry, if the uncompressed size is less than the + * number of uncompressed bytes remaining before hitting the cap, retain it; else, discard it. + *
  8. Return the resulting list of the retained entries. Note that the order of this list may not + * be the same as the input order (i.e., it has been sorted in order of decreasing compressed + * size). *
* * This algorithm attempts to preserve the largest changed resources needing recompression, assuming - * that these are the most likely to be delta-friendly and are therefore represent the best patch - * size savings. This may not be true in all cases but is likely in practice. + * that these are the most likely to be delta-friendly and therefore represent the best patch size + * savings. This may not be true in all cases but is likely in practice. * *

Please note that this algorithm does not limit the size of the temporary files needed * to apply a patch. In particular it does not limit the size of the "delta-friendly old * blob" that is generated during the patch-apply step, since that blob may contain an arbitrary - * amount of compressed resources that are not considered here. + * amount of compressed resources that are not considered here. To limit the size of the + * delta-friendly old blob, use a {@link DeltaFriendlyOldBlobSizeLimiter}. */ public class TotalRecompressionLimiter implements RecommendationModifier { @@ -80,7 +82,7 @@ public TotalRecompressionLimiter(long maxBytesToRecompress) { @Override public List getModifiedRecommendations( - List originalRecommendations) { + File oldFile, File newFile, List originalRecommendations) { List sorted = new ArrayList(originalRecommendations); diff --git a/generator/src/main/java/com/google/archivepatcher/generator/bsdiff/BsDiffMatcher.java b/generator/src/main/java/com/google/archivepatcher/generator/bsdiff/BsDiffMatcher.java index 6eff0d63..e4568233 100644 --- a/generator/src/main/java/com/google/archivepatcher/generator/bsdiff/BsDiffMatcher.java +++ b/generator/src/main/java/com/google/archivepatcher/generator/bsdiff/BsDiffMatcher.java @@ -52,6 +52,12 @@ class BsDiffMatcher implements Matcher { */ private final int mMinimumMatchLength; + /** + * A limit on how many total match lengths encountered, to exit the match extension loop in next() + * and prevent O(n^2) behavior. + */ + private final long mTotalMatchLenBudget = 1L << 26; // ~64 million. + /** * The number of bytes, |n|, which match between newData[mNewPos ... mNewPos + n] and * oldData[mOldPos ... mOldPos + n]. @@ -96,6 +102,9 @@ public Matcher.NextMatch next() throws IOException, InterruptedException { // The size of the range for which |numMatches| has been computed. int matchesCacheSize = 0; + // Sum over all match lengths encountered, to exit loop if we take too long to compute. + long totalMatchLen = 0; + while (mNewPos < newData.length()) { if (Thread.interrupted()) { throw new InterruptedException(); @@ -104,6 +113,7 @@ public Matcher.NextMatch next() throws IOException, InterruptedException { BsDiff.searchForMatch(mGroupArray, oldData, newData, mNewPos, 0, (int) oldData.length()); mOldPos = match.start; mMatchLen = match.length; + totalMatchLen += mMatchLen; // Update |numMatches| for the new value of |matchLen|. for (; matchesCacheSize < mMatchLen; ++matchesCacheSize) { @@ -119,7 +129,8 @@ public Matcher.NextMatch next() throws IOException, InterruptedException { } } - if (mMatchLen > numMatches + mMinimumMatchLength) { + // Also return if we've been trying to extend a large match for a long time. + if (mMatchLen > numMatches + mMinimumMatchLength || totalMatchLen >= mTotalMatchLenBudget) { return Matcher.NextMatch.of(true, mOldPos, mNewPos); } diff --git a/generator/src/test/java/com/google/archivepatcher/generator/DeltaFriendlyOldBlobSizeLimiterTest.java b/generator/src/test/java/com/google/archivepatcher/generator/DeltaFriendlyOldBlobSizeLimiterTest.java new file mode 100644 index 00000000..0bbacafe --- /dev/null +++ b/generator/src/test/java/com/google/archivepatcher/generator/DeltaFriendlyOldBlobSizeLimiterTest.java @@ -0,0 +1,316 @@ +// Copyright 2016 Google Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.archivepatcher.generator; + +import java.io.File; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +/** Tests for {@link DeltaFriendlyOldBlobSizeLimiter}. */ +@RunWith(JUnit4.class) +@SuppressWarnings("javadoc") +public class DeltaFriendlyOldBlobSizeLimiterTest { + private static final int DEFLATE_COMPRESSION_METHOD = 8; + + private static final MinimalZipEntry UNIMPORTANT = makeFakeEntry("/unimportant", 1337, 1337); + private static final MinimalZipEntry ENTRY_A_100K = + makeFakeEntry("/a/100k", 100 * 1024, 200 * 1024); + private static final MinimalZipEntry ENTRY_B_200K = + makeFakeEntry("/b/200k", 100 * 1024, 300 * 1024); + private static final MinimalZipEntry ENTRY_C_300K = + makeFakeEntry("/c/300k", 100 * 1024, 400 * 1024); + private static final MinimalZipEntry ENTRY_D_400K = + makeFakeEntry("/d/400k", 100 * 1024, 500 * 1024); + private static final MinimalZipEntry IGNORED_A = makeFakeEntry("/ignored/a", 1234, 5678); + private static final MinimalZipEntry IGNORED_B = makeFakeEntry("/ignored/b", 5678, 9101112); + private static final MinimalZipEntry IGNORED_C = makeFakeEntry("/ignored/c", 9101112, 13141516); + + // First four recommendations are all ones where uncompression of the old resource is required. + // Note that there is a mix of UNCOMPRESS_OLD and UNCOMPRESS_BOTH, both of which will have the + // "old" entry flagged for uncompression (i.e., should be relevant to the filtering logic). + private static final QualifiedRecommendation REC_A_100K = + new QualifiedRecommendation( + ENTRY_A_100K, + UNIMPORTANT, + Recommendation.UNCOMPRESS_BOTH, + RecommendationReason.COMPRESSED_BYTES_CHANGED); + private static final QualifiedRecommendation REC_B_200K = + new QualifiedRecommendation( + ENTRY_B_200K, + UNIMPORTANT, + Recommendation.UNCOMPRESS_OLD, + RecommendationReason.UNCOMPRESSED_CHANGED_TO_COMPRESSED); + private static final QualifiedRecommendation REC_C_300K = + new QualifiedRecommendation( + ENTRY_C_300K, + UNIMPORTANT, + Recommendation.UNCOMPRESS_BOTH, + RecommendationReason.COMPRESSED_BYTES_CHANGED); + private static final QualifiedRecommendation REC_D_400K = + new QualifiedRecommendation( + ENTRY_D_400K, + UNIMPORTANT, + Recommendation.UNCOMPRESS_BOTH, + RecommendationReason.COMPRESSED_CHANGED_TO_UNCOMPRESSED); + + // Remaining recommendations are all ones where recompression is NOT required. Note the mixture of + // UNCOMPRESS_NEITHER and UNCOMPRESS_OLD, neither of which will have the "new" entry flagged for + // recompression (ie., must be ignored by the filtering logic). + private static final QualifiedRecommendation REC_IGNORED_A_UNCHANGED = + new QualifiedRecommendation( + IGNORED_A, + UNIMPORTANT, + Recommendation.UNCOMPRESS_NEITHER, + RecommendationReason.COMPRESSED_BYTES_IDENTICAL); + private static final QualifiedRecommendation REC_IGNORED_B_BOTH_UNCOMPRESSED = + new QualifiedRecommendation( + IGNORED_B, + UNIMPORTANT, + Recommendation.UNCOMPRESS_NEITHER, + RecommendationReason.BOTH_ENTRIES_UNCOMPRESSED); + private static final QualifiedRecommendation REC_IGNORED_C_UNSUITABLE = + new QualifiedRecommendation( + IGNORED_C, + UNIMPORTANT, + Recommendation.UNCOMPRESS_NEITHER, + RecommendationReason.UNSUITABLE); + + /** Convenience reference to all the recommendations that should be ignored by filtering. */ + private static final List ALL_IGNORED_RECS = + Collections.unmodifiableList( + Arrays.asList( + REC_IGNORED_A_UNCHANGED, REC_IGNORED_B_BOTH_UNCOMPRESSED, REC_IGNORED_C_UNSUITABLE)); + + /** Convenience reference to all the recommendations that are subject to filtering. */ + private static final List ALL_RECS = + Collections.unmodifiableList( + Arrays.asList( + REC_IGNORED_A_UNCHANGED, + REC_A_100K, + REC_IGNORED_B_BOTH_UNCOMPRESSED, + REC_D_400K, + REC_IGNORED_C_UNSUITABLE, + REC_B_200K, + REC_C_300K)); + + /** + * Make a structurally valid but totally bogus {@link MinimalZipEntry} for the purpose of testing + * the {@link RecommendationModifier}. + * + * @param path the path to set on the entry, to help with debugging + * @param compressedSize the compressed size of the entry, in bytes + * @param uncompressedSize the uncompressed size of the entry, in bytes + */ + private static MinimalZipEntry makeFakeEntry( + String path, long compressedSize, long uncompressedSize) { + try { + return new MinimalZipEntry( + DEFLATE_COMPRESSION_METHOD, // == deflate + 0, // crc32OfUncompressedData (ignored for this test) + compressedSize, + uncompressedSize, + path.getBytes("UTF8"), + true, // generalPurposeFlagBit11 (true=UTF8) + 0 // fileOffsetOfLocalEntry (ignored for this test) + ); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); // Impossible on any modern system + } + } + + @Test + public void testNegativeLimit() { + try { + new DeltaFriendlyOldBlobSizeLimiter(-1); + Assert.fail("Set a negative limit"); + } catch (IllegalArgumentException expected) { + // Pass + } + } + + /** + * Asserts that the two collections contain exactly the same elements. This isn't as rigorous as + * it should be, but is ok for this test scenario. Checks the contents but not the iteration order + * of the collections handed in. + */ + private static void assertEquivalence(Collection c1, Collection c2) { + String errorMessage = "Expected " + c1 + " but was " + c2; + Assert.assertEquals(errorMessage, c1.size(), c2.size()); + Assert.assertTrue(errorMessage, c1.containsAll(c2)); + Assert.assertTrue(errorMessage, c2.containsAll(c1)); + } + + /** + * Given {@link QualifiedRecommendation}s, manufacture equivalents altered in the way that the + * {@link DeltaFriendlyOldBlobSizeLimiter} would. + * + * @param originals the original recommendations + * @return the altered recommendations + */ + private static final List suppressed( + QualifiedRecommendation... originals) { + List result = new ArrayList<>(originals.length); + for (QualifiedRecommendation original : originals) { + result.add( + new QualifiedRecommendation( + original.getOldEntry(), + original.getNewEntry(), + Recommendation.UNCOMPRESS_NEITHER, + RecommendationReason.RESOURCE_CONSTRAINED)); + } + return result; + } + + private File tempFile = null; + + @Before + public void setup() throws IOException { + // Make an empty file to test the recommender's limitation logic + tempFile = File.createTempFile("DeltaFriendlyOldBlobSizeLimiterTest", "test"); + tempFile.deleteOnExit(); + } + + @After + public void tearDown() { + tempFile.delete(); + } + + @Test + public void testZeroLimit() { + DeltaFriendlyOldBlobSizeLimiter limiter = new DeltaFriendlyOldBlobSizeLimiter(0); + List expected = new ArrayList(); + expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_C_300K, REC_D_400K)); + expected.addAll(ALL_IGNORED_RECS); + assertEquivalence(expected, limiter.getModifiedRecommendations(tempFile, tempFile, ALL_RECS)); + } + + @Test + public void testMaxLimit() { + DeltaFriendlyOldBlobSizeLimiter limiter = new DeltaFriendlyOldBlobSizeLimiter(Long.MAX_VALUE); + assertEquivalence(ALL_RECS, limiter.getModifiedRecommendations(tempFile, tempFile, ALL_RECS)); + } + + @Test + public void testLimit_ExactlySmallest() { + long limit = + REC_A_100K.getOldEntry().getUncompressedSize() + - REC_A_100K.getOldEntry().getCompressedSize(); // Exactly large enough + DeltaFriendlyOldBlobSizeLimiter limiter = new DeltaFriendlyOldBlobSizeLimiter(limit); + List expected = new ArrayList(); + expected.add(REC_A_100K); + expected.addAll(suppressed(REC_B_200K, REC_C_300K, REC_D_400K)); + expected.addAll(ALL_IGNORED_RECS); + assertEquivalence(expected, limiter.getModifiedRecommendations(tempFile, tempFile, ALL_RECS)); + } + + @Test + public void testLimit_EdgeUnderSmallest() { + long limit = + REC_A_100K.getOldEntry().getUncompressedSize() + - REC_A_100K.getOldEntry().getCompressedSize() + - 1; // 1 byte too small + DeltaFriendlyOldBlobSizeLimiter limiter = new DeltaFriendlyOldBlobSizeLimiter(limit); + List expected = new ArrayList(); + expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_C_300K, REC_D_400K)); + expected.addAll(ALL_IGNORED_RECS); + assertEquivalence(expected, limiter.getModifiedRecommendations(tempFile, tempFile, ALL_RECS)); + } + + @Test + public void testLimit_EdgeOverSmallest() { + long limit = + REC_A_100K.getOldEntry().getUncompressedSize() + - REC_A_100K.getOldEntry().getCompressedSize() + + 1; // 1 byte extra room + DeltaFriendlyOldBlobSizeLimiter limiter = new DeltaFriendlyOldBlobSizeLimiter(limit); + List expected = new ArrayList(); + expected.add(REC_A_100K); + expected.addAll(suppressed(REC_B_200K, REC_C_300K, REC_D_400K)); + expected.addAll(ALL_IGNORED_RECS); + assertEquivalence(expected, limiter.getModifiedRecommendations(tempFile, tempFile, ALL_RECS)); + } + + @Test + public void testLimit_ExactlyLargest() { + long limit = + REC_D_400K.getOldEntry().getUncompressedSize() + - REC_D_400K.getOldEntry().getCompressedSize(); // Exactly large enough + DeltaFriendlyOldBlobSizeLimiter limiter = new DeltaFriendlyOldBlobSizeLimiter(limit); + List expected = new ArrayList(); + expected.add(REC_D_400K); + expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_C_300K)); + expected.addAll(ALL_IGNORED_RECS); + assertEquivalence(expected, limiter.getModifiedRecommendations(tempFile, tempFile, ALL_RECS)); + } + + @Test + public void testLimit_EdgeUnderLargest() { + long limit = + REC_D_400K.getOldEntry().getUncompressedSize() + - REC_D_400K.getOldEntry().getCompressedSize() + - 1; // 1 byte too small + DeltaFriendlyOldBlobSizeLimiter limiter = new DeltaFriendlyOldBlobSizeLimiter(limit); + List expected = new ArrayList(); + expected.add(REC_C_300K); + expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_D_400K)); + expected.addAll(ALL_IGNORED_RECS); + assertEquivalence(expected, limiter.getModifiedRecommendations(tempFile, tempFile, ALL_RECS)); + } + + @Test + public void testLimit_EdgeOverLargest() { + long limit = + REC_D_400K.getOldEntry().getUncompressedSize() + - REC_D_400K.getOldEntry().getCompressedSize() + + 1; // 1 byte extra room + DeltaFriendlyOldBlobSizeLimiter limiter = new DeltaFriendlyOldBlobSizeLimiter(limit); + List expected = new ArrayList(); + expected.add(REC_D_400K); + expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_C_300K)); + expected.addAll(ALL_IGNORED_RECS); + assertEquivalence(expected, limiter.getModifiedRecommendations(tempFile, tempFile, ALL_RECS)); + } + + @Test + public void testLimit_Complex() { + // A more nuanced test. Here we set up a limit of 600k - big enough to get the largest and the + // THIRD largest files. The second largest will fail because there isn't enough space after + // adding the first largest, and the fourth largest will fail because there is not enough space + // after adding the third largest. Tricky. + long limit = + (REC_D_400K.getOldEntry().getUncompressedSize() + - REC_D_400K.getOldEntry().getCompressedSize()) + + (REC_B_200K.getOldEntry().getUncompressedSize() + - REC_B_200K.getOldEntry().getCompressedSize()); + DeltaFriendlyOldBlobSizeLimiter limiter = new DeltaFriendlyOldBlobSizeLimiter(limit); + List expected = new ArrayList(); + expected.add(REC_B_200K); + expected.add(REC_D_400K); + expected.addAll(suppressed(REC_A_100K, REC_C_300K)); + expected.addAll(ALL_IGNORED_RECS); + assertEquivalence(expected, limiter.getModifiedRecommendations(tempFile, tempFile, ALL_RECS)); + } +} diff --git a/generator/src/test/java/com/google/archivepatcher/generator/FileByFileV1DeltaGeneratorTest.java b/generator/src/test/java/com/google/archivepatcher/generator/FileByFileV1DeltaGeneratorTest.java index 971251e9..a43d168b 100644 --- a/generator/src/test/java/com/google/archivepatcher/generator/FileByFileV1DeltaGeneratorTest.java +++ b/generator/src/test/java/com/google/archivepatcher/generator/FileByFileV1DeltaGeneratorTest.java @@ -37,7 +37,7 @@ public class FileByFileV1DeltaGeneratorTest { @Test public void testGenerateDelta_BaseCase() throws Exception { // Simple test of generating a patch with no changes. - FileByFileV1DeltaGenerator generator = new FileByFileV1DeltaGenerator(null); + FileByFileV1DeltaGenerator generator = new FileByFileV1DeltaGenerator(); ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try (TempFileHolder oldArchive = new TempFileHolder(); TempFileHolder newArchive = new TempFileHolder()) { diff --git a/generator/src/test/java/com/google/archivepatcher/generator/PreDiffPlannerTest.java b/generator/src/test/java/com/google/archivepatcher/generator/PreDiffPlannerTest.java index 89a1da78..9ba39e54 100644 --- a/generator/src/test/java/com/google/archivepatcher/generator/PreDiffPlannerTest.java +++ b/generator/src/test/java/com/google/archivepatcher/generator/PreDiffPlannerTest.java @@ -47,9 +47,6 @@ @SuppressWarnings("javadoc") public class PreDiffPlannerTest { - // For test clarity - private static final RecommendationModifier NOOP_RECOMMENDATION_MODIFIER = null; - // All the A and B entries consist of a chunk of text followed by a standard corpus of text from // the DefaultDeflateCompatibilityDiviner that ensures the tests will be able to discriminate // between any compression level. Without this additional corpus text, multiple compression levels @@ -240,7 +237,7 @@ private void corruptCompressionMethod(File tempFile, UnitTestZipEntry unitTestEn } private PreDiffPlan invokeGeneratePreDiffPlan( - File oldFile, File newFile, RecommendationModifier recommendationModifier) + File oldFile, File newFile, RecommendationModifier... recommendationModifiers) throws IOException { Map originalOldArchiveZipEntriesByPath = new LinkedHashMap(); @@ -268,7 +265,7 @@ private PreDiffPlan invokeGeneratePreDiffPlan( newFile, originalNewArchiveZipEntriesByPath, originalNewArchiveJreDeflateParametersByPath, - recommendationModifier); + recommendationModifiers); return preDiffPlanner.generatePreDiffPlan(); } @@ -291,7 +288,7 @@ public void testGeneratePreDiffPlan_OneCompressedEntry_Unchanged() throws IOExce byte[] bytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6)); File oldFile = storeAndMapArchive(bytes); File newFile = storeAndMapArchive(bytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to leave the entry alone in both the old and new archives (empty plans). Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty()); @@ -310,7 +307,7 @@ public void testGeneratePreDiffPlan_OneCompressedEntry_LengthsChanged() throws I byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_9)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to uncompress the entry in both the old and new archives. Assert.assertEquals(1, plan.getOldFileUncompressionPlan().size()); @@ -338,7 +335,7 @@ public void testGeneratePreDiffPlan_OneCompressedEntry_BytesChanged() throws IOE UnitTestZipArchive.makeTestZip(Collections.singletonList(FIXED_LENGTH_ENTRY_C2_LEVEL_6)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to uncompress the entry in both the old and new archives. Assert.assertEquals(1, plan.getOldFileUncompressionPlan().size()); @@ -364,7 +361,7 @@ public void testGeneratePreDiffPlan_OneUncompressedEntry() throws IOException { byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to do nothing because both entries are already uncompressed Assert.assertTrue(plan.getOldFileUncompressionPlan().isEmpty()); @@ -383,7 +380,7 @@ public void testGeneratePreDiffPlan_OneEntry_CompressedToUncompressed() throws I byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_STORED)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to uncompress the entry in the old archive and do nothing in the new // archive (empty plan) @@ -406,7 +403,7 @@ public void testGeneratePreDiffPlan_OneEntry_UncompressedToCompressed() throws I byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_A_LEVEL_6)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to do nothing in the old archive (empty plan) and uncompress the entry in // the new archive @@ -431,7 +428,7 @@ public void testGeneratePreDiffPlan_OneEntry_UncompressedToUndivinable() throws File newFile = storeAndMapArchive(newBytes); // Deliberately break the entry in the new file so that it will not be divinable corruptEntryData(newFile, ENTRY_A_LEVEL_6); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan WOULD be to do nothing in the old archive (empty plan) and uncompress the entry in // the new archive, but because the new entry is un-divinable it cannot be recompressed and so @@ -454,7 +451,7 @@ public void testGeneratePreDiffPlan_OneEntry_OldUncompressed_NewNonDeflate() thr File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); corruptCompressionMethod(newFile, ENTRY_A_LEVEL_9); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to do nothing (empty plans) because the the entry in the old archive is // already uncompressed and the entry in the new archive is not compressed with deflate (i.e., @@ -477,7 +474,7 @@ public void testGeneratePreDiffPlan_OneEntry_OldNonDeflate_NewUncompressed() thr File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); corruptCompressionMethod(oldFile, ENTRY_A_LEVEL_9); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to do nothing (empty plans) because the the entry in the old archive is // not compressed with deflate, so there is no point in trying to do anything at all. @@ -500,7 +497,7 @@ public void testGeneratePreDiffPlan_OneEntry_BothNonDeflate() throws IOException File newFile = storeAndMapArchive(newBytes); corruptCompressionMethod(oldFile, ENTRY_A_LEVEL_6); corruptCompressionMethod(newFile, ENTRY_A_LEVEL_9); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to do nothing (empty plans) because the entries are not compressed with // deflate @@ -521,7 +518,7 @@ public void testGeneratePreDiffPlan_TwoDifferentEntries_DifferentPaths() throws byte[] newBytes = UnitTestZipArchive.makeTestZip(Collections.singletonList(ENTRY_B_LEVEL_6)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to do nothing (empty plans) because entry A is only in the old archive and // entry B is only in the new archive, so there is nothing to diff. @@ -541,7 +538,7 @@ public void testGeneratePreDiffPlan_TwoEntriesEachArchive_SwappingOrder() throws UnitTestZipArchive.makeTestZip(Arrays.asList(ENTRY_B_LEVEL_9, ENTRY_A_LEVEL_9)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to uncompress both entries, but the order is important. File order should // be in both plans. @@ -568,7 +565,7 @@ public void testGeneratePreDiffPlan_SimpleRename_Unchanged() throws IOException UnitTestZipArchive.makeTestZip(Collections.singletonList(SHADOW_ENTRY_A_LEVEL_6)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to do nothing (empty plans) because the bytes are identical in both files // so the entries should remain compressed. However, unlike the case where there was no match, @@ -595,7 +592,7 @@ public void testGeneratePreDiffPlan_SimpleRename_CompressionLevelChanged() throw UnitTestZipArchive.makeTestZip(Collections.singletonList(SHADOW_ENTRY_A_LEVEL_9)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to uncompress both entries so that a super-efficient delta can be done. Assert.assertEquals(1, plan.getOldFileUncompressionPlan().size()); @@ -634,7 +631,7 @@ public void testGeneratePreDiffPlan_ClonedAndCompressionLevelChanged() throws IO Arrays.asList(SHADOW_ENTRY_A_LEVEL_1, SHADOW_ENTRY_A_LEVEL_9)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to uncompress both entries so that a super-efficient delta can be done. // Critically there should only be ONE command for the old file uncompression step! @@ -673,7 +670,7 @@ public void testGeneratePreDiffPlan_SimpleRename_CompressedToUncompressed() thro UnitTestZipArchive.makeTestZip(Collections.singletonList(SHADOW_ENTRY_A_STORED)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to uncompress the old entry so that a super-efficient delta can be done. // The new entry isn't touched because it is already uncompressed. @@ -701,7 +698,7 @@ public void testGeneratePreDiffPlan_SimpleRename_UncompressedToCompressed() thro UnitTestZipArchive.makeTestZip(Collections.singletonList(SHADOW_ENTRY_A_LEVEL_6)); File oldFile = storeAndMapArchive(oldBytes); File newFile = storeAndMapArchive(newBytes); - PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile, NOOP_RECOMMENDATION_MODIFIER); + PreDiffPlan plan = invokeGeneratePreDiffPlan(oldFile, newFile); Assert.assertNotNull(plan); // The plan should be to uncompress the new entry so that a super-efficient delta can be done. // The old entry isn't touched because it is already uncompressed. diff --git a/generator/src/test/java/com/google/archivepatcher/generator/TotalRecompressionLimiterTest.java b/generator/src/test/java/com/google/archivepatcher/generator/TotalRecompressionLimiterTest.java index d31d74df..4e5d5f04 100644 --- a/generator/src/test/java/com/google/archivepatcher/generator/TotalRecompressionLimiterTest.java +++ b/generator/src/test/java/com/google/archivepatcher/generator/TotalRecompressionLimiterTest.java @@ -14,6 +14,7 @@ package com.google.archivepatcher.generator; +import java.io.File; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Arrays; @@ -25,11 +26,14 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; -/** Tests for {@link TempFileHolder}. */ +/** Tests for {@link TotalRecompressionLimiter}. */ @RunWith(JUnit4.class) @SuppressWarnings("javadoc") public class TotalRecompressionLimiterTest { + private static final File OLD_FILE = null; + private static final File NEW_FILE = null; + private static final MinimalZipEntry UNIMPORTANT = makeFakeEntry("/unimportant", 1337); private static final MinimalZipEntry ENTRY_A_100K = makeFakeEntry("/a/100k", 100 * 1024); private static final MinimalZipEntry ENTRY_B_200K = makeFakeEntry("/b/200k", 200 * 1024); @@ -193,13 +197,13 @@ public void testZeroLimit() { List expected = new ArrayList(); expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_C_300K, REC_D_400K)); expected.addAll(ALL_IGNORED_RECS); - assertEquivalence(expected, limiter.getModifiedRecommendations(ALL_RECS)); + assertEquivalence(expected, limiter.getModifiedRecommendations(OLD_FILE, NEW_FILE, ALL_RECS)); } @Test public void testMaxLimit() { TotalRecompressionLimiter limiter = new TotalRecompressionLimiter(Long.MAX_VALUE); - assertEquivalence(ALL_RECS, limiter.getModifiedRecommendations(ALL_RECS)); + assertEquivalence(ALL_RECS, limiter.getModifiedRecommendations(OLD_FILE, NEW_FILE, ALL_RECS)); } @Test @@ -210,7 +214,7 @@ public void testLimit_ExactlySmallest() { expected.add(REC_A_100K); expected.addAll(suppressed(REC_B_200K, REC_C_300K, REC_D_400K)); expected.addAll(ALL_IGNORED_RECS); - assertEquivalence(expected, limiter.getModifiedRecommendations(ALL_RECS)); + assertEquivalence(expected, limiter.getModifiedRecommendations(OLD_FILE, NEW_FILE, ALL_RECS)); } @Test @@ -220,7 +224,7 @@ public void testLimit_EdgeUnderSmallest() { List expected = new ArrayList(); expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_C_300K, REC_D_400K)); expected.addAll(ALL_IGNORED_RECS); - assertEquivalence(expected, limiter.getModifiedRecommendations(ALL_RECS)); + assertEquivalence(expected, limiter.getModifiedRecommendations(OLD_FILE, NEW_FILE, ALL_RECS)); } @Test @@ -231,7 +235,7 @@ public void testLimit_EdgeOverSmallest() { expected.add(REC_A_100K); expected.addAll(suppressed(REC_B_200K, REC_C_300K, REC_D_400K)); expected.addAll(ALL_IGNORED_RECS); - assertEquivalence(expected, limiter.getModifiedRecommendations(ALL_RECS)); + assertEquivalence(expected, limiter.getModifiedRecommendations(OLD_FILE, NEW_FILE, ALL_RECS)); } @Test @@ -242,7 +246,7 @@ public void testLimit_ExactlyLargest() { expected.add(REC_D_400K); expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_C_300K)); expected.addAll(ALL_IGNORED_RECS); - assertEquivalence(expected, limiter.getModifiedRecommendations(ALL_RECS)); + assertEquivalence(expected, limiter.getModifiedRecommendations(OLD_FILE, NEW_FILE, ALL_RECS)); } @Test @@ -253,7 +257,7 @@ public void testLimit_EdgeUnderLargest() { expected.add(REC_C_300K); expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_D_400K)); expected.addAll(ALL_IGNORED_RECS); - assertEquivalence(expected, limiter.getModifiedRecommendations(ALL_RECS)); + assertEquivalence(expected, limiter.getModifiedRecommendations(OLD_FILE, NEW_FILE, ALL_RECS)); } @Test @@ -264,7 +268,7 @@ public void testLimit_EdgeOverLargest() { expected.add(REC_D_400K); expected.addAll(suppressed(REC_A_100K, REC_B_200K, REC_C_300K)); expected.addAll(ALL_IGNORED_RECS); - assertEquivalence(expected, limiter.getModifiedRecommendations(ALL_RECS)); + assertEquivalence(expected, limiter.getModifiedRecommendations(OLD_FILE, NEW_FILE, ALL_RECS)); } @Test @@ -282,6 +286,6 @@ public void testLimit_Complex() { expected.add(REC_D_400K); expected.addAll(suppressed(REC_A_100K, REC_C_300K)); expected.addAll(ALL_IGNORED_RECS); - assertEquivalence(expected, limiter.getModifiedRecommendations(ALL_RECS)); + assertEquivalence(expected, limiter.getModifiedRecommendations(OLD_FILE, NEW_FILE, ALL_RECS)); } } diff --git a/integrationtest/src/test/java/com/google/archivepatcher/integrationtest/FileByFileV1IntegrationTest.java b/integrationtest/src/test/java/com/google/archivepatcher/integrationtest/FileByFileV1IntegrationTest.java index 8bf48a3e..27eade72 100644 --- a/integrationtest/src/test/java/com/google/archivepatcher/integrationtest/FileByFileV1IntegrationTest.java +++ b/integrationtest/src/test/java/com/google/archivepatcher/integrationtest/FileByFileV1IntegrationTest.java @@ -181,7 +181,7 @@ public void testPatchAndApply() throws Exception { // Generate the patch. ByteArrayOutputStream patchBuffer = new ByteArrayOutputStream(); - FileByFileV1DeltaGenerator generator = new FileByFileV1DeltaGenerator(null); + FileByFileV1DeltaGenerator generator = new FileByFileV1DeltaGenerator(); generator.generateDelta(oldFile, newFile, patchBuffer); // Apply the patch. @@ -194,4 +194,4 @@ public void testPatchAndApply() throws Exception { // that was written to disk. Assert.assertArrayEquals(newArchiveBytes, newOut.toByteArray()); } -} \ No newline at end of file +} diff --git a/tools/src/main/java/com/google/archivepatcher/tools/FileByFileTool.java b/tools/src/main/java/com/google/archivepatcher/tools/FileByFileTool.java index 5d9b84b7..5c12c2e9 100644 --- a/tools/src/main/java/com/google/archivepatcher/tools/FileByFileTool.java +++ b/tools/src/main/java/com/google/archivepatcher/tools/FileByFileTool.java @@ -15,6 +15,7 @@ package com.google.archivepatcher.tools; import com.google.archivepatcher.applier.FileByFileV1DeltaApplier; +import com.google.archivepatcher.generator.DeltaFriendlyOldBlobSizeLimiter; import com.google.archivepatcher.generator.FileByFileV1DeltaGenerator; import com.google.archivepatcher.generator.RecommendationModifier; import com.google.archivepatcher.generator.TotalRecompressionLimiter; @@ -24,9 +25,11 @@ import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedList; +import java.util.List; /** * Simple command-line tool for generating and applying patches. @@ -43,6 +46,7 @@ public class FileByFileTool extends AbstractTool { + " --new the new file\n" + " --patch the patch file\n" + " --trl optionally, the total bytes of recompression to allow (see below)\n" + + " --dfobsl optionally, a limit on the total size of the delta-friendly old blob (see below)\n" + "\nTotal Recompression Limit (trl):\n" + " When generating a patch, a limit can be specified on the total number of bytes to\n" + " allow to be recompressed during the patch apply process. This can be for a variety\n" @@ -50,6 +54,15 @@ public class FileByFileTool extends AbstractTool { + " be expended applying the patch on the target platform. To properly explain a\n" + " patch that had such a limitation, it is necessary to specify the same limitation\n" + " here. This argument is illegal for --apply, since it only applies to --generate.\n" + + "\nDelta Friendly Old Blob Size Limit (dfobsl):\n" + + " When generating a patch, a limit can be specified on the total size of the delta-\n" + + " friendly old blob. This implicitly limits the size of the temporary file that\n" + + " needs to be created when applying the patch. The size limit is \"soft\" in that \n" + + " the delta-friendly old blob needs to at least contain the original data that was\n" + + " within it; but the limit specified here will constrain any attempt to uncompress\n" + + " the content. If the limit is less than or equal to the size of the old file, no\n" + + " uncompression will be performed at all. Otherwise, the old file can expand into\n" + + " delta-friendly old blob until the size reaches this limit.\n" + "\nExamples:\n" + " To generate a patch from OLD to NEW, saving the patch in PATCH:\n" + " java -cp com.google.archivepatcher.tools.FileByFileTool --generate \\\n" @@ -99,6 +112,7 @@ public void run(String... args) throws IOException, InterruptedException { String newPath = null; String patchPath = null; Long totalRecompressionLimit = null; + Long deltaFriendlyOldBlobSizeLimit = null; Mode mode = null; Iterator argIterator = new LinkedList(Arrays.asList(args)).iterator(); while (argIterator.hasNext()) { @@ -118,6 +132,11 @@ public void run(String... args) throws IOException, InterruptedException { if (totalRecompressionLimit < 0) { exitWithUsage("--trl cannot be negative: " + totalRecompressionLimit); } + } else if ("--dfobsl".equals(arg)) { + deltaFriendlyOldBlobSizeLimit = Long.parseLong(popOrDie(argIterator, "--dfobsl")); + if (deltaFriendlyOldBlobSizeLimit < 0) { + exitWithUsage("--dfobsl cannot be negative: " + deltaFriendlyOldBlobSizeLimit); + } } else { exitWithUsage("unknown argument: " + arg); } @@ -128,10 +147,18 @@ public void run(String... args) throws IOException, InterruptedException { if (mode == Mode.APPLY && totalRecompressionLimit != null) { exitWithUsage("--trl can only be used with --generate"); } + if (mode == Mode.APPLY && deltaFriendlyOldBlobSizeLimit != null) { + exitWithUsage("--dfobsl can only be used with --generate"); + } File oldFile = getRequiredFileOrDie(oldPath, "old file"); if (mode == Mode.GENERATE) { File newFile = getRequiredFileOrDie(newPath, "new file"); - generatePatch(oldFile, newFile, new File(patchPath), totalRecompressionLimit); + generatePatch( + oldFile, + newFile, + new File(patchPath), + totalRecompressionLimit, + deltaFriendlyOldBlobSizeLimit); } else { // mode == Mode.APPLY File patchFile = getRequiredFileOrDie(patchPath, "patch file"); applyPatch(oldFile, patchFile, new File(newPath)); @@ -146,17 +173,29 @@ public void run(String... args) throws IOException, InterruptedException { * @param patchFile the patch file (will be written) * @param totalRecompressionLimit optional limit for total number of bytes of recompression to * allow in the resulting patch + * @param deltaFriendlyOldBlobSizeLimit optional limit for the size of the delta-friendly old + * blob, which implies a limit on the temporary space needed to apply the generated patch * @throws IOException if anything goes wrong * @throws InterruptedException if any thread has interrupted the current thread */ public static void generatePatch( - File oldFile, File newFile, File patchFile, Long totalRecompressionLimit) + File oldFile, + File newFile, + File patchFile, + Long totalRecompressionLimit, + Long deltaFriendlyOldBlobSizeLimit) throws IOException, InterruptedException { - RecommendationModifier recommendationModifier = null; + List recommendationModifiers = new ArrayList(); if (totalRecompressionLimit != null) { - recommendationModifier = new TotalRecompressionLimiter(totalRecompressionLimit); + recommendationModifiers.add(new TotalRecompressionLimiter(totalRecompressionLimit)); + } + if (deltaFriendlyOldBlobSizeLimit != null) { + recommendationModifiers.add( + new DeltaFriendlyOldBlobSizeLimiter(deltaFriendlyOldBlobSizeLimit)); } - FileByFileV1DeltaGenerator generator = new FileByFileV1DeltaGenerator(recommendationModifier); + FileByFileV1DeltaGenerator generator = + new FileByFileV1DeltaGenerator( + recommendationModifiers.toArray(new RecommendationModifier[] {})); try (FileOutputStream patchOut = new FileOutputStream(patchFile); BufferedOutputStream bufferedPatchOut = new BufferedOutputStream(patchOut)) { generator.generateDelta(oldFile, newFile, bufferedPatchOut); diff --git a/tools/src/main/java/com/google/archivepatcher/tools/PatchExplainerTool.java b/tools/src/main/java/com/google/archivepatcher/tools/PatchExplainerTool.java index bf8d82be..a5589c50 100644 --- a/tools/src/main/java/com/google/archivepatcher/tools/PatchExplainerTool.java +++ b/tools/src/main/java/com/google/archivepatcher/tools/PatchExplainerTool.java @@ -17,6 +17,7 @@ import com.google.archivepatcher.explainer.EntryExplanation; import com.google.archivepatcher.explainer.PatchExplainer; import com.google.archivepatcher.explainer.PatchExplanation; +import com.google.archivepatcher.generator.DeltaFriendlyOldBlobSizeLimiter; import com.google.archivepatcher.generator.RecommendationModifier; import com.google.archivepatcher.generator.RecommendationReason; import com.google.archivepatcher.generator.TotalRecompressionLimiter; @@ -27,6 +28,7 @@ import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.text.NumberFormat; +import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedList; @@ -44,6 +46,7 @@ public class PatchExplainerTool extends AbstractTool { + " --old the old file\n" + " --new the new file\n" + " --trl optionally, the total bytes of recompression to allow (see below)\n" + + " --dfobsl optionally, a limit on the total size of the delta-friendly old blob (see below)\n" + " --json output JSON results instead of plain text\n" + "\nTotal Recompression Limit (trl):\n" + " When generating a patch, a limit can be specified on the total number of bytes to\n" @@ -52,6 +55,15 @@ public class PatchExplainerTool extends AbstractTool { + " be expended applying the patch on the target platform. To properly explain a\n" + " patch that had such a limitation, it is necessary to specify the same limitation\n" + " here.\n" + + "\nDelta Friendly Old Blob Size Limit (dfobsl):\n" + + " When generating a patch, a limit can be specified on the total size of the delta-\n" + + " friendly old blob. This implicitly limits the size of the temporary file that\n" + + " needs to be created when applying the patch. The size limit is \"soft\" in that \n" + + " the delta-friendly old blob needs to at least contain the original data that was\n" + + " within it; but the limit specified here will constrain any attempt to uncompress\n" + + " the content. If the limit is less than or equal to the size of the old file, no\n" + + " uncompression will be performed at all. Otherwise, the old file can expand into\n" + + " delta-friendly old blob until the size reaches this limit.\n" + "\nExamples:\n" + " To explain a patch from OLD to NEW, dumping plain human-readable text output:\n" + " java -cp com.google.archivepatcher.tools.PatchExplainerTool \\\n" @@ -90,6 +102,7 @@ public void run(String... args) throws IOException, InterruptedException { String oldPath = null; String newPath = null; Long totalRecompressionLimit = null; + Long deltaFriendlyOldBlobSizeLimit = null; boolean outputJson = false; Iterator argIterator = new LinkedList(Arrays.asList(args)).iterator(); while (argIterator.hasNext()) { @@ -105,6 +118,11 @@ public void run(String... args) throws IOException, InterruptedException { if (totalRecompressionLimit < 0) { exitWithUsage("--trl cannot be negative: " + totalRecompressionLimit); } + } else if ("--dfobsl".equals(arg)) { + deltaFriendlyOldBlobSizeLimit = Long.parseLong(popOrDie(argIterator, "--dfobsl")); + if (deltaFriendlyOldBlobSizeLimit < 0) { + exitWithUsage("--dfobsl cannot be negative: " + deltaFriendlyOldBlobSizeLimit); + } } else { exitWithUsage("unknown argument: " + arg); } @@ -119,12 +137,20 @@ public void run(String... args) throws IOException, InterruptedException { compressor.setCompressionLevel(9); PatchExplainer explainer = new PatchExplainer(new DeflateCompressor(), new BsDiffDeltaGenerator()); - RecommendationModifier recommendationModifier = null; + List recommendationModifiers = new ArrayList(); if (totalRecompressionLimit != null) { - recommendationModifier = new TotalRecompressionLimiter(totalRecompressionLimit); + recommendationModifiers.add(new TotalRecompressionLimiter(totalRecompressionLimit)); + } + if (deltaFriendlyOldBlobSizeLimit != null) { + recommendationModifiers.add( + new DeltaFriendlyOldBlobSizeLimiter(deltaFriendlyOldBlobSizeLimit)); } PatchExplanation patchExplanation = - new PatchExplanation(explainer.explainPatch(oldFile, newFile, recommendationModifier)); + new PatchExplanation( + explainer.explainPatch( + oldFile, + newFile, + recommendationModifiers.toArray(new RecommendationModifier[] {}))); if (outputJson) { patchExplanation.writeJson(new PrintWriter(System.out)); } else { @@ -199,7 +225,7 @@ private static String toPlainText(EntryExplanation explanation) { if (explanation.getCompressedSizeInPatch() > 0) { String metadata = ""; if (explanation.getReasonIncludedIfNotNew() == RecommendationReason.RESOURCE_CONSTRAINED) { - metadata = " (forced to stay compressed by total recompression limit)"; + metadata = " (forced to stay compressed by a limit)"; } return "Changed file '" + path