Skip to content

Commit

Permalink
Documentation and l10n
Browse files Browse the repository at this point in the history
  • Loading branch information
torusrxxx committed May 14, 2024
1 parent f875d4b commit 523b6f0
Show file tree
Hide file tree
Showing 3 changed files with 79 additions and 34 deletions.
48 changes: 39 additions & 9 deletions src/freenet/client/filter/RIFFFilter.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@
import freenet.support.Logger;
import freenet.support.Logger.LogLevel;

// RIFF file format filter for several formats, such as AVI, WAV, MID, and WebP
/** RIFF file format filter for several formats, such as AVI, WAV, MID, and WebP
*
*/
public abstract class RIFFFilter implements ContentDataFilter {
private static final byte[] magicNumber = new byte[] {'R', 'I', 'F', 'F'};

Expand All @@ -31,7 +33,7 @@ public void readFilter(InputStream input, OutputStream output, String charset, M
out.write(magicNumber);
if(fileSize < 0) {
// FIXME Video with more than 2 GiB data need unsigned format
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), l10n("dataTooBig"));
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), l10n("data2GB"));
}
if(fileSize < 12) {
// There couldn't be any chunk in such a small file
Expand All @@ -50,7 +52,7 @@ public void readFilter(InputStream input, OutputStream output, String charset, M
in.readFully(fccType);
ckSize = readLittleEndianInt(in);
if(ckSize < 0 || remainingSize < ckSize + 8 + (ckSize & 1)) {
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "Chunk size is too big");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), l10n("dataTooBig"));
}
remainingSize -= ckSize + 8 + (ckSize & 1);
readFilterChunk(fccType, ckSize, context, in, out, charset, otherParams, schemeHostAndPort, cb);
Expand All @@ -70,21 +72,36 @@ public void readFilter(InputStream input, OutputStream output, String charset, M
EOFCheck(context);
}

/** Get the FourCC to identify this file format
* @return array of four bytes
*/
protected abstract byte[] getChunkMagicNumber();

// Create a context object holding the context states
/** Create a context object holding the context states
* @return context object
*/
protected abstract Object createContext();

protected abstract void readFilterChunk(byte[] ID, int size, Object context, DataInputStream input, DataOutputStream output, String charset, Map<String, String> otherParams,
String schemeHostAndPort, FilterCallback cb) throws DataFilterException, IOException;

/** Check for invalid conditions after EOF is reached
* @param context context object
* @throws DataFilterException
*/
protected abstract void EOFCheck(Object context) throws DataFilterException;

private static String l10n(String key) {
return NodeL10n.getBase().getString("RIFFFilter."+key);
}

// Pass through bytes to output unchanged
/** Pass through bytes to output unchanged
* @param in Input stream
* @param out Output stream
* @param size Number of bytes to copy
* @throws DataFilterException
* @throws IOException
*/
protected void passthroughBytes(DataInputStream in, DataOutputStream out, int size) throws DataFilterException, IOException {
if(size < 0)
{
Expand Down Expand Up @@ -113,7 +130,13 @@ protected void passthroughBytes(DataInputStream in, DataOutputStream out, int si
}
}

// Write a JUNK chunk for unsupported data
/** Write a JUNK chunk for unsupported data
* @param in Input stream
* @param out Output stream
* @param size Size of the chunk, if the size is odd, a padding is added
* @throws DataFilterException
* @throws IOException
*/
protected void writeJunkChunk(DataInputStream in, DataOutputStream out, int size) throws DataFilterException, IOException {
size += size % 2; // Add a padding if necessary
if(in.skip(size) < size) {
Expand Down Expand Up @@ -147,15 +170,22 @@ protected void writeJunkChunk(DataInputStream in, DataOutputStream out, int size
}
}

// Read a little endian int
// readInt and writeInt are big endian, but RIFF use little endian
/** Read a little endian int. readInt and writeInt are big endian, but RIFF use little endian
* @param stream Stream to read from
* @return
* @throws IOException
*/
protected final static int readLittleEndianInt(DataInputStream stream) throws IOException {
int a;
a = stream.readInt();
return Integer.reverseBytes(a);
}

// Write a little endian int
/** Write a little endian int
* @param stream Stream to write to
* @param a
* @throws IOException
*/
protected final static void writeLittleEndianInt(DataOutputStream stream, int a) throws IOException {
stream.writeInt(Integer.reverseBytes(a));
}
Expand Down
53 changes: 30 additions & 23 deletions src/freenet/client/filter/WebPFilter.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import java.io.IOException;
import java.util.Map;

import freenet.l10n.NodeL10n;
import freenet.support.Logger;
import freenet.support.Logger.LogLevel;

Expand Down Expand Up @@ -32,6 +33,7 @@ class WebPFilterContext {
public boolean hasVP8 = false;
public boolean hasVP8L = false;
}

@Override
protected Object createContext() {
return new WebPFilterContext();
Expand All @@ -40,19 +42,19 @@ protected Object createContext() {
@Override
protected void readFilterChunk(byte[] ID, int size, Object context, DataInputStream input, DataOutputStream output, String charset, Map<String, String> otherParams,
String schemeHostAndPort, FilterCallback cb) throws DataFilterException, IOException {
boolean logMINOR = Logger.shouldLog(LogLevel.MINOR, this.getClass());
boolean logDEBUG = Logger.shouldLog(LogLevel.DEBUG, this.getClass());
WebPFilterContext ctx = (WebPFilterContext)context;
if(ID[0] == 'V' && ID[1] == 'P' && ID[2] == '8' && ID[3] == ' ') {
// VP8 Lossy format: RFC 6386
// Most WebP files just contain a single chunk of this kind
if(ctx.hasVP8 || ctx.hasVP8L || ctx.hasANIM) {
throw new DataFilterException("WebP error", "WebP error", "Unexpected VP8 chunk was encountered");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "Unexpected VP8 chunk was encountered");
}
if(size < 10) {
throw new DataFilterException("WebP error", "WebP error", "The VP8 chunk was too small to be valid");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "The VP8 chunk was too small to be valid");
}
output.write(ID);
if(logMINOR) Logger.minor(this, "Passing through WebP VP8 block with " + size + " bytes.");
if(logDEBUG) Logger.debug(this, "Passing through WebP VP8 block with " + size + " bytes.");
VP8PacketFilter VP8filter = new VP8PacketFilter(true);
// Just read 6 bytes of the header to validate
byte[] buf = new byte[6];
Expand All @@ -67,34 +69,35 @@ protected void readFilterChunk(byte[] ID, int size, Object context, DataInputStr
} else if(ID[0] == 'V' && ID[1] == 'P' && ID[2] == '8' && ID[3] == 'L') {
// VP8 Lossless format: https://chromium.googlesource.com/webm/libwebp/+/refs/tags/v1.4.0/doc/webp-lossless-bitstream-spec.txt
if(ctx.hasVP8 || ctx.hasVP8L || ctx.hasANIM || ctx.hasALPH) {
throw new DataFilterException("WebP error", "WebP error", "Unexpected VP8L chunk was encountered");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "Unexpected VP8L chunk was encountered");
}
//output.write(ID);
//output.writeInt(((size & 0xff000000) >> 24) | ((size & 0x00ff0000) >> 8) | ((size & 0x0000ff00) << 8) | ((size & 0x000000ff) << 24));
// CVE-2023-4863 is an exploit for libwebp (before version 1.3.2) implementation of WebP lossless format, and that could be used in animation and alpha channel as well. This is really serious that we must not let Bad Thing happen.
// TODO: Check for CVE-2023-4863 exploit!
ctx.hasVP8L = true;
throw new DataFilterException("WebP lossless format is currently not supported", "WebP lossless format is currently not supported", "WebP lossless format is currently not supported by the filter. It could contain CVE-2023-4863 exploit.");
throw new DataFilterException(l10n("losslessUnsupportedTitle"), l10n("losslessUnsupportedTitle"), l10n("losslessUnsupported"));
} else if(ID[0] == 'A' && ID[1] == 'L' && ID[2] == 'P' && ID[3] == 'H') {
if(ctx.hasVP8L || ctx.hasANIM || ctx.hasALPH || (!ctx.hasVP8X) || ((ctx.VP8XFlags & ALPHA_FLAG) == 0)) {
// Only applicable to VP8 images. VP8L already has alpha channel, so does not need this.
throw new DataFilterException("WebP error", "WebP error", "Unexpected ALPH chunk was encountered");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "Unexpected ALPH chunk was encountered");
}
if(size == 0) {
throw new DataFilterException("WebP error", "WebP error", "Unexpected empty ALPH chunk");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "Unexpected empty ALPH chunk");
}
// Alpha channel
int flags = input.readUnsignedByte();
if((flags & 2) != 0) {
// Compression is not uncompressed
throw new DataFilterException("WebP error", "WebP error", "WebP alpha channel contains reserved bits");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "WebP alpha channel contains reserved bits");
}
if((flags & 0xc0) != 0) {
// Compression is not uncompressed
// TODO: Check for CVE-2023-4863 exploit!
throw new DataFilterException("WebP lossless format is currently not supported", "WebP lossless format is currently not supported", "WebP alpha channel using lossless compression is currently not supported by the filter. It could contain CVE-2023-4863 exploit.");
throw new DataFilterException(l10n("alphUnsupportedTitle"), l10n("alphUnsupportedTitle"), l10n("alphUnsupported"));
}
output.write(ID);
if(logDEBUG) Logger.debug(this, "Passing through WebP ALPH block with " + size + " bytes.");
writeLittleEndianInt(output, size);
output.writeByte(flags);
passthroughBytes(input, output, size - 1);
Expand All @@ -103,39 +106,39 @@ protected void readFilterChunk(byte[] ID, int size, Object context, DataInputStr
ctx.hasALPH = true;
} else if(ID[0] == 'A' && ID[1] == 'N' && ID[2] == 'I' && ID[3] == 'M') {
if(ctx.hasVP8 || ctx.hasVP8L || ctx.hasANIM) {
throw new DataFilterException("WebP error", "WebP error", "Unexpected ANIM chunk was encountered");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "Unexpected ANIM chunk was encountered");
}
// Global animation parameters
//output.write(ID);
//output.writeInt(((size & 0xff000000) >> 24) | ((size & 0x00ff0000) >> 8) | ((size & 0x0000ff00) << 8) | ((size & 0x000000ff) << 24));
// TODO: Check for CVE-2023-4863 exploit!
ctx.hasANIM = true;
throw new DataFilterException("WebP animation format is currently not supported", "WebP animation format is currently not supported", "WebP animation format is currently not supported by the filter. It could contain CVE-2023-4863 exploit.");
throw new DataFilterException(l10n("animUnsupportedTitle"), l10n("animUnsupportedTitle"), l10n("animUnsupported"));
} else if(ID[0] == 'A' && ID[1] == 'N' && ID[2] == 'M' && ID[3] == 'F') {
// Animation frame
if((ctx.VP8XFlags & ANIMATION_FLAG) == 0 || ctx.hasVP8 || ctx.hasVP8L || !ctx.hasANIM) {
// Animation frame in static WebP file - Unexpected
throw new DataFilterException("WebP error", "WebP error", "Unexpected ANMF chunk was encountered");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "Unexpected ANMF chunk was encountered");
} else {
ctx.hasANMF = true;
//output.write(ID);
//output.writeInt(((size & 0xff000000) >> 24) | ((size & 0x00ff0000) >> 8) | ((size & 0x0000ff00) << 8) | ((size & 0x000000ff) << 24));
// TODO: Check for CVE-2023-4863 exploit!
throw new DataFilterException("WebP animation format is currently not supported", "WebP animation format is currently not supported", "WebP animation format is currently not supported by the filter. It could contain CVE-2023-4863 exploit.");
throw new DataFilterException(l10n("animUnsupportedTitle"), l10n("animUnsupportedTitle"), l10n("animUnsupported"));
}
} else if(ID[0] == 'V' && ID[1] == 'P' && ID[2] == '8' && ID[3] == 'X') {
// meta information
if(ctx.hasVP8 || ctx.hasVP8L || ctx.hasANIM || ctx.hasVP8X) {
// This should be the first chunk of the file
throw new DataFilterException("WebP error", "WebP error", "Unexpected VP8X chunk was encountered");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "Unexpected VP8X chunk was encountered");
}
ctx.VP8XFlags = readLittleEndianInt(input);
if((ctx.VP8XFlags & ~ALL_VALID_FLAGS) != 0) {
// Has reserved flags or uses unsupported image fragmentation
throw new DataFilterException("WebP error", "WebP error", "VP8X header has reserved flags");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "VP8X header has reserved flags");
}
if(size != 10) {
throw new DataFilterException("WebP error", "WebP error", "VP8X header is too small or too big");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "VP8X header is too small or too big");
}
output.write(ID);
writeLittleEndianInt(output, size);
Expand All @@ -153,24 +156,24 @@ protected void readFilterChunk(byte[] ID, int size, Object context, DataInputStr
height++;
if(width > 16384 || height > 16384) {
// VP8 lossy format couldn't encode more than 16384 pixels in width or height. Check again when lossless format is supported.
throw new DataFilterException("WebP error", "WebP error", "WebP image size is too big");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "WebP image size is too big");
}
output.write(widthHeight);
} else if(ID[0] == 'I' && ID[1] == 'C' && ID[2] == 'C' && ID[3] == 'P') {
// ICC Color Profile
if(logMINOR) Logger.minor(this, "WebP image has ICCP block with " + size + " bytes converted into JUNK chunk.");
if(logDEBUG) Logger.debug(this, "WebP image has ICCP block with " + size + " bytes converted into JUNK chunk.");
writeJunkChunk(input, output, size);
} else if(ID[0] == 'E' && ID[1] == 'X' && ID[2] == 'I' && ID[3] == 'F') {
// EXIF metadata
if(logMINOR) Logger.minor(this, "WebP image has EXIF block with " + size + " bytes converted into JUNK chunk.");
if(logDEBUG) Logger.debug(this, "WebP image has EXIF block with " + size + " bytes converted into JUNK chunk.");
writeJunkChunk(input, output, size);
} else if(ID[0] == 'X' && ID[1] == 'M' && ID[2] == 'P' && ID[3] == ' ') {
// XMP metadata
if(logMINOR) Logger.minor(this, "WebP image has XMP block with " + size + " bytes converted into JUNK chunk.");
if(logDEBUG) Logger.debug(this, "WebP image has XMP block with " + size + " bytes converted into JUNK chunk.");
writeJunkChunk(input, output, size);
} else {
// Unknown block
if(logMINOR) Logger.minor(this, "WebP image has Unknown block with " + size + " bytes converted into JUNK chunk.");
if(logDEBUG) Logger.debug(this, "WebP image has Unknown block with " + size + " bytes converted into JUNK chunk.");
writeJunkChunk(input, output, size);
}
}
Expand All @@ -179,7 +182,11 @@ protected void readFilterChunk(byte[] ID, int size, Object context, DataInputStr
protected void EOFCheck(Object context) throws DataFilterException {
WebPFilterContext ctx = (WebPFilterContext)context;
if(ctx.hasVP8 == false && ctx.hasVP8L == false && ctx.hasANMF == false) {
throw new DataFilterException("WebP error", "WebP error", "No image chunk in the WebP file is found");
throw new DataFilterException(l10n("invalidTitle"), l10n("invalidTitle"), "No image chunk in the WebP file is found");
}
}

private static String l10n(String key) {
return NodeL10n.getBase().getString("WebPFilter."+key);
}
}
12 changes: 10 additions & 2 deletions src/freenet/l10n/freenet.l10n.en.properties
Original file line number Diff line number Diff line change
Expand Up @@ -1622,8 +1622,9 @@ PproxyToadlet.unloadPluginTitle=Unload plugin?
PproxyToadlet.unloadPluginWithName=Are you sure you wish to unload ${name}?
PproxyToadlet.unloadPurge=Remove plugin from cache
PproxyToadlet.versionTitle=Version
RIFFFilter.invalidTitle=Invalid RIFF chunk
RIFFFilter.dataTooBig=Data size is bigger than 2 GiB
RIFFFilter.invalidTitle=Invalid RIFF file
RIFFFilter.data2GB=Data size is bigger than 2 GiB
RIFFFilter.dataTooBig=Chunk data size exceeds file size limit
RIFFFilter.invalidStream=The RIFF stream was found to be malformed, and was unable to sanitized.
SaltedHashFreenetStore.shortResizeProgress=Datastore(${name}) resize in progress: ${processed}/${total}
SaltedHashFreenetStore.shortRebuildProgressNew=Datastore(${name}) maintenance in progress: ${processed}/${total} (converting to new format)
Expand Down Expand Up @@ -2247,6 +2248,13 @@ UserAlertsToadlet.title=Status messages
UserAlertsToadlet.noMessages=No messages
VorbisBitstreamFilter.MalformedTitle=Malformed Vorbis Bitstream
VorbisBitstreamFilter.MalformedMessage=The Vorbis bitstream is not correctly formatted, and could not be properly validated.
WebPFilter.animUnsupportedTitle=WebP animation is currently not supported
WebPFilter.animUnsupported=WebP animation is currently not supported by the filter, because it could contain frames using the lossless encoding. WebP lossless format has known buffer overflow exploit. When viewed on unpatched browsers and applications, it can damage the security of the system. Therefore, the content filter cannot ensure the safety of this animation.
WebPFilter.alphUnsupportedTitle=WebP alpha channel with lossless compression is currently not supported
WebPFilter.alphUnsupported=This image contains an alpha channel encoded with WebP lossless compression encoding. WebP lossless format has known buffer overflow exploit. When viewed on unpatched browsers and applications, it can damage the security of the system. Therefore, the content filter cannot ensure the safety of this image.
WebPFilter.invalidTitle=Invalid WebP file
WebPFilter.losslessUnsupportedTitle=WebP lossless format is currently not supported
WebPFilter.losslessUnsupported=WebP lossless format has known buffer overflow exploit. When viewed from unpatched browsers and applications, it can damage the security of the system. Therefore, the content filter cannot ensure the safety of this image.
WelcomeToadlet.activityTitle=Current Activity
WelcomeToadlet.arkFetchCount=ARK Fetchers: ${total}
WelcomeToadlet.alertsSummary=Status messages summary (click for details or to change something)
Expand Down

0 comments on commit 523b6f0

Please sign in to comment.