Big update, added support for TeaVM (javascript)

This commit is contained in:
Andrea Cavalli 2018-06-11 22:41:11 +02:00
parent e9967062fd
commit e0fd70d0ef
265 changed files with 25108 additions and 383 deletions

View File

@ -1,11 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="test" value="true"/>
@ -18,13 +13,24 @@
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<classpathentry excluding="org/warp/picalculator/gui/graphicengine/html/*" kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/classes" path="src/jar-specific/java">
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/rules">
<attributes>
<attribute name="maven.pomderived" value="true"/>
<attribute name="optional" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry excluding="org/warp/picalculator/gui/graphicengine/html/*" kind="src" output="target/classes" path="src/jar-specific/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>

View File

@ -5,4 +5,5 @@ encoding//src/main/java=UTF-8
encoding//src/main/java/org/warp/picalculator/gui/expression/blocks/BlockParenthesis.java=UTF-8
encoding//src/main/java/org/warp/picalculator/math/MathematicalSymbols.java=UTF-8
encoding//src/main/resources=UTF-8
encoding//src/main/rules=UTF-8
encoding//src/test/java=UTF-8

Binary file not shown.

BIN
math-rules-cache.zip114 Normal file

Binary file not shown.

159
pom.xml
View File

@ -8,15 +8,19 @@
<version>1.0-SNAPSHOT</version>
<name>WarpPI Calculator</name>
<url>http://warp.ovh</url>
<properties>
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<src.dir>src/main/java</src.dir>
</properties>
<src.dir>src/main/java</src.dir>
<src.dir2>src/main/java</src.dir2>
<src.resdir>src/main/resources</src.resdir>
</properties>
<repositories>
<repository>
<id>teavm-dev</id>
<url>https://dl.bintray.com/konsoletyper/teavm</url>
<url>https://dl.bintray.com/konsoletyper/teavm</url>
<snapshots> <enabled>true</enabled> </snapshots>
<layout>default</layout>
</repository>
<repository>
<id>oss-snapshots-repo</id>
@ -28,12 +32,21 @@
</snapshots>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>1_teavm-dev</id>
<url>https://dl.bintray.com/konsoletyper/teavm</url>
<snapshots> <enabled>true</enabled> </snapshots>
<layout>default</layout>
</pluginRepository>
</pluginRepositories>
<profiles>
<profile>
<id>jarprofile</id>
<properties>
<src.dir>jar-specific</src.dir>
</properties>
<properties>
<src.dir>jar-specific</src.dir>
<src.resdir>src/main/rules</src.resdir>
</properties>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
@ -64,14 +77,9 @@
<version>1.3.2</version>
</dependency>
<dependency>
<groupId>org.eclipse.jdt.core.compiler</groupId>
<artifactId>ecj</artifactId>
<version>4.6.1</version>
</dependency>
<dependency>
<groupId>ar.com.hjg</groupId>
<artifactId>pngj</artifactId>
<version>2.1.0</version>
<groupId>org.eclipse.jdt.core.compiler</groupId>
<artifactId>ecj</artifactId>
<version>4.6.1</version>
</dependency>
</dependencies>
<build>
@ -82,6 +90,9 @@
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<excludes>
<exclude>org/warp/picalculator/gui/graphicengine/html/*</exclude>
</excludes>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
@ -92,21 +103,48 @@
</profile>
<profile>
<id>jsprofile</id>
<properties>
<src.dir>js-specific</src.dir>
</properties>
<properties>
<src.dir>js-specific</src.dir>
<src.dir2>src/main/rules</src.dir2>
</properties>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.teavm</groupId>
<artifactId>teavm-classlib</artifactId>
<version>0.6.0-dev-529</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>versions-maven-plugin</artifactId>
<version>2.5</version>
<configuration>
<includes>
<include>org.teavm:*</include>
</includes>
</configuration>
<executions>
<execution>
<goals>
<goal>use-latest-versions</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<excludes>
<exclude>org/warp/picalculator/gui/graphicengine/cpu/*</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/cpu/CPUEngine</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/cpu/CPURenderer</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/cpu/SwingWindow</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/gpu/*</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/headless24bit/*</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/headless256/*</exclude>
@ -121,16 +159,7 @@
<plugin>
<groupId>org.teavm</groupId>
<artifactId>teavm-maven-plugin</artifactId>
<version>0.5.1</version>
<dependencies>
<!-- This dependency is required by TeaVM to emulate subset of Java
class library -->
<dependency>
<groupId>org.teavm</groupId>
<artifactId>teavm-classlib</artifactId>
<version>0.5.1</version>
</dependency>
</dependencies>
<version>0.6.0-dev-529</version>
<executions>
<execution>
<goals>
@ -165,34 +194,56 @@
<artifactId>gson</artifactId>
<version>2.8.2</version>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-io/commons-io -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.6</version>
</dependency>
<!-- <dependency>
<groupId>ar.com.hjg</groupId>
<artifactId>pngj</artifactId>
<version>2.1.0</version>
</dependency>-->
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
</resource>
<resource>
<directory>${src.resdir}</directory>
</resource>
</resources>
<finalName>WarpPICalculator</finalName>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.0.0</version>
<executions>
<execution>
<id>add-source</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>
${basedir}/src/main/java
</source>
<source>
${basedir}/src/${src.dir}/java
</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.0.0</version>
<executions>
<execution>
<id>add-source</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>
${basedir}/src/main/java
</source>
<source>
${basedir}/src/${src.dir}/java
</source>
<source>
${basedir}/${src.dir2}
</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<!-- download source code in Eclipse, best practice -->
<plugin>
@ -201,7 +252,7 @@
<version>2.9</version>
<configuration>
<downloadSources>true</downloadSources>
<downloadJavadocs>false</downloadJavadocs>
<downloadJavadocs>true</downloadJavadocs>
</configuration>
</plugin>

276
pom.xml.versionsBackup Normal file
View File

@ -0,0 +1,276 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.warp.picalculator</groupId>
<artifactId>warppi-calculator</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<name>WarpPI Calculator</name>
<url>http://warp.ovh</url>
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<src.dir>src/main/java</src.dir>
</properties>
<repositories>
<repository>
<id>teavm-dev</id>
<url>https://dl.bintray.com/konsoletyper/teavm</url>
</repository>
<repository>
<id>oss-snapshots-repo</id>
<name>Sonatype OSS Maven Repository</name>
<url>https://oss.sonatype.org/content/groups/public</url>
<snapshots>
<enabled>true</enabled>
<updatePolicy>always</updatePolicy>
</snapshots>
</repository>
</repositories>
<profiles>
<profile>
<id>jarprofile</id>
<properties>
<src.dir>jar-specific</src.dir>
</properties>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<dependencies>
<dependency>
<groupId>org.jogamp.jogl</groupId>
<artifactId>jogl-all-main</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>org.jogamp.gluegen</groupId>
<artifactId>gluegen-rt-main</artifactId>
<version>2.3.2</version>
</dependency>
<dependency>
<groupId>com.pi4j</groupId>
<artifactId>pi4j-core</artifactId>
<version>1.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.fusesource.jansi</groupId>
<artifactId>jansi</artifactId>
<version>1.15</version>
</dependency>
<dependency>
<groupId>net.lingala.zip4j</groupId>
<artifactId>zip4j</artifactId>
<version>1.3.2</version>
</dependency>
<dependency>
<groupId>org.eclipse.jdt.core.compiler</groupId>
<artifactId>ecj</artifactId>
<version>4.6.1</version>
</dependency>
<dependency>
<groupId>ar.com.hjg</groupId>
<artifactId>pngj</artifactId>
<version>2.1.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<!-- Set a compiler level -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>jsprofile</id>
<properties>
<src.dir>js-specific</src.dir>
</properties>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>versions-maven-plugin</artifactId>
<version>2.5</version>
<configuration>
<includes>
<include>org.teavm:*</include>
</includes>
</configuration>
<executions>
<execution>
<goals>
<goal>use-latest-versions</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<excludes>
<exclude>org/warp/picalculator/gui/graphicengine/cpu/*</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/gpu/*</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/headless24bit/*</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/headless256/*</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/headless8/*</exclude>
<exclude>org/warp/picalculator/gui/graphicengine/framebuffer/*</exclude>
</excludes>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.teavm</groupId>
<artifactId>teavm-maven-plugin</artifactId>
<version>0.5.1</version>
<dependencies>
<!-- This dependency is required by TeaVM to emulate subset of Java
class library -->
<dependency>
<groupId>org.teavm</groupId>
<artifactId>teavm-classlib</artifactId>
<version>0.5.1</version>
</dependency>
</dependencies>
<executions>
<execution>
<goals>
<goal>compile</goal>
</goals>
<phase>process-classes</phase>
<configuration>
<mainClass>org.warp.picalculator.Main</mainClass>
<mainPageIncluded>true</mainPageIncluded>
<debugInformationGenerated>true</debugInformationGenerated>
<sourceMapsGenerated>true</sourceMapsGenerated>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>org.teavm</groupId>
<artifactId>teavm-classlib</artifactId>
<version>0.5.1</version>
</dependency>
<!--<dependency> <groupId>junit</groupId> <artifactId>junit</artifactId>
<version>4.12</version> <scope>test</scope> </dependency> -->
<dependency>
<groupId>it.unimi.dsi</groupId>
<artifactId>fastutil</artifactId>
<version>7.2.0</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.2</version>
</dependency>
</dependencies>
<build>
<finalName>WarpPICalculator</finalName>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>build-helper-maven-plugin</artifactId>
<version>3.0.0</version>
<executions>
<execution>
<id>add-source</id>
<phase>generate-sources</phase>
<goals>
<goal>add-source</goal>
</goals>
<configuration>
<sources>
<source>
${basedir}/src/main/java
</source>
<source>
${basedir}/src/${src.dir}/java
</source>
</sources>
</configuration>
</execution>
</executions>
</plugin>
<!-- download source code in Eclipse, best practice -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-eclipse-plugin</artifactId>
<version>2.9</version>
<configuration>
<downloadSources>true</downloadSources>
<downloadJavadocs>true</downloadJavadocs>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>3.0.2</version>
<configuration>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<!-- Maven Assembly Plugin -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.4.1</version>
<configuration>
<!-- get all project dependencies -->
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<!-- MainClass in mainfest make a executable jar -->
<archive>
<manifest>
<mainClass>org.warp.picalculator.Main</mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<!-- bind to the packaging phase -->
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<!-- <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId> <version>2.4</version> <configuration>
<failOnError>false</failOnError> <source>1.8</source> <target>1.8</target>
<archive> <manifest> <addClasspath>true</addClasspath> <classpathPrefix>libs/</classpathPrefix>
<mainClass>org.warp.picalculator.Main</mainClass> </manifest> </archive>
</configuration> </plugin> </plugins> </build> -->
</project>

View File

@ -0,0 +1,199 @@
package ar.com.hjg.pngj;
import java.io.IOException;
import java.io.InputStream;
/**
* Reads bytes from an input stream, and feeds a IBytesConsumer.
*/
public class BufferedStreamFeeder {
private InputStream stream;
private byte[] buf;
private int pendinglen; // bytes read and stored in buf that have not yet still been fed to
// IBytesConsumer
private int offset;
private boolean eof = false;
private boolean closeStream = true;
private boolean failIfNoFeed = false;
private static final int DEFAULTSIZE = 8192;
/** By default, the stream will be closed on close() */
public BufferedStreamFeeder(InputStream is) {
this(is, DEFAULTSIZE);
}
public BufferedStreamFeeder(InputStream is, int bufsize) {
this.stream = is;
buf = new byte[bufsize < 1 ? DEFAULTSIZE : bufsize];
}
/**
* Returns inputstream
*
* @return Input Stream from which bytes are read
*/
public InputStream getStream() {
return stream;
}
/**
* Feeds bytes to the consumer <br>
* Returns bytes actually consumed <br>
* This should return 0 only if the stream is EOF or the consumer is done
*/
public int feed(IBytesConsumer consumer) {
return feed(consumer, Integer.MAX_VALUE);
}
/**
* Feeds the consumer (with at most maxbytes) <br>
* Returns 0 only if the stream is EOF (or maxbytes=0). Returns negative is the consumer is done.<br>
* It can return less than maxbytes (that doesn't mean that the consumer or the input stream is done)
*/
public int feed(IBytesConsumer consumer, int maxbytes) {
if (pendinglen == 0)
refillBuffer();
int tofeed = maxbytes >= 0 && maxbytes < pendinglen ? maxbytes : pendinglen;
int n = 0;
if (tofeed > 0) {
n = consumer.consume(buf, offset, tofeed);
if (n > 0) {
offset += n;
pendinglen -= n;
}
}
if (n < 1 && failIfNoFeed)
throw new PngjInputException("Failed to feed bytes (premature ending?)");
return n;
}
/**
* Feeds as much bytes as it can to the consumer, in a loop. <br>
* Returns bytes actually consumed <br>
* This will stop when either the input stream is eof, or when the consumer refuses to eat more bytes. The caller can
* distinguish both cases by calling {@link #hasMoreToFeed()}
*/
public long feedAll(IBytesConsumer consumer) {
long n = 0;
while (hasMoreToFeed()) {
int n1 = feed(consumer);
if (n1 < 1)
break;
n += n1;
}
return n;
}
/**
* Feeds exactly nbytes, retrying if necessary
*
* @param consumer Consumer
* @param nbytes Number of bytes
* @return true if success, false otherwise (EOF on stream, or consumer is done)
*/
public boolean feedFixed(IBytesConsumer consumer, int nbytes) {
int remain = nbytes;
while (remain > 0) {
int n = feed(consumer, remain);
if (n < 1)
return false;
remain -= n;
}
return true;
}
/**
* If there are not pending bytes to be consumed tries to fill the buffer with bytes from the stream.
*/
protected void refillBuffer() {
if (pendinglen > 0 || eof)
return; // only if not pending data
try {
// try to read
offset = 0;
pendinglen = stream.read(buf);
if (pendinglen < 0) {
close();
return;
} else
return;
} catch (IOException e) {
throw new PngjInputException(e);
}
}
/**
* Returuns true if we have more data to fed the consumer. This internally tries to grabs more bytes from the stream
* if necessary
*/
public boolean hasMoreToFeed() {
if (eof)
return pendinglen > 0;
else
refillBuffer();
return pendinglen > 0;
}
/**
* @param closeStream If true, the underlying stream will be closed on when close() is called
*/
public void setCloseStream(boolean closeStream) {
this.closeStream = closeStream;
}
/**
* Closes this object.
*
* Sets EOF=true, and closes the stream if <tt>closeStream</tt> is true
*
* This can be called internally, or from outside.
*
* Idempotent, secure, never throws exception.
**/
public void close() {
eof = true;
buf = null;
pendinglen = 0;
offset = 0;
if (stream != null && closeStream) {
try {
stream.close();
} catch (Exception e) {
// PngHelperInternal.LOGGER.log(Level.WARNING, "Exception closing stream", e);
}
}
stream = null;
}
/**
* Sets a new underlying inputstream. This allows to reuse this object. The old underlying is not closed and the state
* is not reset (you should call close() previously if you want that)
*
* @param is
*/
public void setInputStream(InputStream is) { // to reuse this object
this.stream = is;
eof = false;
}
/**
* @return EOF on stream, or close() was called
*/
public boolean isEof() {
return eof;
}
/**
* If this flag is set (default: false), any call to feed() that returns zero (no byte feed) will throw an exception.
* This is useful to be sure of avoid infinite loops in some scenarios.
*
* @param failIfNoFeed
*/
public void setFailIfNoFeed(boolean failIfNoFeed) {
this.failIfNoFeed = failIfNoFeed;
}
}

View File

@ -0,0 +1,216 @@
package ar.com.hjg.pngj;
import ar.com.hjg.pngj.chunks.ChunkRaw;
/**
* Parses a PNG chunk, consuming bytes in one mode: {@link ChunkReaderMode#BUFFER}, {@link ChunkReaderMode#PROCESS},
* {@link ChunkReaderMode#SKIP}.
* <p>
* It calls {@link #chunkDone()} when done. Also calls {@link #processData(byte[], int, int)} if <code>PROCESS</code>
* mode. Apart from thas, it's totally agnostic (it doesn't know about IDAT chunks, or PNG general structure)
* <p>
* The object wraps a ChunkRaw instance (content filled only if BUFFER mode); it should be short lived (one instance
* created for each chunk, and discarded after reading), but the wrapped chunkRaw can be (usually is) long lived.
*/
public abstract class ChunkReader {
/**
* see {@link ChunkReaderMode}
*/
public final ChunkReaderMode mode;
private final ChunkRaw chunkRaw;
private boolean crcCheck; // by default, this is false for SKIP, true elsewhere
/**
* How many bytes have been read for this chunk, data only
*/
protected int read = 0;
private int crcn = 0; // how many bytes have been read from crc
/**
* Modes of ChunkReader chunk processing.
*/
public enum ChunkReaderMode {
/**
* Stores full chunk data in buffer
*/
BUFFER,
/**
* Does not store content, processes on the fly, calling processData() for each partial read
*/
PROCESS,
/**
* Does not store nor process - implies crcCheck=false (by default).
*/
SKIP;
}
/**
* The constructor creates also a chunkRaw, preallocated if mode = ChunkReaderMode.BUFFER
*
* @param clen
* @param id
* @param offsetInPng Informational, is stored in chunkRaw
* @param mode
*/
public ChunkReader(int clen, String id, long offsetInPng, ChunkReaderMode mode) {
if (mode == null || id.length() != 4 || clen < 0)
throw new PngjExceptionInternal("Bad chunk paramenters: " + mode);
this.mode = mode;
chunkRaw = new ChunkRaw(clen, id, mode == ChunkReaderMode.BUFFER);
chunkRaw.setOffset(offsetInPng);
this.crcCheck = mode == ChunkReaderMode.SKIP ? false : true; // can be changed with setter
}
/**
* Returns raw chunk (data can be empty or not, depending on ChunkReaderMode)
*
* @return Raw chunk - never null
*/
public ChunkRaw getChunkRaw() {
return chunkRaw;
}
/**
* Consumes data for the chunk (data and CRC). This never consumes more bytes than for this chunk.
*
* In HOT_PROCESS can call processData() (not more than once)
*
* If this ends the chunk (included CRC) it checks CRC (if checking) and calls chunkDone()
*
* @param buf
* @param off
* @param len
* @return How many bytes have been consumed
*/
public final int feedBytes(byte[] buf, int off, int len) {
if (len == 0)
return 0;
if (len < 0)
throw new PngjException("negative length??");
if (read == 0 && crcn == 0 && crcCheck)
chunkRaw.updateCrc(chunkRaw.idbytes, 0, 4); // initializes crc calculation with the Chunk ID
int bytesForData = chunkRaw.len - read; // bytesForData : bytes to be actually read from chunk data
if (bytesForData > len)
bytesForData = len;
// we want to call processData even for empty chunks (IEND:bytesForData=0) at least once
if (bytesForData > 0 || crcn == 0) {
// in buffer mode we compute the CRC at the end
if (crcCheck && mode != ChunkReaderMode.BUFFER && bytesForData > 0)
chunkRaw.updateCrc(buf, off, bytesForData);
if (mode == ChunkReaderMode.BUFFER) {
// just copy the contents to the internal buffer
if (chunkRaw.data != buf && bytesForData > 0) {
// if the buffer passed if the same as this one, we don't copy the caller should know what he's doing
System.arraycopy(buf, off, chunkRaw.data, read, bytesForData);
}
} else if (mode == ChunkReaderMode.PROCESS) {
processData(read, buf, off, bytesForData);
} else {
// mode == ChunkReaderMode.SKIP; nothing to do
}
read += bytesForData;
off += bytesForData;
len -= bytesForData;
}
int crcRead = 0;
if (read == chunkRaw.len) { // data done - read crc?
crcRead = 4 - crcn;
if (crcRead > len)
crcRead = len;
if (crcRead > 0) {
if (buf != chunkRaw.crcval)
System.arraycopy(buf, off, chunkRaw.crcval, crcn, crcRead);
crcn += crcRead;
if (crcn == 4) {
if (crcCheck) {
if (mode == ChunkReaderMode.BUFFER) { // in buffer mode we compute the CRC on one single call
chunkRaw.updateCrc(chunkRaw.data, 0, chunkRaw.len);
}
chunkRaw.checkCrc();
}
chunkDone();
}
}
}
return bytesForData + crcRead;
}
/**
* Chunks has been read
*
* @return true if we have read all chunk, including trailing CRC
*/
public final boolean isDone() {
return crcn == 4; // has read all 4 bytes from the crc
}
/**
* Determines if CRC should be checked. This should be called before starting reading.
*
* @param crcCheck
*/
public void setCrcCheck(boolean crcCheck) {
if (read != 0 && crcCheck && !this.crcCheck)
throw new PngjException("too late!");
this.crcCheck = crcCheck;
}
/**
* This method will only be called in PROCESS mode, probably several times, each time with a new fragment of data
* inside the chunk. For chunks with zero-length data, this will still be called once.
*
* It's guaranteed that the data corresponds exclusively to this chunk data (no crc, no data from no other chunks, )
*
* @param offsetInchunk data bytes that had already been read/processed for this chunk
* @param buf
* @param off
* @param len
*/
protected abstract void processData(int offsetInchunk, byte[] buf, int off, int len);
/**
* This method will be called (in all modes) when the full chunk -including crc- has been read
*/
protected abstract void chunkDone();
public boolean isFromDeflatedSet() {
return false;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((chunkRaw == null) ? 0 : chunkRaw.hashCode());
return result;
}
/**
* Equality (and hash) is basically delegated to the ChunkRaw
*/
@Override
public boolean equals(Object obj) { // delegates to chunkraw
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ChunkReader other = (ChunkReader) obj;
if (chunkRaw == null) {
if (other.chunkRaw != null)
return false;
} else if (!chunkRaw.equals(other.chunkRaw))
return false;
return true;
}
@Override
public String toString() {
return chunkRaw.toString();
}
}

View File

@ -0,0 +1,30 @@
package ar.com.hjg.pngj;
/**
* This loads the png as a plain sequence of chunks, buffering all
*
* Useful to do things like insert or delete a ancilllary chunk. This does not distinguish IDAT from others
**/
public class ChunkSeqBuffering extends ChunkSeqReader {
protected boolean checkCrc = true;
public ChunkSeqBuffering() {
super();
}
@Override
protected boolean isIdatKind(String id) {
return false;
}
@Override
protected boolean shouldCheckCrc(int len, String id) {
return checkCrc;
}
public void setCheckCrc(boolean checkCrc) {
this.checkCrc = checkCrc;
}
}

View File

@ -0,0 +1,396 @@
package ar.com.hjg.pngj;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.Arrays;
import ar.com.hjg.pngj.ChunkReader.ChunkReaderMode;
import ar.com.hjg.pngj.chunks.ChunkHelper;
/**
* Consumes a stream of bytes that consist of a series of PNG-like chunks.
* <p>
* This has little intelligence, it's quite low-level and general (it could even be used for a MNG stream, for example).
* It supports signature recognition and idat deflate
*/
public class ChunkSeqReader implements IBytesConsumer {
protected static final int SIGNATURE_LEN = 8;
protected final boolean withSignature;
private byte[] buf0 = new byte[8]; // for signature or chunk starts
private int buf0len = 0;
private boolean signatureDone = false;
private boolean done = false; // ended, normally or not
private int chunkCount = 0;
private long bytesCount = 0;
private DeflatedChunksSet curReaderDeflatedSet; // one instance is created for each
// "idat-like set". Normally one.
private ChunkReader curChunkReader;
private long idatBytes; // this is only for the IDAT (not mrerely "idat-like")
/**
* Creates a ChunkSeqReader (with signature)
*/
public ChunkSeqReader() {
this(true);
}
/**
* @param withSignature If true, the stream is assumed be prepended by 8 bit signature
*/
public ChunkSeqReader(boolean withSignature) {
this.withSignature = withSignature;
signatureDone = !withSignature;
}
/**
* Consumes (in general, partially) a number of bytes. A single call never involves more than one chunk.
*
* When the signature is read, it calls checkSignature()
*
* When the start of a chunk is detected, it calls {@link #startNewChunk(int, String, long)}
*
* When data from a chunk is being read, it delegates to {@link ChunkReader#feedBytes(byte[], int, int)}
*
* The caller might want to call this method more than once in succesion
*
* This should rarely be overriden
*
* @param buffer
* @param offset Offset in buffer
* @param len Valid bytes that can be consumed
* @return processed bytes, in the 1-len range. -1 if done. Only returns 0 if len=0.
**/
public int consume(byte[] buffer, int offset, int len) {
if (done)
return -1;
if (len == 0)
return 0; // nothing to do
if (len < 0)
throw new PngjInputException("Bad len: " + len);
int processed = 0;
if (signatureDone) {
if (curChunkReader == null || curChunkReader.isDone()) { // new chunk: read first 8 bytes
int read0 = 8 - buf0len;
if (read0 > len)
read0 = len;
System.arraycopy(buffer, offset, buf0, buf0len, read0);
buf0len += read0;
processed += read0;
bytesCount += read0;
// len -= read0;
// offset += read0;
if (buf0len == 8) { // end reading chunk length and id
chunkCount++;
int clen = PngHelperInternal.readInt4fromBytes(buf0, 0);
String cid = ChunkHelper.toString(buf0, 4, 4);
startNewChunk(clen, cid, bytesCount - 8);
buf0len = 0;
}
} else { // reading chunk, delegates to curChunkReader
int read1 = curChunkReader.feedBytes(buffer, offset, len);
processed += read1;
bytesCount += read1;
}
} else { // reading signature
int read = SIGNATURE_LEN - buf0len;
if (read > len)
read = len;
System.arraycopy(buffer, offset, buf0, buf0len, read);
buf0len += read;
if (buf0len == SIGNATURE_LEN) {
checkSignature(buf0);
buf0len = 0;
signatureDone = true;
}
processed += read;
bytesCount += read;
}
return processed;
}
/**
* Trys to feeds exactly <tt>len</tt> bytes, calling {@link #consume(byte[], int, int)} retrying if necessary.
*
* This should only be used in callback mode
*
* @return true if succceded
*/
public boolean feedAll(byte[] buf, int off, int len) {
while (len > 0) {
int n = consume(buf, off, len);
if (n < 1)
return false;
len -= n;
off += n;
}
return true;
}
/**
* Called for all chunks when a chunk start has been read (id and length), before the chunk data itself is read. It
* creates a new ChunkReader (field accesible via {@link #getCurChunkReader()}) in the corresponding mode, and
* eventually a curReaderDeflatedSet.(field accesible via {@link #getCurReaderDeflatedSet()})
*
* To decide the mode and options, it calls {@link #shouldCheckCrc(int, String)},
* {@link #shouldSkipContent(int, String)}, {@link #isIdatKind(String)}. Those methods should be overriden in
* preference to this; if overriden, this should be called first.
*
* The respective {@link ChunkReader#chunkDone()} method is directed to this {@link #postProcessChunk(ChunkReader)}.
*
* Instead of overriding this, see also {@link #createChunkReaderForNewChunk(String, int, long, boolean)}
*/
protected void startNewChunk(int len, String id, long offset) {
if (id.equals(ChunkHelper.IDAT))
idatBytes += len;
boolean checkCrc = shouldCheckCrc(len, id);
boolean skip = shouldSkipContent(len, id);
boolean isIdatType = isIdatKind(id);
// PngHelperInternal.debug("start new chunk id=" + id + " off=" + offset + " skip=" + skip + " idat=" +
// isIdatType);
// first see if we should terminate an active curReaderDeflatedSet
boolean forCurrentIdatSet = false;
if (curReaderDeflatedSet != null)
forCurrentIdatSet = curReaderDeflatedSet.ackNextChunkId(id);
if (isIdatType && !skip) { // IDAT non skipped: create a DeflatedChunkReader owned by a idatSet
if (!forCurrentIdatSet) {
if (curReaderDeflatedSet != null && !curReaderDeflatedSet.isDone())
throw new PngjInputException("new IDAT-like chunk when previous was not done");
curReaderDeflatedSet = createIdatSet(id);
}
curChunkReader = new DeflatedChunkReader(len, id, checkCrc, offset, curReaderDeflatedSet) {
@Override
protected void chunkDone() {
super.chunkDone();
postProcessChunk(this);
}
};
} else { // for non-idat chunks (or skipped idat like)
curChunkReader = createChunkReaderForNewChunk(id, len, offset, skip);
if (!checkCrc)
curChunkReader.setCrcCheck(false);
}
}
/**
* This will be called for all chunks (even skipped), except for IDAT-like non-skiped chunks
*
* The default behaviour is to create a ChunkReader in BUFFER mode (or SKIP if skip==true) that calls
* {@link #postProcessChunk(ChunkReader)} (always) when done.
*
* @param id Chunk id
* @param len Chunk length
* @param offset offset inside PNG stream , merely informative
* @param skip flag: is true, the content will not be buffered (nor processed)
* @return a newly created ChunkReader that will create the ChunkRaw and then discarded
*/
protected ChunkReader createChunkReaderForNewChunk(String id, int len, long offset, boolean skip) {
return new ChunkReader(len, id, offset, skip ? ChunkReaderMode.SKIP : ChunkReaderMode.BUFFER) {
@Override
protected void chunkDone() {
postProcessChunk(this);
}
@Override
protected void processData(int offsetinChhunk, byte[] buf, int off, int len) {
throw new PngjExceptionInternal("should never happen");
}
};
}
/**
* This is called after a chunk is read, in all modes
*
* This implementation only chenks the id of the first chunk, and process the IEND chunk (sets done=true)
**
* Further processing should be overriden (call this first!)
**/
protected void postProcessChunk(ChunkReader chunkR) { // called after chunk is read
if (chunkCount == 1) {
String cid = firstChunkId();
if (cid != null && !cid.equals(chunkR.getChunkRaw().id))
throw new PngjInputException("Bad first chunk: " + chunkR.getChunkRaw().id + " expected: "
+ firstChunkId());
}
if (chunkR.getChunkRaw().id.equals(endChunkId()))
done = true;
}
/**
* DeflatedChunksSet factory. This implementation is quite dummy, it usually should be overriden.
*/
protected DeflatedChunksSet createIdatSet(String id) {
return new DeflatedChunksSet(id, 1024, 1024); // sizes: arbitrary This should normally be
// overriden
}
/**
* Decides if this Chunk is of "IDAT" kind (in concrete: if it is, and if it's not to be skiped, a DeflatedChunksSet
* will be created to deflate it and process+ the deflated data)
*
* This implementation always returns always false
*
* @param id
*/
protected boolean isIdatKind(String id) {
return false;
}
/**
* Chunks can be skipped depending on id and/or length. Skipped chunks are still processed, but their data will be
* null, and CRC will never checked
*
* @param len
* @param id
*/
protected boolean shouldSkipContent(int len, String id) {
return false;
}
protected boolean shouldCheckCrc(int len, String id) {
return true;
}
/**
* Throws PngjInputException if bad signature
*
* @param buf Signature. Should be of length 8
*/
protected void checkSignature(byte[] buf) {
if (!Arrays.equals(buf, PngHelperInternal.getPngIdSignature()))
throw new PngjInputException("Bad PNG signature");
}
/**
* If false, we are still reading the signature
*
* @return true if signature has been read (or if we don't have signature)
*/
public boolean isSignatureDone() {
return signatureDone;
}
/**
* If true, we either have processe the IEND chunk, or close() has been called, or a fatal error has happened
*/
public boolean isDone() {
return done;
}
/**
* total of bytes read (buffered or not)
*/
public long getBytesCount() {
return bytesCount;
}
/**
* @return Chunks already read, including partial reading (currently reading)
*/
public int getChunkCount() {
return chunkCount;
}
/**
* Currently reading chunk, or just ended reading
*
* @return null only if still reading signature
*/
public ChunkReader getCurChunkReader() {
return curChunkReader;
}
/**
* The latest deflated set (typically IDAT chunks) reader. Notice that there could be several idat sets (eg for APNG)
*/
public DeflatedChunksSet getCurReaderDeflatedSet() {
return curReaderDeflatedSet;
}
/**
* Closes this object and release resources. For normal termination or abort. Secure and idempotent.
*/
public void close() { // forced closing
if (curReaderDeflatedSet != null)
curReaderDeflatedSet.close();
done = true;
}
/**
* Returns true if we are not in middle of a chunk: we have just ended reading past chunk , or we are at the start, or
* end of signature, or we are done
*/
public boolean isAtChunkBoundary() {
return bytesCount == 0 || bytesCount == 8 || done || curChunkReader == null
|| curChunkReader.isDone();
}
/**
* Which should be the id of the first chunk
*
* @return null if you don't want to check it
*/
protected String firstChunkId() {
return "IHDR";
}
/**
* Helper method, reports amount of bytes inside IDAT chunks.
*
* @return Bytes in IDAT chunks
*/
public long getIdatBytes() {
return idatBytes;
}
/**
* Which should be the id of the last chunk
*
* @return "IEND"
*/
protected String endChunkId() {
return "IEND";
}
/**
* Reads all content from a file. Helper method, only for callback mode
*/
public void feedFromFile(File f) {
try {
feedFromInputStream(new FileInputStream(f), true);
} catch (FileNotFoundException e) {
throw new PngjInputException(e.getMessage());
}
}
/**
* Reads all content from an input stream. Helper method, only for callback mode
*
* @param is
* @param closeStream Closes the input stream when done (or if error)
*/
public void feedFromInputStream(InputStream is, boolean closeStream) {
BufferedStreamFeeder sf = new BufferedStreamFeeder(is);
sf.setCloseStream(closeStream);
try {
sf.feedAll(this);
} finally {
close();
sf.close();
}
}
public void feedFromInputStream(InputStream is) {
feedFromInputStream(is, true);
}
}

View File

@ -0,0 +1,313 @@
package ar.com.hjg.pngj;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import ar.com.hjg.pngj.ChunkReader.ChunkReaderMode;
import ar.com.hjg.pngj.chunks.ChunkFactory;
import ar.com.hjg.pngj.chunks.ChunkHelper;
import ar.com.hjg.pngj.chunks.ChunkLoadBehaviour;
import ar.com.hjg.pngj.chunks.ChunksList;
import ar.com.hjg.pngj.chunks.PngChunk;
import ar.com.hjg.pngj.chunks.PngChunkIDAT;
import ar.com.hjg.pngj.chunks.PngChunkIEND;
import ar.com.hjg.pngj.chunks.PngChunkIHDR;
import ar.com.hjg.pngj.chunks.PngChunkPLTE;
/**
* Adds to ChunkSeqReader the storing of PngChunk, with a PngFactory, and imageInfo + deinterlacer.
* <p>
* Most usual PNG reading should use this class, or a {@link PngReader}, which is a thin wrapper over this.
*/
public class ChunkSeqReaderPng extends ChunkSeqReader {
protected ImageInfo imageInfo; // initialized at parsing the IHDR
protected ImageInfo curImageInfo; // can vary, for apng
protected Deinterlacer deinterlacer;
protected int currentChunkGroup = -1;
/**
* All chunks, but some of them can have the buffer empty (IDAT and skipped)
*/
protected ChunksList chunksList = null;
protected final boolean callbackMode;
private long bytesAncChunksLoaded = 0; // bytes loaded from buffered chunks non-critical chunks (data only)
private boolean checkCrc = true;
// --- parameters to be set prior to reading ---
private boolean includeNonBufferedChunks = false;
private Set<String> chunksToSkip = new HashSet<String>();
private long maxTotalBytesRead = 0;
private long skipChunkMaxSize = 0;
private long maxBytesMetadata = 0;
private IChunkFactory chunkFactory;
private ChunkLoadBehaviour chunkLoadBehaviour = ChunkLoadBehaviour.LOAD_CHUNK_ALWAYS;
public ChunkSeqReaderPng(boolean callbackMode) {
super();
this.callbackMode = callbackMode;
chunkFactory = new ChunkFactory(); // default factory
}
private void updateAndCheckChunkGroup(String id) {
if (id.equals(PngChunkIHDR.ID)) { // IDHR
if (currentChunkGroup < 0)
currentChunkGroup = ChunksList.CHUNK_GROUP_0_IDHR;
else
throw new PngjInputException("unexpected chunk " + id);
} else if (id.equals(PngChunkPLTE.ID)) { // PLTE
if ((currentChunkGroup == ChunksList.CHUNK_GROUP_0_IDHR || currentChunkGroup == ChunksList.CHUNK_GROUP_1_AFTERIDHR))
currentChunkGroup = ChunksList.CHUNK_GROUP_2_PLTE;
else
throw new PngjInputException("unexpected chunk " + id);
} else if (id.equals(PngChunkIDAT.ID)) { // IDAT (no necessarily the first)
if ((currentChunkGroup >= ChunksList.CHUNK_GROUP_0_IDHR && currentChunkGroup <= ChunksList.CHUNK_GROUP_4_IDAT))
currentChunkGroup = ChunksList.CHUNK_GROUP_4_IDAT;
else
throw new PngjInputException("unexpected chunk " + id);
} else if (id.equals(PngChunkIEND.ID)) { // END
if ((currentChunkGroup >= ChunksList.CHUNK_GROUP_4_IDAT))
currentChunkGroup = ChunksList.CHUNK_GROUP_6_END;
else
throw new PngjInputException("unexpected chunk " + id);
} else { // ancillary
if (currentChunkGroup <= ChunksList.CHUNK_GROUP_1_AFTERIDHR)
currentChunkGroup = ChunksList.CHUNK_GROUP_1_AFTERIDHR;
else if (currentChunkGroup <= ChunksList.CHUNK_GROUP_3_AFTERPLTE)
currentChunkGroup = ChunksList.CHUNK_GROUP_3_AFTERPLTE;
else
currentChunkGroup = ChunksList.CHUNK_GROUP_5_AFTERIDAT;
}
}
@Override
public boolean shouldSkipContent(int len, String id) {
if (super.shouldSkipContent(len, id))
return true;
if (ChunkHelper.isCritical(id))
return false;// critical chunks are never skipped
if (maxTotalBytesRead > 0 && len + getBytesCount() > maxTotalBytesRead)
throw new PngjInputException("Maximum total bytes to read exceeeded: " + maxTotalBytesRead
+ " offset:" + getBytesCount() + " len=" + len);
if (chunksToSkip.contains(id))
return true; // specific skip
if (skipChunkMaxSize > 0 && len > skipChunkMaxSize)
return true; // too big chunk
if (maxBytesMetadata > 0 && len > maxBytesMetadata - bytesAncChunksLoaded)
return true; // too much ancillary chunks loaded
switch (chunkLoadBehaviour) {
case LOAD_CHUNK_IF_SAFE:
if (!ChunkHelper.isSafeToCopy(id))
return true;
break;
case LOAD_CHUNK_NEVER:
return true;
default:
break;
}
return false;
}
public long getBytesChunksLoaded() {
return bytesAncChunksLoaded;
}
public int getCurrentChunkGroup() {
return currentChunkGroup;
}
public void setChunksToSkip(String... chunksToSkip) {
this.chunksToSkip.clear();
for (String c : chunksToSkip)
this.chunksToSkip.add(c);
}
public void addChunkToSkip(String chunkToSkip) {
this.chunksToSkip.add(chunkToSkip);
}
public void dontSkipChunk(String chunkToSkip) {
this.chunksToSkip.remove(chunkToSkip);
}
public boolean firstChunksNotYetRead() {
return getCurrentChunkGroup() < ChunksList.CHUNK_GROUP_4_IDAT;
}
@Override
protected void postProcessChunk(ChunkReader chunkR) {
super.postProcessChunk(chunkR);
if (chunkR.getChunkRaw().id.equals(PngChunkIHDR.ID)) {
PngChunkIHDR ch = new PngChunkIHDR(null);
ch.parseFromRaw(chunkR.getChunkRaw());
imageInfo = ch.createImageInfo();
curImageInfo = imageInfo;
if (ch.isInterlaced())
deinterlacer = new Deinterlacer(curImageInfo);
chunksList = new ChunksList(imageInfo);
}
if (chunkR.mode == ChunkReaderMode.BUFFER && countChunkTypeAsAncillary(chunkR.getChunkRaw().id)) {
bytesAncChunksLoaded += chunkR.getChunkRaw().len;
}
if (chunkR.mode == ChunkReaderMode.BUFFER || includeNonBufferedChunks) {
PngChunk chunk = chunkFactory.createChunk(chunkR.getChunkRaw(), getImageInfo());
chunksList.appendReadChunk(chunk, currentChunkGroup);
}
if (isDone()) {
processEndPng();
}
}
protected boolean countChunkTypeAsAncillary(String id) {
return !ChunkHelper.isCritical(id);
}
@Override
protected DeflatedChunksSet createIdatSet(String id) {
IdatSet ids = new IdatSet(id, getCurImgInfo(), deinterlacer);
ids.setCallbackMode(callbackMode);
return ids;
}
public IdatSet getIdatSet() {
DeflatedChunksSet c = getCurReaderDeflatedSet();
return c instanceof IdatSet ? (IdatSet) c : null;
}
@Override
protected boolean isIdatKind(String id) {
return id.equals(PngChunkIDAT.ID);
}
@Override
public int consume(byte[] buf, int off, int len) {
return super.consume(buf, off, len);
}
/**
* sets a custom chunk factory. This is typically called with a custom class extends ChunkFactory, to adds custom
* chunks to the default well-know ones
*
* @param chunkFactory
*/
public void setChunkFactory(IChunkFactory chunkFactory) {
this.chunkFactory = chunkFactory;
}
/**
* Things to be done after IEND processing. This is not called if prematurely closed.
*/
protected void processEndPng() {
// nothing to do
}
public ImageInfo getImageInfo() {
return imageInfo;
}
public boolean isInterlaced() {
return deinterlacer != null;
}
public Deinterlacer getDeinterlacer() {
return deinterlacer;
}
@Override
protected void startNewChunk(int len, String id, long offset) {
updateAndCheckChunkGroup(id);
super.startNewChunk(len, id, offset);
}
@Override
public void close() {
if (currentChunkGroup != ChunksList.CHUNK_GROUP_6_END)// this could only happen if forced close
currentChunkGroup = ChunksList.CHUNK_GROUP_6_END;
super.close();
}
public List<PngChunk> getChunks() {
return chunksList.getChunks();
}
public void setMaxTotalBytesRead(long maxTotalBytesRead) {
this.maxTotalBytesRead = maxTotalBytesRead;
}
public long getSkipChunkMaxSize() {
return skipChunkMaxSize;
}
public void setSkipChunkMaxSize(long skipChunkMaxSize) {
this.skipChunkMaxSize = skipChunkMaxSize;
}
public long getMaxBytesMetadata() {
return maxBytesMetadata;
}
public void setMaxBytesMetadata(long maxBytesMetadata) {
this.maxBytesMetadata = maxBytesMetadata;
}
public long getMaxTotalBytesRead() {
return maxTotalBytesRead;
}
@Override
protected boolean shouldCheckCrc(int len, String id) {
return checkCrc;
}
public boolean isCheckCrc() {
return checkCrc;
}
public void setCheckCrc(boolean checkCrc) {
this.checkCrc = checkCrc;
}
public boolean isCallbackMode() {
return callbackMode;
}
public Set<String> getChunksToSkip() {
return chunksToSkip;
}
public void setChunkLoadBehaviour(ChunkLoadBehaviour chunkLoadBehaviour) {
this.chunkLoadBehaviour = chunkLoadBehaviour;
}
public ImageInfo getCurImgInfo() {
return curImageInfo;
}
public void updateCurImgInfo(ImageInfo iminfo) {
if (!iminfo.equals(curImageInfo)) {
curImageInfo = iminfo;
}
if (deinterlacer != null)
deinterlacer = new Deinterlacer(curImageInfo); // we could reset it, but...
}
/**
* If true, the chunks with no data (because skipped or because processed like IDAT-type) are still stored in the
* PngChunks list, which might be more informative.
*
* Setting this to false saves a few bytes
*
* Default: false
*
* @param includeNonBufferedChunks
*/
public void setIncludeNonBufferedChunks(boolean includeNonBufferedChunks) {
this.includeNonBufferedChunks = includeNonBufferedChunks;
}
}

View File

@ -0,0 +1,70 @@
package ar.com.hjg.pngj;
import java.util.ArrayList;
import java.util.List;
import ar.com.hjg.pngj.ChunkReader.ChunkReaderMode;
import ar.com.hjg.pngj.chunks.ChunkRaw;
/**
* This simple reader skips all chunks contents and stores the chunkRaw in a list. Useful to read chunks structure.
*
* Optionally the contents might be processed. This doesn't distinguish IDAT chunks
*/
public class ChunkSeqSkipping extends ChunkSeqReader {
private List<ChunkRaw> chunks = new ArrayList<ChunkRaw>();
private boolean skip = true;
/**
* @param skipAll if true, contents will be truly skipped, and CRC will not be computed
*/
public ChunkSeqSkipping(boolean skipAll) {
super(true);
skip = skipAll;
}
public ChunkSeqSkipping() {
this(true);
}
protected ChunkReader createChunkReaderForNewChunk(String id, int len, long offset, boolean skip) {
return new ChunkReader(len, id, offset, skip ? ChunkReaderMode.SKIP : ChunkReaderMode.PROCESS) {
@Override
protected void chunkDone() {
postProcessChunk(this);
}
@Override
protected void processData(int offsetinChhunk, byte[] buf, int off, int len) {
processChunkContent(getChunkRaw(), offsetinChhunk, buf, off, len);
}
};
}
protected void processChunkContent(ChunkRaw chunkRaw, int offsetinChhunk, byte[] buf, int off,
int len) {
// does nothing
}
@Override
protected void postProcessChunk(ChunkReader chunkR) {
super.postProcessChunk(chunkR);
chunks.add(chunkR.getChunkRaw());
}
@Override
protected boolean shouldSkipContent(int len, String id) {
return skip;
}
@Override
protected boolean isIdatKind(String id) {
return false;
}
public List<ChunkRaw> getChunks() {
return chunks;
}
}

View File

@ -0,0 +1,83 @@
package ar.com.hjg.pngj;
import ar.com.hjg.pngj.chunks.PngChunkFDAT;
/**
*
* Specialization of ChunkReader, for IDAT-like chunks. These chunks are part of a set of similar chunks (contiguos
* normally, not necessariyl) which conforms a zlib stream
*/
public class DeflatedChunkReader extends ChunkReader {
protected final DeflatedChunksSet deflatedChunksSet;
protected boolean alsoBuffer = false;
protected boolean skipBytes = false; // fDAT (APNG) skips 4 bytes)
protected byte[] skippedBytes; // only for fDAT
protected int seqNumExpected = -1; // only for fDAT
public DeflatedChunkReader(int clen, String chunkid, boolean checkCrc, long offsetInPng,
DeflatedChunksSet iDatSet) {
super(clen, chunkid, offsetInPng, ChunkReaderMode.PROCESS);
this.deflatedChunksSet = iDatSet;
if (chunkid.equals(PngChunkFDAT.ID)) {
skipBytes = true;
skippedBytes = new byte[4];
}
iDatSet.appendNewChunk(this);
}
/**
* Delegates to ChunkReaderDeflatedSet.processData()
*/
@Override
protected void processData(int offsetInchunk, byte[] buf, int off, int len) {
if (skipBytes && offsetInchunk < 4) {// only for APNG (sigh)
for (int oc = offsetInchunk; oc < 4 && len > 0; oc++, off++, len--)
skippedBytes[oc] = buf[off];
}
if (len > 0) { // delegate to idatSet
deflatedChunksSet.processBytes(buf, off, len);
if (alsoBuffer) { // very rare!
System.arraycopy(buf, off, getChunkRaw().data, read, len);
}
}
}
/**
* only a stupid check for fDAT (I wonder how many APGN readers do this)
*/
@Override
protected void chunkDone() {
if (skipBytes && getChunkRaw().id.equals(PngChunkFDAT.ID)) {
if (seqNumExpected >= 0) {
int seqNum = PngHelperInternal.readInt4fromBytes(skippedBytes, 0);
if (seqNum != seqNumExpected)
throw new PngjInputException("bad chunk sequence for fDAT chunk " + seqNum + " expected "
+ seqNumExpected);
}
}
}
@Override
public boolean isFromDeflatedSet() {
return true;
}
/**
* In some rare cases you might want to also buffer the data?
*/
public void setAlsoBuffer() {
if (read > 0)
throw new RuntimeException("too late");
alsoBuffer = true;
getChunkRaw().allocData();
}
/** only relevant for fDAT */
public void setSeqNumExpected(int seqNumExpected) {
this.seqNumExpected = seqNumExpected;
}
}

View File

@ -0,0 +1,417 @@
package ar.com.hjg.pngj;
import java.util.zip.DataFormatException;
import java.util.zip.Inflater;
/**
* A set of IDAT-like chunks which, concatenated, form a zlib stream.
* <p>
* The inflated stream is intented to be read as a sequence of "rows", of which the caller knows the lengths (not
* necessary equal) and number.
* <p>
* Eg: For IDAT non-interlaced images, a row has bytesPerRow + 1 filter byte<br>
* For interlaced images, the lengths are variable.
* <p>
* This class can work in sync (polled) mode or async (callback) mode. But for callback mode the method
* processRowCallback() must be overriden
* <p>
* See {@link IdatSet}, which is mostly used and has a slightly simpler use.<br>
* See <code>DeflatedChunkSetTest</code> for example of use.
*/
public class DeflatedChunksSet {
protected byte[] row; // a "row" here means a raw (uncopressed filtered) part of the IDAT stream,
// normally a image row (or subimage row for interlaced) plus a filter byte
private int rowfilled; // effective/valid length of row
private int rowlen; // what amount of bytes is to be interpreted as a complete "row". can change
// (for interlaced)
private int rown; // only coincide with image row if non-interlaced - incremented by
// setNextRowSize()
/*
* States WAITING_FOR_INPUT ROW_READY WORK_DONE TERMINATED
*
* processBytes() is externally called, prohibited in READY (in DONE it's ignored)
*
* WARNING: inflater.finished() != DONE (not enough, not neccesary) DONE means that we have already uncompressed all
* the data of interest.
*
* In non-callback mode, prepareForNextRow() is also externally called, in
*
* Flow: - processBytes() calls inflateData() - inflateData() : if buffer is filled goes to READY else if !
* inf.finished goes to WAITING else if any data goes to READY (incomplete data to be read) else goes to DONE - in
* Callback mode, after going to READY, n=processCallback() is called and then prepareForNextRow(n) is called. - in
* Polled mode, prepareForNextRow(n) must be called from outside (after checking state=READY) - prepareForNextRow(n)
* goes to DONE if n==0 calls inflateData() again - end() goes to DONE
*/
private enum State {
WAITING_FOR_INPUT, // waiting for more input
ROW_READY, // ready for consumption (might be less than fully filled), ephemeral for CALLBACK
// mode
WORK_DONE, // all data of interest has been read, but we might accept still more trailing chunks
// (we'll ignore them)
TERMINATED; // we are done, and also won't accept more IDAT chunks
public boolean isDone() {
return this == WORK_DONE || this == TERMINATED;
} // the caller has already uncompressed all the data of interest or EOF
public boolean isTerminated() {
return this == TERMINATED;
} // we dont accept more chunks
}
State state = State.WAITING_FOR_INPUT; // never null
private Inflater inf;
private final boolean infOwn; // true if we own the inflater (we created it)
private DeflatedChunkReader curChunk;
private boolean callbackMode = true;
private long nBytesIn = 0; // count the total compressed bytes that have been fed
private long nBytesOut = 0; // count the total uncompressed bytes
int chunkNum = -1; // incremented at each new chunk start
int firstChunqSeqNum = -1; // expected seq num for first chunk. used only for fDAT (APNG)
/**
* All IDAT-like chunks that form a same DeflatedChunksSet should have the same id
*/
public final String chunkid;
/**
* @param initialRowLen Length in bytes of first "row" (see description)
* @param maxRowLen Max length in bytes of "rows"
* @param inflater Can be null. If not null, must be already reset (and it must be closed/released by caller!)
*/
public DeflatedChunksSet(String chunkid, int initialRowLen, int maxRowLen, Inflater inflater,
byte[] buffer) {
this.chunkid = chunkid;
this.rowlen = initialRowLen;
if (initialRowLen < 1 || maxRowLen < initialRowLen)
throw new PngjException("bad inital row len " + initialRowLen);
if (inflater != null) {
this.inf = inflater;
infOwn = false;
} else {
this.inf = new Inflater();
infOwn = true; // inflater is own, we will release on close()
}
this.row = buffer != null && buffer.length >= initialRowLen ? buffer : new byte[maxRowLen];
rown = -1;
this.state = State.WAITING_FOR_INPUT;
try {
prepareForNextRow(initialRowLen);
} catch (RuntimeException e) {
close();
throw e;
}
}
public DeflatedChunksSet(String chunkid, int initialRowLen, int maxRowLen) {
this(chunkid, initialRowLen, maxRowLen, null, null);
}
protected void appendNewChunk(DeflatedChunkReader cr) {
// all chunks must have same id
if (!this.chunkid.equals(cr.getChunkRaw().id))
throw new PngjInputException("Bad chunk inside IdatSet, id:" + cr.getChunkRaw().id
+ ", expected:" + this.chunkid);
this.curChunk = cr;
chunkNum++;
if (firstChunqSeqNum >= 0)
cr.setSeqNumExpected(chunkNum + firstChunqSeqNum);
}
/**
* Feeds the inflater with the compressed bytes
*
* In poll mode, the caller should not call repeatedly this, without consuming first, checking
* isDataReadyForConsumer()
*
* @param buf
* @param off
* @param len
*/
protected void processBytes(byte[] buf, int off, int len) {
nBytesIn += len;
// PngHelperInternal.LOGGER.info("processing compressed bytes in chunkreader : " + len);
if (len < 1 || state.isDone())
return;
if (state == State.ROW_READY)
throw new PngjInputException("this should only be called if waitingForMoreInput");
if (inf.needsDictionary() || !inf.needsInput())
throw new RuntimeException("should not happen");
inf.setInput(buf, off, len);
// PngHelperInternal.debug("entering processs bytes, state=" + state +
// " callback="+callbackMode);
if (isCallbackMode()) {
while (inflateData()) {
int nextRowLen = processRowCallback();
prepareForNextRow(nextRowLen);
if (isDone())
processDoneCallback();
}
} else
inflateData();
}
/*
* This never inflates more than one row This returns true if this has resulted in a row being ready and preprocessed
* with preProcessRow (in callback mode, we should call immediately processRowCallback() and
* prepareForNextRow(nextRowLen)
*/
private boolean inflateData() {
try {
// PngHelperInternal.debug("entering inflateData bytes, state=" + state +
// " callback="+callbackMode);
if (state == State.ROW_READY)
throw new PngjException("invalid state");// assert
if (state.isDone())
return false;
int ninflated = 0;
if (row == null || row.length < rowlen)
row = new byte[rowlen]; // should not happen
if (rowfilled < rowlen && !inf.finished()) {
try {
ninflated = inf.inflate(row, rowfilled, rowlen - rowfilled);
} catch (DataFormatException e) {
throw new PngjInputException("error decompressing zlib stream ", e);
}
rowfilled += ninflated;
nBytesOut += ninflated;
}
State nextstate = null;
if (rowfilled == rowlen)
nextstate = State.ROW_READY; // complete row, process it
else if (!inf.finished())
nextstate = State.WAITING_FOR_INPUT;
else if (rowfilled > 0)
nextstate = State.ROW_READY; // complete row, process it
else {
nextstate = State.WORK_DONE; // eof, no more data
}
state = nextstate;
if (state == State.ROW_READY) {
preProcessRow();
return true;
}
} catch (RuntimeException e) {
close();
throw e;
}
return false;
}
/**
* Called automatically in all modes when a full row has been inflated.
*/
protected void preProcessRow() {
}
/**
* Callback, must be implemented in callbackMode
* <p>
* This should use {@link #getRowFilled()} and {@link #getInflatedRow()} to access the row.
* <p>
* Must return byes of next row, for next callback.
*/
protected int processRowCallback() {
throw new PngjInputException("not implemented");
}
/**
* Callback, to be implemented in callbackMode
* <p>
* This will be called once to notify state done
*/
protected void processDoneCallback() {}
/**
* Inflated buffer.
*
* The effective length is given by {@link #getRowFilled()}
*/
public byte[] getInflatedRow() {
return row;
}
/**
* Should be called after the previous row was processed
* <p>
* Pass 0 or negative to signal that we are done (not expecting more bytes)
* <p>
* This resets {@link #rowfilled}
* <p>
* The
*/
public void prepareForNextRow(int len) {
rowfilled = 0;
rown++;
if (len < 1) {
rowlen = 0;
done();
} else if (inf.finished()) {
rowlen = 0;
done();
} else {
state = State.WAITING_FOR_INPUT;
rowlen = len;
if (!callbackMode)
inflateData();
}
}
/**
* In this state, the object is waiting for more input to deflate.
* <p>
* Only in this state it's legal to feed this
*/
public boolean isWaitingForMoreInput() {
return state == State.WAITING_FOR_INPUT;
}
/**
* In this state, the object is waiting the caller to retrieve inflated data
* <p>
* Effective length: see {@link #getRowFilled()}
*/
public boolean isRowReady() {
return state == State.ROW_READY;
}
/**
* In this state, all relevant data has been uncompressed and retrieved (exceptionally, the reading has ended
* prematurely).
* <p>
* We can still feed this object, but the bytes will be swallowed/ignored.
*/
public boolean isDone() {
return state.isDone();
}
public boolean isTerminated() {
return state.isTerminated();
}
/**
* This will be called by the owner to report us the next chunk to come. We can make our own internal changes and
* checks. This returns true if we acknowledge the next chunk as part of this set
*/
public boolean ackNextChunkId(String id) {
if (state.isTerminated())
return false;
else if (id.equals(chunkid)) {
return true;
} else {
if (!allowOtherChunksInBetween(id)) {
if (state.isDone()) {
if (!isTerminated())
terminate();
return false;
} else {
throw new PngjInputException("Unexpected chunk " + id + " while " + chunkid
+ " set is not done");
}
} else
return true;
}
}
protected void terminate() {
close();
}
/**
* This should be called when discarding this object, or for aborting. Secure, idempotent Don't use this just to
* notify this object that it has no more work to do, see {@link #done()}
* */
public void close() {
try {
if (!state.isTerminated()) {
state = State.TERMINATED;
}
if (infOwn && inf != null) {
inf.end();// we end the Inflater only if we created it
inf = null;
}
} catch (Exception e) {
}
}
/**
* Forces the DONE state, this object won't uncompress more data. It's still not terminated, it will accept more IDAT
* chunks, but will ignore them.
*/
public void done() {
if (!isDone())
state = State.WORK_DONE;
}
/**
* Target size of the current row, including filter byte. <br>
* should coincide (or be less than) with row.length
*/
public int getRowLen() {
return rowlen;
}
/** This the amount of valid bytes in the buffer */
public int getRowFilled() {
return rowfilled;
}
/**
* Get current (last) row number.
* <p>
* This corresponds to the raw numeration of rows as seen by the deflater. Not the same as the real image row, if
* interlaced.
*
*/
public int getRown() {
return rown;
}
/**
* Some IDAT-like set can allow other chunks in between (APGN?).
* <p>
* Normally false.
*
* @param id Id of the other chunk that appeared in middel of this set.
* @return true if allowed
*/
public boolean allowOtherChunksInBetween(String id) {
return false;
}
/**
* Callback mode = async processing
*/
public boolean isCallbackMode() {
return callbackMode;
}
public void setCallbackMode(boolean callbackMode) {
this.callbackMode = callbackMode;
}
/** total number of bytes that have been fed to this object */
public long getBytesIn() {
return nBytesIn;
}
/** total number of bytes that have been uncompressed */
public long getBytesOut() {
return nBytesOut;
}
@Override
public String toString() {
StringBuilder sb =
new StringBuilder("idatSet : " + curChunk.getChunkRaw().id + " state=" + state + " rows="
+ rown + " bytes=" + nBytesIn + "/" + nBytesOut);
return sb.toString();
}
}

View File

@ -0,0 +1,199 @@
package ar.com.hjg.pngj;
public class Deinterlacer {
final ImageInfo imi;
private int pass; // 1-7
private int rows, cols;
int dY, dX, oY, oX; // current step and offset (in pixels)
int oXsamples, dXsamples; // step in samples
// current row in the virtual subsampled image; this increments (by 1) from 0 to rows/dy 7 times
private int currRowSubimg;
// in the real image, this will cycle from 0 to im.rows in different steps, 7 times
private int currRowReal;
private int currRowSeq; // not counting empty rows
int totalRows;
private boolean ended = false;
public Deinterlacer(ImageInfo iminfo) {
this.imi = iminfo;
pass = 0;
currRowSubimg = -1;
currRowReal = -1;
currRowSeq = 0;
ended = false;
totalRows = 0; // lazy compute
setPass(1);
setRow(0);
}
/** this refers to the row currRowSubimg */
private void setRow(int n) { // This should be called only intercally, in sequential order
currRowSubimg = n;
currRowReal = n * dY + oY;
if (currRowReal < 0 || currRowReal >= imi.rows)
throw new PngjExceptionInternal("bad row - this should not happen");
}
/** Skips passes with no rows. Return false is no more rows */
boolean nextRow() {
currRowSeq++;
if (rows == 0 || currRowSubimg >= rows - 1) { // next pass
if (pass == 7) {
ended = true;
return false;
}
setPass(pass + 1);
if (rows == 0) {
currRowSeq--;
return nextRow();
}
setRow(0);
} else {
setRow(currRowSubimg + 1);
}
return true;
}
boolean isEnded() {
return ended;
}
void setPass(int p) {
if (this.pass == p)
return;
pass = p;
byte[] pp = paramsForPass(p);// dx,dy,ox,oy
dX = pp[0];
dY = pp[1];
oX = pp[2];
oY = pp[3];
rows = imi.rows > oY ? (imi.rows + dY - 1 - oY) / dY : 0;
cols = imi.cols > oX ? (imi.cols + dX - 1 - oX) / dX : 0;
if (cols == 0)
rows = 0; // well, really...
dXsamples = dX * imi.channels;
oXsamples = oX * imi.channels;
}
static byte[] paramsForPass(final int p) {// dx,dy,ox,oy
switch (p) {
case 1:
return new byte[] {8, 8, 0, 0};
case 2:
return new byte[] {8, 8, 4, 0};
case 3:
return new byte[] {4, 8, 0, 4};
case 4:
return new byte[] {4, 4, 2, 0};
case 5:
return new byte[] {2, 4, 0, 2};
case 6:
return new byte[] {2, 2, 1, 0};
case 7:
return new byte[] {1, 2, 0, 1};
default:
throw new PngjExceptionInternal("bad interlace pass" + p);
}
}
/**
* current row number inside the "sub image"
*/
int getCurrRowSubimg() {
return currRowSubimg;
}
/**
* current row number inside the "real image"
*/
int getCurrRowReal() {
return currRowReal;
}
/**
* current pass number (1-7)
*/
int getPass() {
return pass;
}
/**
* How many rows has the current pass?
**/
int getRows() {
return rows;
}
/**
* How many columns (pixels) are there in the current row
*/
int getCols() {
return cols;
}
public int getPixelsToRead() {
return getCols();
}
public int getBytesToRead() { // not including filter byte
return (imi.bitspPixel * getPixelsToRead() + 7) / 8;
}
public int getdY() {
return dY;
}
/*
* in pixels
*/
public int getdX() {
return dX;
}
public int getoY() {
return oY;
}
/*
* in pixels
*/
public int getoX() {
return oX;
}
public int getTotalRows() {
if (totalRows == 0) { // lazy compute
for (int p = 1; p <= 7; p++) {
byte[] pp = paramsForPass(p); // dx dy ox oy
int rows = imi.rows > pp[3] ? (imi.rows + pp[1] - 1 - pp[3]) / pp[1] : 0;
int cols = imi.cols > pp[2] ? (imi.cols + pp[0] - 1 - pp[2]) / pp[0] : 0;
if (rows > 0 && cols > 0)
totalRows += rows;
}
}
return totalRows;
}
/**
* total unfiltered bytes in the image, including the filter byte
*/
public long getTotalRawBytes() { // including the filter byte
long bytes = 0;
for (int p = 1; p <= 7; p++) {
byte[] pp = paramsForPass(p); // dx dy ox oy
int rows = imi.rows > pp[3] ? (imi.rows + pp[1] - 1 - pp[3]) / pp[1] : 0;
int cols = imi.cols > pp[2] ? (imi.cols + pp[0] - 1 - pp[2]) / pp[0] : 0;
int bytesr = (imi.bitspPixel * cols + 7) / 8; // without filter byte
if (rows > 0 && cols > 0)
bytes += rows * (1 + (long) bytesr);
}
return bytes;
}
public int getCurrRowSeq() {
return currRowSeq;
}
}

View File

@ -0,0 +1,124 @@
package ar.com.hjg.pngj;
import java.util.HashMap;
/**
* Internal PNG predictor filter type
*
* Negative values are pseudo types, actually global strategies for writing, that (can) result on different real filters
* for different rows
*/
public enum FilterType {
/**
* No filter.
*/
FILTER_NONE(0),
/**
* SUB filter (uses same row)
*/
FILTER_SUB(1),
/**
* UP filter (uses previous row)
*/
FILTER_UP(2),
/**
* AVERAGE filter
*/
FILTER_AVERAGE(3),
/**
* PAETH predictor
*/
FILTER_PAETH(4),
/**
* Default strategy: select one of the standard filters depending on global image parameters
*/
FILTER_DEFAULT(-1),
/**
* @deprecated use #FILTER_ADAPTIVE_FAST
*/
FILTER_AGGRESSIVE(-2),
/**
* @deprecated use #FILTER_ADAPTIVE_MEDIUM or #FILTER_ADAPTIVE_FULL
*/
FILTER_VERYAGGRESSIVE(-4),
/**
* Adaptative strategy, sampling each row, or almost
*/
FILTER_ADAPTIVE_FULL(-4),
/**
* Adaptive strategy, skippping some rows
*/
FILTER_ADAPTIVE_MEDIUM(-3), // samples about 1/4 row
/**
* Adaptative strategy, skipping many rows - more speed
*/
FILTER_ADAPTIVE_FAST(-2), // samples each 8 or 16 rows
/**
* Experimental
*/
FILTER_SUPER_ADAPTIVE(-10), //
/**
* Preserves the filter passed in original row.
*/
FILTER_PRESERVE(-40),
/**
* Uses all fiters, one for lines, cyciclally. Only for tests.
*/
FILTER_CYCLIC(-50),
/**
* Not specified, placeholder for unknown or NA filters.
*/
FILTER_UNKNOWN(-100);
public final int val;
private FilterType(int val) {
this.val = val;
}
private static HashMap<Integer, FilterType> byVal;
static {
byVal = new HashMap<Integer, FilterType>();
for (FilterType ft : values()) {
byVal.put(ft.val, ft);
}
}
public static FilterType getByVal(int i) {
return byVal.get(i);
}
/** only considers standard */
public static boolean isValidStandard(int i) {
return i >= 0 && i <= 4;
}
public static boolean isValidStandard(FilterType fy) {
return fy != null && isValidStandard(fy.val);
}
public static boolean isAdaptive(FilterType fy) {
return fy.val <= -2 && fy.val >= -4;
}
/**
* Returns all "standard" filters
*/
public static FilterType[] getAllStandard() {
return new FilterType[] {FILTER_NONE, FILTER_SUB, FILTER_UP, FILTER_AVERAGE, FILTER_PAETH};
}
public static FilterType[] getAllStandardNoneLast() {
return new FilterType[] {FILTER_SUB, FILTER_UP, FILTER_AVERAGE, FILTER_PAETH, FILTER_NONE};
}
public static FilterType[] getAllStandardExceptNone() {
return new FilterType[] {FILTER_SUB, FILTER_UP, FILTER_AVERAGE, FILTER_PAETH};
}
static FilterType[] getAllStandardForFirstRow() {
return new FilterType[] {FILTER_SUB, FILTER_NONE};
}
}

View File

@ -0,0 +1,14 @@
package ar.com.hjg.pngj;
/**
* Bytes consumer. Objects implementing this interface can act as bytes consumers, that are "fed" with bytes.
*/
public interface IBytesConsumer {
/**
* Eats some bytes, at most len.
* <p>
* Returns bytes actually consumed. A negative return value signals that the consumer is done, it refuses to eat more
* bytes. This should only return 0 if len is 0
*/
int consume(byte[] buf, int offset, int len);
}

View File

@ -0,0 +1,20 @@
package ar.com.hjg.pngj;
import ar.com.hjg.pngj.chunks.ChunkRaw;
import ar.com.hjg.pngj.chunks.PngChunk;
/**
* Factory to create a {@link PngChunk} from a {@link ChunkRaw}.
* <p>
* Used by {@link PngReader}
*/
public interface IChunkFactory {
/**
* @param chunkRaw Chunk in raw form. Data can be null if it was skipped or processed directly (eg IDAT)
* @param imgInfo Not normally necessary, but some chunks want this info
* @return should never return null.
*/
public PngChunk createChunk(ChunkRaw chunkRaw, ImageInfo imgInfo);
}

View File

@ -0,0 +1,129 @@
package ar.com.hjg.pngj;
import java.io.OutputStream;
import ar.com.hjg.pngj.chunks.ChunkHelper;
import ar.com.hjg.pngj.chunks.ChunkRaw;
/**
* Outputs a sequence of IDAT-like chunk, that is filled progressively until the max chunk length is reached (or until
* flush())
*/
public class IDatChunkWriter {
private static final int MAX_LEN_DEFAULT = 32768; // 32K rather arbitrary - data only
private final OutputStream outputStream;
private final int maxChunkLen;
private byte[] buf;
private int offset = 0;
private int availLen;
private long totalBytesWriten = 0; // including header+crc
private int chunksWriten = 0;
public IDatChunkWriter(OutputStream outputStream) {
this(outputStream, 0);
}
public IDatChunkWriter(OutputStream outputStream, int maxChunkLength) {
this.outputStream = outputStream;
this.maxChunkLen = maxChunkLength > 0 ? maxChunkLength : MAX_LEN_DEFAULT;
buf = new byte[maxChunkLen];
availLen = maxChunkLen - offset;
postReset();
}
public IDatChunkWriter(OutputStream outputStream, byte[] b) {
this.outputStream = outputStream;
this.buf = b != null ? b : new byte[MAX_LEN_DEFAULT];
this.maxChunkLen = b.length;
availLen = maxChunkLen - offset;
postReset();
}
protected byte[] getChunkId() {
return ChunkHelper.b_IDAT;
}
/**
* Writes a chhunk if there is more than minLenToWrite.
*
* This is normally called internally, but can be called explicitly to force flush.
*/
public final void flush() {
if (offset > 0 && offset >= minLenToWrite()) {
ChunkRaw c = new ChunkRaw(offset, getChunkId(), false);
c.data = buf;
c.writeChunk(outputStream);
totalBytesWriten += c.len + 12;
chunksWriten++;
offset = 0;
availLen = maxChunkLen;
postReset();
}
}
public int getOffset() {
return offset;
}
public int getAvailLen() {
return availLen;
}
/** triggers an flush+reset if appropiate */
public void incrementOffset(int n) {
offset += n;
availLen -= n;
if (availLen < 0)
throw new PngjOutputException("Anomalous situation");
if (availLen == 0) {
flush();
}
}
/**
* this should rarely be used, the normal way (to avoid double copying) is to get the buffer and write directly to it
*/
public void write(byte[] b, int o, int len) {
while (len > 0) {
int n = len <= availLen ? len : availLen;
System.arraycopy(b, o, buf, offset, n);
incrementOffset(n);
len -= n;
o += n;
}
}
/** this will be called after reset */
protected void postReset() {
// fdat could override this (and minLenToWrite) to add a prefix
}
protected int minLenToWrite() {
return 1;
}
public void close() {
flush();
offset = 0;
buf = null;
}
/**
* You can write directly to this buffer, using {@link #getOffset()} and {@link #getAvailLen()}. You should call
* {@link #incrementOffset(int)} inmediately after.
* */
public byte[] getBuf() {
return buf;
}
public long getTotalBytesWriten() {
return totalBytesWriten;
}
public int getChunksWriten() {
return chunksWriten;
}
}

View File

@ -0,0 +1,41 @@
package ar.com.hjg.pngj;
/**
* General format-translated image line.
* <p>
* The methods from this interface provides translation from/to PNG raw unfiltered pixel data, for each image line. This
* doesn't make any assumptions of underlying storage.
* <p>
* The user of this library will not normally use this methods, but instead will cast to a more concrete implementation,
* as {@link ImageLineInt} or {@link ImageLineByte} with its methods for accessing the pixel values.
*/
public interface IImageLine {
/**
* Extract pixels from a raw unlfilterd PNG row. Len is the total amount of bytes in the array, including the first
* byte (filter type)
*
* Arguments offset and step (0 and 1 for non interlaced) are in PIXELS. It's guaranteed that when step==1 then
* offset=0
*
* Notice that when step!=1 the data is partial, this method will be called several times
*
* Warning: the data in array 'raw' starts at position 0 and has 'len' consecutive bytes. 'offset' and 'step' refer to
* the pixels in destination
*/
void readFromPngRaw(byte[] raw, int len, int offset, int step);
/**
* This is called when the read for the line has been completed (eg for interlaced). It's called exactly once for each
* line. This is provided in case the class needs to to some postprocessing.
*/
void endReadFromPngRaw();
/**
* Writes the line to a PNG raw byte array, in the unfiltered PNG format Notice that the first byte is the filter
* type, you should write it only if you know it.
*
*/
void writeToPngRaw(byte[] raw);
}

View File

@ -0,0 +1,23 @@
package ar.com.hjg.pngj;
/**
* This interface is just for the sake of unifying some methods of {@link ImageLineHelper} that can use both
* {@link ImageLineInt} or {@link ImageLineByte}. It's not very useful outside that, and the user should not rely much
* on this.
*/
public interface IImageLineArray {
public ImageInfo getImageInfo();
public FilterType getFilterType();
/**
* length of array (should correspond to samples)
*/
public int getSize();
/**
* Get i-th element of array (for 0 to size-1). The meaning of this is type dependent. For ImageLineInt and
* ImageLineByte is the sample value.
*/
public int getElem(int i);
}

View File

@ -0,0 +1,8 @@
package ar.com.hjg.pngj;
/**
* Image Line factory.
*/
public interface IImageLineFactory<T extends IImageLine> {
public T createImageLine(ImageInfo iminfo);
}

View File

@ -0,0 +1,53 @@
package ar.com.hjg.pngj;
/**
* Set of {@link IImageLine} elements.
* <p>
* This is actually a "virtual" set, it can be implemented in several ways; for example
* <ul>
* <li>Cursor-like: stores only one line, which is implicitly moved when requested</li>
* <li>All lines: all lines stored as an array of <tt>IImageLine</tt></li>
* <li>
* Subset of lines: eg, only first 3 lines, or odd numbered lines. Or a band of neighbours lines that is moved like a
* cursor.</li>
* The ImageLine that PngReader returns is hosted by a IImageLineSet (this abstraction allows the implementation to deal
* with interlaced images cleanly) but the library user does not normally needs to know that (or rely on that), except
* for the {@link PngReader#readRows()} method.
* </ul>
*/
public interface IImageLineSet<T extends IImageLine> {
/**
* Asks for imageline corresponding to row <tt>n</tt> in the original image (zero based). This can trigger side
* effects in this object (eg, advance a cursor, set current row number...) In some scenarios, this should be consider
* as alias to (pseudocode) <tt>positionAtLine(n); getCurrentLine();</tt>
* <p>
* Throws exception if not available. The caller is supposed to know what he/she is doing
**/
public IImageLine getImageLine(int n);
/**
* Like {@link #getImageLine(int)} but uses the raw numbering inside the LineSet This makes little sense for a cursor
*
* @param n Should normally go from 0 to {@link #size()}
* @return
*/
public IImageLine getImageLineRawNum(int n);
/**
* Returns true if the set contain row <tt>n</tt> (in the original image,zero based) currently allocated.
* <p>
* If it's a single-cursor, this should return true only if it's positioned there. (notice that hasImageLine(n) can
* return false, but getImageLine(n) can be ok)
*
**/
public boolean hasImageLine(int n);
/**
* Internal size of allocated rows This is informational, it should rarely be important for the caller.
**/
public int size();
}

View File

@ -0,0 +1,24 @@
package ar.com.hjg.pngj;
/**
* Factory of {@link IImageLineSet}, used by {@link PngReader}.
* <p>
*
* @param <T> Generic type of IImageLine
*/
public interface IImageLineSetFactory<T extends IImageLine> {
/**
* Creates a new {@link IImageLineSet}
*
* If singleCursor=true, the caller will read and write one row fully at a time, in order (it'll never try to read out
* of order lines), so the implementation can opt for allocate only one line.
*
* @param imgInfo Image info
* @param singleCursor : will read/write one row at a time
* @param nlines : how many lines we plan to read
* @param noffset : how many lines we want to skip from the original image (normally 0)
* @param step : row step (normally 1)
*/
public IImageLineSet<T> create(ImageInfo imgInfo, boolean singleCursor, int nlines, int noffset,
int step);
}

View File

@ -0,0 +1,7 @@
package ar.com.hjg.pngj;
import java.io.OutputStream;
public interface IPngWriterFactory {
public PngWriter createPngWriter(OutputStream outputStream, ImageInfo imgInfo);
}

View File

@ -0,0 +1,242 @@
package ar.com.hjg.pngj;
import java.util.Arrays;
import java.util.zip.Checksum;
import java.util.zip.Inflater;
/**
* This object process the concatenation of IDAT chunks.
* <p>
* It extends {@link DeflatedChunksSet}, adding the intelligence to unfilter rows, and to understand row lenghts in
* terms of ImageInfo and (eventually) Deinterlacer
*/
public class IdatSet extends DeflatedChunksSet {
protected byte rowUnfiltered[];
protected byte rowUnfilteredPrev[];
protected final ImageInfo imgInfo; // in the case of APNG this is the frame image
protected final Deinterlacer deinterlacer;
final RowInfo rowinfo; // info for the last processed row, for debug
protected int filterUseStat[] = new int[5]; // for stats
/**
* @param id Chunk id (first chunk), should be shared by all concatenated chunks
* @param iminfo Image info
* @param deinterlacer Not null if interlaced
*/
public IdatSet(String id, ImageInfo iminfo, Deinterlacer deinterlacer) {
this(id, iminfo, deinterlacer, null, null);
}
/**
* Special constructor with preallocated buffer.
* <p>
* <p>
* Same as {@link #IdatSet(String, ImageInfo, Deinterlacer)}, but you can pass a Inflater (will be reset internally),
* and a buffer (will be used only if size is enough)
*/
public IdatSet(String id, ImageInfo iminfo, Deinterlacer deinterlacer, Inflater inf, byte[] buffer) {
super(id, deinterlacer != null ? deinterlacer.getBytesToRead() + 1 : iminfo.bytesPerRow + 1,
iminfo.bytesPerRow + 1, inf, buffer);
this.imgInfo = iminfo;
this.deinterlacer = deinterlacer;
this.rowinfo = new RowInfo(iminfo, deinterlacer);
}
/**
* Applies PNG un-filter to inflated raw line. Result in {@link #getUnfilteredRow()} {@link #getRowLen()}
*/
public void unfilterRow() {
unfilterRow(rowinfo.bytesRow);
}
// nbytes: NOT including the filter byte. leaves result in rowUnfiltered
protected void unfilterRow(int nbytes) {
if (rowUnfiltered == null || rowUnfiltered.length < row.length) {
rowUnfiltered = new byte[row.length];
rowUnfilteredPrev = new byte[row.length];
}
if (rowinfo.rowNsubImg == 0)
Arrays.fill(rowUnfiltered, (byte) 0); // see swap that follows
// swap
byte[] tmp = rowUnfiltered;
rowUnfiltered = rowUnfilteredPrev;
rowUnfilteredPrev = tmp;
int ftn = row[0];
if (!FilterType.isValidStandard(ftn))
throw new PngjInputException("Filter type " + ftn + " invalid");
FilterType ft = FilterType.getByVal(ftn);
filterUseStat[ftn]++;
rowUnfiltered[0] = row[0]; // we copy the filter type, can be useful
switch (ft) {
case FILTER_NONE:
unfilterRowNone(nbytes);
break;
case FILTER_SUB:
unfilterRowSub(nbytes);
break;
case FILTER_UP:
unfilterRowUp(nbytes);
break;
case FILTER_AVERAGE:
unfilterRowAverage(nbytes);
break;
case FILTER_PAETH:
unfilterRowPaeth(nbytes);
break;
default:
throw new PngjInputException("Filter type " + ftn + " not implemented");
}
}
private void unfilterRowAverage(final int nbytes) {
int i, j, x;
for (j = 1 - imgInfo.bytesPixel, i = 1; i <= nbytes; i++, j++) {
x = j > 0 ? (rowUnfiltered[j] & 0xff) : 0;
rowUnfiltered[i] = (byte) (row[i] + (x + (rowUnfilteredPrev[i] & 0xFF)) / 2);
}
}
private void unfilterRowNone(final int nbytes) {
for (int i = 1; i <= nbytes; i++) {
rowUnfiltered[i] = (byte) (row[i]);
}
}
private void unfilterRowPaeth(final int nbytes) {
int i, j, x, y;
for (j = 1 - imgInfo.bytesPixel, i = 1; i <= nbytes; i++, j++) {
x = j > 0 ? (rowUnfiltered[j] & 0xFF) : 0;
y = j > 0 ? (rowUnfilteredPrev[j] & 0xFF) : 0;
rowUnfiltered[i] =
(byte) (row[i] + PngHelperInternal
.filterPaethPredictor(x, rowUnfilteredPrev[i] & 0xFF, y));
}
}
private void unfilterRowSub(final int nbytes) {
int i, j;
for (i = 1; i <= imgInfo.bytesPixel; i++) {
rowUnfiltered[i] = (byte) (row[i]);
}
for (j = 1, i = imgInfo.bytesPixel + 1; i <= nbytes; i++, j++) {
rowUnfiltered[i] = (byte) (row[i] + rowUnfiltered[j]);
}
}
private void unfilterRowUp(final int nbytes) {
for (int i = 1; i <= nbytes; i++) {
rowUnfiltered[i] = (byte) (row[i] + rowUnfilteredPrev[i]);
}
}
/**
* does the unfiltering of the inflated row, and updates row info
*/
@Override
protected void preProcessRow() {
super.preProcessRow();
rowinfo.update(getRown());
unfilterRow();
rowinfo.updateBuf(rowUnfiltered, rowinfo.bytesRow + 1);
}
/**
* Method for async/callback mode .
* <p>
* In callback mode will be called as soon as each row is retrieved (inflated and unfiltered), after
* {@link #preProcessRow()}
* <p>
* This is a dummy implementation (this normally should be overriden) that does nothing more than compute the length
* of next row.
* <p>
* The return value is essential
* <p>
*
* @return Length of next row, in bytes (including filter byte), non-positive if done
*/
@Override
protected int processRowCallback() {
int bytesNextRow = advanceToNextRow();
return bytesNextRow;
}
@Override
protected void processDoneCallback() {
super.processDoneCallback();
}
/**
* Signals that we are done with the previous row, begin reading the next one.
* <p>
* In polled mode, calls setNextRowLen()
* <p>
* Warning: after calling this, the unfilterRow is invalid!
*
* @return Returns nextRowLen
*/
public int advanceToNextRow() {
// PngHelperInternal.LOGGER.info("advanceToNextRow");
int bytesNextRow;
if (deinterlacer == null) {
bytesNextRow = getRown() >= imgInfo.rows - 1 ? 0 : imgInfo.bytesPerRow + 1;
} else {
boolean more = deinterlacer.nextRow();
bytesNextRow = more ? deinterlacer.getBytesToRead() + 1 : 0;
}
if (!isCallbackMode()) { // in callback mode, setNextRowLen() is called internally
prepareForNextRow(bytesNextRow);
}
return bytesNextRow;
}
public boolean isRowReady() {
return !isWaitingForMoreInput();
}
/**
* Unfiltered row.
* <p>
* This should be called only if {@link #isRowReady()} returns true.
* <p>
* To get real length, use {@link #getRowLen()}
* <p>
*
* @return Unfiltered row, includes filter byte
*/
public byte[] getUnfilteredRow() {
return rowUnfiltered;
}
public Deinterlacer getDeinterlacer() {
return deinterlacer;
}
void updateCrcs(Checksum... idatCrcs) {
for (Checksum idatCrca : idatCrcs)
if (idatCrca != null)// just for testing
idatCrca.update(getUnfilteredRow(), 1, getRowFilled() - 1);
}
@Override
public void close() {
super.close();
rowUnfiltered = null;// not really necessary...
rowUnfilteredPrev = null;
}
/**
* Only for debug/stats
*
* @return Array of 5 integers (sum equal numbers of rows) counting each filter use
*/
public int[] getFilterUseStat() {
return filterUseStat;
}
}

View File

@ -0,0 +1,255 @@
package ar.com.hjg.pngj;
import java.util.zip.Checksum;
/**
* Simple immutable wrapper for basic image info.
* <p>
* Some parameters are redundant, but the constructor receives an 'orthogonal' subset.
* <p>
* ref: http://www.w3.org/TR/PNG/#11IHDR
*/
public class ImageInfo {
/**
* Absolute allowed maximum value for rows and cols (2^24 ~16 million). (bytesPerRow must fit in a 32bit integer,
* though total amount of pixels not necessarily).
*/
public static final int MAX_COLS_ROW = 16777216;
/**
* Cols= Image width, in pixels.
*/
public final int cols;
/**
* Rows= Image height, in pixels
*/
public final int rows;
/**
* Bits per sample (per channel) in the buffer (1-2-4-8-16). This is 8-16 for RGB/ARGB images, 1-2-4-8 for grayscale.
* For indexed images, number of bits per palette index (1-2-4-8)
*/
public final int bitDepth;
/**
* Number of channels, as used internally: 3 for RGB, 4 for RGBA, 2 for GA (gray with alpha), 1 for grayscale or
* indexed.
*/
public final int channels;
/**
* Flag: true if has alpha channel (RGBA/GA)
*/
public final boolean alpha;
/**
* Flag: true if is grayscale (G/GA)
*/
public final boolean greyscale;
/**
* Flag: true if image is indexed, i.e., it has a palette
*/
public final boolean indexed;
/**
* Flag: true if image internally uses less than one byte per sample (bit depth 1-2-4)
*/
public final boolean packed;
/**
* Bits used for each pixel in the buffer: channel * bitDepth
*/
public final int bitspPixel;
/**
* rounded up value: this is only used internally for filter
*/
public final int bytesPixel;
/**
* ceil(bitspp*cols/8) - does not include filter
*/
public final int bytesPerRow;
/**
* Equals cols * channels
*/
public final int samplesPerRow;
/**
* Amount of "packed samples" : when several samples are stored in a single byte (bitdepth 1,2 4) they are counted as
* one "packed sample". This is less that samplesPerRow only when bitdepth is 1-2-4 (flag packed = true)
* <p>
* This equals the number of elements in the scanline array if working with packedMode=true
* <p>
* For internal use, client code should rarely access this.
*/
public final int samplesPerRowPacked;
private long totalPixels = -1; // lazy getter
private long totalRawBytes = -1; // lazy getter
/**
* Short constructor: assumes truecolor (RGB/RGBA)
*/
public ImageInfo(int cols, int rows, int bitdepth, boolean alpha) {
this(cols, rows, bitdepth, alpha, false, false);
}
/**
* Full constructor
*
* @param cols Width in pixels
* @param rows Height in pixels
* @param bitdepth Bits per sample, in the buffer : 8-16 for RGB true color and greyscale
* @param alpha Flag: has an alpha channel (RGBA or GA)
* @param grayscale Flag: is gray scale (any bitdepth, with or without alpha)
* @param indexed Flag: has palette
*/
public ImageInfo(int cols, int rows, int bitdepth, boolean alpha, boolean grayscale,
boolean indexed) {
this.cols = cols;
this.rows = rows;
this.alpha = alpha;
this.indexed = indexed;
this.greyscale = grayscale;
if (greyscale && indexed)
throw new PngjException("palette and greyscale are mutually exclusive");
this.channels = (grayscale || indexed) ? (alpha ? 2 : 1) : (alpha ? 4 : 3);
// http://www.w3.org/TR/PNG/#11IHDR
this.bitDepth = bitdepth;
this.packed = bitdepth < 8;
this.bitspPixel = (channels * this.bitDepth);
this.bytesPixel = (bitspPixel + 7) / 8;
this.bytesPerRow = (bitspPixel * cols + 7) / 8;
this.samplesPerRow = channels * this.cols;
this.samplesPerRowPacked = packed ? bytesPerRow : samplesPerRow;
// several checks
switch (this.bitDepth) {
case 1:
case 2:
case 4:
if (!(this.indexed || this.greyscale))
throw new PngjException("only indexed or grayscale can have bitdepth=" + this.bitDepth);
break;
case 8:
break;
case 16:
if (this.indexed)
throw new PngjException("indexed can't have bitdepth=" + this.bitDepth);
break;
default:
throw new PngjException("invalid bitdepth=" + this.bitDepth);
}
if (cols < 1 || cols > MAX_COLS_ROW)
throw new PngjException("invalid cols=" + cols + " ???");
if (rows < 1 || rows > MAX_COLS_ROW)
throw new PngjException("invalid rows=" + rows + " ???");
if (samplesPerRow < 1)
throw new PngjException("invalid image parameters (overflow?)");
}
/**
* returns a copy with different size
*
* @param cols if non-positive, the original is used
* @param rows if non-positive, the original is used
* @return a new copy with the specified size and same properties
*/
public ImageInfo withSize(int cols, int rows) {
return new ImageInfo(cols > 0 ? cols : this.cols, rows > 0 ? rows : this.rows, this.bitDepth,
this.alpha, this.greyscale, this.indexed);
}
public long getTotalPixels() {
if (totalPixels < 0)
totalPixels = cols * (long) rows;
return totalPixels;
}
/**
* Total uncompressed bytes in IDAT, including filter byte. This is not valid for interlaced.
*/
public long getTotalRawBytes() {
if (totalRawBytes < 0)
totalRawBytes = (bytesPerRow + 1) * (long) rows;
return totalRawBytes;
}
@Override
public String toString() {
return "ImageInfo [cols=" + cols + ", rows=" + rows + ", bitDepth=" + bitDepth + ", channels="
+ channels + ", alpha=" + alpha + ", greyscale=" + greyscale + ", indexed=" + indexed + "]";
}
/**
* Brief info: COLSxROWS[dBITDEPTH][a][p][g] ( the default dBITDEPTH='d8' is ommited)
**/
public String toStringBrief() {
return String.valueOf(cols) + "x" + rows + (bitDepth != 8 ? ("d" + bitDepth) : "")
+ (alpha ? "a" : "") + (indexed ? "p" : "") + (greyscale ? "g" : "");
}
public String toStringDetail() {
return "ImageInfo [cols=" + cols + ", rows=" + rows + ", bitDepth=" + bitDepth + ", channels="
+ channels + ", bitspPixel=" + bitspPixel + ", bytesPixel=" + bytesPixel + ", bytesPerRow="
+ bytesPerRow + ", samplesPerRow=" + samplesPerRow + ", samplesPerRowP="
+ samplesPerRowPacked + ", alpha=" + alpha + ", greyscale=" + greyscale + ", indexed="
+ indexed + ", packed=" + packed + "]";
}
void updateCrc(Checksum crc) {
crc.update((byte) rows);
crc.update((byte) (rows >> 8));
crc.update((byte) (rows >> 16));
crc.update((byte) cols);
crc.update((byte) (cols >> 8));
crc.update((byte) (cols >> 16));
crc.update((byte) (bitDepth));
crc.update((byte) (indexed ? 1 : 2));
crc.update((byte) (greyscale ? 3 : 4));
crc.update((byte) (alpha ? 3 : 4));
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (alpha ? 1231 : 1237);
result = prime * result + bitDepth;
result = prime * result + cols;
result = prime * result + (greyscale ? 1231 : 1237);
result = prime * result + (indexed ? 1231 : 1237);
result = prime * result + rows;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ImageInfo other = (ImageInfo) obj;
if (alpha != other.alpha)
return false;
if (bitDepth != other.bitDepth)
return false;
if (cols != other.cols)
return false;
if (greyscale != other.greyscale)
return false;
if (indexed != other.indexed)
return false;
if (rows != other.rows)
return false;
return true;
}
}

View File

@ -0,0 +1,186 @@
package ar.com.hjg.pngj;
/**
* Lightweight wrapper for an image scanline, used for read and write.
* <p>
* This object can be (usually it is) reused while iterating over the image lines.
* <p>
* See <code>scanline</code> field, to understand the format.
*
* Format: byte (one bytes per sample) (for 16bpp the extra byte is placed in an extra array)
*/
public class ImageLineByte implements IImageLine, IImageLineArray {
public final ImageInfo imgInfo;
final byte[] scanline;
final byte[] scanline2; // only used for 16 bpp (less significant byte) Normally you'd prefer
// ImageLineInt in this case
protected FilterType filterType; // informational ; only filled by the reader. not significant for
// interlaced
final int size; // = imgInfo.samplePerRowPacked, if packed:imgInfo.samplePerRow elswhere
public ImageLineByte(ImageInfo imgInfo) {
this(imgInfo, null);
}
public ImageLineByte(ImageInfo imgInfo, byte[] sci) {
this.imgInfo = imgInfo;
filterType = FilterType.FILTER_UNKNOWN;
size = imgInfo.samplesPerRow;
scanline = sci != null && sci.length >= size ? sci : new byte[size];
scanline2 = imgInfo.bitDepth == 16 ? new byte[size] : null;
}
/**
* Returns a factory for this object
*/
public static IImageLineFactory<ImageLineByte> getFactory() {
return new IImageLineFactory<ImageLineByte>() {
public ImageLineByte createImageLine(ImageInfo iminfo) {
return new ImageLineByte(iminfo);
}
};
}
public FilterType getFilterUsed() {
return filterType;
}
/**
* One byte per sample. This can be used also for 16bpp images, but in this case this loses the less significant
* 8-bits ; see also getScanlineByte2 and getElem.
*/
public byte[] getScanlineByte() {
return scanline;
}
/**
* only for 16bpp (less significant byte)
*
* @return null for less than 16bpp
*/
public byte[] getScanlineByte2() {
return scanline2;
}
/**
* Basic info
*/
public String toString() {
return " cols=" + imgInfo.cols + " bpc=" + imgInfo.bitDepth + " size=" + scanline.length;
}
public void readFromPngRaw(byte[] raw, final int len, final int offset, final int step) {
filterType = FilterType.getByVal(raw[0]); // only for non interlaced line the filter is significative
int len1 = len - 1;
int step1 = (step - 1) * imgInfo.channels;
if (imgInfo.bitDepth == 8) {
if (step == 1) {// 8bispp non-interlaced: most important case, should be optimized
System.arraycopy(raw, 1, scanline, 0, len1);
} else {// 8bispp interlaced
for (int s = 1, c = 0, i = offset * imgInfo.channels; s <= len1; s++, i++) {
scanline[i] = raw[s];
c++;
if (c == imgInfo.channels) {
c = 0;
i += step1;
}
}
}
} else if (imgInfo.bitDepth == 16) {
if (step == 1) {// 16bispp non-interlaced
for (int i = 0, s = 1; i < imgInfo.samplesPerRow; i++) {
scanline[i] = raw[s++]; // get the first byte
scanline2[i] = raw[s++]; // get the first byte
}
} else {
for (int s = 1, c = 0, i = offset != 0 ? offset * imgInfo.channels : 0; s <= len1; i++) {
scanline[i] = raw[s++];
scanline2[i] = raw[s++];
c++;
if (c == imgInfo.channels) {
c = 0;
i += step1;
}
}
}
} else { // packed formats
int mask0, mask, shi, bd;
bd = imgInfo.bitDepth;
mask0 = ImageLineHelper.getMaskForPackedFormats(bd);
for (int i = offset * imgInfo.channels, r = 1, c = 0; r < len; r++) {
mask = mask0;
shi = 8 - bd;
do {
scanline[i] = (byte) ((raw[r] & mask) >> shi);
mask >>= bd;
shi -= bd;
i++;
c++;
if (c == imgInfo.channels) {
c = 0;
i += step1;
}
} while (mask != 0 && i < size);
}
}
}
public void writeToPngRaw(byte[] raw) {
raw[0] = (byte) filterType.val;
if (imgInfo.bitDepth == 8) {
System.arraycopy(scanline, 0, raw, 1, size);
} else if (imgInfo.bitDepth == 16) {
for (int i = 0, s = 1; i < size; i++) {
raw[s++] = scanline[i];
raw[s++] = scanline2[i];
}
} else { // packed formats
int shi, bd, v;
bd = imgInfo.bitDepth;
shi = 8 - bd;
v = 0;
for (int i = 0, r = 1; i < size; i++) {
v |= (scanline[i] << shi);
shi -= bd;
if (shi < 0 || i == size - 1) {
raw[r++] = (byte) v;
shi = 8 - bd;
v = 0;
}
}
}
}
public void endReadFromPngRaw() {}
public int getSize() {
return size;
}
public int getElem(int i) {
return scanline2 == null ? scanline[i] & 0xFF : ((scanline[i] & 0xFF) << 8)
| (scanline2[i] & 0xFF);
}
public byte[] getScanline() {
return scanline;
}
public ImageInfo getImageInfo() {
return imgInfo;
}
public FilterType getFilterType() {
return filterType;
}
/**
* This should rarely be used by client code. Only relevant if FilterPreserve==true
*/
public void setFilterType(FilterType ft) {
filterType = ft;
}
}

View File

@ -0,0 +1,470 @@
package ar.com.hjg.pngj;
import java.util.Arrays;
import ar.com.hjg.pngj.chunks.PngChunkPLTE;
import ar.com.hjg.pngj.chunks.PngChunkTRNS;
/**
* Bunch of utility static methods to proces an image line at the pixel level.
* <p>
* WARNING: this has little testing/optimizing, and this API is not stable. some methods will probably be changed or
* removed if future releases.
* <p>
* WARNING: most methods for getting/setting values work currently only for ImageLine or ImageLineByte
*/
public class ImageLineHelper {
static int[] DEPTH_UNPACK_1;
static int[] DEPTH_UNPACK_2;
static int[] DEPTH_UNPACK_4;
static int[][] DEPTH_UNPACK;
static {
initDepthScale();
}
private static void initDepthScale() {
DEPTH_UNPACK_1 = new int[2];
for (int i = 0; i < 2; i++)
DEPTH_UNPACK_1[i] = i * 255;
DEPTH_UNPACK_2 = new int[4];
for (int i = 0; i < 4; i++)
DEPTH_UNPACK_2[i] = (i * 255) / 3;
DEPTH_UNPACK_4 = new int[16];
for (int i = 0; i < 16; i++)
DEPTH_UNPACK_4[i] = (i * 255) / 15;
DEPTH_UNPACK = new int[][] {null, DEPTH_UNPACK_1, DEPTH_UNPACK_2, null, DEPTH_UNPACK_4};
}
/**
* When the bitdepth is less than 8, the imageLine is usually returned/expected unscaled. This method upscales it in
* place. Eg, if bitdepth=1, values 0-1 will be converted to 0-255
*/
public static void scaleUp(IImageLineArray line) {
if (line.getImageInfo().indexed || line.getImageInfo().bitDepth >= 8)
return;
final int[] scaleArray = DEPTH_UNPACK[line.getImageInfo().bitDepth];
if (line instanceof ImageLineInt) {
ImageLineInt iline = (ImageLineInt) line;
for (int i = 0; i < iline.getSize(); i++)
iline.scanline[i] = scaleArray[iline.scanline[i]];
} else if (line instanceof ImageLineByte) {
ImageLineByte iline = (ImageLineByte) line;
for (int i = 0; i < iline.getSize(); i++)
iline.scanline[i] = (byte) scaleArray[iline.scanline[i]];
} else
throw new PngjException("not implemented");
}
/**
* Reverse of {@link #scaleUp(IImageLineArray)}
*/
public static void scaleDown(IImageLineArray line) {
if (line.getImageInfo().indexed || line.getImageInfo().bitDepth >= 8)
return;
if (line instanceof ImageLineInt) {
final int scalefactor = 8 - line.getImageInfo().bitDepth;
if (line instanceof ImageLineInt) {
ImageLineInt iline = (ImageLineInt) line;
for (int i = 0; i < line.getSize(); i++)
iline.scanline[i] = iline.scanline[i] >> scalefactor;
} else if (line instanceof ImageLineByte) {
ImageLineByte iline = (ImageLineByte) line;
for (int i = 0; i < line.getSize(); i++)
iline.scanline[i] = (byte) ((iline.scanline[i] & 0xFF) >> scalefactor);
}
} else
throw new PngjException("not implemented");
}
public static byte scaleUp(int bitdepth, byte v) {
return bitdepth < 8 ? (byte) DEPTH_UNPACK[bitdepth][v] : v;
}
public static byte scaleDown(int bitdepth, byte v) {
return bitdepth < 8 ? (byte) (v >> (8 - bitdepth)) : v;
}
/**
* Given an indexed line with a palette, unpacks as a RGB array, or RGBA if a non nul PngChunkTRNS chunk is passed
*
* @param line ImageLine as returned from PngReader
* @param pal Palette chunk
* @param trns Transparency chunk, can be null (absent)
* @param buf Preallocated array, optional
* @return R G B (A), one sample 0-255 per array element. Ready for pngw.writeRowInt()
*/
public static int[] palette2rgb(ImageLineInt line, PngChunkPLTE pal, PngChunkTRNS trns, int[] buf) {
return palette2rgb(line, pal, trns, buf, false);
}
/**
* Warning: the line should be upscaled, see {@link #scaleUp(IImageLineArray)}
*/
static int[] lineToARGB32(ImageLineByte line, PngChunkPLTE pal, PngChunkTRNS trns, int[] buf) {
boolean alphachannel = line.imgInfo.alpha;
int cols = line.getImageInfo().cols;
if (buf == null || buf.length < cols)
buf = new int[cols];
int index, rgb, alpha, ga, g;
if (line.getImageInfo().indexed) {// palette
int nindexesWithAlpha = trns != null ? trns.getPalletteAlpha().length : 0;
for (int c = 0; c < cols; c++) {
index = line.scanline[c] & 0xFF;
rgb = pal.getEntry(index);
alpha = index < nindexesWithAlpha ? trns.getPalletteAlpha()[index] : 255;
buf[c] = (alpha << 24) | rgb;
}
} else if (line.imgInfo.greyscale) { // gray
ga = trns != null ? trns.getGray() : -1;
for (int c = 0, c2 = 0; c < cols; c++) {
g = (line.scanline[c2++] & 0xFF);
alpha = alphachannel ? line.scanline[c2++] & 0xFF : (g != ga ? 255 : 0);
buf[c] = (alpha << 24) | g | (g << 8) | (g << 16);
}
} else { // true color
ga = trns != null ? trns.getRGB888() : -1;
for (int c = 0, c2 = 0; c < cols; c++) {
rgb =
((line.scanline[c2++] & 0xFF) << 16) | ((line.scanline[c2++] & 0xFF) << 8)
| (line.scanline[c2++] & 0xFF);
alpha = alphachannel ? line.scanline[c2++] & 0xFF : (rgb != ga ? 255 : 0);
buf[c] = (alpha << 24) | rgb;
}
}
return buf;
}
/**
* Warning: the line should be upscaled, see {@link #scaleUp(IImageLineArray)}
*/
static byte[] lineToRGBA8888(ImageLineByte line, PngChunkPLTE pal, PngChunkTRNS trns, byte[] buf) {
boolean alphachannel = line.imgInfo.alpha;
int cols = line.imgInfo.cols;
int bytes = cols * 4;
if (buf == null || buf.length < bytes)
buf = new byte[bytes];
int index, rgb, ga;
byte val;
if (line.imgInfo.indexed) {// palette
int nindexesWithAlpha = trns != null ? trns.getPalletteAlpha().length : 0;
for (int c = 0, b = 0; c < cols; c++) {
index = line.scanline[c] & 0xFF;
rgb = pal.getEntry(index);
buf[b++] = (byte) ((rgb >> 16) & 0xFF);
buf[b++] = (byte) ((rgb >> 8) & 0xFF);
buf[b++] = (byte) (rgb & 0xFF);
buf[b++] = (byte) (index < nindexesWithAlpha ? trns.getPalletteAlpha()[index] : 255);
}
} else if (line.imgInfo.greyscale) { //
ga = trns != null ? trns.getGray() : -1;
for (int c = 0, b = 0; b < bytes;) {
val = line.scanline[c++];
buf[b++] = val;
buf[b++] = val;
buf[b++] = val;
buf[b++] =
alphachannel ? line.scanline[c++] : ((int) (val & 0xFF) == ga) ? (byte) 0 : (byte) 255;
}
} else { // true color
if (alphachannel) // same format!
System.arraycopy(line.scanline, 0, buf, 0, bytes);
else {
for (int c = 0, b = 0; b < bytes;) {
buf[b++] = line.scanline[c++];
buf[b++] = line.scanline[c++];
buf[b++] = line.scanline[c++];
buf[b++] = (byte) (255); // tentative (probable)
if (trns != null && buf[b - 3] == (byte) trns.getRGB()[0]
&& buf[b - 2] == (byte) trns.getRGB()[1] && buf[b - 1] == (byte) trns.getRGB()[2]) // not
// very
// efficient,
// but
// not
// frecuent
buf[b - 1] = 0;
}
}
}
return buf;
}
static byte[] lineToRGB888(ImageLineByte line, PngChunkPLTE pal, byte[] buf) {
boolean alphachannel = line.imgInfo.alpha;
int cols = line.imgInfo.cols;
int bytes = cols * 3;
if (buf == null || buf.length < bytes)
buf = new byte[bytes];
byte val;
int[] rgb = new int[3];
if (line.imgInfo.indexed) {// palette
for (int c = 0, b = 0; c < cols; c++) {
pal.getEntryRgb(line.scanline[c] & 0xFF, rgb);
buf[b++] = (byte) rgb[0];
buf[b++] = (byte) rgb[1];
buf[b++] = (byte) rgb[2];
}
} else if (line.imgInfo.greyscale) { //
for (int c = 0, b = 0; b < bytes;) {
val = line.scanline[c++];
buf[b++] = val;
buf[b++] = val;
buf[b++] = val;
if (alphachannel)
c++; // skip alpha
}
} else { // true color
if (!alphachannel) // same format!
System.arraycopy(line.scanline, 0, buf, 0, bytes);
else {
for (int c = 0, b = 0; b < bytes;) {
buf[b++] = line.scanline[c++];
buf[b++] = line.scanline[c++];
buf[b++] = line.scanline[c++];
c++;// skip alpha
}
}
}
return buf;
}
/**
* Same as palette2rgbx , but returns rgba always, even if trns is null
*
* @param line ImageLine as returned from PngReader
* @param pal Palette chunk
* @param trns Transparency chunk, can be null (absent)
* @param buf Preallocated array, optional
* @return R G B (A), one sample 0-255 per array element. Ready for pngw.writeRowInt()
*/
public static int[] palette2rgba(ImageLineInt line, PngChunkPLTE pal, PngChunkTRNS trns, int[] buf) {
return palette2rgb(line, pal, trns, buf, true);
}
public static int[] palette2rgb(ImageLineInt line, PngChunkPLTE pal, int[] buf) {
return palette2rgb(line, pal, null, buf, false);
}
/** this is not very efficient, only for tests and troubleshooting */
public static int[] convert2rgba(IImageLineArray line, PngChunkPLTE pal, PngChunkTRNS trns,
int[] buf) {
ImageInfo imi = line.getImageInfo();
int nsamples = imi.cols * 4;
if (buf == null || buf.length < nsamples)
buf = new int[nsamples];
int maxval = imi.bitDepth == 16 ? (1 << 16) - 1 : 255;
Arrays.fill(buf, maxval);
if (imi.indexed) {
int tlen = trns != null ? trns.getPalletteAlpha().length : 0;
for (int s = 0; s < imi.cols; s++) {
int index = line.getElem(s);
pal.getEntryRgb(index, buf, s * 4);
if (index < tlen) {
buf[s * 4 + 3] = trns.getPalletteAlpha()[index];
}
}
} else if (imi.greyscale) {
int[] unpack = null;
if (imi.bitDepth < 8)
unpack = ImageLineHelper.DEPTH_UNPACK[imi.bitDepth];
for (int s = 0, i = 0, p = 0; p < imi.cols; p++) {
buf[s++] = unpack != null ? unpack[line.getElem(i++)] : line.getElem(i++);
buf[s] = buf[s - 1];
s++;
buf[s] = buf[s - 1];
s++;
if (imi.channels == 2)
buf[s++] = unpack != null ? unpack[line.getElem(i++)] : line.getElem(i++);
else
buf[s++] = maxval;
}
} else {
for (int s = 0, i = 0, p = 0; p < imi.cols; p++) {
buf[s++] = line.getElem(i++);
buf[s++] = line.getElem(i++);
buf[s++] = line.getElem(i++);
buf[s++] = imi.alpha ? line.getElem(i++) : maxval;
}
}
return buf;
}
private static int[] palette2rgb(IImageLine line, PngChunkPLTE pal, PngChunkTRNS trns, int[] buf,
boolean alphaForced) {
boolean isalpha = trns != null;
int channels = isalpha ? 4 : 3;
ImageLineInt linei = (ImageLineInt) (line instanceof ImageLineInt ? line : null);
ImageLineByte lineb = (ImageLineByte) (line instanceof ImageLineByte ? line : null);
boolean isbyte = lineb != null;
int cols = linei != null ? linei.imgInfo.cols : lineb.imgInfo.cols;
int nsamples = cols * channels;
if (buf == null || buf.length < nsamples)
buf = new int[nsamples];
int nindexesWithAlpha = trns != null ? trns.getPalletteAlpha().length : 0;
for (int c = 0; c < cols; c++) {
int index = isbyte ? (lineb.scanline[c] & 0xFF) : linei.scanline[c];
pal.getEntryRgb(index, buf, c * channels);
if (isalpha) {
int alpha = index < nindexesWithAlpha ? trns.getPalletteAlpha()[index] : 255;
buf[c * channels + 3] = alpha;
}
}
return buf;
}
/**
* what follows is pretty uninteresting/untested/obsolete, subject to change
*/
/**
* Just for basic info or debugging. Shows values for first and last pixel. Does not include alpha
*/
public static String infoFirstLastPixels(ImageLineInt line) {
return line.imgInfo.channels == 1 ? String.format("first=(%d) last=(%d)", line.scanline[0],
line.scanline[line.scanline.length - 1]) : String.format(
"first=(%d %d %d) last=(%d %d %d)", line.scanline[0], line.scanline[1], line.scanline[2],
line.scanline[line.scanline.length - line.imgInfo.channels],
line.scanline[line.scanline.length - line.imgInfo.channels + 1],
line.scanline[line.scanline.length - line.imgInfo.channels + 2]);
}
/**
* integer packed R G B only for bitdepth=8! (does not check!)
*
**/
public static int getPixelRGB8(IImageLine line, int column) {
if (line instanceof ImageLineInt) {
int offset = column * ((ImageLineInt) line).imgInfo.channels;
int[] scanline = ((ImageLineInt) line).getScanline();
return (scanline[offset] << 16) | (scanline[offset + 1] << 8) | (scanline[offset + 2]);
} else if (line instanceof ImageLineByte) {
int offset = column * ((ImageLineByte) line).imgInfo.channels;
byte[] scanline = ((ImageLineByte) line).getScanline();
return ((scanline[offset] & 0xff) << 16) | ((scanline[offset + 1] & 0xff) << 8)
| ((scanline[offset + 2] & 0xff));
} else
throw new PngjException("Not supported " + line.getClass());
}
public static int getPixelARGB8(IImageLine line, int column) {
if (line instanceof ImageLineInt) {
int offset = column * ((ImageLineInt) line).imgInfo.channels;
int[] scanline = ((ImageLineInt) line).getScanline();
return (scanline[offset + 3] << 24) | (scanline[offset] << 16) | (scanline[offset + 1] << 8)
| (scanline[offset + 2]);
} else if (line instanceof ImageLineByte) {
int offset = column * ((ImageLineByte) line).imgInfo.channels;
byte[] scanline = ((ImageLineByte) line).getScanline();
return (((scanline[offset + 3] & 0xff) << 24) | ((scanline[offset] & 0xff) << 16)
| ((scanline[offset + 1] & 0xff) << 8) | ((scanline[offset + 2] & 0xff)));
} else
throw new PngjException("Not supported " + line.getClass());
}
public static void setPixelsRGB8(ImageLineInt line, int[] rgb) {
for (int i = 0, j = 0; i < line.imgInfo.cols; i++) {
line.scanline[j++] = ((rgb[i] >> 16) & 0xFF);
line.scanline[j++] = ((rgb[i] >> 8) & 0xFF);
line.scanline[j++] = ((rgb[i] & 0xFF));
}
}
public static void setPixelRGB8(ImageLineInt line, int col, int r, int g, int b) {
col *= line.imgInfo.channels;
line.scanline[col++] = r;
line.scanline[col++] = g;
line.scanline[col] = b;
}
public static void setPixelRGB8(ImageLineInt line, int col, int rgb) {
setPixelRGB8(line, col, (rgb >> 16) & 0xFF, (rgb >> 8) & 0xFF, rgb & 0xFF);
}
public static void setPixelsRGBA8(ImageLineInt line, int[] rgb) {
for (int i = 0, j = 0; i < line.imgInfo.cols; i++) {
line.scanline[j++] = ((rgb[i] >> 16) & 0xFF);
line.scanline[j++] = ((rgb[i] >> 8) & 0xFF);
line.scanline[j++] = ((rgb[i] & 0xFF));
line.scanline[j++] = ((rgb[i] >> 24) & 0xFF);
}
}
public static void setPixelRGBA8(ImageLineInt line, int col, int r, int g, int b, int a) {
col *= line.imgInfo.channels;
line.scanline[col++] = r;
line.scanline[col++] = g;
line.scanline[col++] = b;
line.scanline[col] = a;
}
public static void setPixelRGBA8(ImageLineInt line, int col, int rgb) {
setPixelRGBA8(line, col, (rgb >> 16) & 0xFF, (rgb >> 8) & 0xFF, rgb & 0xFF, (rgb >> 24) & 0xFF);
}
public static void setValD(ImageLineInt line, int i, double d) {
line.scanline[i] = double2int(line, d);
}
public static int interpol(int a, int b, int c, int d, double dx, double dy) {
// a b -> x (0-1)
// c d
double e = a * (1.0 - dx) + b * dx;
double f = c * (1.0 - dx) + d * dx;
return (int) (e * (1 - dy) + f * dy + 0.5);
}
public static double int2double(ImageLineInt line, int p) {
return line.imgInfo.bitDepth == 16 ? p / 65535.0 : p / 255.0;
// TODO: replace my multiplication? check for other bitdepths
}
public static double int2doubleClamped(ImageLineInt line, int p) {
// TODO: replace my multiplication?
double d = line.imgInfo.bitDepth == 16 ? p / 65535.0 : p / 255.0;
return d <= 0.0 ? 0 : (d >= 1.0 ? 1.0 : d);
}
public static int double2int(ImageLineInt line, double d) {
d = d <= 0.0 ? 0 : (d >= 1.0 ? 1.0 : d);
return line.imgInfo.bitDepth == 16 ? (int) (d * 65535.0 + 0.5) : (int) (d * 255.0 + 0.5); //
}
public static int double2intClamped(ImageLineInt line, double d) {
d = d <= 0.0 ? 0 : (d >= 1.0 ? 1.0 : d);
return line.imgInfo.bitDepth == 16 ? (int) (d * 65535.0 + 0.5) : (int) (d * 255.0 + 0.5); //
}
public static int clampTo_0_255(int i) {
return i > 255 ? 255 : (i < 0 ? 0 : i);
}
public static int clampTo_0_65535(int i) {
return i > 65535 ? 65535 : (i < 0 ? 0 : i);
}
public static int clampTo_128_127(int x) {
return x > 127 ? 127 : (x < -128 ? -128 : x);
}
public static int getMaskForPackedFormats(int bitDepth) { // Utility function for pack/unpack
if (bitDepth == 4)
return 0xf0;
else if (bitDepth == 2)
return 0xc0;
else
return 0x80; // bitDepth == 1
}
public static int getMaskForPackedFormatsLs(int bitDepth) { // Utility function for pack/unpack
if (bitDepth == 4)
return 0x0f;
else if (bitDepth == 2)
return 0x03;
else
return 0x01; // bitDepth == 1
}
}

View File

@ -0,0 +1,193 @@
package ar.com.hjg.pngj;
/**
* Represents an image line, integer format (one integer by sample). See {@link #scanline} to understand the format.
*/
public class ImageLineInt implements IImageLine, IImageLineArray {
public final ImageInfo imgInfo;
/**
* The 'scanline' is an array of integers, corresponds to an image line (row).
* <p>
* Each <code>int</code> is a "sample" (one for channel), (0-255 or 0-65535) in the corresponding PNG sequence:
* <code>R G B R G B...</code> or <code>R G B A R G B A...</tt>
* or <code>g g g ...</code> or <code>i i i</code> (palette index)
* <p>
* For bitdepth=1/2/4 the value is not scaled (hence, eg, if bitdepth=2 the range will be 0-4)
* <p>
* To convert a indexed line to RGB values, see
* {@link ImageLineHelper#palette2rgb(ImageLineInt, ar.com.hjg.pngj.chunks.PngChunkPLTE, int[])} (you can't do the
* reverse)
*/
protected final int[] scanline;
/**
* number of elements in the scanline
*/
protected final int size;
/**
* informational ; only filled by the reader. not meaningful for interlaced
*/
protected FilterType filterType = FilterType.FILTER_UNKNOWN;
/**
* @param imgInfo Inmutable ImageInfo, basic parameters of the image we are reading or writing
*/
public ImageLineInt(ImageInfo imgInfo) {
this(imgInfo, null);
}
/**
* @param imgInfo Inmutable ImageInfo, basic parameters of the image we are reading or writing
* @param sci prealocated buffer (can be null)
*/
public ImageLineInt(ImageInfo imgInfo, int[] sci) {
this.imgInfo = imgInfo;
filterType = FilterType.FILTER_UNKNOWN;
size = imgInfo.samplesPerRow;
scanline = sci != null && sci.length >= size ? sci : new int[size];
}
/**
* Helper method, returns a default factory for this object
*
*/
public static IImageLineFactory<ImageLineInt> getFactory() {
return new IImageLineFactory<ImageLineInt>() {
public ImageLineInt createImageLine(ImageInfo iminfo) {
return new ImageLineInt(iminfo);
}
};
}
public FilterType getFilterType() {
return filterType;
}
/**
* This should rarely be used by client code. Only relevant if FilterPreserve==true
*/
public void setFilterType(FilterType ft) {
filterType = ft;
}
/**
* Basic info
*/
public String toString() {
return " cols=" + imgInfo.cols + " bpc=" + imgInfo.bitDepth + " size=" + scanline.length;
}
public void readFromPngRaw(byte[] raw, final int len, final int offset, final int step) {
setFilterType(FilterType.getByVal(raw[0]));
int len1 = len - 1;
int step1 = (step - 1) * imgInfo.channels;
if (imgInfo.bitDepth == 8) {
if (step == 1) {// 8bispp non-interlaced: most important case, should be optimized
for (int i = 0; i < size; i++) {
scanline[i] = (raw[i + 1] & 0xff);
}
} else {// 8bispp interlaced
for (int s = 1, c = 0, i = offset * imgInfo.channels; s <= len1; s++, i++) {
scanline[i] = (raw[s] & 0xff);
c++;
if (c == imgInfo.channels) {
c = 0;
i += step1;
}
}
}
} else if (imgInfo.bitDepth == 16) {
if (step == 1) {// 16bispp non-interlaced
for (int i = 0, s = 1; i < size; i++) {
scanline[i] = ((raw[s++] & 0xFF) << 8) | (raw[s++] & 0xFF); // 16 bitspc
}
} else {
for (int s = 1, c = 0, i = offset != 0 ? offset * imgInfo.channels : 0; s <= len1; s++, i++) {
scanline[i] = ((raw[s++] & 0xFF) << 8) | (raw[s] & 0xFF); // 16 bitspc
c++;
if (c == imgInfo.channels) {
c = 0;
i += step1;
}
}
}
} else { // packed formats
int mask0, mask, shi, bd;
bd = imgInfo.bitDepth;
mask0 = ImageLineHelper.getMaskForPackedFormats(bd);
for (int i = offset * imgInfo.channels, r = 1, c = 0; r < len; r++) {
mask = mask0;
shi = 8 - bd;
do {
scanline[i++] = (raw[r] & mask) >> shi;
mask >>= bd;
shi -= bd;
c++;
if (c == imgInfo.channels) {
c = 0;
i += step1;
}
} while (mask != 0 && i < size);
}
}
}
public void writeToPngRaw(byte[] raw) {
raw[0] = (byte) filterType.val;
if (imgInfo.bitDepth == 8) {
for (int i = 0; i < size; i++) {
raw[i + 1] = (byte) scanline[i];
}
} else if (imgInfo.bitDepth == 16) {
for (int i = 0, s = 1; i < size; i++) {
raw[s++] = (byte) (scanline[i] >> 8);
raw[s++] = (byte) (scanline[i] & 0xff);
}
} else { // packed formats
int shi, bd, v;
bd = imgInfo.bitDepth;
shi = 8 - bd;
v = 0;
for (int i = 0, r = 1; i < size; i++) {
v |= (scanline[i] << shi);
shi -= bd;
if (shi < 0 || i == size - 1) {
raw[r++] = (byte) v;
shi = 8 - bd;
v = 0;
}
}
}
}
/**
* Does nothing in this implementation
*/
public void endReadFromPngRaw() {
}
/**
* @see #size
*/
public int getSize() {
return size;
}
public int getElem(int i) {
return scanline[i];
}
/**
* @return see {@link #scanline}
*/
public int[] getScanline() {
return scanline;
}
public ImageInfo getImageInfo() {
return imgInfo;
}
}

View File

@ -0,0 +1,151 @@
package ar.com.hjg.pngj;
import java.util.ArrayList;
import java.util.List;
/**
* Default implementation of {@link IImageLineSet}.
* <P>
* Supports all modes: single cursor, full rows, or partial. This should not be used for
*/
public abstract class ImageLineSetDefault<T extends IImageLine> implements IImageLineSet<T> {
protected final ImageInfo imgInfo;
private final boolean singleCursor;
private final int nlines, offset, step;
protected List<T> imageLines; // null if single cursor
protected T imageLine; // null unless single cursor
protected int currentRow = -1; // only relevant (and not much) for cursor
public ImageLineSetDefault(ImageInfo imgInfo, final boolean singleCursor, final int nlinesx,
final int noffsetx, final int stepx) {
this.imgInfo = imgInfo;
this.singleCursor = singleCursor;
if (singleCursor) {
this.nlines = 1; // we store only one line, no matter how many will be read
offset = 0;
this.step = 1;// don't matter
} else {
this.nlines = nlinesx; // note that it can also be 1
offset = noffsetx;
this.step = stepx;// don't matter
}
createImageLines();
}
private void createImageLines() {
if (singleCursor)
imageLine = createImageLine();
else {
imageLines = new ArrayList<T>();
for (int i = 0; i < nlines; i++)
imageLines.add(createImageLine());
}
}
protected abstract T createImageLine();
/**
* Retrieves the image line
* <p>
* Warning: the argument is the row number in the original image
* <p>
* If this is a cursor, no check is done, always the same row is returned
*/
public T getImageLine(int n) {
currentRow = n;
if (singleCursor)
return imageLine;
else {
int r = imageRowToMatrixRowStrict(n);
if (r < 0)
throw new PngjException("Invalid row number");
return imageLines.get(r);
}
}
/**
* does not check for valid range
*/
public T getImageLineRawNum(int r) {
if (singleCursor)
return imageLine;
else
return imageLines.get(r);
}
/**
* True if the set contains this image line
* <p>
* Warning: the argument is the row number in the original image
* <p>
* If this works as cursor, this returns true only if that is the number of its "current" line
*/
public boolean hasImageLine(int n) {
return singleCursor ? currentRow == n : imageRowToMatrixRowStrict(n) >= 0;
}
/**
* How many lines does this object contain?
*/
public int size() {
return nlines;
}
/**
* Same as {@link #imageRowToMatrixRow(int)}, but returns negative if invalid
*/
public int imageRowToMatrixRowStrict(int imrow) {
imrow -= offset;
int mrow = imrow >= 0 && (step == 1 || imrow % step == 0) ? imrow / step : -1;
return mrow < nlines ? mrow : -1;
}
/**
* Converts from matrix row number (0 : nRows-1) to image row number
*
* @param mrow Matrix row number
* @return Image row number. Returns trash if mrow is invalid
*/
public int matrixRowToImageRow(int mrow) {
return mrow * step + offset;
}
/**
* Converts from real image row to this object row number.
* <p>
* Warning: this always returns a valid matrix row (clamping on 0 : nrows-1, and rounding down)
* <p>
* Eg: rowOffset=4,rowStep=2 imageRowToMatrixRow(17) returns 6 , imageRowToMatrixRow(1) returns 0
*/
public int imageRowToMatrixRow(int imrow) {
int r = (imrow - offset) / step;
return r < 0 ? 0 : (r < nlines ? r : nlines - 1);
}
/** utility function, given a factory for one line, returns a factory for a set */
public static <T extends IImageLine> IImageLineSetFactory<T> createImageLineSetFactoryFromImageLineFactory(
final IImageLineFactory<T> ifactory) { // ugly method must have ugly name. don't let this intimidate you
return new IImageLineSetFactory<T>() {
public IImageLineSet<T> create(final ImageInfo iminfo, boolean singleCursor, int nlines,
int noffset, int step) {
return new ImageLineSetDefault<T>(iminfo, singleCursor, nlines, noffset, step) {
@Override
protected T createImageLine() {
return ifactory.createImageLine(iminfo);
}
};
};
};
}
/** utility function, returns default factory for {@link ImageLineInt} */
public static IImageLineSetFactory<ImageLineInt> getFactoryInt() {
return createImageLineSetFactoryFromImageLineFactory(ImageLineInt.getFactory());
}
/** utility function, returns default factory for {@link ImageLineByte} */
public static IImageLineSetFactory<ImageLineByte> getFactoryByte() {
return createImageLineSetFactoryFromImageLineFactory(ImageLineByte.getFactory());
}
}

View File

@ -0,0 +1,329 @@
package ar.com.hjg.pngj;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.util.logging.Logger;
/**
* Some utility static methods for internal use.
* <p>
* Client code should not normally use this class
* <p>
*/
public final class PngHelperInternal {
public static final String KEY_LOGGER = "ar.com.pngj";
public static final Logger LOGGER = Logger.getLogger(KEY_LOGGER);
/**
* Default charset, used internally by PNG for several things
*/
public static String charsetLatin1name = "ISO-8859-1";
public static Charset charsetLatin1 = Charset.forName(charsetLatin1name);
/**
* UTF-8 is only used for some chunks
*/
public static String charsetUTF8name = "UTF-8";
public static Charset charsetUTF8 = Charset.forName(charsetUTF8name);
private static ThreadLocal<Boolean> DEBUG = new ThreadLocal<Boolean>() {
protected Boolean initialValue() {
return Boolean.FALSE;
}
};
/**
* PNG magic bytes
*/
public static byte[] getPngIdSignature() {
return new byte[] {-119, 80, 78, 71, 13, 10, 26, 10};
}
public static int doubleToInt100000(double d) {
return (int) (d * 100000.0 + 0.5);
}
public static double intToDouble100000(int i) {
return i / 100000.0;
}
public static int readByte(InputStream is) {
try {
return is.read();
} catch (IOException e) {
throw new PngjInputException("error reading byte", e);
}
}
/**
* -1 if eof
*
* PNG uses "network byte order"
*/
public static int readInt2(InputStream is) {
try {
int b1 = is.read();
int b2 = is.read();
if (b1 == -1 || b2 == -1)
return -1;
return (b1 << 8) | b2;
} catch (IOException e) {
throw new PngjInputException("error reading Int2", e);
}
}
/**
* -1 if eof
*/
public static int readInt4(InputStream is) {
try {
int b1 = is.read();
int b2 = is.read();
int b3 = is.read();
int b4 = is.read();
if (b1 == -1 || b2 == -1 || b3 == -1 || b4 == -1)
return -1;
return (b1 << 24) | (b2 << 16) | (b3 << 8) + b4;
} catch (IOException e) {
throw new PngjInputException("error reading Int4", e);
}
}
public static int readInt1fromByte(byte[] b, int offset) {
return (b[offset] & 0xff);
}
public static int readInt2fromBytes(byte[] b, int offset) {
return ((b[offset] & 0xff) << 8) | ((b[offset + 1] & 0xff));
}
public static final int readInt4fromBytes(byte[] b, int offset) {
return ((b[offset] & 0xff) << 24) | ((b[offset + 1] & 0xff) << 16)
| ((b[offset + 2] & 0xff) << 8) | (b[offset + 3] & 0xff);
}
public static void writeByte(OutputStream os, byte b) {
try {
os.write(b);
} catch (IOException e) {
throw new PngjOutputException(e);
}
}
public static void writeByte(OutputStream os, byte[] bs) {
try {
os.write(bs);
} catch (IOException e) {
throw new PngjOutputException(e);
}
}
public static void writeInt2(OutputStream os, int n) {
byte[] temp = {(byte) ((n >> 8) & 0xff), (byte) (n & 0xff)};
writeBytes(os, temp);
}
public static void writeInt4(OutputStream os, int n) {
byte[] temp = new byte[4];
writeInt4tobytes(n, temp, 0);
writeBytes(os, temp);
}
public static void writeInt2tobytes(int n, byte[] b, int offset) {
b[offset] = (byte) ((n >> 8) & 0xff);
b[offset + 1] = (byte) (n & 0xff);
}
public static void writeInt4tobytes(int n, byte[] b, int offset) {
b[offset] = (byte) ((n >> 24) & 0xff);
b[offset + 1] = (byte) ((n >> 16) & 0xff);
b[offset + 2] = (byte) ((n >> 8) & 0xff);
b[offset + 3] = (byte) (n & 0xff);
}
/**
* guaranteed to read exactly len bytes. throws error if it can't
*/
public static void readBytes(InputStream is, byte[] b, int offset, int len) {
if (len == 0)
return;
try {
int read = 0;
while (read < len) {
int n = is.read(b, offset + read, len - read);
if (n < 1)
throw new PngjInputException("error reading bytes, " + n + " !=" + len);
read += n;
}
} catch (IOException e) {
throw new PngjInputException("error reading", e);
}
}
public static void skipBytes(InputStream is, long len) {
try {
while (len > 0) {
long n1 = is.skip(len);
if (n1 > 0) {
len -= n1;
} else if (n1 == 0) { // should we retry? lets read one byte
if (is.read() == -1) // EOF
break;
else
len--;
} else
// negative? this should never happen but...
throw new IOException("skip() returned a negative value ???");
}
} catch (IOException e) {
throw new PngjInputException(e);
}
}
public static void writeBytes(OutputStream os, byte[] b) {
try {
os.write(b);
} catch (IOException e) {
throw new PngjOutputException(e);
}
}
public static void writeBytes(OutputStream os, byte[] b, int offset, int n) {
try {
os.write(b, offset, n);
} catch (IOException e) {
throw new PngjOutputException(e);
}
}
public static void logdebug(String msg) {
if (isDebug())
System.err.println("logdebug: " + msg);
}
// / filters
public static int filterRowNone(int r) {
return (int) (r & 0xFF);
}
public static int filterRowSub(int r, int left) {
return ((int) (r - left) & 0xFF);
}
public static int filterRowUp(int r, int up) {
return ((int) (r - up) & 0xFF);
}
public static int filterRowAverage(int r, int left, int up) {
return (r - (left + up) / 2) & 0xFF;
}
public static int filterRowPaeth(int r, int left, int up, int upleft) { // a = left, b = above, c
// = upper left
return (r - filterPaethPredictor(left, up, upleft)) & 0xFF;
}
final static int filterPaethPredictor(final int a, final int b, final int c) { // a = left, b =
// above, c = upper
// left
// from http://www.libpng.org/pub/png/spec/1.2/PNG-Filters.html
final int p = a + b - c;// ; initial estimate
final int pa = p >= a ? p - a : a - p;
final int pb = p >= b ? p - b : b - p;
final int pc = p >= c ? p - c : c - p;
// ; return nearest of a,b,c,
// ; breaking ties in order a,b,c.
if (pa <= pb && pa <= pc)
return a;
else if (pb <= pc)
return b;
else
return c;
}
/**
* Prits a debug message (prints class name, method and line number)
*
* @param obj : Object to print
*/
public static void debug(Object obj) {
debug(obj, 1, true);
}
/**
* Prits a debug message (prints class name, method and line number)
*
* @param obj : Object to print
* @param offset : Offset N lines from stacktrace
*/
static void debug(Object obj, int offset) {
debug(obj, offset, true);
}
public static InputStream istreamFromFile(File f) {
FileInputStream is;
try {
is = new FileInputStream(f);
} catch (Exception e) {
throw new PngjInputException("Could not open " + f, e);
}
return is;
}
static OutputStream ostreamFromFile(File f) {
return ostreamFromFile(f, true);
}
static OutputStream ostreamFromFile(File f, boolean overwrite) {
return PngHelperInternal2.ostreamFromFile(f, overwrite);
}
/**
* Prints a debug message (prints class name, method and line number) to stderr and logFile
*
* @param obj : Object to print
* @param offset : Offset N lines from stacktrace
* @param newLine : Print a newline char at the end ('\n')
*/
static void debug(Object obj, int offset, boolean newLine) {
StackTraceElement ste = new Exception().getStackTrace()[1 + offset];
String steStr = ste.getClassName();
int ind = steStr.lastIndexOf('.');
steStr = steStr.substring(ind + 1);
steStr +=
"." + ste.getMethodName() + "(" + ste.getLineNumber() + "): "
+ (obj == null ? null : obj.toString());
System.err.println(steStr);
}
/**
* Sets a global debug flag. This is bound to a thread.
*/
public static void setDebug(boolean b) {
DEBUG.set(b);
}
public static boolean isDebug() {
return DEBUG.get().booleanValue();
}
public static long getDigest(PngReader pngr) {
return pngr.getSimpleDigest();
}
public static void initCrcForTests(PngReader pngr) {
pngr.prepareSimpleDigestComputation();
}
public static long getRawIdatBytes(PngReader r) { // in case of image with frames, returns the current one
return r.interlaced ? r.getChunkseq().getDeinterlacer().getTotalRawBytes() : r.getCurImgInfo()
.getTotalRawBytes();
}
}

View File

@ -0,0 +1,33 @@
package ar.com.hjg.pngj;
import java.io.File;
import java.io.OutputStream;
/**
* For organization purposes, this class is the onlt that uses classes not in GAE (Google App Engine) white list
* <p>
* You should not use this class in GAE
*/
final class PngHelperInternal2 {
/**
* WARNING: this uses FileOutputStream which is not allowed in GoogleAppEngine
*
* In GAE, dont use this
*
* @param f
* @param allowoverwrite
* @return
*/
static OutputStream ostreamFromFile(File f, boolean allowoverwrite) {
java.io.FileOutputStream os = null; // this will fail in GAE!
if (f.exists() && !allowoverwrite)
throw new PngjOutputException("File already exists: " + f);
try {
os = new java.io.FileOutputStream(f);
} catch (Exception e) {
throw new PngjInputException("Could not open for write" + f, e);
}
return os;
}
}

View File

@ -0,0 +1,586 @@
package ar.com.hjg.pngj;
import java.io.File;
import java.io.InputStream;
import java.util.zip.Adler32;
import java.util.zip.CRC32;
import ar.com.hjg.pngj.chunks.ChunkLoadBehaviour;
import ar.com.hjg.pngj.chunks.ChunksList;
import ar.com.hjg.pngj.chunks.PngChunkFCTL;
import ar.com.hjg.pngj.chunks.PngChunkFDAT;
import ar.com.hjg.pngj.chunks.PngChunkIDAT;
import ar.com.hjg.pngj.chunks.PngMetadata;
/**
* Reads a PNG image (pixels and/or metadata) from a file or stream.
* <p>
* Each row is read as an {@link ImageLineInt} object (one int per sample), but this can be changed by setting a
* different ImageLineFactory
* <p>
* Internally, this wraps a {@link ChunkSeqReaderPng} with a {@link BufferedStreamFeeder}
* <p>
* The reading sequence is as follows: <br>
* 1. At construction time, the header and IHDR chunk are read (basic image info) <br>
* 2. Afterwards you can set some additional global options. Eg. {@link #setCrcCheckDisabled()}.<br>
* 3. Optional: If you call getMetadata() or getChunksLisk() before start reading the rows, all the chunks before IDAT
* are then loaded and available <br>
* 4a. The rows are read in order by calling {@link #readRow()}. You can also call {@link #readRow(int)} to skip rows
* -but you can't go backwards, at least not with this implementation. This method returns a {@link IImageLine} object
* which can be casted to the concrete class. This class returns by default a {@link ImageLineInt}, but this can be
* changed.<br>
* 4b. Alternatively, you can read all rows, or a subset, in a single call: {@link #readRows()},
* {@link #readRows(int, int, int)} ,etc. In general this consumes more memory, but for interlaced images this is
* equally efficient, and more so if reading a small subset of rows.<br>
* 5. Reading of the last row automatically loads the trailing chunks, and ends the reader.<br>
* 6. end() also loads the trailing chunks, if not done, and finishes cleanly the reading and closes the stream.
* <p>
* See also {@link PngReaderInt} (esentially the same as this, and slightly preferred) and {@link PngReaderByte} (uses
* byte instead of int to store the samples).
*/
public class PngReader {
// some performance/defensive limits
/**
* Defensive limit: refuse to read more than 900MB, can be changed with {@link #setMaxTotalBytesRead(long)}
*/
public static final long MAX_TOTAL_BYTES_READ_DEFAULT = 901001001L; // ~ 900MB
/**
* Defensive limit: refuse to load more than 5MB of ancillary metadata, see {@link #setMaxBytesMetadata(long)} and
* also {@link #addChunkToSkip(String)}
*/
public static final long MAX_BYTES_METADATA_DEFAULT = 5024024; // for ancillary chunks
/**
* Skip ancillary chunks greater than 2MB, see {@link #setSkipChunkMaxSize(long)}
*/
public static final long MAX_CHUNK_SIZE_SKIP = 2024024; // chunks exceeding this size will be skipped (nor even CRC
// checked)
/**
* Basic image info - final and inmutable.
*/
public final ImageInfo imgInfo; // People always told me: be careful what you do, and don't go around declaring public
// fields...
/**
* flag: image was in interlaced format
*/
public final boolean interlaced;
/**
* This object has most of the intelligence to parse the chunks and decompress the IDAT stream
*/
protected final ChunkSeqReaderPng chunkseq;
/**
* Takes bytes from the InputStream and passes it to the ChunkSeqReaderPng. Never null.
*/
protected final BufferedStreamFeeder streamFeeder;
/**
* @see #getMetadata()
*/
protected final PngMetadata metadata; // this a wrapper over chunks
/**
* Current row number (reading or read), numbered from 0
*/
protected int rowNum = -1;
/**
* Represents the set of lines (rows) being read. Normally this works as a cursor, storing only one (the current) row.
* This stores several (perhaps all) rows only if calling {@link #readRows()} or for interlaced images (this later is
* transparent to the user)
*/
protected IImageLineSet<? extends IImageLine> imlinesSet;
/**
* This factory decides the concrete type of the ImageLine that will be used. See {@link ImageLineSetDefault} for
* examples
*/
private IImageLineSetFactory<? extends IImageLine> imageLineSetFactory;
CRC32 idatCrca;// for internal testing
Adler32 idatCrcb;// for internal testing
/**
* Constructs a PngReader object from a stream, with default options. This reads the signature and the first IHDR
* chunk only.
* <p>
* Warning: In case of exception the stream is NOT closed.
* <p>
* Warning: By default the stream will be closed when this object is {@link #close()}d. See
* {@link #PngReader(InputStream,boolean)} or {@link #setShouldCloseStream(boolean)}
* <p>
*
* @param inputStream PNG stream
*/
public PngReader(InputStream inputStream) {
this(inputStream, true);
}
/**
* Same as {@link #PngReader(InputStream)} but allows to specify early if the stream must be closed
*
* @param inputStream
* @param shouldCloseStream The stream will be closed in case of exception (constructor included) or normal
* termination.
*/
public PngReader(InputStream inputStream, boolean shouldCloseStream) {
streamFeeder = new BufferedStreamFeeder(inputStream);
streamFeeder.setCloseStream(shouldCloseStream);
chunkseq = createChunkSeqReader();
try {
streamFeeder.setFailIfNoFeed(true);
if (!streamFeeder.feedFixed(chunkseq, 36)) // 8+13+12=36 PNG signature+IHDR chunk
throw new PngjInputException("error reading first 21 bytes");
imgInfo = chunkseq.getImageInfo();
interlaced = chunkseq.getDeinterlacer() != null;
setMaxBytesMetadata(MAX_BYTES_METADATA_DEFAULT);
setMaxTotalBytesRead(MAX_TOTAL_BYTES_READ_DEFAULT);
setSkipChunkMaxSize(MAX_CHUNK_SIZE_SKIP);
chunkseq.addChunkToSkip(PngChunkFDAT.ID);// default: skip fdAT chunks!
chunkseq.addChunkToSkip(PngChunkFCTL.ID);// default: skip fctl chunks!
this.metadata = new PngMetadata(chunkseq.chunksList);
// sets a default factory (with ImageLineInt),
// this can be overwriten by a extended constructor, or by a setter
setLineSetFactory(ImageLineSetDefault.getFactoryInt());
rowNum = -1;
} catch (RuntimeException e) {
streamFeeder.close();
chunkseq.close();
throw e;
}
}
/**
* Constructs a PngReader opening a file. Sets <tt>shouldCloseStream=true</tt>, so that the stream will be closed with
* this object.
*
* @param file PNG image file
*/
public PngReader(File file) {
this(PngHelperInternal.istreamFromFile(file), true);
}
/**
* Reads chunks before first IDAT. Normally this is called automatically
* <p>
* Position before: after IDHR (crc included) Position after: just after the first IDAT chunk id
* <P>
* This can be called several times (tentatively), it does nothing if already run
* <p>
* (Note: when should this be called? in the constructor? hardly, because we loose the opportunity to call
* setChunkLoadBehaviour() and perhaps other settings before reading the first row? but sometimes we want to access
* some metadata (plte, phys) before. Because of this, this method can be called explicitly but is also called
* implicititly in some methods (getMetatada(), getChunksList())
*/
protected void readFirstChunks() {
while (chunkseq.currentChunkGroup < ChunksList.CHUNK_GROUP_4_IDAT)
if (streamFeeder.feed(chunkseq) <= 0)
throw new PngjInputException("premature ending reading first chunks");
}
/**
* Determines which ancillary chunks (metadata) are to be loaded and which skipped.
* <p>
* Additional restrictions may apply. See also {@link #setChunksToSkip(String...)}, {@link #addChunkToSkip(String)},
* {@link #setMaxBytesMetadata(long)}, {@link #setSkipChunkMaxSize(long)}
*
* @param chunkLoadBehaviour {@link ChunkLoadBehaviour}
*/
public void setChunkLoadBehaviour(ChunkLoadBehaviour chunkLoadBehaviour) {
this.chunkseq.setChunkLoadBehaviour(chunkLoadBehaviour);
}
/**
* All loaded chunks (metada). If we have not yet end reading the image, this will include only the chunks before the
* pixels data (IDAT)
* <p>
* Critical chunks are included, except that all IDAT chunks appearance are replaced by a single dummy-marker IDAT
* chunk. These might be copied to the PngWriter
* <p>
*
* @see #getMetadata()
*/
public ChunksList getChunksList() {
return getChunksList(true);
}
public ChunksList getChunksList(boolean forceLoadingOfFirstChunks) {
if (forceLoadingOfFirstChunks && chunkseq.firstChunksNotYetRead())
readFirstChunks();
return chunkseq.chunksList;
}
int getCurrentChunkGroup() {
return chunkseq.currentChunkGroup;
}
/**
* High level wrapper over chunksList
*
* @see #getChunksList()
*/
public PngMetadata getMetadata() {
if (chunkseq.firstChunksNotYetRead())
readFirstChunks();
return metadata;
}
/**
* Reads next row.
*
* The caller must know that there are more rows to read.
*
* @return Never null. Throws PngInputException if no more
*/
public IImageLine readRow() {
return readRow(rowNum + 1);
}
/**
* True if last row has not yet been read
*/
public boolean hasMoreRows() {
return rowNum < getCurImgInfo().rows - 1;
}
/**
* The row number is mostly meant as a check, the rows must be called in ascending order (not necessarily consecutive)
*/
public IImageLine readRow(int nrow) {
if (chunkseq.firstChunksNotYetRead())
readFirstChunks();
if (!interlaced) {
if (imlinesSet == null)
imlinesSet = createLineSet(true, -1, 0, 1);
IImageLine line = imlinesSet.getImageLine(nrow);
if (nrow == rowNum)
return line; // already read??
else if (nrow < rowNum)
throw new PngjInputException("rows must be read in increasing order: " + nrow);
while (rowNum < nrow) {
while (!chunkseq.getIdatSet().isRowReady())
if (streamFeeder.feed(chunkseq) < 1)
throw new PngjInputException("premature ending");
rowNum++;
chunkseq.getIdatSet().updateCrcs(idatCrca, idatCrcb);
if (rowNum == nrow) {
line.readFromPngRaw(chunkseq.getIdatSet().getUnfilteredRow(),
getCurImgInfo().bytesPerRow + 1, 0, 1);
line.endReadFromPngRaw();
}
chunkseq.getIdatSet().advanceToNextRow();
}
return line;
} else { // and now, for something completely different (interlaced!)
if (imlinesSet == null) {
imlinesSet = createLineSet(false, getCurImgInfo().rows, 0, 1);
loadAllInterlaced(getCurImgInfo().rows, 0, 1);
}
rowNum = nrow;
return imlinesSet.getImageLine(nrow);
}
}
/**
* Reads all rows in a ImageLineSet This is handy, but less memory-efficient (except for interlaced)
*/
public IImageLineSet<? extends IImageLine> readRows() {
return readRows(getCurImgInfo().rows, 0, 1);
}
/**
* Reads a subset of rows.
* <p>
* This method should called once, and not be mixed with {@link #readRow()}
*
* @param nRows how many rows to read (default: imageInfo.rows; negative: autocompute)
* @param rowOffset rows to skip (default:0)
* @param rowStep step between rows to load( default:1)
*/
public IImageLineSet<? extends IImageLine> readRows(int nRows, int rowOffset, int rowStep) {
if (chunkseq.firstChunksNotYetRead())
readFirstChunks();
if (nRows < 0)
nRows = (getCurImgInfo().rows - rowOffset) / rowStep;
if (rowStep < 1 || rowOffset < 0 || nRows == 0
|| nRows * rowStep + rowOffset > getCurImgInfo().rows)
throw new PngjInputException("bad args");
if (rowNum >= rowOffset)
throw new PngjInputException("readRows cannot be mixed with readRow");
imlinesSet = createLineSet(false, nRows, rowOffset, rowStep);
if (!interlaced) {
int m = -1; // last row already read in
while (m < nRows - 1) {
while (!chunkseq.getIdatSet().isRowReady())
if (streamFeeder.feed(chunkseq) < 1)
throw new PngjInputException("Premature ending");
rowNum++;
chunkseq.getIdatSet().updateCrcs(idatCrca, idatCrcb);
m = (rowNum - rowOffset) / rowStep;
if (rowNum >= rowOffset && rowStep * m + rowOffset == rowNum) {
IImageLine line = imlinesSet.getImageLine(rowNum);
line.readFromPngRaw(chunkseq.getIdatSet().getUnfilteredRow(),
getCurImgInfo().bytesPerRow + 1, 0, 1);
line.endReadFromPngRaw();
}
chunkseq.getIdatSet().advanceToNextRow();
}
} else { // and now, for something completely different (interlaced)
loadAllInterlaced(nRows, rowOffset, rowStep);
}
chunkseq.getIdatSet().done();
return imlinesSet;
}
/**
* Sets the factory that creates the ImageLine. By default, this implementation uses ImageLineInt but this can be
* changed (at construction time or later) by calling this method.
* <p>
* See also {@link #createLineSet(boolean, int, int, int)}
*
* @param factory
*/
public void setLineSetFactory(IImageLineSetFactory<? extends IImageLine> factory) {
imageLineSetFactory = factory;
}
/**
* By default this uses the factory (which, by default creates ImageLineInt). You should rarely override this.
* <p>
* See doc in {@link IImageLineSetFactory#create(ImageInfo, boolean, int, int, int)}
*/
protected IImageLineSet<? extends IImageLine> createLineSet(boolean singleCursor, int nlines,
int noffset, int step) {
return imageLineSetFactory.create(getCurImgInfo(), singleCursor, nlines, noffset, step);
}
protected void loadAllInterlaced(int nRows, int rowOffset, int rowStep) {
IdatSet idat = chunkseq.getIdatSet();
int nread = 0;
do {
while (!chunkseq.getIdatSet().isRowReady())
if (streamFeeder.feed(chunkseq) <= 0)
break;
if (!chunkseq.getIdatSet().isRowReady())
throw new PngjInputException("Premature ending?");
chunkseq.getIdatSet().updateCrcs(idatCrca, idatCrcb);
int rowNumreal = idat.rowinfo.rowNreal;
boolean inset = imlinesSet.hasImageLine(rowNumreal);
if (inset) {
imlinesSet.getImageLine(rowNumreal).readFromPngRaw(idat.getUnfilteredRow(),
idat.rowinfo.buflen, idat.rowinfo.oX, idat.rowinfo.dX);
nread++;
}
idat.advanceToNextRow();
} while (nread < nRows || !idat.isDone());
idat.done();
for (int i = 0, j = rowOffset; i < nRows; i++, j += rowStep) {
imlinesSet.getImageLine(j).endReadFromPngRaw();
}
}
/**
* Reads all the (remaining) file, skipping the pixels data. This is much more efficient that calling
* {@link #readRow()}, specially for big files (about 10 times faster!), because it doesn't even decompress the IDAT
* stream and disables CRC check Use this if you are not interested in reading pixels,only metadata.
*/
public void readSkippingAllRows() {
chunkseq.addChunkToSkip(PngChunkIDAT.ID);
chunkseq.addChunkToSkip(PngChunkFDAT.ID);
if (chunkseq.firstChunksNotYetRead())
readFirstChunks();
end();
}
/**
* Set total maximum bytes to read (0: unlimited; default: 200MB). <br>
* These are the bytes read (not loaded) in the input stream. If exceeded, an exception will be thrown.
*/
public void setMaxTotalBytesRead(long maxTotalBytesToRead) {
chunkseq.setMaxTotalBytesRead(maxTotalBytesToRead);
}
/**
* Set total maximum bytes to load from ancillary chunks (0: unlimited; default: 5Mb).<br>
* If exceeded, some chunks will be skipped
*/
public void setMaxBytesMetadata(long maxBytesMetadata) {
chunkseq.setMaxBytesMetadata(maxBytesMetadata);
}
/**
* Set maximum size in bytes for individual ancillary chunks (0: unlimited; default: 2MB). <br>
* Chunks exceeding this length will be skipped (the CRC will not be checked) and the chunk will be saved as a
* PngChunkSkipped object. See also setSkipChunkIds
*/
public void setSkipChunkMaxSize(long skipChunkMaxSize) {
chunkseq.setSkipChunkMaxSize(skipChunkMaxSize);
}
/**
* Chunks ids to be skipped. <br>
* These chunks will be skipped (the CRC will not be checked) and the chunk will be saved as a PngChunkSkipped object.
* See also setSkipChunkMaxSize
*/
public void setChunksToSkip(String... chunksToSkip) {
chunkseq.setChunksToSkip(chunksToSkip);
}
public void addChunkToSkip(String chunkToSkip) {
chunkseq.addChunkToSkip(chunkToSkip);
}
public void dontSkipChunk(String chunkToSkip) {
chunkseq.dontSkipChunk(chunkToSkip);
}
/**
* if true, input stream will be closed after ending read
* <p>
* default=true
*/
public void setShouldCloseStream(boolean shouldCloseStream) {
streamFeeder.setCloseStream(shouldCloseStream);
}
/**
* Reads till end of PNG stream and call <tt>close()</tt>
*
* This should normally be called after reading the pixel data, to read the trailing chunks and close the stream. But
* it can be called at anytime. This will also read the first chunks if not still read, and skip pixels (IDAT) if
* still pending.
*
* If you want to read all metadata skipping pixels, readSkippingAllRows() is a little more efficient.
*
* If you want to abort immediately, call instead <tt>close()</tt>
*/
public void end() {
try {
if (chunkseq.firstChunksNotYetRead())
readFirstChunks();
if (chunkseq.getIdatSet() != null && !chunkseq.getIdatSet().isDone())
chunkseq.getIdatSet().done();
while (!chunkseq.isDone())
if (streamFeeder.feed(chunkseq) <= 0)
break;
} finally {
close();
}
}
/**
* Releases resources, and closes stream if corresponds. Idempotent, secure, no exceptions.
*
* This can be also called for abort. It is recommended to call this in case of exceptions
*/
public void close() {
try {
if (chunkseq != null)
chunkseq.close();
} catch (Exception e) {
PngHelperInternal.LOGGER.warning("error closing chunk sequence:" + e.getMessage());
}
if (streamFeeder != null)
streamFeeder.close();
}
/**
* Interlaced PNG is accepted -though not welcomed- now...
*/
public boolean isInterlaced() {
return interlaced;
}
/**
* Disables the CRC integrity check in IDAT chunks and ancillary chunks, this gives a slight increase in reading speed
* for big files
*/
public void setCrcCheckDisabled() {
chunkseq.setCheckCrc(false);
}
/**
* Gets wrapped {@link ChunkSeqReaderPng} object
*/
public ChunkSeqReaderPng getChunkseq() {
return chunkseq;
}
/** called on construction time. Override if you want an alternative class */
protected ChunkSeqReaderPng createChunkSeqReader() {
return new ChunkSeqReaderPng(false);
}
/**
* Enables and prepare the simple digest computation. Must be called before reading the pixels. See
* {@link #getSimpleDigestHex()}
*/
public void prepareSimpleDigestComputation() {
if (idatCrca == null)
idatCrca = new CRC32();
else
idatCrca.reset();
if (idatCrcb == null)
idatCrcb = new Adler32();
else
idatCrcb.reset();
imgInfo.updateCrc(idatCrca);
idatCrcb.update((byte) imgInfo.rows); // not important
}
long getSimpleDigest() {
if (idatCrca == null)
return 0;
else
return (idatCrca.getValue() ^ (idatCrcb.getValue() << 31));
}
/**
* Pseudo 64-bits digest computed over the basic image properties and the raw pixels data: it should coincide for
* equivalent images encoded with different filters and compressors; but will not coincide for
* interlaced/non-interlaced; also, this does not take into account the palette info. This will be valid only if
* {@link #prepareSimpleDigestComputation()} has been called, and all rows have been read. Not fool-proof, not
* cryptografically secure, only for informal testing and duplicates detection.
*
* @return A 64-digest in hexadecimal
*/
public String getSimpleDigestHex() {
return String.format("%016X", getSimpleDigest());
}
/**
* Basic info, for debugging.
*/
public String toString() { // basic info
return imgInfo.toString() + " interlaced=" + interlaced;
}
/**
* Basic info, in a compact format, apt for scripting COLSxROWS[dBITDEPTH][a][p][g][i] ( the default dBITDEPTH='d8' is
* ommited)
*
*/
public String toStringCompact() {
return imgInfo.toStringBrief() + (interlaced ? "i" : "");
}
public ImageInfo getImgInfo() {
return imgInfo;
}
public ImageInfo getCurImgInfo() {
return chunkseq.getCurImgInfo();
}
}

View File

@ -0,0 +1,213 @@
package ar.com.hjg.pngj;
import java.io.File;
import java.io.InputStream;
import java.util.List;
import ar.com.hjg.pngj.chunks.PngChunk;
import ar.com.hjg.pngj.chunks.PngChunkACTL;
import ar.com.hjg.pngj.chunks.PngChunkFCTL;
import ar.com.hjg.pngj.chunks.PngChunkFDAT;
import ar.com.hjg.pngj.chunks.PngChunkIDAT;
/**
*/
public class PngReaderApng extends PngReaderByte {
public PngReaderApng(File file) {
super(file);
dontSkipChunk(PngChunkFCTL.ID);
}
public PngReaderApng(InputStream inputStream) {
super(inputStream);
dontSkipChunk(PngChunkFCTL.ID);
}
private Boolean apngKind = null;
private boolean firsIdatApngFrame = false;
protected PngChunkACTL actlChunk; // null if not APNG
private PngChunkFCTL fctlChunk; // current (null for the pseudo still frame)
/**
* Current frame number (reading or read). First animated frame is 0. Frame -1 represents the IDAT (default image)
* when it's not part of the animation
*/
protected int frameNum = -1; // incremented after each fctl finding
public boolean isApng() {
if (apngKind == null) {
// this triggers the loading of first chunks;
actlChunk = (PngChunkACTL) getChunksList().getById1(PngChunkACTL.ID); // null if not apng
apngKind = actlChunk != null;
firsIdatApngFrame = fctlChunk != null;
}
return apngKind.booleanValue();
}
public void advanceToFrame(int frame) {
if (frame < frameNum)
throw new PngjInputException("Cannot go backwards");
if (frame >= getApngNumFrames())
throw new PngjInputException("Frame out of range " + frame);
if (frame > frameNum) {
addChunkToSkip(PngChunkIDAT.ID);
addChunkToSkip(PngChunkFDAT.ID);
if (chunkseq.getIdatSet() != null && !chunkseq.getIdatSet().isDone())
chunkseq.getIdatSet().done(); // seems to be necessary sometimes (we should check this)
while (frameNum < frame & !chunkseq.isDone())
if (streamFeeder.feed(chunkseq) <= 0)
break;
}
if (frame == frameNum) { // prepare to read rows. at this point we have a new
dontSkipChunk(PngChunkIDAT.ID);
dontSkipChunk(PngChunkFDAT.ID);
rowNum = -1;
imlinesSet = null;// force recreation (this is slightly dirty)
// seek the next IDAT/fDAT - TODO: set the expected sequence number
while (!chunkseq.isDone() && !chunkseq.getCurChunkReader().isFromDeflatedSet())
if (streamFeeder.feed(chunkseq) <= 0)
break;
} else {
throw new PngjInputException("unexpected error seeking from frame " + frame);
}
}
/**
* True if it has a default image (IDAT) that is not part of the animation. In that case, we consider it as a
* pseudo-frame (number -1)
*/
public boolean hasExtraStillImage() {
return isApng() && !firsIdatApngFrame;
}
/**
* Only counts true animation frames.
*/
public int getApngNumFrames() {
if (isApng())
return actlChunk.getNumFrames();
else
return 0;
}
/**
* 0 if it's to been played infinitely. -1 if not APNG
*/
public int getApngNumPlays() {
if (isApng())
return actlChunk.getNumPlays();
else
return -1;
}
@Override
public IImageLine readRow() {
// TODO Auto-generated method stub
return super.readRow();
}
@Override
public boolean hasMoreRows() {
// TODO Auto-generated method stub
return super.hasMoreRows();
}
@Override
public IImageLine readRow(int nrow) {
// TODO Auto-generated method stub
return super.readRow(nrow);
}
@Override
public IImageLineSet<? extends IImageLine> readRows() {
// TODO Auto-generated method stub
return super.readRows();
}
@Override
public IImageLineSet<? extends IImageLine> readRows(int nRows, int rowOffset, int rowStep) {
// TODO Auto-generated method stub
return super.readRows(nRows, rowOffset, rowStep);
}
@Override
public void readSkippingAllRows() {
// TODO Auto-generated method stub
super.readSkippingAllRows();
}
@Override
protected ChunkSeqReaderPng createChunkSeqReader() {
ChunkSeqReaderPng cr = new ChunkSeqReaderPng(false) {
@Override
public boolean shouldSkipContent(int len, String id) {
return super.shouldSkipContent(len, id);
}
@Override
protected boolean isIdatKind(String id) {
return id.equals(PngChunkIDAT.ID) || id.equals(PngChunkFDAT.ID);
}
@Override
protected DeflatedChunksSet createIdatSet(String id) {
IdatSet ids = new IdatSet(id, getCurImgInfo(), deinterlacer);
ids.setCallbackMode(callbackMode);
return ids;
}
@Override
protected void startNewChunk(int len, String id, long offset) {
super.startNewChunk(len, id, offset);
}
@Override
protected void postProcessChunk(ChunkReader chunkR) {
super.postProcessChunk(chunkR);
if (chunkR.getChunkRaw().id.equals(PngChunkFCTL.ID)) {
frameNum++;
List<PngChunk> chunkslist = chunkseq.getChunks();
fctlChunk = (PngChunkFCTL) chunkslist.get(chunkslist.size() - 1);
// as this is slightly dirty, we check
if (chunkR.getChunkRaw().getOffset() != fctlChunk.getRaw().getOffset())
throw new PngjInputException("something went wrong");
ImageInfo frameInfo = fctlChunk.getEquivImageInfo();
getChunkseq().updateCurImgInfo(frameInfo);
}
}
@Override
protected boolean countChunkTypeAsAncillary(String id) {
// we don't count fdat as ancillary data
return super.countChunkTypeAsAncillary(id) && !id.equals(id.equals(PngChunkFDAT.ID));
}
};
return cr;
}
/**
* @see #frameNum
*/
public int getFrameNum() {
return frameNum;
}
@Override
public void end() {
// TODO Auto-generated method stub
super.end();
}
public PngChunkFCTL getFctl() {
return fctlChunk;
}
}

View File

@ -0,0 +1,30 @@
package ar.com.hjg.pngj;
import java.io.File;
import java.io.InputStream;
/**
* Trivial extension of {@link PngReader} that uses {@link ImageLineByte}
* <p>
* The factory is set at construction time. Remember that this could still be changed at runtime.
*/
public class PngReaderByte extends PngReader {
public PngReaderByte(File file) {
super(file);
setLineSetFactory(ImageLineSetDefault.getFactoryByte());
}
public PngReaderByte(InputStream inputStream) {
super(inputStream);
setLineSetFactory(ImageLineSetDefault.getFactoryByte());
}
/**
* Utility method that casts {@link #readRow()} return to {@link ImageLineByte}.
*/
public ImageLineByte readRowByte() {
return (ImageLineByte) readRow();
}
}

View File

@ -0,0 +1,99 @@
package ar.com.hjg.pngj;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import ar.com.hjg.pngj.chunks.PngChunk;
/**
* This class allows to use a simple PNG reader as an input filter, wrapping a ChunkSeqReaderPng in callback mode.
*
* In this sample implementation, all IDAT chunks are skipped and the rest are stored. An example of use, that lets us
* grab the Metadata and let the pixels go towards a BufferedImage:
*
*
* <pre class="code">
* PngReaderFilter reader = new PngReaderFilter(new FileInputStream(&quot;image.png&quot;));
* BufferedImage image1 = ImageIO.read(reader);
* reader.readUntilEndAndClose(); // in case ImageIO.read() does not read the traling chunks (it happens)
* System.out.println(reader.getChunksList());
* </pre>
*
*/
public class PngReaderFilter extends FilterInputStream {
private ChunkSeqReaderPng chunkseq;
public PngReaderFilter(InputStream arg0) {
super(arg0);
chunkseq = createChunkSequenceReader();
}
protected ChunkSeqReaderPng createChunkSequenceReader() {
return new ChunkSeqReaderPng(true) {
@Override
public boolean shouldSkipContent(int len, String id) {
return super.shouldSkipContent(len, id) || id.equals("IDAT");
}
@Override
protected boolean shouldCheckCrc(int len, String id) {
return false;
}
@Override
protected void postProcessChunk(ChunkReader chunkR) {
super.postProcessChunk(chunkR);
// System.out.println("processed chunk " + chunkR.getChunkRaw().id);
}
};
}
@Override
public void close() throws IOException {
super.close();
chunkseq.close();
}
@Override
public int read() throws IOException {
int r = super.read();
if (r > 0)
chunkseq.feedAll(new byte[] {(byte) r}, 0, 1);
return r;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
int res = super.read(b, off, len);
if (res > 0)
chunkseq.feedAll(b, off, res);
return res;
}
@Override
public int read(byte[] b) throws IOException {
int res = super.read(b);
if (res > 0)
chunkseq.feedAll(b, 0, res);
return res;
}
public void readUntilEndAndClose() throws IOException {
BufferedStreamFeeder br = new BufferedStreamFeeder(this.in);
while ((!chunkseq.isDone()) && br.hasMoreToFeed())
br.feed(chunkseq);
close();
}
public List<PngChunk> getChunksList() {
return chunkseq.getChunks();
}
public ChunkSeqReaderPng getChunkseq() {
return chunkseq;
}
}

View File

@ -0,0 +1,37 @@
package ar.com.hjg.pngj;
import java.io.File;
import java.io.InputStream;
/**
* Trivial extension of {@link PngReader} that uses {@link ImageLineInt}.
* <p>
* In the current implementation this is quite dummy/redundant, because (for backward compatibility) PngReader already
* uses a {@link ImageLineInt}.
* <p>
* The factory is set at construction time. Remember that this could still be changed at runtime.
*/
public class PngReaderInt extends PngReader {
public PngReaderInt(File file) {
super(file); // not necessary to set factory, PngReader already does that
}
public PngReaderInt(InputStream inputStream) {
super(inputStream);
}
/**
* Utility method that casts the IImageLine to a ImageLineInt
*
* This only make sense for this concrete class
*
*/
public ImageLineInt readRowInt() {
IImageLine line = readRow();
if (line instanceof ImageLineInt)
return (ImageLineInt) line;
else
throw new PngjException("This is not a ImageLineInt : " + line.getClass());
}
}

View File

@ -0,0 +1,427 @@
package ar.com.hjg.pngj;
import java.io.File;
import java.io.OutputStream;
import java.util.List;
import ar.com.hjg.pngj.chunks.ChunkCopyBehaviour;
import ar.com.hjg.pngj.chunks.ChunkPredicate;
import ar.com.hjg.pngj.chunks.ChunksList;
import ar.com.hjg.pngj.chunks.ChunksListForWrite;
import ar.com.hjg.pngj.chunks.PngChunk;
import ar.com.hjg.pngj.chunks.PngChunkIEND;
import ar.com.hjg.pngj.chunks.PngChunkIHDR;
import ar.com.hjg.pngj.chunks.PngChunkPLTE;
import ar.com.hjg.pngj.chunks.PngMetadata;
import ar.com.hjg.pngj.pixels.PixelsWriter;
import ar.com.hjg.pngj.pixels.PixelsWriterDefault;
/**
* Writes a PNG image, line by line.
*/
public class PngWriter {
public final ImageInfo imgInfo;
/**
* last writen row number, starting from 0
*/
protected int rowNum = -1;
private final ChunksListForWrite chunksList;
private final PngMetadata metadata;
/**
* Current chunk grounp, (0-6) already written or currently writing (this is advanced when just starting to write the
* new group, not when finalizing the previous)
* <p>
* see {@link ChunksList}
*/
protected int currentChunkGroup = -1;
private int passes = 1; // Some writes might require two passes (NOT USED STILL)
private int currentpass = 0; // numbered from 1
private boolean shouldCloseStream = true;
private int idatMaxSize = 0; // 0=use default (PngIDatChunkOutputStream 64k)
// private PngIDatChunkOutputStream datStream;
protected PixelsWriter pixelsWriter;
private final OutputStream os;
private ChunkPredicate copyFromPredicate = null;
private ChunksList copyFromList = null;
protected StringBuilder debuginfo = new StringBuilder();
/**
* Opens a file for writing.
* <p>
* Sets shouldCloseStream=true. For more info see {@link #PngWriter(OutputStream, ImageInfo)}
*
* @param file
* @param imgInfo
* @param allowoverwrite If false and file exists, an {@link PngjOutputException} is thrown
*/
public PngWriter(File file, ImageInfo imgInfo, boolean allowoverwrite) {
this(PngHelperInternal.ostreamFromFile(file, allowoverwrite), imgInfo);
setShouldCloseStream(true);
}
/**
* @see #PngWriter(File, ImageInfo, boolean) (overwrite=true)
*/
public PngWriter(File file, ImageInfo imgInfo) {
this(file, imgInfo, true);
}
/**
* Constructs a new PngWriter from a output stream. After construction nothing is writen yet. You still can set some
* parameters (compression, filters) and queue chunks before start writing the pixels.
* <p>
*
* @param outputStream Open stream for binary writing
* @param imgInfo Basic image parameters
*/
public PngWriter(OutputStream outputStream, ImageInfo imgInfo) {
this.os = outputStream;
this.imgInfo = imgInfo;
// prealloc
chunksList = new ChunksListForWrite(imgInfo);
metadata = new PngMetadata(chunksList);
pixelsWriter = createPixelsWriter(imgInfo);
setCompLevel(9);
}
private void initIdat() { // this triggers the writing of first chunks
pixelsWriter.setOs(this.os);
pixelsWriter.setIdatMaxSize(idatMaxSize);
writeSignatureAndIHDR();
writeFirstChunks();
}
private void writeEndChunk() {
currentChunkGroup = ChunksList.CHUNK_GROUP_6_END;
PngChunkIEND c = new PngChunkIEND(imgInfo);
c.createRawChunk().writeChunk(os);
chunksList.getChunks().add(c);
}
private void writeFirstChunks() {
if (currentChunkGroup >= ChunksList.CHUNK_GROUP_4_IDAT)
return;
int nw = 0;
currentChunkGroup = ChunksList.CHUNK_GROUP_1_AFTERIDHR;
queueChunksFromOther();
nw = chunksList.writeChunks(os, currentChunkGroup);
currentChunkGroup = ChunksList.CHUNK_GROUP_2_PLTE;
nw = chunksList.writeChunks(os, currentChunkGroup);
if (nw > 0 && imgInfo.greyscale)
throw new PngjOutputException("cannot write palette for this format");
if (nw == 0 && imgInfo.indexed)
throw new PngjOutputException("missing palette");
currentChunkGroup = ChunksList.CHUNK_GROUP_3_AFTERPLTE;
nw = chunksList.writeChunks(os, currentChunkGroup);
}
private void writeLastChunks() { // not including end
currentChunkGroup = ChunksList.CHUNK_GROUP_5_AFTERIDAT;
queueChunksFromOther();
chunksList.writeChunks(os, currentChunkGroup);
// should not be unwriten chunks
List<PngChunk> pending = chunksList.getQueuedChunks();
if (!pending.isEmpty())
throw new PngjOutputException(pending.size() + " chunks were not written! Eg: "
+ pending.get(0).toString());
}
/**
* Write id signature and also "IHDR" chunk
*/
private void writeSignatureAndIHDR() {
PngHelperInternal.writeBytes(os, PngHelperInternal.getPngIdSignature()); // signature
currentChunkGroup = ChunksList.CHUNK_GROUP_0_IDHR;
PngChunkIHDR ihdr = new PngChunkIHDR(imgInfo);
// http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html
ihdr.createRawChunk().writeChunk(os);
chunksList.getChunks().add(ihdr);
}
private void queueChunksFromOther() {
if (copyFromList == null || copyFromPredicate == null)
return;
boolean idatDone = currentChunkGroup >= ChunksList.CHUNK_GROUP_4_IDAT; // we assume this method is not either before
// or after the IDAT writing, not in the
// middle!
for (PngChunk chunk : copyFromList.getChunks()) {
if (chunk.getRaw().data == null)
continue; // we cannot copy skipped chunks?
int groupOri = chunk.getChunkGroup();
if (groupOri <= ChunksList.CHUNK_GROUP_4_IDAT && idatDone)
continue;
if (groupOri >= ChunksList.CHUNK_GROUP_4_IDAT && !idatDone)
continue;
if (chunk.crit && !chunk.id.equals(PngChunkPLTE.ID))
continue; // critical chunks (except perhaps PLTE) are never
// copied
boolean copy = copyFromPredicate.match(chunk);
if (copy) {
// but if the chunk is already queued or writen, it's ommited!
if (chunksList.getEquivalent(chunk).isEmpty()
&& chunksList.getQueuedEquivalent(chunk).isEmpty()) {
chunksList.queue(chunk);
}
}
}
}
/**
* Queues an ancillary chunk for writing.
* <p>
* If a "equivalent" chunk is already queued (see {@link ChunkHelper#equivalent(PngChunk, PngChunk)), this overwrites it.
* <p>
* The chunk will be written as late as possible, unless the priority is set.
*
* @param chunk
*/
public void queueChunk(PngChunk chunk) {
for (PngChunk other : chunksList.getQueuedEquivalent(chunk)) {
getChunksList().removeChunk(other);
}
chunksList.queue(chunk);
}
/**
* Sets an origin (typically from a {@link PngReader}) of Chunks to be copied. This should be called only once, before
* starting writing the rows. It doesn't matter the current state of the PngReader reading, this is a live object and
* what matters is that when the writer writes the pixels (IDAT) the reader has already read them, and that when the
* writer ends, the reader is already ended (all this is very natural).
* <p>
* Apart from the copyMask, there is some addional heuristics:
* <p>
* - The chunks will be queued, but will be written as late as possible (unless you explicitly set priority=true)
* <p>
* - The chunk will not be queued if an "equivalent" chunk was already queued explicitly. And it will be overwriten
* another is queued explicitly.
*
* @param chunks
* @param copyMask Some bitmask from {@link ChunkCopyBehaviour}
*
* @see #copyChunksFrom(ChunksList, ChunkPredicate)
*/
public void copyChunksFrom(ChunksList chunks, int copyMask) {
copyChunksFrom(chunks, ChunkCopyBehaviour.createPredicate(copyMask, imgInfo));
}
/**
* Copy all chunks from origin. See {@link #copyChunksFrom(ChunksList, int)} for more info
*/
public void copyChunksFrom(ChunksList chunks) {
copyChunksFrom(chunks, ChunkCopyBehaviour.COPY_ALL);
}
/**
* Copy chunks from origin depending on some {@link ChunkPredicate}
*
* @param chunks
* @param predicate The chunks (ancillary or PLTE) will be copied if and only if predicate matches
*
* @see #copyChunksFrom(ChunksList, int) for more info
*/
public void copyChunksFrom(ChunksList chunks, ChunkPredicate predicate) {
if (copyFromList != null && chunks != null)
PngHelperInternal.LOGGER.warning("copyChunksFrom should only be called once");
if (predicate == null)
throw new PngjOutputException("copyChunksFrom requires a predicate");
this.copyFromList = chunks;
this.copyFromPredicate = predicate;
}
/**
* Computes compressed size/raw size, approximate.
* <p>
* Actually: compressed size = total size of IDAT data , raw size = uncompressed pixel bytes = rows * (bytesPerRow +
* 1).
*
* This must be called after pngw.end()
*/
public double computeCompressionRatio() {
if (currentChunkGroup < ChunksList.CHUNK_GROUP_5_AFTERIDAT)
throw new PngjOutputException("must be called after end()");
return pixelsWriter.getCompression();
}
/**
* Finalizes all the steps and closes the stream. This must be called after writing the lines. Idempotent
*/
public void end() {
if (rowNum != imgInfo.rows - 1 || !pixelsWriter.isDone())
throw new PngjOutputException("all rows have not been written");
try {
if (pixelsWriter != null)
pixelsWriter.close();
if (currentChunkGroup < ChunksList.CHUNK_GROUP_5_AFTERIDAT)
writeLastChunks();
if (currentChunkGroup < ChunksList.CHUNK_GROUP_6_END)
writeEndChunk();
} finally {
close();
}
}
/**
* Closes and releases resources
* <p>
* This is normally called internally from {@link #end()}, you should only call this for aborting the writing and
* release resources (close the stream).
* <p>
* Idempotent and secure - never throws exceptions
*/
public void close() {
if (pixelsWriter != null)
pixelsWriter.close();
if (shouldCloseStream && os != null)
try {
os.close();
} catch (Exception e) {
PngHelperInternal.LOGGER.warning("Error closing writer " + e.toString());
}
}
/**
* returns the chunks list (queued and writen chunks)
*/
public ChunksListForWrite getChunksList() {
return chunksList;
}
/**
* Retruns a high level wrapper over for metadata handling
*/
public PngMetadata getMetadata() {
return metadata;
}
/**
* Sets internal prediction filter type, or strategy to choose it.
* <p>
* This must be called just after constructor, before starting writing.
* <p>
*/
public void setFilterType(FilterType filterType) {
pixelsWriter.setFilterType(filterType);
}
/**
* This is kept for backwards compatibility, now the PixelsWriter object should be used for setting
* compression/filtering options
*
* @see PixelsWriter#setCompressionFactor(double)
* @param compLevel between 0 (no compression, max speed) and 9 (max compression)
*/
public void setCompLevel(int complevel) {
pixelsWriter.setDeflaterCompLevel(complevel);
}
/**
*
*/
public void setFilterPreserve(boolean filterPreserve) {
if (filterPreserve)
pixelsWriter.setFilterType(FilterType.FILTER_PRESERVE);
else if (pixelsWriter.getFilterType() == null)
pixelsWriter.setFilterType(FilterType.FILTER_DEFAULT);
}
/**
* Sets maximum size of IDAT fragments. Incrementing this from the default has very little effect on compression and
* increments memory usage. You should rarely change this.
* <p>
*
* @param idatMaxSize default=0 : use defaultSize (32K)
*/
public void setIdatMaxSize(int idatMaxSize) {
this.idatMaxSize = idatMaxSize;
}
/**
* If true, output stream will be closed after ending write
* <p>
* default=true
*/
public void setShouldCloseStream(boolean shouldCloseStream) {
this.shouldCloseStream = shouldCloseStream;
}
/**
* Writes next row, does not check row number.
*
* @param imgline
*/
public void writeRow(IImageLine imgline) {
writeRow(imgline, rowNum + 1);
}
/**
* Writes the full set of row. The ImageLineSet should contain (allow to acces) imgInfo.rows
*/
public void writeRows(IImageLineSet<? extends IImageLine> imglines) {
for (int i = 0; i < imgInfo.rows; i++)
writeRow(imglines.getImageLineRawNum(i));
}
public void writeRow(IImageLine imgline, int rownumber) {
rowNum++;
if (rowNum == imgInfo.rows)
rowNum = 0;
if (rownumber == imgInfo.rows)
rownumber = 0;
if (rownumber >= 0 && rowNum != rownumber)
throw new PngjOutputException("rows must be written in order: expected:" + rowNum
+ " passed:" + rownumber);
if (rowNum == 0)
currentpass++;
if (rownumber == 0 && currentpass == passes) {
initIdat();
currentChunkGroup = ChunksList.CHUNK_GROUP_4_IDAT; // we just begin writing IDAT
}
byte[] rowb = pixelsWriter.getRowb();
imgline.writeToPngRaw(rowb);
pixelsWriter.processRow(rowb);
}
/**
* Utility method, uses internaly a ImageLineInt
*/
public void writeRowInt(int[] buf) {
writeRow(new ImageLineInt(imgInfo, buf));
}
/**
* Factory method for pixels writer. This will be called once at the moment at start writing a set of IDAT chunks
* (typically once in a normal PNG)
*
* This should be overriden if custom filtering strategies are desired. Remember to release this with close()
*
* @param imginfo Might be different than that of this object (eg: APNG with subimages)
* @param os Output stream
* @return new PixelsWriter. Don't forget to call close() when discarding it
*/
protected PixelsWriter createPixelsWriter(ImageInfo imginfo) {
PixelsWriterDefault pw = new PixelsWriterDefault(imginfo);
return pw;
}
public final PixelsWriter getPixelsWriter() {
return pixelsWriter;
}
public String getDebuginfo() {
return debuginfo.toString();
}
}

View File

@ -0,0 +1,35 @@
package ar.com.hjg.pngj;
import java.io.File;
import java.io.OutputStream;
import ar.com.hjg.pngj.pixels.PixelsWriter;
import ar.com.hjg.pngj.pixels.PixelsWriterMultiple;
/** Pngwriter with High compression EXPERIMENTAL */
public class PngWriterHc extends PngWriter {
public PngWriterHc(File file, ImageInfo imgInfo, boolean allowoverwrite) {
super(file, imgInfo, allowoverwrite);
setFilterType(FilterType.FILTER_SUPER_ADAPTIVE);
}
public PngWriterHc(File file, ImageInfo imgInfo) {
super(file, imgInfo);
}
public PngWriterHc(OutputStream outputStream, ImageInfo imgInfo) {
super(outputStream, imgInfo);
}
@Override
protected PixelsWriter createPixelsWriter(ImageInfo imginfo) {
PixelsWriterMultiple pw = new PixelsWriterMultiple(imginfo);
return pw;
}
public PixelsWriterMultiple getPixelWriterMultiple() {
return (PixelsWriterMultiple) pixelsWriter;
}
}

View File

@ -0,0 +1,20 @@
package ar.com.hjg.pngj;
/**
* Exception thrown by bad CRC check
*/
public class PngjBadCrcException extends PngjInputException {
private static final long serialVersionUID = 1L;
public PngjBadCrcException(String message, Throwable cause) {
super(message, cause);
}
public PngjBadCrcException(String message) {
super(message);
}
public PngjBadCrcException(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,20 @@
package ar.com.hjg.pngj;
/**
* Generic exception for this library. It's a RuntimeException (unchecked)
*/
public class PngjException extends RuntimeException {
private static final long serialVersionUID = 1L;
public PngjException(String message, Throwable cause) {
super(message, cause);
}
public PngjException(String message) {
super(message);
}
public PngjException(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,23 @@
package ar.com.hjg.pngj;
/**
* Exception for anomalous internal problems (sort of asserts) that point to some issue with the library
*
* @author Hernan J Gonzalez
*
*/
public class PngjExceptionInternal extends RuntimeException {
private static final long serialVersionUID = 1L;
public PngjExceptionInternal(String message, Throwable cause) {
super(message, cause);
}
public PngjExceptionInternal(String message) {
super(message);
}
public PngjExceptionInternal(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,20 @@
package ar.com.hjg.pngj;
/**
* Exception thrown when reading a PNG.
*/
public class PngjInputException extends PngjException {
private static final long serialVersionUID = 1L;
public PngjInputException(String message, Throwable cause) {
super(message, cause);
}
public PngjInputException(String message) {
super(message);
}
public PngjInputException(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,20 @@
package ar.com.hjg.pngj;
/**
* Exception thrown by writing process
*/
public class PngjOutputException extends PngjException {
private static final long serialVersionUID = 1L;
public PngjOutputException(String message, Throwable cause) {
super(message, cause);
}
public PngjOutputException(String message) {
super(message);
}
public PngjOutputException(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,24 @@
package ar.com.hjg.pngj;
/**
* Exception thrown because of some valid feature of PNG standard that this library does not support.
*/
public class PngjUnsupportedException extends RuntimeException {
private static final long serialVersionUID = 1L;
public PngjUnsupportedException() {
super();
}
public PngjUnsupportedException(String message, Throwable cause) {
super(message, cause);
}
public PngjUnsupportedException(String message) {
super(message);
}
public PngjUnsupportedException(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,55 @@
package ar.com.hjg.pngj;
/**
* Packs information of current row. Only used internally
*/
class RowInfo {
public final ImageInfo imgInfo;
public final Deinterlacer deinterlacer;
public final boolean imode; // Interlaced
int dY, dX, oY, oX; // current step and offset (in pixels)
int rowNseq; // row number (from 0) in sequential read order
int rowNreal; // row number in the real image
int rowNsubImg; // current row in the virtual subsampled image; this increments (by 1) from 0 to
// rows/dy 7 times
int rowsSubImg, colsSubImg; // size of current subimage , in pixels
int bytesRow;
int pass; // 1-7
byte[] buf; // non-deep copy
int buflen; // valid bytes in buffer (include filter byte)
public RowInfo(ImageInfo imgInfo, Deinterlacer deinterlacer) {
this.imgInfo = imgInfo;
this.deinterlacer = deinterlacer;
this.imode = deinterlacer != null;
}
void update(int rowseq) {
rowNseq = rowseq;
if (imode) {
pass = deinterlacer.getPass();
dX = deinterlacer.dX;
dY = deinterlacer.dY;
oX = deinterlacer.oX;
oY = deinterlacer.oY;
rowNreal = deinterlacer.getCurrRowReal();
rowNsubImg = deinterlacer.getCurrRowSubimg();
rowsSubImg = deinterlacer.getRows();
colsSubImg = deinterlacer.getCols();
bytesRow = (imgInfo.bitspPixel * colsSubImg + 7) / 8;
} else {
pass = 1;
dX = dY = 1;
oX = oY = 0;
rowNreal = rowNsubImg = rowseq;
rowsSubImg = imgInfo.rows;
colsSubImg = imgInfo.cols;
bytesRow = imgInfo.bytesPerRow;
}
}
void updateBuf(byte[] buf, int buflen) {
this.buf = buf;
this.buflen = buflen;
}
}

View File

@ -0,0 +1,101 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngReader;
import ar.com.hjg.pngj.PngWriter;
/**
* Chunk copy policy to apply when copyng from a {@link PngReader} to a {@link PngWriter}.
* <p>
* The constants are bit-masks, they can be OR-ed
* <p>
* Reference: <a href="http://www.w3.org/TR/PNG/#14">http://www.w3.org/TR/PNG/#14</a> <br>
*/
public class ChunkCopyBehaviour {
/** Don't copy anything */
public static final int COPY_NONE = 0;
/** copy the palette */
public static final int COPY_PALETTE = 1;
/** copy all 'safe to copy' chunks */
public static final int COPY_ALL_SAFE = 1 << 2;
/**
* copy all, including palette
*/
public static final int COPY_ALL = 1 << 3; // includes palette!
/**
* Copy PHYS chunk (physical resolution)
*/
public static final int COPY_PHYS = 1 << 4; // dpi
/**
* Copy al textual chunks.
*/
public static final int COPY_TEXTUAL = 1 << 5; // all textual types
/**
* Copy TRNS chunk
*/
public static final int COPY_TRANSPARENCY = 1 << 6; //
/**
* Copy unknown chunks (unknown by our factory)
*/
public static final int COPY_UNKNOWN = 1 << 7; // all unknown (by the factory!)
/**
* Copy almost all: excepts only HIST (histogram) TIME and TEXTUAL chunks
*/
public static final int COPY_ALMOSTALL = 1 << 8;
private static boolean maskMatch(int v, int mask) {
return (v & mask) != 0;
}
/**
* Creates a predicate equivalent to the copy mask
* <p>
* Given a copy mask (see static fields) and the ImageInfo of the target PNG, returns a predicate that tells if a
* chunk should be copied.
* <p>
* This is a handy helper method, you can also create and set your own predicate
*/
public static ChunkPredicate createPredicate(final int copyFromMask, final ImageInfo imgInfo) {
return new ChunkPredicate() {
public boolean match(PngChunk chunk) {
if (chunk.crit) {
if (chunk.id.equals(ChunkHelper.PLTE)) {
if (imgInfo.indexed && maskMatch(copyFromMask, ChunkCopyBehaviour.COPY_PALETTE))
return true;
if (!imgInfo.greyscale && maskMatch(copyFromMask, ChunkCopyBehaviour.COPY_ALL))
return true;
}
} else { // ancillary
boolean text = (chunk instanceof PngChunkTextVar);
boolean safe = chunk.safe;
// notice that these if are not exclusive
if (maskMatch(copyFromMask, ChunkCopyBehaviour.COPY_ALL))
return true;
if (safe && maskMatch(copyFromMask, ChunkCopyBehaviour.COPY_ALL_SAFE))
return true;
if (chunk.id.equals(ChunkHelper.tRNS)
&& maskMatch(copyFromMask, ChunkCopyBehaviour.COPY_TRANSPARENCY))
return true;
if (chunk.id.equals(ChunkHelper.pHYs)
&& maskMatch(copyFromMask, ChunkCopyBehaviour.COPY_PHYS))
return true;
if (text && maskMatch(copyFromMask, ChunkCopyBehaviour.COPY_TEXTUAL))
return true;
if (maskMatch(copyFromMask, ChunkCopyBehaviour.COPY_ALMOSTALL)
&& !(ChunkHelper.isUnknown(chunk) || text || chunk.id.equals(ChunkHelper.hIST) || chunk.id
.equals(ChunkHelper.tIME)))
return true;
if (maskMatch(copyFromMask, ChunkCopyBehaviour.COPY_UNKNOWN)
&& ChunkHelper.isUnknown(chunk))
return true;
}
return false;
}
};
}
}

View File

@ -0,0 +1,107 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.IChunkFactory;
import ar.com.hjg.pngj.ImageInfo;
/**
* Default chunk factory.
* <p>
* The user that wants to parse custom chunks can extend {@link #createEmptyChunkExtended(String, ImageInfo)}
*/
public class ChunkFactory implements IChunkFactory {
boolean parse;
public ChunkFactory() {
this(true);
}
public ChunkFactory(boolean parse) {
this.parse = parse;
}
public final PngChunk createChunk(ChunkRaw chunkRaw, ImageInfo imgInfo) {
PngChunk c = createEmptyChunkKnown(chunkRaw.id, imgInfo);
if (c == null)
c = createEmptyChunkExtended(chunkRaw.id, imgInfo);
if (c == null)
c = createEmptyChunkUnknown(chunkRaw.id, imgInfo);
c.setRaw(chunkRaw);
if (parse && chunkRaw.data != null)
c.parseFromRaw(chunkRaw);
return c;
}
protected final PngChunk createEmptyChunkKnown(String id, ImageInfo imgInfo) {
if (id.equals(ChunkHelper.IDAT))
return new PngChunkIDAT(imgInfo);
if (id.equals(ChunkHelper.IHDR))
return new PngChunkIHDR(imgInfo);
if (id.equals(ChunkHelper.PLTE))
return new PngChunkPLTE(imgInfo);
if (id.equals(ChunkHelper.IEND))
return new PngChunkIEND(imgInfo);
if (id.equals(ChunkHelper.tEXt))
return new PngChunkTEXT(imgInfo);
if (id.equals(ChunkHelper.iTXt))
return new PngChunkITXT(imgInfo);
if (id.equals(ChunkHelper.zTXt))
return new PngChunkZTXT(imgInfo);
if (id.equals(ChunkHelper.bKGD))
return new PngChunkBKGD(imgInfo);
if (id.equals(ChunkHelper.gAMA))
return new PngChunkGAMA(imgInfo);
if (id.equals(ChunkHelper.pHYs))
return new PngChunkPHYS(imgInfo);
if (id.equals(ChunkHelper.iCCP))
return new PngChunkICCP(imgInfo);
if (id.equals(ChunkHelper.tIME))
return new PngChunkTIME(imgInfo);
if (id.equals(ChunkHelper.tRNS))
return new PngChunkTRNS(imgInfo);
if (id.equals(ChunkHelper.cHRM))
return new PngChunkCHRM(imgInfo);
if (id.equals(ChunkHelper.sBIT))
return new PngChunkSBIT(imgInfo);
if (id.equals(ChunkHelper.sRGB))
return new PngChunkSRGB(imgInfo);
if (id.equals(ChunkHelper.hIST))
return new PngChunkHIST(imgInfo);
if (id.equals(ChunkHelper.sPLT))
return new PngChunkSPLT(imgInfo);
// apng
if (id.equals(PngChunkFDAT.ID))
return new PngChunkFDAT(imgInfo);
if (id.equals(PngChunkACTL.ID))
return new PngChunkACTL(imgInfo);
if (id.equals(PngChunkFCTL.ID))
return new PngChunkFCTL(imgInfo);
return null;
}
/**
* This is used as last resort factory method.
* <p>
* It creates a {@link PngChunkUNKNOWN} chunk.
*/
protected final PngChunk createEmptyChunkUnknown(String id, ImageInfo imgInfo) {
return new PngChunkUNKNOWN(id, imgInfo);
}
/**
* Factory for chunks that are not in the original PNG standard. This can be overriden (but dont forget to call this
* also)
*
* @param id Chunk id , 4 letters
* @param imgInfo Usually not needed
* @return null if chunk id not recognized
*/
protected PngChunk createEmptyChunkExtended(String id, ImageInfo imgInfo) {
if (id.equals(PngChunkOFFS.ID))
return new PngChunkOFFS(imgInfo);
if (id.equals(PngChunkSTER.ID))
return new PngChunkSTER(imgInfo);
return null; // extend!
}
}

View File

@ -0,0 +1,290 @@
package ar.com.hjg.pngj.chunks;
// see http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html
// http://www.w3.org/TR/PNG/#5Chunk-naming-conventions
// http://www.w3.org/TR/PNG/#table53
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* Helper methods and constants related to Chunk processing.
* <p>
* This should only be of interest to developers doing special chunk processing or extending the ChunkFactory
*/
public class ChunkHelper {
ChunkHelper() {}
public static final String IHDR = "IHDR";
public static final String PLTE = "PLTE";
public static final String IDAT = "IDAT";
public static final String IEND = "IEND";
public static final String cHRM = "cHRM";
public static final String gAMA = "gAMA";
public static final String iCCP = "iCCP";
public static final String sBIT = "sBIT";
public static final String sRGB = "sRGB";
public static final String bKGD = "bKGD";
public static final String hIST = "hIST";
public static final String tRNS = "tRNS";
public static final String pHYs = "pHYs";
public static final String sPLT = "sPLT";
public static final String tIME = "tIME";
public static final String iTXt = "iTXt";
public static final String tEXt = "tEXt";
public static final String zTXt = "zTXt";
public static final byte[] b_IHDR = toBytes(IHDR);
public static final byte[] b_PLTE = toBytes(PLTE);
public static final byte[] b_IDAT = toBytes(IDAT);
public static final byte[] b_IEND = toBytes(IEND);
/*
* static auxiliary buffer. any method that uses this should synchronize against this
*/
private static byte[] tmpbuffer = new byte[4096];
/**
* Converts to bytes using Latin1 (ISO-8859-1)
*/
public static byte[] toBytes(String x) {
try {
return x.getBytes(PngHelperInternal.charsetLatin1name);
} catch (UnsupportedEncodingException e) {
throw new PngBadCharsetException(e);
}
}
/**
* Converts to String using Latin1 (ISO-8859-1)
*/
public static String toString(byte[] x) {
try {
return new String(x, PngHelperInternal.charsetLatin1name);
} catch (UnsupportedEncodingException e) {
throw new PngBadCharsetException(e);
}
}
/**
* Converts to String using Latin1 (ISO-8859-1)
*/
public static String toString(byte[] x, int offset, int len) {
try {
return new String(x, offset, len, PngHelperInternal.charsetLatin1name);
} catch (UnsupportedEncodingException e) {
throw new PngBadCharsetException(e);
}
}
/**
* Converts to bytes using UTF-8
*/
public static byte[] toBytesUTF8(String x) {
try {
return x.getBytes(PngHelperInternal.charsetUTF8name);
} catch (UnsupportedEncodingException e) {
throw new PngBadCharsetException(e);
}
}
/**
* Converts to string using UTF-8
*/
public static String toStringUTF8(byte[] x) {
try {
return new String(x, PngHelperInternal.charsetUTF8name);
} catch (UnsupportedEncodingException e) {
throw new PngBadCharsetException(e);
}
}
/**
* Converts to string using UTF-8
*/
public static String toStringUTF8(byte[] x, int offset, int len) {
try {
return new String(x, offset, len, PngHelperInternal.charsetUTF8name);
} catch (UnsupportedEncodingException e) {
throw new PngBadCharsetException(e);
}
}
/**
* critical chunk : first letter is uppercase
*/
public static boolean isCritical(String id) {
return (Character.isUpperCase(id.charAt(0)));
}
/**
* public chunk: second letter is uppercase
*/
public static boolean isPublic(String id) { //
return (Character.isUpperCase(id.charAt(1)));
}
/**
* Safe to copy chunk: fourth letter is lower case
*/
public static boolean isSafeToCopy(String id) {
return (!Character.isUpperCase(id.charAt(3)));
}
/**
* "Unknown" just means that our chunk factory (even when it has been augmented by client code) did not recognize its
* id
*/
public static boolean isUnknown(PngChunk c) {
return c instanceof PngChunkUNKNOWN;
}
/**
* Finds position of null byte in array
*
* @param b
* @return -1 if not found
*/
public static int posNullByte(byte[] b) {
for (int i = 0; i < b.length; i++)
if (b[i] == 0)
return i;
return -1;
}
/**
* Decides if a chunk should be loaded, according to a ChunkLoadBehaviour
*
* @param id
* @param behav
* @return true/false
*/
public static boolean shouldLoad(String id, ChunkLoadBehaviour behav) {
if (isCritical(id))
return true;
switch (behav) {
case LOAD_CHUNK_ALWAYS:
return true;
case LOAD_CHUNK_IF_SAFE:
return isSafeToCopy(id);
case LOAD_CHUNK_NEVER:
return false;
case LOAD_CHUNK_MOST_IMPORTANT:
return id.equals(PngChunkTRNS.ID);
}
return false; // should not reach here
}
public final static byte[] compressBytes(byte[] ori, boolean compress) {
return compressBytes(ori, 0, ori.length, compress);
}
public static byte[] compressBytes(byte[] ori, int offset, int len, boolean compress) {
try {
ByteArrayInputStream inb = new ByteArrayInputStream(ori, offset, len);
InputStream in = compress ? inb : new InflaterInputStream(inb);
ByteArrayOutputStream outb = new ByteArrayOutputStream();
OutputStream out = compress ? new DeflaterOutputStream(outb) : outb;
shovelInToOut(in, out);
in.close();
out.close();
return outb.toByteArray();
} catch (Exception e) {
throw new PngjException(e);
}
}
/**
* Shovels all data from an input stream to an output stream.
*/
private static void shovelInToOut(InputStream in, OutputStream out) throws IOException {
synchronized (tmpbuffer) {
int len;
while ((len = in.read(tmpbuffer)) > 0) {
out.write(tmpbuffer, 0, len);
}
}
}
/**
* Returns only the chunks that "match" the predicate
*
* See also trimList()
*/
public static List<PngChunk> filterList(List<PngChunk> target, ChunkPredicate predicateKeep) {
List<PngChunk> result = new ArrayList<PngChunk>();
for (PngChunk element : target) {
if (predicateKeep.match(element)) {
result.add(element);
}
}
return result;
}
/**
* Remove (in place) the chunks that "match" the predicate
*
* See also filterList
*/
public static int trimList(List<PngChunk> target, ChunkPredicate predicateRemove) {
Iterator<PngChunk> it = target.iterator();
int cont = 0;
while (it.hasNext()) {
PngChunk c = it.next();
if (predicateRemove.match(c)) {
it.remove();
cont++;
}
}
return cont;
}
/**
* Adhoc criteria: two ancillary chunks are "equivalent" ("practically same type") if they have same id and (perhaps,
* if multiple are allowed) if the match also in some "internal key" (eg: key for string values, palette for sPLT,
* etc)
*
* When we use this method, we implicitly assume that we don't allow/expect two "equivalent" chunks in a single PNG
*
* Notice that the use of this is optional, and that the PNG standard actually allows text chunks that have same key
*
* @return true if "equivalent"
*/
public static final boolean equivalent(PngChunk c1, PngChunk c2) {
if (c1 == c2)
return true;
if (c1 == null || c2 == null || !c1.id.equals(c2.id))
return false;
if (c1.crit)
return false;
// same id
if (c1.getClass() != c2.getClass())
return false; // should not happen
if (!c2.allowsMultiple())
return true;
if (c1 instanceof PngChunkTextVar) {
return ((PngChunkTextVar) c1).getKey().equals(((PngChunkTextVar) c2).getKey());
}
if (c1 instanceof PngChunkSPLT) {
return ((PngChunkSPLT) c1).getPalName().equals(((PngChunkSPLT) c2).getPalName());
}
// unknown chunks that allow multiple? consider they don't match
return false;
}
public static boolean isText(PngChunk c) {
return c instanceof PngChunkTextVar;
}
}

View File

@ -0,0 +1,26 @@
package ar.com.hjg.pngj.chunks;
/**
* What to do with ancillary (non-critical) chunks when reading.
* <p>
*
*/
public enum ChunkLoadBehaviour {
/**
* All non-critical chunks are skipped
*/
LOAD_CHUNK_NEVER,
/**
* Load chunk if "safe to copy"
*/
LOAD_CHUNK_IF_SAFE,
/**
* Load only most important chunk: TRNS
*/
LOAD_CHUNK_MOST_IMPORTANT,
/**
* Load all chunks. <br>
* Notice that other restrictions might apply, see PngReader.skipChunkMaxSize PngReader.skipChunkIds
*/
LOAD_CHUNK_ALWAYS;
}

View File

@ -0,0 +1,14 @@
package ar.com.hjg.pngj.chunks;
/**
* Decides if another chunk "matches", according to some criterion
*/
public interface ChunkPredicate {
/**
* The other chunk matches with this one
*
* @param chunk
* @return true if match
*/
boolean match(PngChunk chunk);
}

View File

@ -0,0 +1,169 @@
package ar.com.hjg.pngj.chunks;
import java.io.ByteArrayInputStream;
import java.io.OutputStream;
import java.util.zip.CRC32;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjBadCrcException;
import ar.com.hjg.pngj.PngjException;
import ar.com.hjg.pngj.PngjOutputException;
/**
* Raw (physical) chunk.
* <p>
* Short lived object, to be created while serialing/deserializing Do not reuse it for different chunks. <br>
* See http://www.libpng.org/pub/png/spec/1.2/PNG-Structure.html
*/
public class ChunkRaw {
/**
* The length counts only the data field, not itself, the chunk type code, or the CRC. Zero is a valid length.
* Although encoders and decoders should treat the length as unsigned, its value must not exceed 231-1 bytes.
*/
public final int len;
/**
* A 4-byte chunk type code. uppercase and lowercase ASCII letters
*/
public final byte[] idbytes;
public final String id;
/**
* The data bytes appropriate to the chunk type, if any. This field can be of zero length. Does not include crc. If
* it's null, it means that the data is ot available
*/
public byte[] data = null;
/**
* @see ChunkRaw#getOffset()
*/
private long offset = 0;
/**
* A 4-byte CRC (Cyclic Redundancy Check) calculated on the preceding bytes in the chunk, including the chunk type
* code and chunk data fields, but not including the length field.
*/
public byte[] crcval = new byte[4];
private CRC32 crcengine; // lazily instantiated
public ChunkRaw(int len, String id, boolean alloc) {
this.len = len;
this.id = id;
this.idbytes = ChunkHelper.toBytes(id);
for (int i = 0; i < 4; i++) {
if (idbytes[i] < 65 || idbytes[i] > 122 || (idbytes[i] > 90 && idbytes[i] < 97))
throw new PngjException("Bad id chunk: must be ascii letters " + id);
}
if (alloc)
allocData();
}
public ChunkRaw(int len, byte[] idbytes, boolean alloc) {
this(len, ChunkHelper.toString(idbytes), alloc);
}
public void allocData() { // TODO: not public
if (data == null || data.length < len)
data = new byte[len];
}
/**
* this is called after setting data, before writing to os
*/
private void computeCrcForWriting() {
crcengine = new CRC32();
crcengine.update(idbytes, 0, 4);
if (len > 0)
crcengine.update(data, 0, len); //
PngHelperInternal.writeInt4tobytes((int) crcengine.getValue(), crcval, 0);
}
/**
* Computes the CRC and writes to the stream. If error, a PngjOutputException is thrown
*
* Note that this is only used for non idat chunks
*/
public void writeChunk(OutputStream os) {
writeChunkHeader(os);
if (len > 0) {
if (data == null)
throw new PngjOutputException("cannot write chunk, raw chunk data is null [" + id + "]");
PngHelperInternal.writeBytes(os, data, 0, len);
}
computeCrcForWriting();
writeChunkCrc(os);
}
public void writeChunkHeader(OutputStream os) {
if (idbytes.length != 4)
throw new PngjOutputException("bad chunkid [" + id + "]");
PngHelperInternal.writeInt4(os, len);
PngHelperInternal.writeBytes(os, idbytes);
}
public void writeChunkCrc(OutputStream os) {
PngHelperInternal.writeBytes(os, crcval, 0, 4);
}
public void checkCrc() {
int crcComputed = (int) crcengine.getValue();
int crcExpected = PngHelperInternal.readInt4fromBytes(crcval, 0);
if (crcComputed != crcExpected)
throw new PngjBadCrcException("chunk: " + this.toString() + " expected=" + crcExpected
+ " read=" + crcComputed);
}
public void updateCrc(byte[] buf, int off, int len) {
if (crcengine == null)
crcengine = new CRC32();
crcengine.update(buf, off, len);
}
ByteArrayInputStream getAsByteStream() { // only the data
return new ByteArrayInputStream(data);
}
/**
* offset in the full PNG stream, in bytes. only informational, for read chunks (0=NA)
*/
public long getOffset() {
return offset;
}
public void setOffset(long offset) {
this.offset = offset;
}
public String toString() {
return "chunkid=" + ChunkHelper.toString(idbytes) + " len=" + len;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + (int) (offset ^ (offset >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ChunkRaw other = (ChunkRaw) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
if (offset != other.offset)
return false;
return true;
}
}

View File

@ -0,0 +1,167 @@
package ar.com.hjg.pngj.chunks;
import java.util.ArrayList;
import java.util.List;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjException;
/**
* All chunks that form an image, read or to be written.
* <p>
* chunks include all chunks, but IDAT is a single pseudo chunk without data
**/
public class ChunksList {
// ref: http://www.w3.org/TR/PNG/#table53
public static final int CHUNK_GROUP_0_IDHR = 0; // required - single
public static final int CHUNK_GROUP_1_AFTERIDHR = 1; // optional - multiple
public static final int CHUNK_GROUP_2_PLTE = 2; // optional - single
public static final int CHUNK_GROUP_3_AFTERPLTE = 3; // optional - multple
public static final int CHUNK_GROUP_4_IDAT = 4; // required (single pseudo chunk)
public static final int CHUNK_GROUP_5_AFTERIDAT = 5; // optional - multple
public static final int CHUNK_GROUP_6_END = 6; // only 1 chunk - requried
/**
* All chunks, read (or written)
*
* But IDAT is a single pseudo chunk without data
*/
List<PngChunk> chunks = new ArrayList<PngChunk>();
// protected HashMap<String, List<PngChunk>> chunksById = new HashMap<String, List<PngChunk>>();
// // does not include IDAT
final ImageInfo imageInfo; // only required for writing
boolean withPlte = false;
public ChunksList(ImageInfo imfinfo) {
this.imageInfo = imfinfo;
}
/**
* WARNING: this does NOT return a copy, but the list itself. The called should not modify this directly! Don't use
* this to manipulate the chunks.
*/
public List<PngChunk> getChunks() {
return chunks;
}
protected static List<PngChunk> getXById(final List<PngChunk> list, final String id,
final String innerid) {
if (innerid == null)
return ChunkHelper.filterList(list, new ChunkPredicate() {
public boolean match(PngChunk c) {
return c.id.equals(id);
}
});
else
return ChunkHelper.filterList(list, new ChunkPredicate() {
public boolean match(PngChunk c) {
if (!c.id.equals(id))
return false;
if (c instanceof PngChunkTextVar && !((PngChunkTextVar) c).getKey().equals(innerid))
return false;
if (c instanceof PngChunkSPLT && !((PngChunkSPLT) c).getPalName().equals(innerid))
return false;
return true;
}
});
}
/**
* Adds chunk in next position. This is used onyl by the pngReader
*/
public void appendReadChunk(PngChunk chunk, int chunkGroup) {
chunk.setChunkGroup(chunkGroup);
chunks.add(chunk);
if (chunk.id.equals(PngChunkPLTE.ID))
withPlte = true;
}
/**
* All chunks with this ID
*
* @param id
* @return List, empty if none
*/
public List<? extends PngChunk> getById(final String id) {
return getById(id, null);
}
/**
* If innerid!=null and the chunk is PngChunkTextVar or PngChunkSPLT, it's filtered by that id
*
* @param id
* @return innerid Only used for text and SPLT chunks
* @return List, empty if none
*/
public List<? extends PngChunk> getById(final String id, final String innerid) {
return getXById(chunks, id, innerid);
}
/**
* Returns only one chunk
*
* @param id
* @return First chunk found, null if not found
*/
public PngChunk getById1(final String id) {
return getById1(id, false);
}
/**
* Returns only one chunk or null if nothing found - does not include queued
* <p>
* If more than one chunk is found, then an exception is thrown (failifMultiple=true or chunk is single) or the last
* one is returned (failifMultiple=false)
**/
public PngChunk getById1(final String id, final boolean failIfMultiple) {
return getById1(id, null, failIfMultiple);
}
/**
* Returns only one chunk or null if nothing found - does not include queued
* <p>
* If more than one chunk (after filtering by inner id) is found, then an exception is thrown (failifMultiple=true or
* chunk is single) or the last one is returned (failifMultiple=false)
**/
public PngChunk getById1(final String id, final String innerid, final boolean failIfMultiple) {
List<? extends PngChunk> list = getById(id, innerid);
if (list.isEmpty())
return null;
if (list.size() > 1 && (failIfMultiple || !list.get(0).allowsMultiple()))
throw new PngjException("unexpected multiple chunks id=" + id);
return list.get(list.size() - 1);
}
/**
* Finds all chunks "equivalent" to this one
*
* @param c2
* @return Empty if nothing found
*/
public List<PngChunk> getEquivalent(final PngChunk c2) {
return ChunkHelper.filterList(chunks, new ChunkPredicate() {
public boolean match(PngChunk c) {
return ChunkHelper.equivalent(c, c2);
}
});
}
public String toString() {
return "ChunkList: read: " + chunks.size();
}
/**
* for debugging
*/
public String toStringFull() {
StringBuilder sb = new StringBuilder(toString());
sb.append("\n Read:\n");
for (PngChunk chunk : chunks) {
sb.append(chunk).append(" G=" + chunk.getChunkGroup() + "\n");
}
return sb.toString();
}
}

View File

@ -0,0 +1,189 @@
package ar.com.hjg.pngj.chunks;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjException;
import ar.com.hjg.pngj.PngjOutputException;
public class ChunksListForWrite extends ChunksList {
/**
* chunks not yet writen - does not include IHDR, IDAT, END, perhaps yes PLTE
*/
private final List<PngChunk> queuedChunks = new ArrayList<PngChunk>();
// redundant, just for eficciency
private HashMap<String, Integer> alreadyWrittenKeys = new HashMap<String, Integer>();
public ChunksListForWrite(ImageInfo imfinfo) {
super(imfinfo);
}
/**
* Same as getById(), but looking in the queued chunks
*/
public List<? extends PngChunk> getQueuedById(final String id) {
return getQueuedById(id, null);
}
/**
* Same as getById(), but looking in the queued chunks
*/
public List<? extends PngChunk> getQueuedById(final String id, final String innerid) {
return getXById(queuedChunks, id, innerid);
}
/**
* Same as getById1(), but looking in the queued chunks
**/
public PngChunk getQueuedById1(final String id, final String innerid, final boolean failIfMultiple) {
List<? extends PngChunk> list = getQueuedById(id, innerid);
if (list.isEmpty())
return null;
if (list.size() > 1 && (failIfMultiple || !list.get(0).allowsMultiple()))
throw new PngjException("unexpected multiple chunks id=" + id);
return list.get(list.size() - 1);
}
/**
* Same as getById1(), but looking in the queued chunks
**/
public PngChunk getQueuedById1(final String id, final boolean failIfMultiple) {
return getQueuedById1(id, null, failIfMultiple);
}
/**
* Same as getById1(), but looking in the queued chunks
**/
public PngChunk getQueuedById1(final String id) {
return getQueuedById1(id, false);
}
/**
* Finds all chunks "equivalent" to this one
*
* @param c2
* @return Empty if nothing found
*/
public List<PngChunk> getQueuedEquivalent(final PngChunk c2) {
return ChunkHelper.filterList(queuedChunks, new ChunkPredicate() {
public boolean match(PngChunk c) {
return ChunkHelper.equivalent(c, c2);
}
});
}
/**
* Remove Chunk: only from queued
*
* WARNING: this depends on c.equals() implementation, which is straightforward for SingleChunks. For MultipleChunks,
* it will normally check for reference equality!
*/
public boolean removeChunk(PngChunk c) {
if (c == null)
return false;
return queuedChunks.remove(c);
}
/**
* Adds chunk to queue
*
* If there
*
* @param c
*/
public boolean queue(PngChunk c) {
queuedChunks.add(c);
return true;
}
/**
* this should be called only for ancillary chunks and PLTE (groups 1 - 3 - 5)
**/
private static boolean shouldWrite(PngChunk c, int currentGroup) {
if (currentGroup == CHUNK_GROUP_2_PLTE)
return c.id.equals(ChunkHelper.PLTE);
if (currentGroup % 2 == 0)
throw new PngjOutputException("bad chunk group?");
int minChunkGroup, maxChunkGroup;
if (c.getOrderingConstraint().mustGoBeforePLTE())
minChunkGroup = maxChunkGroup = ChunksList.CHUNK_GROUP_1_AFTERIDHR;
else if (c.getOrderingConstraint().mustGoBeforeIDAT()) {
maxChunkGroup = ChunksList.CHUNK_GROUP_3_AFTERPLTE;
minChunkGroup =
c.getOrderingConstraint().mustGoAfterPLTE() ? ChunksList.CHUNK_GROUP_3_AFTERPLTE
: ChunksList.CHUNK_GROUP_1_AFTERIDHR;
} else {
maxChunkGroup = ChunksList.CHUNK_GROUP_5_AFTERIDAT;
minChunkGroup = ChunksList.CHUNK_GROUP_1_AFTERIDHR;
}
int preferred = maxChunkGroup;
if (c.hasPriority())
preferred = minChunkGroup;
if (ChunkHelper.isUnknown(c) && c.getChunkGroup() > 0)
preferred = c.getChunkGroup();
if (currentGroup == preferred)
return true;
if (currentGroup > preferred && currentGroup <= maxChunkGroup)
return true;
return false;
}
public int writeChunks(OutputStream os, int currentGroup) {
int cont = 0;
Iterator<PngChunk> it = queuedChunks.iterator();
while (it.hasNext()) {
PngChunk c = it.next();
if (!shouldWrite(c, currentGroup))
continue;
if (ChunkHelper.isCritical(c.id) && !c.id.equals(ChunkHelper.PLTE))
throw new PngjOutputException("bad chunk queued: " + c);
if (alreadyWrittenKeys.containsKey(c.id) && !c.allowsMultiple())
throw new PngjOutputException("duplicated chunk does not allow multiple: " + c);
c.write(os);
chunks.add(c);
alreadyWrittenKeys.put(c.id,
alreadyWrittenKeys.containsKey(c.id) ? alreadyWrittenKeys.get(c.id) + 1 : 1);
c.setChunkGroup(currentGroup);
it.remove();
cont++;
}
return cont;
}
/**
* warning: this is NOT a copy, do not modify
*/
public List<PngChunk> getQueuedChunks() {
return queuedChunks;
}
public String toString() {
return "ChunkList: written: " + getChunks().size() + " queue: " + queuedChunks.size();
}
/**
* for debugging
*/
public String toStringFull() {
StringBuilder sb = new StringBuilder(toString());
sb.append("\n Written:\n");
for (PngChunk chunk : getChunks()) {
sb.append(chunk).append(" G=" + chunk.getChunkGroup() + "\n");
}
if (!queuedChunks.isEmpty()) {
sb.append(" Queued:\n");
for (PngChunk chunk : queuedChunks) {
sb.append(chunk).append("\n");
}
}
return sb.toString();
}
}

View File

@ -0,0 +1,20 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.PngjException;
public class PngBadCharsetException extends PngjException {
private static final long serialVersionUID = 1L;
public PngBadCharsetException(String message, Throwable cause) {
super(message, cause);
}
public PngBadCharsetException(String message) {
super(message);
}
public PngBadCharsetException(Throwable cause) {
super(cause);
}
}

View File

@ -0,0 +1,216 @@
package ar.com.hjg.pngj.chunks;
import java.io.OutputStream;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjExceptionInternal;
/**
* Represents a instance of a PNG chunk.
* <p>
* See <a href="http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html">http://www
* .libpng.org/pub/png/spec/1.2/PNG-Chunks .html</a> </a>
* <p>
* Concrete classes should extend {@link PngChunkSingle} or {@link PngChunkMultiple}
* <p>
* Note that some methods/fields are type-specific (getOrderingConstraint(), allowsMultiple()),<br>
* some are 'almost' type-specific (id,crit,pub,safe; the exception is PngUKNOWN), <br>
* and the rest are instance-specific
*/
public abstract class PngChunk {
/**
* Chunk-id: 4 letters
*/
public final String id;
/**
* Autocomputed at creation time
*/
public final boolean crit, pub, safe;
protected final ImageInfo imgInfo;
protected ChunkRaw raw;
private boolean priority = false; // For writing. Queued chunks with high priority will be written
// as soon as
// possible
protected int chunkGroup = -1; // chunk group where it was read or writen
/**
* Possible ordering constraint for a PngChunk type -only relevant for ancillary chunks. Theoretically, there could be
* more general constraints, but these cover the constraints for standard chunks.
*/
public enum ChunkOrderingConstraint {
/**
* no ordering constraint
*/
NONE,
/**
* Must go before PLTE (and hence, also before IDAT)
*/
BEFORE_PLTE_AND_IDAT,
/**
* Must go after PLTE (if exists) but before IDAT
*/
AFTER_PLTE_BEFORE_IDAT,
/**
* Must go after PLTE (and it must exist) but before IDAT
*/
AFTER_PLTE_BEFORE_IDAT_PLTE_REQUIRED,
/**
* Must before IDAT (before or after PLTE)
*/
BEFORE_IDAT,
/**
* After IDAT (this restriction does not apply to the standard PNG chunks)
*/
AFTER_IDAT,
/**
* Does not apply
*/
NA;
public boolean mustGoBeforePLTE() {
return this == BEFORE_PLTE_AND_IDAT;
}
public boolean mustGoBeforeIDAT() {
return this == BEFORE_IDAT || this == BEFORE_PLTE_AND_IDAT || this == AFTER_PLTE_BEFORE_IDAT;
}
/**
* after pallete, if exists
*/
public boolean mustGoAfterPLTE() {
return this == AFTER_PLTE_BEFORE_IDAT || this == AFTER_PLTE_BEFORE_IDAT_PLTE_REQUIRED;
}
public boolean mustGoAfterIDAT() {
return this == AFTER_IDAT;
}
public boolean isOk(int currentChunkGroup, boolean hasplte) {
if (this == NONE)
return true;
else if (this == BEFORE_IDAT)
return currentChunkGroup < ChunksList.CHUNK_GROUP_4_IDAT;
else if (this == BEFORE_PLTE_AND_IDAT)
return currentChunkGroup < ChunksList.CHUNK_GROUP_2_PLTE;
else if (this == AFTER_PLTE_BEFORE_IDAT)
return hasplte ? currentChunkGroup < ChunksList.CHUNK_GROUP_4_IDAT
: (currentChunkGroup < ChunksList.CHUNK_GROUP_4_IDAT && currentChunkGroup > ChunksList.CHUNK_GROUP_2_PLTE);
else if (this == AFTER_IDAT)
return currentChunkGroup > ChunksList.CHUNK_GROUP_4_IDAT;
return false;
}
}
public PngChunk(String id, ImageInfo imgInfo) {
this.id = id;
this.imgInfo = imgInfo;
this.crit = ChunkHelper.isCritical(id);
this.pub = ChunkHelper.isPublic(id);
this.safe = ChunkHelper.isSafeToCopy(id);
}
protected final ChunkRaw createEmptyChunk(int len, boolean alloc) {
ChunkRaw c = new ChunkRaw(len, ChunkHelper.toBytes(id), alloc);
return c;
}
/**
* In which "chunkGroup" (see {@link ChunksList}for definition) this chunks instance was read or written.
* <p>
* -1 if not read or written (eg, queued)
*/
final public int getChunkGroup() {
return chunkGroup;
}
/**
* @see #getChunkGroup()
*/
final void setChunkGroup(int chunkGroup) {
this.chunkGroup = chunkGroup;
}
public boolean hasPriority() {
return priority;
}
public void setPriority(boolean priority) {
this.priority = priority;
}
final void write(OutputStream os) {
if (raw == null || raw.data == null)
raw = createRawChunk();
if (raw == null)
throw new PngjExceptionInternal("null chunk ! creation failed for " + this);
raw.writeChunk(os);
}
/**
* Creates the physical chunk. This is used when writing (serialization). Each particular chunk class implements its
* own logic.
*
* @return A newly allocated and filled raw chunk
*/
public abstract ChunkRaw createRawChunk();
/**
* Parses raw chunk and fill inside data. This is used when reading (deserialization). Each particular chunk class
* implements its own logic.
*/
protected abstract void parseFromRaw(ChunkRaw c);
/**
* See {@link PngChunkMultiple} and {@link PngChunkSingle}
*
* @return true if PNG accepts multiple chunks of this class
*/
protected abstract boolean allowsMultiple();
public ChunkRaw getRaw() {
return raw;
}
void setRaw(ChunkRaw raw) {
this.raw = raw;
}
/**
* @see ChunkRaw#len
*/
public int getLen() {
return raw != null ? raw.len : -1;
}
/**
* @see ChunkRaw#getOffset()
*/
public long getOffset() {
return raw != null ? raw.getOffset() : -1;
}
/**
* This signals that the raw chunk (serialized data) as invalid, so that it's regenerated on write. This should be
* called for the (infrequent) case of chunks that were copied from a PngReader and we want to manually modify it.
*/
public void invalidateRawData() {
raw = null;
}
/**
* see {@link ChunkOrderingConstraint}
*/
public abstract ChunkOrderingConstraint getOrderingConstraint();
@Override
public String toString() {
return "chunk id= " + id + " (len=" + getLen() + " offset=" + getOffset() + ")";
}
}

View File

@ -0,0 +1,59 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
/**
* acTL chunk. For APGN, not PGN standard
* <p>
* see https://wiki.mozilla.org/APNG_Specification#.60acTL.60:_The_Animation_Control_Chunk
* <p>
*/
public class PngChunkACTL extends PngChunkSingle {
public final static String ID = "acTL";
private int numFrames;
private int numPlays;
public PngChunkACTL(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.BEFORE_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = createEmptyChunk(8, true);
PngHelperInternal.writeInt4tobytes((int) numFrames, c.data, 0);
PngHelperInternal.writeInt4tobytes((int) numPlays, c.data, 4);
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
numFrames = PngHelperInternal.readInt4fromBytes(chunk.data, 0);
numPlays = PngHelperInternal.readInt4fromBytes(chunk.data, 4);
}
public int getNumFrames() {
return numFrames;
}
public void setNumFrames(int numFrames) {
this.numFrames = numFrames;
}
public int getNumPlays() {
return numPlays;
}
public void setNumPlays(int numPlays) {
this.numPlays = numPlays;
}
}

View File

@ -0,0 +1,112 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* bKGD Chunk.
* <p>
* see {@link http://www.w3.org/TR/PNG/#11bKGD}
* <p>
* This chunk structure depends on the image type
*/
public class PngChunkBKGD extends PngChunkSingle {
public final static String ID = ChunkHelper.bKGD;
// only one of these is meaningful
private int gray;
private int red, green, blue;
private int paletteIndex;
public PngChunkBKGD(ImageInfo info) {
super(ChunkHelper.bKGD, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.AFTER_PLTE_BEFORE_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = null;
if (imgInfo.greyscale) {
c = createEmptyChunk(2, true);
PngHelperInternal.writeInt2tobytes(gray, c.data, 0);
} else if (imgInfo.indexed) {
c = createEmptyChunk(1, true);
c.data[0] = (byte) paletteIndex;
} else {
c = createEmptyChunk(6, true);
PngHelperInternal.writeInt2tobytes(red, c.data, 0);
PngHelperInternal.writeInt2tobytes(green, c.data, 0);
PngHelperInternal.writeInt2tobytes(blue, c.data, 0);
}
return c;
}
@Override
public void parseFromRaw(ChunkRaw c) {
if (imgInfo.greyscale) {
gray = PngHelperInternal.readInt2fromBytes(c.data, 0);
} else if (imgInfo.indexed) {
paletteIndex = (int) (c.data[0] & 0xff);
} else {
red = PngHelperInternal.readInt2fromBytes(c.data, 0);
green = PngHelperInternal.readInt2fromBytes(c.data, 2);
blue = PngHelperInternal.readInt2fromBytes(c.data, 4);
}
}
/**
* Set gray value (0-255 if bitdept=8)
*
* @param gray
*/
public void setGray(int gray) {
if (!imgInfo.greyscale)
throw new PngjException("only gray images support this");
this.gray = gray;
}
public int getGray() {
if (!imgInfo.greyscale)
throw new PngjException("only gray images support this");
return gray;
}
/**
* Set pallette index
*
*/
public void setPaletteIndex(int i) {
if (!imgInfo.indexed)
throw new PngjException("only indexed (pallete) images support this");
this.paletteIndex = i;
}
public int getPaletteIndex() {
if (!imgInfo.indexed)
throw new PngjException("only indexed (pallete) images support this");
return paletteIndex;
}
/**
* Set rgb values
*
*/
public void setRGB(int r, int g, int b) {
if (imgInfo.greyscale || imgInfo.indexed)
throw new PngjException("only rgb or rgba images support this");
red = r;
green = g;
blue = b;
}
public int[] getRGB() {
if (imgInfo.greyscale || imgInfo.indexed)
throw new PngjException("only rgb or rgba images support this");
return new int[] {red, green, blue};
}
}

View File

@ -0,0 +1,75 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* cHRM chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11cHRM
*/
public class PngChunkCHRM extends PngChunkSingle {
public final static String ID = ChunkHelper.cHRM;
// http://www.w3.org/TR/PNG/#11cHRM
private double whitex, whitey;
private double redx, redy;
private double greenx, greeny;
private double bluex, bluey;
public PngChunkCHRM(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.AFTER_PLTE_BEFORE_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = null;
c = createEmptyChunk(32, true);
PngHelperInternal.writeInt4tobytes(PngHelperInternal.doubleToInt100000(whitex), c.data, 0);
PngHelperInternal.writeInt4tobytes(PngHelperInternal.doubleToInt100000(whitey), c.data, 4);
PngHelperInternal.writeInt4tobytes(PngHelperInternal.doubleToInt100000(redx), c.data, 8);
PngHelperInternal.writeInt4tobytes(PngHelperInternal.doubleToInt100000(redy), c.data, 12);
PngHelperInternal.writeInt4tobytes(PngHelperInternal.doubleToInt100000(greenx), c.data, 16);
PngHelperInternal.writeInt4tobytes(PngHelperInternal.doubleToInt100000(greeny), c.data, 20);
PngHelperInternal.writeInt4tobytes(PngHelperInternal.doubleToInt100000(bluex), c.data, 24);
PngHelperInternal.writeInt4tobytes(PngHelperInternal.doubleToInt100000(bluey), c.data, 28);
return c;
}
@Override
public void parseFromRaw(ChunkRaw c) {
if (c.len != 32)
throw new PngjException("bad chunk " + c);
whitex = PngHelperInternal.intToDouble100000(PngHelperInternal.readInt4fromBytes(c.data, 0));
whitey = PngHelperInternal.intToDouble100000(PngHelperInternal.readInt4fromBytes(c.data, 4));
redx = PngHelperInternal.intToDouble100000(PngHelperInternal.readInt4fromBytes(c.data, 8));
redy = PngHelperInternal.intToDouble100000(PngHelperInternal.readInt4fromBytes(c.data, 12));
greenx = PngHelperInternal.intToDouble100000(PngHelperInternal.readInt4fromBytes(c.data, 16));
greeny = PngHelperInternal.intToDouble100000(PngHelperInternal.readInt4fromBytes(c.data, 20));
bluex = PngHelperInternal.intToDouble100000(PngHelperInternal.readInt4fromBytes(c.data, 24));
bluey = PngHelperInternal.intToDouble100000(PngHelperInternal.readInt4fromBytes(c.data, 28));
}
public void setChromaticities(double whitex, double whitey, double redx, double redy,
double greenx, double greeny, double bluex, double bluey) {
this.whitex = whitex;
this.redx = redx;
this.greenx = greenx;
this.bluex = bluex;
this.whitey = whitey;
this.redy = redy;
this.greeny = greeny;
this.bluey = bluey;
}
public double[] getChromaticities() {
return new double[] {whitex, whitey, redx, redy, greenx, greeny, bluex, bluey};
}
}

View File

@ -0,0 +1,158 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
/**
* fcTL chunk. For APGN, not PGN standard
* <p>
* see https://wiki.mozilla.org/APNG_Specification#.60fcTL.60:_The_Frame_Control_Chunk
* <p>
*/
public class PngChunkFCTL extends PngChunkMultiple {
public final static String ID = "fcTL";
public final static byte APNG_DISPOSE_OP_NONE = 0;
public final static byte APNG_DISPOSE_OP_BACKGROUND = 1;
public final static byte APNG_DISPOSE_OP_PREVIOUS = 2;
public final static byte APNG_BLEND_OP_SOURCE = 0;
public final static byte APNG_BLEND_OP_OVER = 1;
private int seqNum;
private int width, height, xOff, yOff;
private int delayNum, delayDen;
private byte disposeOp, blendOp;
public PngChunkFCTL(ImageInfo info) {
super(ID, info);
}
public ImageInfo getEquivImageInfo() {
return new ImageInfo(width, height, imgInfo.bitDepth, imgInfo.alpha, imgInfo.greyscale,
imgInfo.indexed);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.NONE;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = createEmptyChunk(8, true);
int off = 0;
PngHelperInternal.writeInt4tobytes(seqNum, c.data, off);
off += 4;
PngHelperInternal.writeInt4tobytes(width, c.data, off);
off += 4;
PngHelperInternal.writeInt4tobytes(height, c.data, off);
off += 4;
PngHelperInternal.writeInt4tobytes(xOff, c.data, off);
off += 4;
PngHelperInternal.writeInt4tobytes(yOff, c.data, off);
off += 4;
PngHelperInternal.writeInt2tobytes(delayNum, c.data, off);
off += 2;
PngHelperInternal.writeInt2tobytes(delayDen, c.data, off);
off += 2;
c.data[off] = disposeOp;
off += 1;
c.data[off] = blendOp;
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
int off = 0;
seqNum = PngHelperInternal.readInt4fromBytes(chunk.data, off);
off += 4;
width = PngHelperInternal.readInt4fromBytes(chunk.data, off);
off += 4;
height = PngHelperInternal.readInt4fromBytes(chunk.data, off);
off += 4;
xOff = PngHelperInternal.readInt4fromBytes(chunk.data, off);
off += 4;
yOff = PngHelperInternal.readInt4fromBytes(chunk.data, off);
off += 4;
delayNum = PngHelperInternal.readInt2fromBytes(chunk.data, off);
off += 2;
delayDen = PngHelperInternal.readInt2fromBytes(chunk.data, off);
off += 2;
disposeOp = chunk.data[off];
off += 1;
blendOp = chunk.data[off];
}
public int getSeqNum() {
return seqNum;
}
public void setSeqNum(int seqNum) {
this.seqNum = seqNum;
}
public int getWidth() {
return width;
}
public void setWidth(int width) {
this.width = width;
}
public int getHeight() {
return height;
}
public void setHeight(int height) {
this.height = height;
}
public int getxOff() {
return xOff;
}
public void setxOff(int xOff) {
this.xOff = xOff;
}
public int getyOff() {
return yOff;
}
public void setyOff(int yOff) {
this.yOff = yOff;
}
public int getDelayNum() {
return delayNum;
}
public void setDelayNum(int delayNum) {
this.delayNum = delayNum;
}
public int getDelayDen() {
return delayDen;
}
public void setDelayDen(int delayDen) {
this.delayDen = delayDen;
}
public byte getDisposeOp() {
return disposeOp;
}
public void setDisposeOp(byte disposeOp) {
this.disposeOp = disposeOp;
}
public byte getBlendOp() {
return blendOp;
}
public void setBlendOp(byte blendOp) {
this.blendOp = blendOp;
}
}

View File

@ -0,0 +1,70 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* fdAT chunk. For APGN, not PGN standard
* <p>
* see https://wiki.mozilla.org/APNG_Specification#.60fdAT.60:_The_Frame_Data_Chunk
* <p>
* This implementation does not support buffering, this should be not managed similar to a IDAT chunk
*
*/
public class PngChunkFDAT extends PngChunkMultiple {
public final static String ID = "fdAT";
private int seqNum;
private byte[] buffer; // normally not allocated - if so, it's the raw data, so it includes the 4bytes seqNum
int datalen; // length of idat data, excluding seqNUm (= chunk.len-4)
public PngChunkFDAT(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.AFTER_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
if (buffer == null)
throw new PngjException("not buffered");
ChunkRaw c = createEmptyChunk(datalen + 4, false);
c.data = buffer; // shallow copy!
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
seqNum = PngHelperInternal.readInt4fromBytes(chunk.data, 0);
datalen = chunk.len - 4;
buffer = chunk.data;
}
public int getSeqNum() {
return seqNum;
}
public void setSeqNum(int seqNum) {
this.seqNum = seqNum;
}
public byte[] getBuffer() {
return buffer;
}
public void setBuffer(byte[] buffer) {
this.buffer = buffer;
}
public int getDatalen() {
return datalen;
}
public void setDatalen(int datalen) {
this.datalen = datalen;
}
}

View File

@ -0,0 +1,51 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* gAMA chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11gAMA
*/
public class PngChunkGAMA extends PngChunkSingle {
public final static String ID = ChunkHelper.gAMA;
// http://www.w3.org/TR/PNG/#11gAMA
private double gamma;
public PngChunkGAMA(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.BEFORE_PLTE_AND_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = createEmptyChunk(4, true);
int g = (int) (gamma * 100000 + 0.5);
PngHelperInternal.writeInt4tobytes(g, c.data, 0);
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
if (chunk.len != 4)
throw new PngjException("bad chunk " + chunk);
int g = PngHelperInternal.readInt4fromBytes(chunk.data, 0);
gamma = ((double) g) / 100000.0;
}
public double getGamma() {
return gamma;
}
public void setGamma(double gamma) {
this.gamma = gamma;
}
}

View File

@ -0,0 +1,58 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* hIST chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11hIST <br>
* only for palette images
*/
public class PngChunkHIST extends PngChunkSingle {
public final static String ID = ChunkHelper.hIST;
private int[] hist = new int[0]; // should have same lenght as palette
public PngChunkHIST(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.AFTER_PLTE_BEFORE_IDAT;
}
@Override
public void parseFromRaw(ChunkRaw c) {
if (!imgInfo.indexed)
throw new PngjException("only indexed images accept a HIST chunk");
int nentries = c.data.length / 2;
hist = new int[nentries];
for (int i = 0; i < hist.length; i++) {
hist[i] = PngHelperInternal.readInt2fromBytes(c.data, i * 2);
}
}
@Override
public ChunkRaw createRawChunk() {
if (!imgInfo.indexed)
throw new PngjException("only indexed images accept a HIST chunk");
ChunkRaw c = null;
c = createEmptyChunk(hist.length * 2, true);
for (int i = 0; i < hist.length; i++) {
PngHelperInternal.writeInt2tobytes(hist[i], c.data, i * 2);
}
return c;
}
public int[] getHist() {
return hist;
}
public void setHist(int[] hist) {
this.hist = hist;
}
}

View File

@ -0,0 +1,77 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjException;
/**
* iCCP chunk.
* <p>
* See {@link http://www.w3.org/TR/PNG/#11iCCP}
*/
public class PngChunkICCP extends PngChunkSingle {
public final static String ID = ChunkHelper.iCCP;
// http://www.w3.org/TR/PNG/#11iCCP
private String profileName;
private byte[] compressedProfile; // copmression/decopmresion is done in getter/setter
public PngChunkICCP(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.BEFORE_PLTE_AND_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = createEmptyChunk(profileName.length() + compressedProfile.length + 2, true);
System.arraycopy(ChunkHelper.toBytes(profileName), 0, c.data, 0, profileName.length());
c.data[profileName.length()] = 0;
c.data[profileName.length() + 1] = 0;
System.arraycopy(compressedProfile, 0, c.data, profileName.length() + 2,
compressedProfile.length);
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
int pos0 = ChunkHelper.posNullByte(chunk.data);
profileName = ChunkHelper.toString(chunk.data, 0, pos0);
int comp = (chunk.data[pos0 + 1] & 0xff);
if (comp != 0)
throw new PngjException("bad compression for ChunkTypeICCP");
int compdatasize = chunk.data.length - (pos0 + 2);
compressedProfile = new byte[compdatasize];
System.arraycopy(chunk.data, pos0 + 2, compressedProfile, 0, compdatasize);
}
/**
* The profile should be uncompressed bytes
*/
public void setProfileNameAndContent(String name, byte[] profile) {
profileName = name;
compressedProfile = ChunkHelper.compressBytes(profile, true);
}
public void setProfileNameAndContent(String name, String profile) {
setProfileNameAndContent(name, ChunkHelper.toBytes(profile));
}
public String getProfileName() {
return profileName;
}
/**
* uncompressed
**/
public byte[] getProfile() {
return ChunkHelper.compressBytes(compressedProfile, false);
}
public String getProfileAsString() {
return ChunkHelper.toString(getProfile());
}
}

View File

@ -0,0 +1,34 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
/**
* IDAT chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11IDAT
* <p>
* This is dummy placeholder - we write/read this chunk (actually several) by special code.
*/
public class PngChunkIDAT extends PngChunkMultiple {
public final static String ID = ChunkHelper.IDAT;
// http://www.w3.org/TR/PNG/#11IDAT
public PngChunkIDAT(ImageInfo i) {
super(ID, i);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.NA;
}
@Override
public ChunkRaw createRawChunk() {// does nothing
return null;
}
@Override
public void parseFromRaw(ChunkRaw c) { // does nothing
}
}

View File

@ -0,0 +1,35 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
/**
* IEND chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11IEND
*/
public class PngChunkIEND extends PngChunkSingle {
public final static String ID = ChunkHelper.IEND;
// http://www.w3.org/TR/PNG/#11IEND
// this is a dummy placeholder
public PngChunkIEND(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.NA;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = new ChunkRaw(0, ChunkHelper.b_IEND, false);
return c;
}
@Override
public void parseFromRaw(ChunkRaw c) {
// this is not used
}
}

View File

@ -0,0 +1,185 @@
package ar.com.hjg.pngj.chunks;
import java.io.ByteArrayInputStream;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
import ar.com.hjg.pngj.PngjInputException;
/**
* IHDR chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11IHDR
* <p>
* This is a special critical Chunk.
*/
public class PngChunkIHDR extends PngChunkSingle {
public final static String ID = ChunkHelper.IHDR;
private int cols;
private int rows;
private int bitspc;
private int colormodel;
private int compmeth;
private int filmeth;
private int interlaced;
// http://www.w3.org/TR/PNG/#11IHDR
//
public PngChunkIHDR(ImageInfo info) { // argument is normally null here, if not null is used to fill the fields
super(ID, info);
if (info != null)
fillFromInfo(info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.NA;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = new ChunkRaw(13, ChunkHelper.b_IHDR, true);
int offset = 0;
PngHelperInternal.writeInt4tobytes(cols, c.data, offset);
offset += 4;
PngHelperInternal.writeInt4tobytes(rows, c.data, offset);
offset += 4;
c.data[offset++] = (byte) bitspc;
c.data[offset++] = (byte) colormodel;
c.data[offset++] = (byte) compmeth;
c.data[offset++] = (byte) filmeth;
c.data[offset++] = (byte) interlaced;
return c;
}
@Override
public void parseFromRaw(ChunkRaw c) {
if (c.len != 13)
throw new PngjException("Bad IDHR len " + c.len);
ByteArrayInputStream st = c.getAsByteStream();
cols = PngHelperInternal.readInt4(st);
rows = PngHelperInternal.readInt4(st);
// bit depth: number of bits per channel
bitspc = PngHelperInternal.readByte(st);
colormodel = PngHelperInternal.readByte(st);
compmeth = PngHelperInternal.readByte(st);
filmeth = PngHelperInternal.readByte(st);
interlaced = PngHelperInternal.readByte(st);
}
public int getCols() {
return cols;
}
public void setCols(int cols) {
this.cols = cols;
}
public int getRows() {
return rows;
}
public void setRows(int rows) {
this.rows = rows;
}
public int getBitspc() {
return bitspc;
}
public void setBitspc(int bitspc) {
this.bitspc = bitspc;
}
public int getColormodel() {
return colormodel;
}
public void setColormodel(int colormodel) {
this.colormodel = colormodel;
}
public int getCompmeth() {
return compmeth;
}
public void setCompmeth(int compmeth) {
this.compmeth = compmeth;
}
public int getFilmeth() {
return filmeth;
}
public void setFilmeth(int filmeth) {
this.filmeth = filmeth;
}
public int getInterlaced() {
return interlaced;
}
public void setInterlaced(int interlaced) {
this.interlaced = interlaced;
}
public boolean isInterlaced() {
return getInterlaced() == 1;
}
public void fillFromInfo(ImageInfo info) {
setCols(imgInfo.cols);
setRows(imgInfo.rows);
setBitspc(imgInfo.bitDepth);
int colormodel = 0;
if (imgInfo.alpha)
colormodel += 0x04;
if (imgInfo.indexed)
colormodel += 0x01;
if (!imgInfo.greyscale)
colormodel += 0x02;
setColormodel(colormodel);
setCompmeth(0); // compression method 0=deflate
setFilmeth(0); // filter method (0)
setInterlaced(0); // we never interlace
}
/** throws PngInputException if unexpected values */
public ImageInfo createImageInfo() {
check();
boolean alpha = (getColormodel() & 0x04) != 0;
boolean palette = (getColormodel() & 0x01) != 0;
boolean grayscale = (getColormodel() == 0 || getColormodel() == 4);
// creates ImgInfo and imgLine, and allocates buffers
return new ImageInfo(getCols(), getRows(), getBitspc(), alpha, grayscale, palette);
}
public void check() {
if (cols < 1 || rows < 1 || compmeth != 0 || filmeth != 0)
throw new PngjInputException("bad IHDR: col/row/compmethod/filmethod invalid");
if (bitspc != 1 && bitspc != 2 && bitspc != 4 && bitspc != 8 && bitspc != 16)
throw new PngjInputException("bad IHDR: bitdepth invalid");
if (interlaced < 0 || interlaced > 1)
throw new PngjInputException("bad IHDR: interlace invalid");
switch (colormodel) {
case 0:
break;
case 3:
if (bitspc == 16)
throw new PngjInputException("bad IHDR: bitdepth invalid");
break;
case 2:
case 4:
case 6:
if (bitspc != 8 && bitspc != 16)
throw new PngjInputException("bad IHDR: bitdepth invalid");
break;
default:
throw new PngjInputException("bad IHDR: invalid colormodel");
}
}
}

View File

@ -0,0 +1,111 @@
package ar.com.hjg.pngj.chunks;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjException;
/**
* iTXt chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11iTXt
*/
public class PngChunkITXT extends PngChunkTextVar {
public final static String ID = ChunkHelper.iTXt;
private boolean compressed = false;
private String langTag = "";
private String translatedTag = "";
// http://www.w3.org/TR/PNG/#11iTXt
public PngChunkITXT(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkRaw createRawChunk() {
if (key == null || key.trim().length() == 0)
throw new PngjException("Text chunk key must be non empty");
try {
ByteArrayOutputStream ba = new ByteArrayOutputStream();
ba.write(ChunkHelper.toBytes(key));
ba.write(0); // separator
ba.write(compressed ? 1 : 0);
ba.write(0); // compression method (always 0)
ba.write(ChunkHelper.toBytes(langTag));
ba.write(0); // separator
ba.write(ChunkHelper.toBytesUTF8(translatedTag));
ba.write(0); // separator
byte[] textbytes = ChunkHelper.toBytesUTF8(val);
if (compressed) {
textbytes = ChunkHelper.compressBytes(textbytes, true);
}
ba.write(textbytes);
byte[] b = ba.toByteArray();
ChunkRaw chunk = createEmptyChunk(b.length, false);
chunk.data = b;
return chunk;
} catch (IOException e) {
throw new PngjException(e);
}
}
@Override
public void parseFromRaw(ChunkRaw c) {
int nullsFound = 0;
int[] nullsIdx = new int[3];
for (int i = 0; i < c.data.length; i++) {
if (c.data[i] != 0)
continue;
nullsIdx[nullsFound] = i;
nullsFound++;
if (nullsFound == 1)
i += 2;
if (nullsFound == 3)
break;
}
if (nullsFound != 3)
throw new PngjException("Bad formed PngChunkITXT chunk");
key = ChunkHelper.toString(c.data, 0, nullsIdx[0]);
int i = nullsIdx[0] + 1;
compressed = c.data[i] == 0 ? false : true;
i++;
if (compressed && c.data[i] != 0)
throw new PngjException("Bad formed PngChunkITXT chunk - bad compression method ");
langTag = ChunkHelper.toString(c.data, i, nullsIdx[1] - i);
translatedTag =
ChunkHelper.toStringUTF8(c.data, nullsIdx[1] + 1, nullsIdx[2] - nullsIdx[1] - 1);
i = nullsIdx[2] + 1;
if (compressed) {
byte[] bytes = ChunkHelper.compressBytes(c.data, i, c.data.length - i, false);
val = ChunkHelper.toStringUTF8(bytes);
} else {
val = ChunkHelper.toStringUTF8(c.data, i, c.data.length - i);
}
}
public boolean isCompressed() {
return compressed;
}
public void setCompressed(boolean compressed) {
this.compressed = compressed;
}
public String getLangtag() {
return langTag;
}
public void setLangtag(String langtag) {
this.langTag = langtag;
}
public String getTranslatedTag() {
return translatedTag;
}
public void setTranslatedTag(String translatedTag) {
this.translatedTag = translatedTag;
}
}

View File

@ -0,0 +1,27 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
/**
* PNG chunk type (abstract) that allows multiple instances in same image.
*/
public abstract class PngChunkMultiple extends PngChunk {
protected PngChunkMultiple(String id, ImageInfo imgInfo) {
super(id, imgInfo);
}
@Override
public final boolean allowsMultiple() {
return true;
}
/**
* NOTE: this chunk uses the default Object's equals() hashCode() implementation.
*
* This is the right thing to do, normally.
*
* This is important, eg see ChunkList.removeFromList()
*/
}

View File

@ -0,0 +1,81 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* oFFs chunk.
* <p>
* see http://www.libpng.org/pub/png/spec/register/pngext-1.3.0-pdg.html#C.oFFs
*/
public class PngChunkOFFS extends PngChunkSingle {
public final static String ID = "oFFs";
// http://www.libpng.org/pub/png/spec/register/pngext-1.3.0-pdg.html#C.oFFs
private long posX;
private long posY;
private int units; // 0: pixel 1:micrometer
public PngChunkOFFS(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.BEFORE_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = createEmptyChunk(9, true);
PngHelperInternal.writeInt4tobytes((int) posX, c.data, 0);
PngHelperInternal.writeInt4tobytes((int) posY, c.data, 4);
c.data[8] = (byte) units;
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
if (chunk.len != 9)
throw new PngjException("bad chunk length " + chunk);
posX = PngHelperInternal.readInt4fromBytes(chunk.data, 0);
if (posX < 0)
posX += 0x100000000L;
posY = PngHelperInternal.readInt4fromBytes(chunk.data, 4);
if (posY < 0)
posY += 0x100000000L;
units = PngHelperInternal.readInt1fromByte(chunk.data, 8);
}
/**
* 0: pixel, 1:micrometer
*/
public int getUnits() {
return units;
}
/**
* 0: pixel, 1:micrometer
*/
public void setUnits(int units) {
this.units = units;
}
public long getPosX() {
return posX;
}
public void setPosX(long posX) {
this.posX = posX;
}
public long getPosY() {
return posY;
}
public void setPosY(long posY) {
this.posY = posY;
}
}

View File

@ -0,0 +1,107 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* pHYs chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11pHYs
*/
public class PngChunkPHYS extends PngChunkSingle {
public final static String ID = ChunkHelper.pHYs;
// http://www.w3.org/TR/PNG/#11pHYs
private long pixelsxUnitX;
private long pixelsxUnitY;
private int units; // 0: unknown 1:metre
public PngChunkPHYS(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.BEFORE_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = createEmptyChunk(9, true);
PngHelperInternal.writeInt4tobytes((int) pixelsxUnitX, c.data, 0);
PngHelperInternal.writeInt4tobytes((int) pixelsxUnitY, c.data, 4);
c.data[8] = (byte) units;
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
if (chunk.len != 9)
throw new PngjException("bad chunk length " + chunk);
pixelsxUnitX = PngHelperInternal.readInt4fromBytes(chunk.data, 0);
if (pixelsxUnitX < 0)
pixelsxUnitX += 0x100000000L;
pixelsxUnitY = PngHelperInternal.readInt4fromBytes(chunk.data, 4);
if (pixelsxUnitY < 0)
pixelsxUnitY += 0x100000000L;
units = PngHelperInternal.readInt1fromByte(chunk.data, 8);
}
public long getPixelsxUnitX() {
return pixelsxUnitX;
}
public void setPixelsxUnitX(long pixelsxUnitX) {
this.pixelsxUnitX = pixelsxUnitX;
}
public long getPixelsxUnitY() {
return pixelsxUnitY;
}
public void setPixelsxUnitY(long pixelsxUnitY) {
this.pixelsxUnitY = pixelsxUnitY;
}
public int getUnits() {
return units;
}
public void setUnits(int units) {
this.units = units;
}
// special getters / setters
/**
* returns -1 if the physicial unit is unknown, or X-Y are not equal
*/
public double getAsDpi() {
if (units != 1 || pixelsxUnitX != pixelsxUnitY)
return -1;
return ((double) pixelsxUnitX) * 0.0254;
}
/**
* returns -1 if the physicial unit is unknown
*/
public double[] getAsDpi2() {
if (units != 1)
return new double[] {-1, -1};
return new double[] {((double) pixelsxUnitX) * 0.0254, ((double) pixelsxUnitY) * 0.0254};
}
public void setAsDpi(double dpi) {
units = 1;
pixelsxUnitX = (long) (dpi / 0.0254 + 0.5);
pixelsxUnitY = pixelsxUnitX;
}
public void setAsDpi2(double dpix, double dpiy) {
units = 1;
pixelsxUnitX = (long) (dpix / 0.0254 + 0.5);
pixelsxUnitY = (long) (dpiy / 0.0254 + 0.5);
}
}

View File

@ -0,0 +1,98 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjException;
/**
* PLTE chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11PLTE
* <p>
* Critical chunk
*/
public class PngChunkPLTE extends PngChunkSingle {
public final static String ID = ChunkHelper.PLTE;
// http://www.w3.org/TR/PNG/#11PLTE
private int nentries = 0;
/**
* RGB8 packed in one integer
*/
private int[] entries;
public PngChunkPLTE(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.NA;
}
@Override
public ChunkRaw createRawChunk() {
int len = 3 * nentries;
int[] rgb = new int[3];
ChunkRaw c = createEmptyChunk(len, true);
for (int n = 0, i = 0; n < nentries; n++) {
getEntryRgb(n, rgb);
c.data[i++] = (byte) rgb[0];
c.data[i++] = (byte) rgb[1];
c.data[i++] = (byte) rgb[2];
}
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
setNentries(chunk.len / 3);
for (int n = 0, i = 0; n < nentries; n++) {
setEntry(n, (int) (chunk.data[i++] & 0xff), (int) (chunk.data[i++] & 0xff),
(int) (chunk.data[i++] & 0xff));
}
}
public void setNentries(int n) {
nentries = n;
if (nentries < 1 || nentries > 256)
throw new PngjException("invalid pallette - nentries=" + nentries);
if (entries == null || entries.length != nentries) { // alloc
entries = new int[nentries];
}
}
public int getNentries() {
return nentries;
}
public void setEntry(int n, int r, int g, int b) {
entries[n] = ((r << 16) | (g << 8) | b);
}
public int getEntry(int n) {
return entries[n];
}
public void getEntryRgb(int n, int[] rgb) {
getEntryRgb(n, rgb, 0);
}
public void getEntryRgb(int n, int[] rgb, int offset) {
int v = entries[n];
rgb[offset + 0] = ((v & 0xff0000) >> 16);
rgb[offset + 1] = ((v & 0xff00) >> 8);
rgb[offset + 2] = (v & 0xff);
}
public int minBitDepth() {
if (nentries <= 2)
return 1;
else if (nentries <= 4)
return 2;
else if (nentries <= 16)
return 4;
else
return 8;
}
}

View File

@ -0,0 +1,114 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* sBIT chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11sBIT
* <p>
* this chunk structure depends on the image type
*/
public class PngChunkSBIT extends PngChunkSingle {
public final static String ID = ChunkHelper.sBIT;
// http://www.w3.org/TR/PNG/#11sBIT
// significant bits
private int graysb, alphasb;
private int redsb, greensb, bluesb;
public PngChunkSBIT(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.BEFORE_PLTE_AND_IDAT;
}
private int getCLen() {
int len = imgInfo.greyscale ? 1 : 3;
if (imgInfo.alpha)
len += 1;
return len;
}
@Override
public void parseFromRaw(ChunkRaw c) {
if (c.len != getCLen())
throw new PngjException("bad chunk length " + c);
if (imgInfo.greyscale) {
graysb = PngHelperInternal.readInt1fromByte(c.data, 0);
if (imgInfo.alpha)
alphasb = PngHelperInternal.readInt1fromByte(c.data, 1);
} else {
redsb = PngHelperInternal.readInt1fromByte(c.data, 0);
greensb = PngHelperInternal.readInt1fromByte(c.data, 1);
bluesb = PngHelperInternal.readInt1fromByte(c.data, 2);
if (imgInfo.alpha)
alphasb = PngHelperInternal.readInt1fromByte(c.data, 3);
}
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = null;
c = createEmptyChunk(getCLen(), true);
if (imgInfo.greyscale) {
c.data[0] = (byte) graysb;
if (imgInfo.alpha)
c.data[1] = (byte) alphasb;
} else {
c.data[0] = (byte) redsb;
c.data[1] = (byte) greensb;
c.data[2] = (byte) bluesb;
if (imgInfo.alpha)
c.data[3] = (byte) alphasb;
}
return c;
}
public void setGraysb(int gray) {
if (!imgInfo.greyscale)
throw new PngjException("only greyscale images support this");
graysb = gray;
}
public int getGraysb() {
if (!imgInfo.greyscale)
throw new PngjException("only greyscale images support this");
return graysb;
}
public void setAlphasb(int a) {
if (!imgInfo.alpha)
throw new PngjException("only images with alpha support this");
alphasb = a;
}
public int getAlphasb() {
if (!imgInfo.alpha)
throw new PngjException("only images with alpha support this");
return alphasb;
}
/**
* Set rgb values
*
*/
public void setRGB(int r, int g, int b) {
if (imgInfo.greyscale || imgInfo.indexed)
throw new PngjException("only rgb or rgba images support this");
redsb = r;
greensb = g;
bluesb = b;
}
public int[] getRGB() {
if (imgInfo.greyscale || imgInfo.indexed)
throw new PngjException("only rgb or rgba images support this");
return new int[] {redsb, greensb, bluesb};
}
}

View File

@ -0,0 +1,131 @@
package ar.com.hjg.pngj.chunks;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* sPLT chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11sPLT
*/
public class PngChunkSPLT extends PngChunkMultiple {
public final static String ID = ChunkHelper.sPLT;
// http://www.w3.org/TR/PNG/#11sPLT
private String palName;
private int sampledepth; // 8/16
private int[] palette; // 5 elements per entry
public PngChunkSPLT(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.BEFORE_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
try {
ByteArrayOutputStream ba = new ByteArrayOutputStream();
ba.write(ChunkHelper.toBytes(palName));
ba.write(0); // separator
ba.write((byte) sampledepth);
int nentries = getNentries();
for (int n = 0; n < nentries; n++) {
for (int i = 0; i < 4; i++) {
if (sampledepth == 8)
PngHelperInternal.writeByte(ba, (byte) palette[n * 5 + i]);
else
PngHelperInternal.writeInt2(ba, palette[n * 5 + i]);
}
PngHelperInternal.writeInt2(ba, palette[n * 5 + 4]);
}
byte[] b = ba.toByteArray();
ChunkRaw chunk = createEmptyChunk(b.length, false);
chunk.data = b;
return chunk;
} catch (IOException e) {
throw new PngjException(e);
}
}
@Override
public void parseFromRaw(ChunkRaw c) {
int t = -1;
for (int i = 0; i < c.data.length; i++) { // look for first zero
if (c.data[i] == 0) {
t = i;
break;
}
}
if (t <= 0 || t > c.data.length - 2)
throw new PngjException("bad sPLT chunk: no separator found");
palName = ChunkHelper.toString(c.data, 0, t);
sampledepth = PngHelperInternal.readInt1fromByte(c.data, t + 1);
t += 2;
int nentries = (c.data.length - t) / (sampledepth == 8 ? 6 : 10);
palette = new int[nentries * 5];
int r, g, b, a, f, ne;
ne = 0;
for (int i = 0; i < nentries; i++) {
if (sampledepth == 8) {
r = PngHelperInternal.readInt1fromByte(c.data, t++);
g = PngHelperInternal.readInt1fromByte(c.data, t++);
b = PngHelperInternal.readInt1fromByte(c.data, t++);
a = PngHelperInternal.readInt1fromByte(c.data, t++);
} else {
r = PngHelperInternal.readInt2fromBytes(c.data, t);
t += 2;
g = PngHelperInternal.readInt2fromBytes(c.data, t);
t += 2;
b = PngHelperInternal.readInt2fromBytes(c.data, t);
t += 2;
a = PngHelperInternal.readInt2fromBytes(c.data, t);
t += 2;
}
f = PngHelperInternal.readInt2fromBytes(c.data, t);
t += 2;
palette[ne++] = r;
palette[ne++] = g;
palette[ne++] = b;
palette[ne++] = a;
palette[ne++] = f;
}
}
public int getNentries() {
return palette.length / 5;
}
public String getPalName() {
return palName;
}
public void setPalName(String palName) {
this.palName = palName;
}
public int getSampledepth() {
return sampledepth;
}
public void setSampledepth(int sampledepth) {
this.sampledepth = sampledepth;
}
public int[] getPalette() {
return palette;
}
public void setPalette(int[] palette) {
this.palette = palette;
}
}

View File

@ -0,0 +1,55 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* sRGB chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11sRGB
*/
public class PngChunkSRGB extends PngChunkSingle {
public final static String ID = ChunkHelper.sRGB;
// http://www.w3.org/TR/PNG/#11sRGB
public static final int RENDER_INTENT_Perceptual = 0;
public static final int RENDER_INTENT_Relative_colorimetric = 1;
public static final int RENDER_INTENT_Saturation = 2;
public static final int RENDER_INTENT_Absolute_colorimetric = 3;
private int intent;
public PngChunkSRGB(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.BEFORE_PLTE_AND_IDAT;
}
@Override
public void parseFromRaw(ChunkRaw c) {
if (c.len != 1)
throw new PngjException("bad chunk length " + c);
intent = PngHelperInternal.readInt1fromByte(c.data, 0);
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = null;
c = createEmptyChunk(1, true);
c.data[0] = (byte) intent;
return c;
}
public int getIntent() {
return intent;
}
public void setIntent(int intent) {
this.intent = intent;
}
}

View File

@ -0,0 +1,54 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjException;
/**
* sTER chunk.
* <p>
* see http://www.libpng.org/pub/png/spec/register/pngext-1.3.0-pdg.html#C.sTER
*/
public class PngChunkSTER extends PngChunkSingle {
public final static String ID = "sTER";
// http://www.libpng.org/pub/png/spec/register/pngext-1.3.0-pdg.html#C.sTER
private byte mode; // 0: cross-fuse layout 1: diverging-fuse layout
public PngChunkSTER(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.BEFORE_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = createEmptyChunk(1, true);
c.data[0] = (byte) mode;
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
if (chunk.len != 1)
throw new PngjException("bad chunk length " + chunk);
mode = chunk.data[0];
}
/**
* 0: cross-fuse layout 1: diverging-fuse layout
*/
public byte getMode() {
return mode;
}
/**
* 0: cross-fuse layout 1: diverging-fuse layout
*/
public void setMode(byte mode) {
this.mode = mode;
}
}

View File

@ -0,0 +1,43 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
/**
* PNG chunk type (abstract) that does not allow multiple instances in same image.
*/
public abstract class PngChunkSingle extends PngChunk {
protected PngChunkSingle(String id, ImageInfo imgInfo) {
super(id, imgInfo);
}
public final boolean allowsMultiple() {
return false;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PngChunkSingle other = (PngChunkSingle) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
}

View File

@ -0,0 +1,44 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjException;
/**
* tEXt chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11tEXt
*/
public class PngChunkTEXT extends PngChunkTextVar {
public final static String ID = ChunkHelper.tEXt;
public PngChunkTEXT(ImageInfo info) {
super(ID, info);
}
public PngChunkTEXT(ImageInfo info, String key, String val) {
super(ID, info);
setKeyVal(key, val);
}
@Override
public ChunkRaw createRawChunk() {
if (key == null || key.trim().length() == 0)
throw new PngjException("Text chunk key must be non empty");
byte[] b = ChunkHelper.toBytes(key + "\0" + val);
ChunkRaw chunk = createEmptyChunk(b.length, false);
chunk.data = b;
return chunk;
}
@Override
public void parseFromRaw(ChunkRaw c) {
int i;
for (i = 0; i < c.data.length; i++)
if (c.data[i] == 0)
break;
key = ChunkHelper.toString(c.data, 0, i);
i++;
val = i < c.data.length ? ChunkHelper.toString(c.data, i, c.data.length - i) : "";
}
}

View File

@ -0,0 +1,82 @@
package ar.com.hjg.pngj.chunks;
import java.util.Calendar;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* tIME chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11tIME
*/
public class PngChunkTIME extends PngChunkSingle {
public final static String ID = ChunkHelper.tIME;
// http://www.w3.org/TR/PNG/#11tIME
private int year, mon, day, hour, min, sec;
public PngChunkTIME(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.NONE;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = createEmptyChunk(7, true);
PngHelperInternal.writeInt2tobytes(year, c.data, 0);
c.data[2] = (byte) mon;
c.data[3] = (byte) day;
c.data[4] = (byte) hour;
c.data[5] = (byte) min;
c.data[6] = (byte) sec;
return c;
}
@Override
public void parseFromRaw(ChunkRaw chunk) {
if (chunk.len != 7)
throw new PngjException("bad chunk " + chunk);
year = PngHelperInternal.readInt2fromBytes(chunk.data, 0);
mon = PngHelperInternal.readInt1fromByte(chunk.data, 2);
day = PngHelperInternal.readInt1fromByte(chunk.data, 3);
hour = PngHelperInternal.readInt1fromByte(chunk.data, 4);
min = PngHelperInternal.readInt1fromByte(chunk.data, 5);
sec = PngHelperInternal.readInt1fromByte(chunk.data, 6);
}
public void setNow(int secsAgo) {
Calendar d = Calendar.getInstance();
d.setTimeInMillis(System.currentTimeMillis() - 1000 * (long) secsAgo);
year = d.get(Calendar.YEAR);
mon = d.get(Calendar.MONTH) + 1;
day = d.get(Calendar.DAY_OF_MONTH);
hour = d.get(Calendar.HOUR_OF_DAY);
min = d.get(Calendar.MINUTE);
sec = d.get(Calendar.SECOND);
}
public void setYMDHMS(int yearx, int monx, int dayx, int hourx, int minx, int secx) {
year = yearx;
mon = monx;
day = dayx;
hour = hourx;
min = minx;
sec = secx;
}
public int[] getYMDHMS() {
return new int[] {year, mon, day, hour, min, sec};
}
/** format YYYY/MM/DD HH:mm:SS */
public String getAsString() {
return String.format("%04d/%02d/%02d %02d:%02d:%02d", year, mon, day, hour, min, sec);
}
}

View File

@ -0,0 +1,149 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjException;
/**
* tRNS chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11tRNS
* <p>
* this chunk structure depends on the image type
*/
public class PngChunkTRNS extends PngChunkSingle {
public final static String ID = ChunkHelper.tRNS;
// http://www.w3.org/TR/PNG/#11tRNS
// only one of these is meaningful, depending on the image type
private int gray;
private int red, green, blue;
private int[] paletteAlpha = new int[] {};
public PngChunkTRNS(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.AFTER_PLTE_BEFORE_IDAT;
}
@Override
public ChunkRaw createRawChunk() {
ChunkRaw c = null;
if (imgInfo.greyscale) {
c = createEmptyChunk(2, true);
PngHelperInternal.writeInt2tobytes(gray, c.data, 0);
} else if (imgInfo.indexed) {
c = createEmptyChunk(paletteAlpha.length, true);
for (int n = 0; n < c.len; n++) {
c.data[n] = (byte) paletteAlpha[n];
}
} else {
c = createEmptyChunk(6, true);
PngHelperInternal.writeInt2tobytes(red, c.data, 0);
PngHelperInternal.writeInt2tobytes(green, c.data, 0);
PngHelperInternal.writeInt2tobytes(blue, c.data, 0);
}
return c;
}
@Override
public void parseFromRaw(ChunkRaw c) {
if (imgInfo.greyscale) {
gray = PngHelperInternal.readInt2fromBytes(c.data, 0);
} else if (imgInfo.indexed) {
int nentries = c.data.length;
paletteAlpha = new int[nentries];
for (int n = 0; n < nentries; n++) {
paletteAlpha[n] = (int) (c.data[n] & 0xff);
}
} else {
red = PngHelperInternal.readInt2fromBytes(c.data, 0);
green = PngHelperInternal.readInt2fromBytes(c.data, 2);
blue = PngHelperInternal.readInt2fromBytes(c.data, 4);
}
}
/**
* Set rgb values
*
*/
public void setRGB(int r, int g, int b) {
if (imgInfo.greyscale || imgInfo.indexed)
throw new PngjException("only rgb or rgba images support this");
red = r;
green = g;
blue = b;
}
public int[] getRGB() {
if (imgInfo.greyscale || imgInfo.indexed)
throw new PngjException("only rgb or rgba images support this");
return new int[] {red, green, blue};
}
public int getRGB888() {
if (imgInfo.greyscale || imgInfo.indexed)
throw new PngjException("only rgb or rgba images support this");
return (red << 16) | (green << 8) | blue;
}
public void setGray(int g) {
if (!imgInfo.greyscale)
throw new PngjException("only grayscale images support this");
gray = g;
}
public int getGray() {
if (!imgInfo.greyscale)
throw new PngjException("only grayscale images support this");
return gray;
}
/**
* Sets the length of the palette alpha. This should be followed by #setNentriesPalAlpha
*
* @param idx index inside the table
* @param val alpha value (0-255)
*/
public void setEntryPalAlpha(int idx, int val) {
paletteAlpha[idx] = val;
}
public void setNentriesPalAlpha(int len) {
paletteAlpha = new int[len];
}
/**
* WARNING: non deep copy. See also {@link #setNentriesPalAlpha(int)} {@link #setEntryPalAlpha(int, int)}
*/
public void setPalAlpha(int[] palAlpha) {
if (!imgInfo.indexed)
throw new PngjException("only indexed images support this");
paletteAlpha = palAlpha;
}
/**
* WARNING: non deep copy
*/
public int[] getPalletteAlpha() {
return paletteAlpha;
}
/**
* to use when only one pallete index is set as totally transparent
*/
public void setIndexEntryAsTransparent(int palAlphaIndex) {
if (!imgInfo.indexed)
throw new PngjException("only indexed images support this");
paletteAlpha = new int[] {palAlphaIndex + 1};
for (int i = 0; i < palAlphaIndex; i++)
paletteAlpha[i] = 255;
paletteAlpha[palAlphaIndex] = 0;
}
}

View File

@ -0,0 +1,60 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
/**
* Superclass (abstract) for three textual chunks (TEXT, ITXT, ZTXT)
*/
public abstract class PngChunkTextVar extends PngChunkMultiple {
protected String key; // key/val: only for tEXt. lazy computed
protected String val;
// http://www.w3.org/TR/PNG/#11keywords
public final static String KEY_Title = "Title"; // Short (one line) title or caption for image
public final static String KEY_Author = "Author"; // Name of image's creator
public final static String KEY_Description = "Description"; // Description of image (possibly
// long)
public final static String KEY_Copyright = "Copyright"; // Copyright notice
public final static String KEY_Creation_Time = "Creation Time"; // Time of original image creation
public final static String KEY_Software = "Software"; // Software used to create the image
public final static String KEY_Disclaimer = "Disclaimer"; // Legal disclaimer
public final static String KEY_Warning = "Warning"; // Warning of nature of content
public final static String KEY_Source = "Source"; // Device used to create the image
public final static String KEY_Comment = "Comment"; // Miscellaneous comment
protected PngChunkTextVar(String id, ImageInfo info) {
super(id, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.NONE;
}
public static class PngTxtInfo {
public String title;
public String author;
public String description;
public String creation_time;// = (new Date()).toString();
public String software;
public String disclaimer;
public String warning;
public String source;
public String comment;
}
public String getKey() {
return key;
}
public String getVal() {
return val;
}
public void setKeyVal(String key, String val) {
this.key = key;
this.val = val;
}
}

View File

@ -0,0 +1,40 @@
package ar.com.hjg.pngj.chunks;
import ar.com.hjg.pngj.ImageInfo;
/**
* Placeholder for UNKNOWN (custom or not) chunks.
* <p>
* For PngReader, a chunk is unknown if it's not registered in the chunk factory
*/
public class PngChunkUNKNOWN extends PngChunkMultiple { // unkown, custom or not
public PngChunkUNKNOWN(String id, ImageInfo info) {
super(id, info);
}
@Override
public ChunkOrderingConstraint getOrderingConstraint() {
return ChunkOrderingConstraint.NONE;
}
@Override
public ChunkRaw createRawChunk() {
return raw;
}
@Override
public void parseFromRaw(ChunkRaw c) {
}
/* does not do deep copy! */
public byte[] getData() {
return raw.data;
}
/* does not do deep copy! */
public void setData(byte[] data) {
raw.data = data;
}
}

View File

@ -0,0 +1,62 @@
package ar.com.hjg.pngj.chunks;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjException;
/**
* zTXt chunk.
* <p>
* see http://www.w3.org/TR/PNG/#11zTXt
*/
public class PngChunkZTXT extends PngChunkTextVar {
public final static String ID = ChunkHelper.zTXt;
// http://www.w3.org/TR/PNG/#11zTXt
public PngChunkZTXT(ImageInfo info) {
super(ID, info);
}
@Override
public ChunkRaw createRawChunk() {
if (key == null || key.trim().length() == 0)
throw new PngjException("Text chunk key must be non empty");
try {
ByteArrayOutputStream ba = new ByteArrayOutputStream();
ba.write(ChunkHelper.toBytes(key));
ba.write(0); // separator
ba.write(0); // compression method: 0
byte[] textbytes = ChunkHelper.compressBytes(ChunkHelper.toBytes(val), true);
ba.write(textbytes);
byte[] b = ba.toByteArray();
ChunkRaw chunk = createEmptyChunk(b.length, false);
chunk.data = b;
return chunk;
} catch (IOException e) {
throw new PngjException(e);
}
}
@Override
public void parseFromRaw(ChunkRaw c) {
int nullsep = -1;
for (int i = 0; i < c.data.length; i++) { // look for first zero
if (c.data[i] != 0)
continue;
nullsep = i;
break;
}
if (nullsep < 0 || nullsep > c.data.length - 2)
throw new PngjException("bad zTXt chunk: no separator found");
key = ChunkHelper.toString(c.data, 0, nullsep);
int compmet = (int) c.data[nullsep + 1];
if (compmet != 0)
throw new PngjException("bad zTXt chunk: unknown compression method");
byte[] uncomp =
ChunkHelper.compressBytes(c.data, nullsep + 2, c.data.length - nullsep - 2, false); // uncompress
val = ChunkHelper.toString(uncomp);
}
}

View File

@ -0,0 +1,230 @@
package ar.com.hjg.pngj.chunks;
import java.util.ArrayList;
import java.util.List;
import ar.com.hjg.pngj.PngjException;
/**
* We consider "image metadata" every info inside the image except for the most basic image info (IHDR chunk - ImageInfo
* class) and the pixels values.
* <p>
* This includes the palette (if present) and all the ancillary chunks
* <p>
* This class provides a wrapper over the collection of chunks of a image (read or to write) and provides some high
* level methods to access them
*/
public class PngMetadata {
private final ChunksList chunkList;
private final boolean readonly;
public PngMetadata(ChunksList chunks) {
this.chunkList = chunks;
if (chunks instanceof ChunksListForWrite) {
this.readonly = false;
} else {
this.readonly = true;
}
}
/**
* Queues the chunk at the writer
* <p>
* lazyOverwrite: if true, checks if there is a queued "equivalent" chunk and if so, overwrites it. However if that
* not check for already written chunks.
*/
public void queueChunk(final PngChunk c, boolean lazyOverwrite) {
ChunksListForWrite cl = getChunkListW();
if (readonly)
throw new PngjException("cannot set chunk : readonly metadata");
if (lazyOverwrite) {
ChunkHelper.trimList(cl.getQueuedChunks(), new ChunkPredicate() {
public boolean match(PngChunk c2) {
return ChunkHelper.equivalent(c, c2);
}
});
}
cl.queue(c);
}
public void queueChunk(final PngChunk c) {
queueChunk(c, true);
}
private ChunksListForWrite getChunkListW() {
return (ChunksListForWrite) chunkList;
}
// ///// high level utility methods follow ////////////
// //////////// DPI
/**
* returns -1 if not found or dimension unknown
*/
public double[] getDpi() {
PngChunk c = chunkList.getById1(ChunkHelper.pHYs, true);
if (c == null)
return new double[] {-1, -1};
else
return ((PngChunkPHYS) c).getAsDpi2();
}
public void setDpi(double x) {
setDpi(x, x);
}
public void setDpi(double x, double y) {
PngChunkPHYS c = new PngChunkPHYS(chunkList.imageInfo);
c.setAsDpi2(x, y);
queueChunk(c);
}
// //////////// TIME
/**
* Creates a time chunk with current time, less secsAgo seconds
* <p>
*
* @return Returns the created-queued chunk, just in case you want to examine or modify it
*/
public PngChunkTIME setTimeNow(int secsAgo) {
PngChunkTIME c = new PngChunkTIME(chunkList.imageInfo);
c.setNow(secsAgo);
queueChunk(c);
return c;
}
public PngChunkTIME setTimeNow() {
return setTimeNow(0);
}
/**
* Creates a time chunk with diven date-time
* <p>
*
* @return Returns the created-queued chunk, just in case you want to examine or modify it
*/
public PngChunkTIME setTimeYMDHMS(int yearx, int monx, int dayx, int hourx, int minx, int secx) {
PngChunkTIME c = new PngChunkTIME(chunkList.imageInfo);
c.setYMDHMS(yearx, monx, dayx, hourx, minx, secx);
queueChunk(c, true);
return c;
}
/**
* null if not found
*/
public PngChunkTIME getTime() {
return (PngChunkTIME) chunkList.getById1(ChunkHelper.tIME);
}
public String getTimeAsString() {
PngChunkTIME c = getTime();
return c == null ? "" : c.getAsString();
}
// //////////// TEXT
/**
* Creates a text chunk and queue it.
* <p>
*
* @param k : key (latin1)
* @param val (arbitrary, should be latin1 if useLatin1)
* @param useLatin1
* @param compress
* @return Returns the created-queued chunks, just in case you want to examine, touch it
*/
public PngChunkTextVar setText(String k, String val, boolean useLatin1, boolean compress) {
if (compress && !useLatin1)
throw new PngjException("cannot compress non latin text");
PngChunkTextVar c;
if (useLatin1) {
if (compress) {
c = new PngChunkZTXT(chunkList.imageInfo);
} else {
c = new PngChunkTEXT(chunkList.imageInfo);
}
} else {
c = new PngChunkITXT(chunkList.imageInfo);
((PngChunkITXT) c).setLangtag(k); // we use the same orig tag (this is not quite right)
}
c.setKeyVal(k, val);
queueChunk(c, true);
return c;
}
public PngChunkTextVar setText(String k, String val) {
return setText(k, val, false, false);
}
/**
* gets all text chunks with a given key
* <p>
* returns null if not found
* <p>
* Warning: this does not check the "lang" key of iTxt
*/
@SuppressWarnings("unchecked")
public List<? extends PngChunkTextVar> getTxtsForKey(String k) {
@SuppressWarnings("rawtypes")
List c = new ArrayList();
c.addAll(chunkList.getById(ChunkHelper.tEXt, k));
c.addAll(chunkList.getById(ChunkHelper.zTXt, k));
c.addAll(chunkList.getById(ChunkHelper.iTXt, k));
return c;
}
/**
* Returns empty if not found, concatenated (with newlines) if multiple! - and trimmed
* <p>
* Use getTxtsForKey() if you don't want this behaviour
*/
public String getTxtForKey(String k) {
List<? extends PngChunkTextVar> li = getTxtsForKey(k);
if (li.isEmpty())
return "";
StringBuilder t = new StringBuilder();
for (PngChunkTextVar c : li)
t.append(c.getVal()).append("\n");
return t.toString().trim();
}
/**
* Returns the palette chunk, if present
*
* @return null if not present
*/
public PngChunkPLTE getPLTE() {
return (PngChunkPLTE) chunkList.getById1(PngChunkPLTE.ID);
}
/**
* Creates a new empty palette chunk, queues it for write and return it to the caller, who should fill its entries
*/
public PngChunkPLTE createPLTEChunk() {
PngChunkPLTE plte = new PngChunkPLTE(chunkList.imageInfo);
queueChunk(plte);
return plte;
}
/**
* Returns the TRNS chunk, if present
*
* @return null if not present
*/
public PngChunkTRNS getTRNS() {
return (PngChunkTRNS) chunkList.getById1(PngChunkTRNS.ID);
}
/**
* Creates a new empty TRNS chunk, queues it for write and return it to the caller, who should fill its entries
*/
public PngChunkTRNS createTRNSChunk() {
PngChunkTRNS trns = new PngChunkTRNS(chunkList.imageInfo);
queueChunk(trns);
return trns;
}
}

View File

@ -0,0 +1,9 @@
<html>
<body bgcolor="white">
<p>
Contains the code related to chunk management for the PNGJ library.</p>
<p>
Only needed by client code if some special chunk handling is required.
</p>
</body>
</html>

View File

@ -0,0 +1,49 @@
<html>
<body bgcolor="white">
<p>
PNGJ main package
</p>
<p>
Users of this library should rarely need more than the public members of this package.<br>
Newcomers: start with <a href="PngReader.html">PngReader</a> and <a href="PngWriter.html">PngWriter</a>.
</p>
<p>
Example of use: this code reads a true colour PNG image (RGB8 or RGBA8)
and reduces the red channel by half, increasing the green by 20.
It copies all the "safe" metadata from the original image, and adds a textual metadata.
<pre class="code">
public static void convert(String origFilename, String destFilename) {
// you can also use PngReader (esentially the same) or PngReaderByte
PngReaderInt pngr = new PngReaderInt(new File(origFilename));
System.out.println(pngr.toString());
int channels = pngr.imgInfo.channels;
if (channels &lt; 3 || pngr.imgInfo.bitDepth != 8)
throw new RuntimeException("For simplicity this supports only RGB8/RGBA8 images");
// writer with same image properties as original
PngWriter pngw = new PngWriter(new File(destFilename), pngr.imgInfo, true);
// instruct the writer to grab all ancillary chunks from the original
pngw.copyChunksFrom(pngr.getChunksList(), ChunkCopyBehaviour.COPY_ALL_SAFE);
// add a textual chunk to writer
pngw.getMetadata().setText(PngChunkTextVar.KEY_Description, "Decreased red and increased green");
// also: while(pngr.hasMoreRows())
for (int row = 0; row &lt; pngr.imgInfo.rows; row++) {
ImageLineInt l1 = pngr.readRowInt(); // each element is a sample
int[] scanline = l1.getScanline(); // to save typing
for (int j = 0; j < pngr.imgInfo.cols; j++) {
scanline[j * channels] /= 2;
scanline[j * channels + 1] = ImageLineHelper.clampTo_0_255(scanline[j * channels + 1] + 20);
}
pngw.writeRow(l1);
}
pngr.end(); // it's recommended to end the reader first, in case there are trailing chunks to read
pngw.end();
}
</pre>
For more examples, see the tests and samples.
</p>
</body>
</html>

View File

@ -0,0 +1,160 @@
package ar.com.hjg.pngj.pixels;
import java.io.OutputStream;
import ar.com.hjg.pngj.IDatChunkWriter;
/**
* This is an OutputStream that compresses (via Deflater or a deflater-like object), and optionally passes the
* compressed stream to another output stream.
*
* It allows to compute in/out/ratio stats.
*
* It works as a stream (similar to DeflaterOutputStream), but it's peculiar in that it expects that each writes has a
* fixed length (other lenghts are accepted, but it's less efficient) and that the total amount of bytes is known (so it
* can close itself, but it can also be closed on demand) In PNGJ use, the block is typically a row (including filter
* byte).
*
* We use this to do the real compression (with Deflate) but also to compute tentative estimators
*
* If not closed, it can be recicled via reset()
*
*
*/
public abstract class CompressorStream extends OutputStream {
protected IDatChunkWriter idatChunkWriter;
public final int blockLen;
public final long totalbytes;
boolean closed = false;
protected boolean done = false;
protected long bytesIn = 0;
protected long bytesOut = 0;
protected int block = -1;
/** optionally stores the first byte of each block (row) */
private byte[] firstBytes;
protected boolean storeFirstByte = false;
/**
*
* @param idatCw Can be null (if we are only interested in compute compression ratio)
* @param blockLen Estimated maximum block length. If unknown, use -1.
* @param totalbytes Expected total bytes to be fed. If unknown, use -1.
*/
public CompressorStream(IDatChunkWriter idatCw, int blockLen, long totalbytes) {
this.idatChunkWriter = idatCw;
if (blockLen < 0)
blockLen = 4096;
if (totalbytes < 0)
totalbytes = Long.MAX_VALUE;
if (blockLen < 1 || totalbytes < 1)
throw new RuntimeException(" maxBlockLen or totalLen invalid");
this.blockLen = blockLen;
this.totalbytes = totalbytes;
}
/** Releases resources. Idempotent. */
@Override
public void close() {
done();
if(idatChunkWriter!=null) idatChunkWriter.close();
closed = true;
}
/**
* Will be called automatically when the number of bytes reaches the total expected Can be also be called from
* outside. This should set the flag done=true
*/
public abstract void done();
@Override
public final void write(byte[] data) {
write(data, 0, data.length);
}
@Override
public final void write(byte[] data, int off, int len) {
block++;
if (len <= blockLen) { // normal case
mywrite(data, off, len);
if (storeFirstByte && block < firstBytes.length) {
firstBytes[block] = data[off]; // only makes sense in this case
}
} else {
while (len > 0) {
mywrite(data, off, blockLen);
off += blockLen;
len -= blockLen;
}
}
if (bytesIn >= totalbytes)
done();
}
/**
* same as write, but guarantedd to not exceed blockLen The implementation should update bytesOut and bytesInt but not
* check for totalBytes
*/
public abstract void mywrite(byte[] data, int off, int len);
/**
* compressed/raw. This should be called only when done
*/
public final double getCompressionRatio() {
return bytesOut == 0 ? 1.0 : bytesOut / (double) bytesIn;
}
/**
* raw (input) bytes. This should be called only when done
*/
public final long getBytesRaw() {
return bytesIn;
}
/**
* compressed (out) bytes. This should be called only when done
*/
public final long getBytesCompressed() {
return bytesOut;
}
public boolean isClosed() {
return closed;
}
public boolean isDone() {
return done;
}
public byte[] getFirstBytes() {
return firstBytes;
}
public void setStoreFirstByte(boolean storeFirstByte, int nblocks) {
this.storeFirstByte = storeFirstByte;
if (this.storeFirstByte) {
if (firstBytes == null || firstBytes.length < nblocks)
firstBytes = new byte[nblocks];
} else
firstBytes = null;
}
public void reset() {
done();
bytesIn = 0;
bytesOut = 0;
block = -1;
done = false;
}
@Override
public void write(int i) { // should not be used
write(new byte[] {(byte) i});
}
}

View File

@ -0,0 +1,104 @@
package ar.com.hjg.pngj.pixels;
import java.util.zip.Deflater;
import ar.com.hjg.pngj.IDatChunkWriter;
import ar.com.hjg.pngj.PngjOutputException;
/**
* CompressorStream backed by a Deflater.
*
* Note that the Deflater is not disposed after done, you should either recycle this with reset() or dispose it with
* close()
*
*/
public class CompressorStreamDeflater extends CompressorStream {
protected Deflater deflater;
protected byte[] buf1; // temporary storage of compressed bytes: only used if idatWriter is null
protected boolean deflaterIsOwn = true;
/** if a deflater is passed, it must be already reset. It will not be released on close */
public CompressorStreamDeflater(IDatChunkWriter idatCw, int maxBlockLen, long totalLen,
Deflater def) {
super(idatCw, maxBlockLen, totalLen);
this.deflater = def == null ? new Deflater() : def;
this.deflaterIsOwn = def == null;
}
public CompressorStreamDeflater(IDatChunkWriter idatCw, int maxBlockLen, long totalLen) {
this(idatCw, maxBlockLen, totalLen, null);
}
public CompressorStreamDeflater(IDatChunkWriter idatCw, int maxBlockLen, long totalLen,
int deflaterCompLevel, int deflaterStrategy) {
this(idatCw, maxBlockLen, totalLen, new Deflater(deflaterCompLevel));
this.deflaterIsOwn = true;
deflater.setStrategy(deflaterStrategy);
}
@Override
public void mywrite(byte[] data, int off, final int len) {
if (deflater.finished() || done || closed)
throw new PngjOutputException("write beyond end of stream");
deflater.setInput(data, off, len);
bytesIn += len;
while (!deflater.needsInput())
deflate();
}
protected void deflate() {
byte[] buf;
int off, n;
if (idatChunkWriter != null) {
buf = idatChunkWriter.getBuf();
off = idatChunkWriter.getOffset();
n = idatChunkWriter.getAvailLen();
} else {
if (buf1 == null)
buf1 = new byte[4096];
buf = buf1;
off = 0;
n = buf1.length;
}
int len = deflater.deflate(buf, off, n);
if (len > 0) {
if (idatChunkWriter != null)
idatChunkWriter.incrementOffset(len);
bytesOut += len;
}
}
/** automatically called when done */
@Override
public void done() {
if (done)
return;
if (!deflater.finished()) {
deflater.finish();
while (!deflater.finished())
deflate();
}
done = true;
if (idatChunkWriter != null)
idatChunkWriter.close();
}
public void close() {
done();
try {
if (deflaterIsOwn) {
deflater.end();
}
} catch (Exception e) {
}
super.close();
}
@Override
public void reset() {
deflater.reset();
super.reset();
}
}

View File

@ -0,0 +1,94 @@
package ar.com.hjg.pngj.pixels;
import java.util.zip.Deflater;
import ar.com.hjg.pngj.IDatChunkWriter;
import ar.com.hjg.pngj.PngjOutputException;
/**
* This class uses a quick compressor to get a rough estimate of deflate compression ratio.
*
* This just ignores the outputStream, and the deflater related parameters
*/
public class CompressorStreamLz4 extends CompressorStream {
private final DeflaterEstimatorLz4 lz4;
private byte[] buf; // lazily allocated, only if needed
private final int buffer_size;
// bufpos=bytes in buffer yet not compressed (bytesIn include this)
private int inbuf = 0;
private static final int MAX_BUFFER_SIZE = 16000;
public CompressorStreamLz4(IDatChunkWriter os, int maxBlockLen, long totalLen) {
super(os, maxBlockLen, totalLen);
lz4 = new DeflaterEstimatorLz4();
buffer_size = (int) (totalLen > MAX_BUFFER_SIZE ? MAX_BUFFER_SIZE : totalLen);
}
public CompressorStreamLz4(IDatChunkWriter os, int maxBlockLen, long totalLen, Deflater def) {
this(os, maxBlockLen, totalLen);// edlfater ignored
}
public CompressorStreamLz4(IDatChunkWriter os, int maxBlockLen, long totalLen,
int deflaterCompLevel, int deflaterStrategy) {
this(os, maxBlockLen, totalLen); // paramters ignored
}
@Override
public void mywrite(byte[] b, int off, int len) {
if (len == 0)
return;
if (done || closed)
throw new PngjOutputException("write beyond end of stream");
bytesIn += len;
while (len > 0) {
if (inbuf == 0 && (len >= MAX_BUFFER_SIZE || bytesIn == totalbytes)) {
// direct copy (buffer might be null or empty)
bytesOut += lz4.compressEstim(b, off, len);
len = 0;
} else {
if (buf == null)
buf = new byte[buffer_size];
int len1 = inbuf + len <= buffer_size ? len : buffer_size - inbuf; // to copy
if (len1 > 0)
System.arraycopy(b, off, buf, inbuf, len1);
inbuf += len1;
len -= len1;
off += len1;
if (inbuf == buffer_size)
compressFromBuffer();
}
}
}
void compressFromBuffer() {
if (inbuf > 0) {
bytesOut += lz4.compressEstim(buf, 0, inbuf);
inbuf = 0;
}
}
@Override
public void done() {
if (!done) {
compressFromBuffer();
done = true;
}
}
@Override
public void close() {
done();
if (!closed) {
super.close();
buf = null;
}
}
public void reset() {
super.reset();
}
}

View File

@ -0,0 +1,258 @@
package ar.com.hjg.pngj.pixels;
final public class DeflaterEstimatorHjg {
/**
* This object is stateless, it's thread safe and can be reused
*/
public DeflaterEstimatorHjg() {}
/**
* Estimates the length of the compressed bytes, as compressed by Lz4 WARNING: if larger than LZ4_64K_LIMIT it cuts it
* in fragments
*
* WARNING: if some part of the input is discarded, this should return the proportional (so that
* returnValue/srcLen=compressionRatio)
*
* @param src
* @param srcOff
* @param srcLen
* @return length of the compressed bytes
*/
public int compressEstim(byte[] src, int srcOff, final int srcLen) {
if (srcLen < 10)
return srcLen; // too small
int stride = LZ4_64K_LIMIT - 1;
int segments = (srcLen + stride - 1) / stride;
stride = srcLen / segments;
if (stride >= LZ4_64K_LIMIT - 1 || stride * segments > srcLen || segments < 1 || stride < 1)
throw new RuntimeException("?? " + srcLen);
int bytesIn = 0;
int bytesOut = 0;
int len = srcLen;
while (len > 0) {
if (len > stride)
len = stride;
bytesOut += compress64k(src, srcOff, len);
srcOff += len;
bytesIn += len;
len = srcLen - bytesIn;
}
double ratio = bytesOut / (double) bytesIn;
return bytesIn == srcLen ? bytesOut : (int) (ratio * srcLen + 0.5);
}
public int compressEstim(byte[] src) {
return compressEstim(src, 0, src.length);
}
static final int MEMORY_USAGE = 14;
static final int NOT_COMPRESSIBLE_DETECTION_LEVEL = 6; // see SKIP_STRENGTH
static final int MIN_MATCH = 4;
static final int HASH_LOG = MEMORY_USAGE - 2;
static final int HASH_TABLE_SIZE = 1 << HASH_LOG;
static final int SKIP_STRENGTH = Math.max(NOT_COMPRESSIBLE_DETECTION_LEVEL, 2); // 6 findMatchAttempts =
// 2^SKIP_STRENGTH+3
static final int COPY_LENGTH = 8;
static final int LAST_LITERALS = 5;
static final int MF_LIMIT = COPY_LENGTH + MIN_MATCH;
static final int MIN_LENGTH = MF_LIMIT + 1;
static final int MAX_DISTANCE = 1 << 16;
static final int ML_BITS = 4;
static final int ML_MASK = (1 << ML_BITS) - 1;
static final int RUN_BITS = 8 - ML_BITS;
static final int RUN_MASK = (1 << RUN_BITS) - 1;
static final int LZ4_64K_LIMIT = (1 << 16) + (MF_LIMIT - 1);
static final int HASH_LOG_64K = HASH_LOG + 1;
static final int HASH_TABLE_SIZE_64K = 1 << HASH_LOG_64K;
static final int HASH_LOG_HC = 15;
static final int HASH_TABLE_SIZE_HC = 1 << HASH_LOG_HC;
static final int OPTIMAL_ML = ML_MASK - 1 + MIN_MATCH;
static int compress64k(byte[] src, final int srcOff, final int srcLen) {
final int srcEnd = srcOff + srcLen;
final int srcLimit = srcEnd - LAST_LITERALS;
final int mflimit = srcEnd - MF_LIMIT;
int sOff = srcOff, dOff = 0;
int anchor = sOff;
if (srcLen >= MIN_LENGTH) {
final short[] hashTable = new short[HASH_TABLE_SIZE_64K];
++sOff;
main: while (true) {
// find a match
int forwardOff = sOff;
int ref;
int findMatchAttempts1 = (1 << SKIP_STRENGTH) + 3; // 64+3=67
do {
sOff = forwardOff;
forwardOff += findMatchAttempts1++ >>> SKIP_STRENGTH;
if (forwardOff > mflimit) {
break main; // ends all
}
final int h = hash64k(readInt(src, sOff));
ref = srcOff + readShort(hashTable, h);
writeShort(hashTable, h, sOff - srcOff);
} while (!readIntEquals(src, ref, sOff));
// catch up
final int excess = commonBytesBackward(src, ref, sOff, srcOff, anchor);
sOff -= excess;
ref -= excess;
// sequence == refsequence
final int runLen = sOff - anchor;
dOff++;
if (runLen >= RUN_MASK) {
if (runLen > RUN_MASK)
dOff += (runLen - RUN_MASK) / 0xFF;
dOff++;
}
dOff += runLen;
while (true) {
// encode offset
dOff += 2;
// count nb matches
sOff += MIN_MATCH;
ref += MIN_MATCH;
final int matchLen = commonBytes(src, ref, sOff, srcLimit);
sOff += matchLen;
// encode match len
if (matchLen >= ML_MASK) {
if (matchLen >= ML_MASK + 0xFF)
dOff += (matchLen - ML_MASK) / 0xFF;
dOff++;
}
// test end of chunk
if (sOff > mflimit) {
anchor = sOff;
break main;
}
// fill table
writeShort(hashTable, hash64k(readInt(src, sOff - 2)), sOff - 2 - srcOff);
// test next position
final int h = hash64k(readInt(src, sOff));
ref = srcOff + readShort(hashTable, h);
writeShort(hashTable, h, sOff - srcOff);
if (!readIntEquals(src, sOff, ref)) {
break;
}
dOff++;
}
// prepare next loop
anchor = sOff++;
}
}
int runLen = srcEnd - anchor;
if (runLen >= RUN_MASK + 0xFF) {
dOff += (runLen - RUN_MASK) / 0xFF;
}
dOff++;
dOff += runLen;
return dOff;
}
static final int maxCompressedLength(int length) {
if (length < 0) {
throw new IllegalArgumentException("length must be >= 0, got " + length);
}
return length + length / 255 + 16;
}
static int hash(int i) {
return (i * -1640531535) >>> ((MIN_MATCH * 8) - HASH_LOG);
}
static int hash64k(int i) {
return (i * -1640531535) >>> ((MIN_MATCH * 8) - HASH_LOG_64K);
}
static int readShortLittleEndian(byte[] buf, int i) {
return (buf[i] & 0xFF) | ((buf[i + 1] & 0xFF) << 8);
}
static boolean readIntEquals(byte[] buf, int i, int j) {
return buf[i] == buf[j] && buf[i + 1] == buf[j + 1] && buf[i + 2] == buf[j + 2]
&& buf[i + 3] == buf[j + 3];
}
static int commonBytes(byte[] b, int o1, int o2, int limit) {
int count = 0;
while (o2 < limit && b[o1++] == b[o2++]) {
++count;
}
return count;
}
static int commonBytesBackward(byte[] b, int o1, int o2, int l1, int l2) {
int count = 0;
while (o1 > l1 && o2 > l2 && b[--o1] == b[--o2]) {
++count;
}
return count;
}
static int readShort(short[] buf, int off) {
return buf[off] & 0xFFFF;
}
static byte readByte(byte[] buf, int i) {
return buf[i];
}
static void checkRange(byte[] buf, int off) {
if (off < 0 || off >= buf.length) {
throw new ArrayIndexOutOfBoundsException(off);
}
}
static void checkRange(byte[] buf, int off, int len) {
checkLength(len);
if (len > 0) {
checkRange(buf, off);
checkRange(buf, off + len - 1);
}
}
static void checkLength(int len) {
if (len < 0) {
throw new IllegalArgumentException("lengths must be >= 0");
}
}
static int readIntBE(byte[] buf, int i) {
return ((buf[i] & 0xFF) << 24) | ((buf[i + 1] & 0xFF) << 16) | ((buf[i + 2] & 0xFF) << 8)
| (buf[i + 3] & 0xFF);
}
static int readIntLE(byte[] buf, int i) {
return (buf[i] & 0xFF) | ((buf[i + 1] & 0xFF) << 8) | ((buf[i + 2] & 0xFF) << 16)
| ((buf[i + 3] & 0xFF) << 24);
}
static int readInt(byte[] buf, int i) {
return readIntBE(buf, i);
}
static void writeShort(short[] buf, int off, int v) {
buf[off] = (short) v;
}
}

View File

@ -0,0 +1,272 @@
package ar.com.hjg.pngj.pixels;
import java.nio.ByteOrder;
/**
* This estimator actually uses the LZ4 compression algorithm, and hopes that it's well correlated with Deflater. It's
* about 3 to 4 times faster than Deflater.
*
* This is a modified heavily trimmed version of the net.jpountz.lz4.LZ4JavaSafeCompressor class plus some methods from
* other classes from LZ4 Java library: https://github.com/jpountz/lz4-java , originally licensed under the Apache
* License 2.0
*/
final public class DeflaterEstimatorLz4 {
/**
* This object is stateless, it's thread safe and can be reused
*/
public DeflaterEstimatorLz4() {}
/**
* Estimates the length of the compressed bytes, as compressed by Lz4 WARNING: if larger than LZ4_64K_LIMIT it cuts it
* in fragments
*
* WARNING: if some part of the input is discarded, this should return the proportional (so that
* returnValue/srcLen=compressionRatio)
*
* @param src
* @param srcOff
* @param srcLen
* @return length of the compressed bytes
*/
public int compressEstim(byte[] src, int srcOff, final int srcLen) {
if (srcLen < 10)
return srcLen; // too small
int stride = LZ4_64K_LIMIT - 1;
int segments = (srcLen + stride - 1) / stride;
stride = srcLen / segments;
if (stride >= LZ4_64K_LIMIT - 1 || stride * segments > srcLen || segments < 1 || stride < 1)
throw new RuntimeException("?? " + srcLen);
int bytesIn = 0;
int bytesOut = 0;
int len = srcLen;
while (len > 0) {
if (len > stride)
len = stride;
bytesOut += compress64k(src, srcOff, len);
srcOff += len;
bytesIn += len;
len = srcLen - bytesIn;
}
double ratio = bytesOut / (double) bytesIn;
return bytesIn == srcLen ? bytesOut : (int) (ratio * srcLen + 0.5);
}
public int compressEstim(byte[] src) {
return compressEstim(src, 0, src.length);
}
static final ByteOrder NATIVE_BYTE_ORDER = ByteOrder.nativeOrder();
static final int MEMORY_USAGE = 14;
static final int NOT_COMPRESSIBLE_DETECTION_LEVEL = 6;
static final int MIN_MATCH = 4;
static final int HASH_LOG = MEMORY_USAGE - 2;
static final int HASH_TABLE_SIZE = 1 << HASH_LOG;
static final int SKIP_STRENGTH = Math.max(NOT_COMPRESSIBLE_DETECTION_LEVEL, 2);
static final int COPY_LENGTH = 8;
static final int LAST_LITERALS = 5;
static final int MF_LIMIT = COPY_LENGTH + MIN_MATCH;
static final int MIN_LENGTH = MF_LIMIT + 1;
static final int MAX_DISTANCE = 1 << 16;
static final int ML_BITS = 4;
static final int ML_MASK = (1 << ML_BITS) - 1;
static final int RUN_BITS = 8 - ML_BITS;
static final int RUN_MASK = (1 << RUN_BITS) - 1;
static final int LZ4_64K_LIMIT = (1 << 16) + (MF_LIMIT - 1);
static final int HASH_LOG_64K = HASH_LOG + 1;
static final int HASH_TABLE_SIZE_64K = 1 << HASH_LOG_64K;
static final int HASH_LOG_HC = 15;
static final int HASH_TABLE_SIZE_HC = 1 << HASH_LOG_HC;
static final int OPTIMAL_ML = ML_MASK - 1 + MIN_MATCH;
static int compress64k(byte[] src, int srcOff, int srcLen) {
final int srcEnd = srcOff + srcLen;
final int srcLimit = srcEnd - LAST_LITERALS;
final int mflimit = srcEnd - MF_LIMIT;
int sOff = srcOff, dOff = 0;
int anchor = sOff;
if (srcLen >= MIN_LENGTH) {
final short[] hashTable = new short[HASH_TABLE_SIZE_64K];
++sOff;
main: while (true) {
// find a match
int forwardOff = sOff;
int ref;
int findMatchAttempts = (1 << SKIP_STRENGTH) + 3;
do {
sOff = forwardOff;
forwardOff += findMatchAttempts++ >>> SKIP_STRENGTH;
if (forwardOff > mflimit) {
break main;
}
final int h = hash64k(readInt(src, sOff));
ref = srcOff + readShort(hashTable, h);
writeShort(hashTable, h, sOff - srcOff);
} while (!readIntEquals(src, ref, sOff));
// catch up
final int excess = commonBytesBackward(src, ref, sOff, srcOff, anchor);
sOff -= excess;
ref -= excess;
// sequence == refsequence
final int runLen = sOff - anchor;
dOff++;
if (runLen >= RUN_MASK) {
if (runLen > RUN_MASK)
dOff += (runLen - RUN_MASK) / 0xFF;
dOff++;
}
dOff += runLen;
while (true) {
// encode offset
dOff += 2;
// count nb matches
sOff += MIN_MATCH;
ref += MIN_MATCH;
final int matchLen = commonBytes(src, ref, sOff, srcLimit);
sOff += matchLen;
// encode match len
if (matchLen >= ML_MASK) {
if (matchLen >= ML_MASK + 0xFF)
dOff += (matchLen - ML_MASK) / 0xFF;
dOff++;
}
// test end of chunk
if (sOff > mflimit) {
anchor = sOff;
break main;
}
// fill table
writeShort(hashTable, hash64k(readInt(src, sOff - 2)), sOff - 2 - srcOff);
// test next position
final int h = hash64k(readInt(src, sOff));
ref = srcOff + readShort(hashTable, h);
writeShort(hashTable, h, sOff - srcOff);
if (!readIntEquals(src, sOff, ref)) {
break;
}
dOff++;
}
// prepare next loop
anchor = sOff++;
}
}
int runLen = srcEnd - anchor;
if (runLen >= RUN_MASK + 0xFF) {
dOff += (runLen - RUN_MASK) / 0xFF;
}
dOff++;
dOff += runLen;
return dOff;
}
static final int maxCompressedLength(int length) {
if (length < 0) {
throw new IllegalArgumentException("length must be >= 0, got " + length);
}
return length + length / 255 + 16;
}
static int hash(int i) {
return (i * -1640531535) >>> ((MIN_MATCH * 8) - HASH_LOG);
}
static int hash64k(int i) {
return (i * -1640531535) >>> ((MIN_MATCH * 8) - HASH_LOG_64K);
}
static int readShortLittleEndian(byte[] buf, int i) {
return (buf[i] & 0xFF) | ((buf[i + 1] & 0xFF) << 8);
}
static boolean readIntEquals(byte[] buf, int i, int j) {
return buf[i] == buf[j] && buf[i + 1] == buf[j + 1] && buf[i + 2] == buf[j + 2]
&& buf[i + 3] == buf[j + 3];
}
static int commonBytes(byte[] b, int o1, int o2, int limit) {
int count = 0;
while (o2 < limit && b[o1++] == b[o2++]) {
++count;
}
return count;
}
static int commonBytesBackward(byte[] b, int o1, int o2, int l1, int l2) {
int count = 0;
while (o1 > l1 && o2 > l2 && b[--o1] == b[--o2]) {
++count;
}
return count;
}
static int readShort(short[] buf, int off) {
return buf[off] & 0xFFFF;
}
static byte readByte(byte[] buf, int i) {
return buf[i];
}
static void checkRange(byte[] buf, int off) {
if (off < 0 || off >= buf.length) {
throw new ArrayIndexOutOfBoundsException(off);
}
}
static void checkRange(byte[] buf, int off, int len) {
checkLength(len);
if (len > 0) {
checkRange(buf, off);
checkRange(buf, off + len - 1);
}
}
static void checkLength(int len) {
if (len < 0) {
throw new IllegalArgumentException("lengths must be >= 0");
}
}
static int readIntBE(byte[] buf, int i) {
return ((buf[i] & 0xFF) << 24) | ((buf[i + 1] & 0xFF) << 16) | ((buf[i + 2] & 0xFF) << 8)
| (buf[i + 3] & 0xFF);
}
static int readIntLE(byte[] buf, int i) {
return (buf[i] & 0xFF) | ((buf[i + 1] & 0xFF) << 8) | ((buf[i + 2] & 0xFF) << 16)
| ((buf[i + 3] & 0xFF) << 24);
}
static int readInt(byte[] buf, int i) {
if (NATIVE_BYTE_ORDER == ByteOrder.BIG_ENDIAN) {
return readIntBE(buf, i);
} else {
return readIntLE(buf, i);
}
}
static void writeShort(short[] buf, int off, int v) {
buf[off] = (short) v;
}
}

View File

@ -0,0 +1,203 @@
package ar.com.hjg.pngj.pixels;
import java.util.Arrays;
import ar.com.hjg.pngj.FilterType;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjExceptionInternal;
/** for use in adaptative strategy */
public class FiltersPerformance {
private final ImageInfo iminfo;
private double memoryA = 0.7; // empirical (not very critical: 0.72)
private int lastrow = -1;
private double[] absum = new double[5];// depending on the strategy not all values might be
// computed for all
private double[] entropy = new double[5];
private double[] cost = new double[5];
private int[] histog = new int[256]; // temporary, not normalized
private int lastprefered = -1;
private boolean initdone = false;
private double preferenceForNone = 1.0; // higher gives more preference to NONE
// this values are empirical (montecarlo), for RGB8 images with entropy estimator for NONE and
// memory=0.7
// DONT MODIFY THIS
public static final double[] FILTER_WEIGHTS_DEFAULT = {0.73, 1.03, 0.97, 1.11, 1.22}; // lower is
// better!
private double[] filter_weights = new double[] {-1, -1, -1, -1, -1};
private final static double LOG2NI = -1.0 / Math.log(2.0);
public FiltersPerformance(ImageInfo imgInfo) {
this.iminfo = imgInfo;
}
private void init() {
if (filter_weights[0] < 0) {// has not been set from outside
System.arraycopy(FILTER_WEIGHTS_DEFAULT, 0, filter_weights, 0, 5);
double wNone = filter_weights[0];
if (iminfo.bitDepth == 16)
wNone = 1.2;
else if (iminfo.alpha)
wNone = 0.8;
else if (iminfo.indexed || iminfo.bitDepth < 8)
wNone = 0.4; // we prefer NONE strongly
wNone /= preferenceForNone;
filter_weights[0] = wNone;
}
Arrays.fill(cost, 1.0);
initdone = true;
}
public void updateFromFiltered(FilterType ftype, byte[] rowff, int rown) {
updateFromRawOrFiltered(ftype, rowff, null, null, rown);
}
/** alternative: computes statistic without filtering */
public void updateFromRaw(FilterType ftype, byte[] rowb, byte[] rowbprev, int rown) {
updateFromRawOrFiltered(ftype, null, rowb, rowbprev, rown);
}
private void updateFromRawOrFiltered(FilterType ftype, byte[] rowff, byte[] rowb,
byte[] rowbprev, int rown) {
if (!initdone)
init();
if (rown != lastrow) {
Arrays.fill(absum, Double.NaN);
Arrays.fill(entropy, Double.NaN);
}
lastrow = rown;
if (rowff != null)
computeHistogram(rowff);
else
computeHistogramForFilter(ftype, rowb, rowbprev);
if (ftype == FilterType.FILTER_NONE)
entropy[ftype.val] = computeEntropyFromHistogram();
else
absum[ftype.val] = computeAbsFromHistogram();
}
/* WARNING: this is not idempotent, call it just once per cycle (sigh) */
public FilterType getPreferred() {
int fi = 0;
double vali = Double.MAX_VALUE, val = 0; // lower wins
for (int i = 0; i < 5; i++) {
if (!Double.isNaN(absum[i])) {
val = absum[i];
} else if (!Double.isNaN(entropy[i])) {
val = (Math.pow(2.0, entropy[i]) - 1.0) * 0.5;
} else
continue;
val *= filter_weights[i];
val = cost[i] * memoryA + (1 - memoryA) * val;
cost[i] = val;
if (val < vali) {
vali = val;
fi = i;
}
}
lastprefered = fi;
return FilterType.getByVal(lastprefered);
}
public final void computeHistogramForFilter(FilterType filterType, byte[] rowb, byte[] rowbprev) {
Arrays.fill(histog, 0);
int i, j, imax = iminfo.bytesPerRow;
switch (filterType) {
case FILTER_NONE:
for (i = 1; i <= imax; i++)
histog[rowb[i] & 0xFF]++;
break;
case FILTER_PAETH:
for (i = 1; i <= imax; i++)
histog[PngHelperInternal.filterRowPaeth(rowb[i], 0, rowbprev[i] & 0xFF, 0)]++;
for (j = 1, i = iminfo.bytesPixel + 1; i <= imax; i++, j++)
histog[PngHelperInternal.filterRowPaeth(rowb[i], rowb[j] & 0xFF, rowbprev[i] & 0xFF,
rowbprev[j] & 0xFF)]++;
break;
case FILTER_SUB:
for (i = 1; i <= iminfo.bytesPixel; i++)
histog[rowb[i] & 0xFF]++;
for (j = 1, i = iminfo.bytesPixel + 1; i <= imax; i++, j++)
histog[(rowb[i] - rowb[j]) & 0xFF]++;
break;
case FILTER_UP:
for (i = 1; i <= iminfo.bytesPerRow; i++)
histog[(rowb[i] - rowbprev[i]) & 0xFF]++;
break;
case FILTER_AVERAGE:
for (i = 1; i <= iminfo.bytesPixel; i++)
histog[((rowb[i] & 0xFF) - ((rowbprev[i] & 0xFF)) / 2) & 0xFF]++;
for (j = 1, i = iminfo.bytesPixel + 1; i <= imax; i++, j++)
histog[((rowb[i] & 0xFF) - ((rowbprev[i] & 0xFF) + (rowb[j] & 0xFF)) / 2) & 0xFF]++;
break;
default:
throw new PngjExceptionInternal("Bad filter:" + filterType);
}
}
public void computeHistogram(byte[] rowff) {
Arrays.fill(histog, 0);
for (int i = 1; i < iminfo.bytesPerRow; i++)
histog[rowff[i] & 0xFF]++;
}
public double computeAbsFromHistogram() {
int s = 0;
for (int i = 1; i < 128; i++)
s += histog[i] * i;
for (int i = 128, j = 128; j > 0; i++, j--)
s += histog[i] * j;
return s / (double) iminfo.bytesPerRow;
}
public final double computeEntropyFromHistogram() {
double s = 1.0 / iminfo.bytesPerRow;
double ls = Math.log(s);
double h = 0;
for (int x : histog) {
if (x > 0)
h += (Math.log(x) + ls) * x;
}
h *= s * LOG2NI;
if (h < 0.0)
h = 0.0;
return h;
}
/**
* If larger than 1.0, NONE will be more prefered. This must be called before init
*
* @param preferenceForNone around 1.0 (default: 1.0)
*/
public void setPreferenceForNone(double preferenceForNone) {
this.preferenceForNone = preferenceForNone;
}
/**
* Values greater than 1.0 (towards infinite) increase the memory towards 1. Values smaller than 1.0 (towards zero)
* decreases the memory .
*
*/
public void tuneMemory(double m) {
if (m == 0)
memoryA = 0.0;
else
memoryA = Math.pow(memoryA, 1.0 / m);
}
/**
* To set manually the filter weights. This is not recommended, unless you know what you are doing. Setting this
* ignores preferenceForNone and omits some heuristics
*
* @param weights Five doubles around 1.0, one for each filter type. Lower is preferered
*/
public void setFilterWeights(double[] weights) {
System.arraycopy(weights, 0, filter_weights, 0, 5);
}
}

View File

@ -0,0 +1,263 @@
package ar.com.hjg.pngj.pixels;
import java.io.OutputStream;
import java.util.zip.Deflater;
import ar.com.hjg.pngj.FilterType;
import ar.com.hjg.pngj.IDatChunkWriter;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngHelperInternal;
import ar.com.hjg.pngj.PngjOutputException;
/**
* Encodes a set of rows (pixels) as a continuous deflated stream (does not know about IDAT chunk segmentation).
* <p>
* This includes the filter selection strategy, plus the filtering itself and the deflating. Only supports fixed length
* rows (no interlaced writing).
* <p>
* Typically an instance of this is hold by a PngWriter - but more instances could be used (for APGN)
*/
public abstract class PixelsWriter {
private static final int IDAT_MAX_SIZE_DEFAULT = 32000;
protected final ImageInfo imgInfo;
/**
* row buffer length, including filter byte (imgInfo.bytesPerRow + 1)
*/
protected final int buflen;
protected final int bytesPixel;
protected final int bytesRow;
private CompressorStream compressorStream; // to compress the idat stream
protected int deflaterCompLevel = 6;
protected int deflaterStrategy = Deflater.DEFAULT_STRATEGY;
protected boolean initdone = false;
/**
* This is the globally configured filter type - it can be a concrete type or a pseudo type (hint or strategy)
*/
protected FilterType filterType;
// counts the filters used - just for stats
private int[] filtersUsed = new int[5];
// this is the raw underlying os (shared with the PngWriter)
private OutputStream os;
private int idatMaxSize = IDAT_MAX_SIZE_DEFAULT;
/**
* row being processed, couting from zero
*/
protected int currentRow;
public PixelsWriter(ImageInfo imgInfo) {
this.imgInfo = imgInfo;
bytesRow = imgInfo.bytesPerRow;
buflen = bytesRow + 1;
bytesPixel = imgInfo.bytesPixel;
currentRow = -1;
filterType = FilterType.FILTER_DEFAULT;
}
/**
* main internal point for external call. It does the lazy initializion if necessary, sets current row, and call
* {@link #filterAndWrite(byte[])}
*/
public final void processRow(final byte[] rowb) {
if (!initdone)
init();
currentRow++;
filterAndWrite(rowb);
}
protected void sendToCompressedStream(byte[] rowf) {
compressorStream.write(rowf, 0, rowf.length);
filtersUsed[rowf[0]]++;
}
/**
* This does the filtering and send to stream. Typically should decide the filtering, call
* {@link #filterRowWithFilterType(FilterType, byte[], byte[], byte[])} and and
* {@link #sendToCompressedStream(byte[])}
*
* @param rowb
*/
protected abstract void filterAndWrite(final byte[] rowb);
/**
* Does the real filtering. This must be called with the real (standard) filterType. This should rarely be overriden.
* <p>
* WARNING: look out the contract
*
* @param _filterType
* @param _rowb current row (the first byte might be modified)
* @param _rowbprev previous row (should be all zero the first time)
* @param _rowf tentative buffer to store the filtered bytes. might not be used!
* @return normally _rowf, but eventually _rowb. This MUST NOT BE MODIFIED nor reused by caller
*/
final protected byte[] filterRowWithFilterType(FilterType _filterType, byte[] _rowb,
byte[] _rowbprev, byte[] _rowf) {
// warning: some filters rely on: "previous row" (rowbprev) it must be initialized to 0 the
// first time
if (_filterType == FilterType.FILTER_NONE)
_rowf = _rowb;
_rowf[0] = (byte) _filterType.val;
int i, j;
switch (_filterType) {
case FILTER_NONE:
// we return the same original (be careful!)
break;
case FILTER_PAETH:
for (i = 1; i <= bytesPixel; i++)
_rowf[i] = (byte) PngHelperInternal.filterRowPaeth(_rowb[i], 0, _rowbprev[i] & 0xFF, 0);
for (j = 1, i = bytesPixel + 1; i <= bytesRow; i++, j++)
_rowf[i] =
(byte) PngHelperInternal.filterRowPaeth(_rowb[i], _rowb[j] & 0xFF,
_rowbprev[i] & 0xFF, _rowbprev[j] & 0xFF);
break;
case FILTER_SUB:
for (i = 1; i <= bytesPixel; i++)
_rowf[i] = (byte) _rowb[i];
for (j = 1, i = bytesPixel + 1; i <= bytesRow; i++, j++)
_rowf[i] = (byte) (_rowb[i] - _rowb[j]);
break;
case FILTER_AVERAGE:
for (i = 1; i <= bytesPixel; i++)
_rowf[i] = (byte) (_rowb[i] - (_rowbprev[i] & 0xFF) / 2);
for (j = 1, i = bytesPixel + 1; i <= bytesRow; i++, j++)
_rowf[i] = (byte) (_rowb[i] - ((_rowbprev[i] & 0xFF) + (_rowb[j] & 0xFF)) / 2);
break;
case FILTER_UP:
for (i = 1; i <= bytesRow; i++)
_rowf[i] = (byte) (_rowb[i] - _rowbprev[i]);
break;
default:
throw new PngjOutputException("Filter type not recognized: " + _filterType);
}
return _rowf;
}
/**
* This will be called by the PngWrite to fill the raw pixels for each row. This can change from call to call.
* Warning: this can be called before the object is init, implementations should call init() to be sure
*/
public abstract byte[] getRowb();
/**
* This will be called lazily just before writing row 0. Idempotent.
*/
protected final void init() {
if (!initdone) {
initParams();
initdone = true;
}
}
/** called by init(); override (calling this first) to do additional initialization */
protected void initParams() {
IDatChunkWriter idatWriter = new IDatChunkWriter(os, idatMaxSize);
if (compressorStream == null) { // if not set, use the deflater
compressorStream =
new CompressorStreamDeflater(idatWriter, buflen, imgInfo.getTotalRawBytes(),
deflaterCompLevel, deflaterStrategy);
}
}
/** cleanup. This should be called explicitly. Idempotent and secure */
public void close() {
if (compressorStream != null) {
compressorStream.close();
}
}
/**
* Deflater (ZLIB) strategy. You should rarely change this from the default (Deflater.DEFAULT_STRATEGY) to
* Deflater.FILTERED (Deflater.HUFFMAN_ONLY is fast but compress poorly)
*/
public void setDeflaterStrategy(Integer deflaterStrategy) {
this.deflaterStrategy = deflaterStrategy;
}
/**
* Deflater (ZLIB) compression level, between 0 (no compression) and 9
*/
public void setDeflaterCompLevel(Integer deflaterCompLevel) {
this.deflaterCompLevel = deflaterCompLevel;
}
public Integer getDeflaterCompLevel() {
return deflaterCompLevel;
}
public final void setOs(OutputStream datStream) {
this.os = datStream;
}
public OutputStream getOs() {
return os;
}
/** @see #filterType */
final public FilterType getFilterType() {
return filterType;
}
/** @see #filterType */
final public void setFilterType(FilterType filterType) {
this.filterType = filterType;
}
/* out/in This should be called only after end() to get reliable results */
public double getCompression() {
return compressorStream.isDone() ? compressorStream.getCompressionRatio() : 1.0;
}
public void setCompressorStream(CompressorStream compressorStream) {
this.compressorStream = compressorStream;
}
public long getTotalBytesToWrite() {
return imgInfo.getTotalRawBytes();
}
public boolean isDone() {
return currentRow == imgInfo.rows - 1;
}
/**
* computed default fixed filter type to use, if specified DEFAULT; wilde guess based on image properties
*
* @return One of the five concrete filter types
*/
protected FilterType getDefaultFilter() {
if (imgInfo.indexed || imgInfo.bitDepth < 8)
return FilterType.FILTER_NONE;
else if (imgInfo.getTotalPixels() < 1024)
return FilterType.FILTER_NONE;
else if (imgInfo.rows == 1)
return FilterType.FILTER_SUB;
else if (imgInfo.cols == 1)
return FilterType.FILTER_UP;
else
return FilterType.FILTER_PAETH;
}
/** informational stats : filter used, in percentages */
final public String getFiltersUsed() {
return String.format("%d,%d,%d,%d,%d", (int) (filtersUsed[0] * 100.0 / imgInfo.rows + 0.5),
(int) (filtersUsed[1] * 100.0 / imgInfo.rows + 0.5), (int) (filtersUsed[2] * 100.0
/ imgInfo.rows + 0.5), (int) (filtersUsed[3] * 100.0 / imgInfo.rows + 0.5),
(int) (filtersUsed[4] * 100.0 / imgInfo.rows + 0.5));
}
public void setIdatMaxSize(int idatMaxSize) {
this.idatMaxSize = idatMaxSize;
}
}

View File

@ -0,0 +1,158 @@
package ar.com.hjg.pngj.pixels;
import java.util.Arrays;
import ar.com.hjg.pngj.FilterType;
import ar.com.hjg.pngj.ImageInfo;
import ar.com.hjg.pngj.PngjOutputException;
/**
* Default implementation of PixelsWriter, with fixed filters and also adaptive strategies.
*/
public class PixelsWriterDefault extends PixelsWriter {
/** current raw row */
protected byte[] rowb;
/** previous raw row */
protected byte[] rowbprev;
/** buffer for filtered row */
protected byte[] rowbfilter;
/** evaluates different filters, for adaptive strategy */
protected FiltersPerformance filtersPerformance;
/** currently concrete selected filter type */
protected FilterType curfilterType;
/** parameters for adaptive strategy */
protected int adaptMaxSkip; // set in initParams, does not change
protected int adaptSkipIncreaseSinceRow; // set in initParams, does not change
protected double adaptSkipIncreaseFactor; // set in initParams, does not change
protected int adaptNextRow = 0;
public PixelsWriterDefault(ImageInfo imgInfo) {
super(imgInfo);
filtersPerformance = new FiltersPerformance(imgInfo);
}
@Override
protected void initParams() {
super.initParams();
if (rowb == null || rowb.length < buflen)
rowb = new byte[buflen];
if (rowbfilter == null || rowbfilter.length < buflen)
rowbfilter = new byte[buflen];
if (rowbprev == null || rowbprev.length < buflen)
rowbprev = new byte[buflen];
else
Arrays.fill(rowbprev, (byte) 0);
// if adaptative but too few rows or columns, use default
if (imgInfo.cols < 3 && !FilterType.isValidStandard(filterType))
filterType = FilterType.FILTER_DEFAULT;
if (imgInfo.rows < 3 && !FilterType.isValidStandard(filterType))
filterType = FilterType.FILTER_DEFAULT;
if (imgInfo.getTotalPixels() <= 1024 && !FilterType.isValidStandard(filterType))
filterType = getDefaultFilter();
if (FilterType.isAdaptive(filterType)) {
// adaptCurSkip = 0;
adaptNextRow = 0;
if (filterType == FilterType.FILTER_ADAPTIVE_FAST) {
adaptMaxSkip = 200;
adaptSkipIncreaseSinceRow = 3;
adaptSkipIncreaseFactor = 1 / 4.0; // skip ~ row/3
} else if (filterType == FilterType.FILTER_ADAPTIVE_MEDIUM) {
adaptMaxSkip = 8;
adaptSkipIncreaseSinceRow = 32;
adaptSkipIncreaseFactor = 1 / 80.0;
} else if (filterType == FilterType.FILTER_ADAPTIVE_FULL) {
adaptMaxSkip = 0;
adaptSkipIncreaseSinceRow = 128;
adaptSkipIncreaseFactor = 1 / 120.0;
} else
throw new PngjOutputException("bad filter " + filterType);
}
}
@Override
protected void filterAndWrite(final byte[] rowb) {
if (rowb != this.rowb)
throw new RuntimeException("??"); // we rely on this
decideCurFilterType();
byte[] filtered = filterRowWithFilterType(curfilterType, rowb, rowbprev, rowbfilter);
sendToCompressedStream(filtered);
// swap rowb <-> rowbprev
byte[] aux = this.rowb;
this.rowb = rowbprev;
rowbprev = aux;
}
protected void decideCurFilterType() {
// decide the real filter and store in curfilterType
if (FilterType.isValidStandard(getFilterType())) {
curfilterType = getFilterType();
} else if (getFilterType() == FilterType.FILTER_PRESERVE) {
curfilterType = FilterType.getByVal(rowb[0]);
} else if (getFilterType() == FilterType.FILTER_CYCLIC) {
curfilterType = FilterType.getByVal(currentRow % 5);
} else if (getFilterType() == FilterType.FILTER_DEFAULT) {
setFilterType(getDefaultFilter());
curfilterType = getFilterType(); // this could be done once
} else if (FilterType.isAdaptive(getFilterType())) {// adaptive
if (currentRow == adaptNextRow) {
for (FilterType ftype : FilterType.getAllStandard())
filtersPerformance.updateFromRaw(ftype, rowb, rowbprev, currentRow);
curfilterType = filtersPerformance.getPreferred();
int skip =
(currentRow >= adaptSkipIncreaseSinceRow ? (int) Math
.round((currentRow - adaptSkipIncreaseSinceRow) * adaptSkipIncreaseFactor) : 0);
if (skip > adaptMaxSkip)
skip = adaptMaxSkip;
if (currentRow == 0)
skip = 0;
adaptNextRow = currentRow + 1 + skip;
}
} else {
throw new PngjOutputException("not implemented filter: " + getFilterType());
}
if (currentRow == 0 && curfilterType != FilterType.FILTER_NONE
&& curfilterType != FilterType.FILTER_SUB)
curfilterType = FilterType.FILTER_SUB; // first row should always be none or sub
}
@Override
public byte[] getRowb() {
if (!initdone)
init();
return rowb;
}
@Override
public void close() {
super.close();
}
/**
* Only for adaptive strategies. See {@link FiltersPerformance#setPreferenceForNone(double)}
*/
public void setPreferenceForNone(double preferenceForNone) {
filtersPerformance.setPreferenceForNone(preferenceForNone);
}
/**
* Only for adaptive strategies. See {@link FiltersPerformance#tuneMemory(double)}
*/
public void tuneMemory(double m) {
filtersPerformance.tuneMemory(m);
}
/**
* Only for adaptive strategies. See {@link FiltersPerformance#setFilterWeights(double[])}
*/
public void setFilterWeights(double[] weights) {
filtersPerformance.setFilterWeights(weights);
}
}

View File

@ -0,0 +1,241 @@
package ar.com.hjg.pngj.pixels;
import java.util.LinkedList;
import java.util.zip.Deflater;
import ar.com.hjg.pngj.FilterType;
import ar.com.hjg.pngj.ImageInfo;
/** Special pixels writer for experimental super adaptive strategy */
public class PixelsWriterMultiple extends PixelsWriter {
/**
* unfiltered rowsperband elements, 0 is the current (rowb). This should include all rows of current band, plus one
*/
protected LinkedList<byte[]> rows;
/**
* bank of compressor estimators, one for each filter and (perhaps) an adaptive strategy
*/
protected CompressorStream[] filterBank = new CompressorStream[6];
/**
* stored filtered rows, one for each filter (0=none is not allocated but linked)
*/
protected byte[][] filteredRows = new byte[5][];
protected byte[] filteredRowTmp; //
protected FiltersPerformance filtersPerf;
protected int rowsPerBand = 0; // This is a 'nominal' size
protected int rowsPerBandCurrent = 0; // lastRowInThisBand-firstRowInThisBand +1 : might be
// smaller than rowsPerBand
protected int rowInBand = -1;
protected int bandNum = -1;
protected int firstRowInThisBand, lastRowInThisBand;
private boolean tryAdaptive = true;
protected static final int HINT_MEMORY_DEFAULT_KB = 100;
// we will consume about (not more than) this memory (in buffers, not counting the compressors)
protected int hintMemoryKb = HINT_MEMORY_DEFAULT_KB;
private int hintRowsPerBand = 1000; // default: very large number, can be changed
private boolean useLz4 = true;
public PixelsWriterMultiple(ImageInfo imgInfo) {
super(imgInfo);
filtersPerf = new FiltersPerformance(imgInfo);
rows = new LinkedList<byte[]>();
for (int i = 0; i < 2; i++)
rows.add(new byte[buflen]); // we preallocate 2 rows (rowb and rowbprev)
filteredRowTmp = new byte[buflen];
}
@Override
protected void filterAndWrite(byte[] rowb) {
if (!initdone)
init();
if (rowb != rows.get(0))
throw new RuntimeException("?");
setBandFromNewRown();
byte[] rowbprev = rows.get(1);
for (FilterType ftype : FilterType.getAllStandardNoneLast()) {
// this has a special behaviour for NONE: filteredRows[0] is null, and the returned value is
// rowb
if (currentRow == 0 && ftype != FilterType.FILTER_NONE && ftype != FilterType.FILTER_SUB)
continue;
byte[] filtered = filterRowWithFilterType(ftype, rowb, rowbprev, filteredRows[ftype.val]);
filterBank[ftype.val].write(filtered);
if (currentRow == 0 && ftype == FilterType.FILTER_SUB) { // litle lie, only for first row
filterBank[FilterType.FILTER_PAETH.val].write(filtered);
filterBank[FilterType.FILTER_AVERAGE.val].write(filtered);
filterBank[FilterType.FILTER_UP.val].write(filtered);
}
// adptive: report each filterted
if (tryAdaptive) {
filtersPerf.updateFromFiltered(ftype, filtered, currentRow);
}
}
filteredRows[0] = rowb;
if (tryAdaptive) {
FilterType preferredAdaptive = filtersPerf.getPreferred();
filterBank[5].write(filteredRows[preferredAdaptive.val]);
}
if (currentRow == lastRowInThisBand) {
int best = getBestCompressor();
// PngHelperInternal.debug("won: " + best + " (rows: " + firstRowInThisBand + ":" + lastRowInThisBand + ")");
// if(currentRow>90&&currentRow<100)
// PngHelperInternal.debug(String.format("row=%d ft=%s",currentRow,FilterType.getByVal(best)));
byte[] filtersAdapt = filterBank[best].getFirstBytes();
for (int r = firstRowInThisBand, i = 0, j = lastRowInThisBand - firstRowInThisBand; r <= lastRowInThisBand; r++, j--, i++) {
int fti = filtersAdapt[i];
byte[] filtered = null;
if (r != lastRowInThisBand) {
filtered =
filterRowWithFilterType(FilterType.getByVal(fti), rows.get(j), rows.get(j + 1),
filteredRowTmp);
} else { // no need to do this filtering, we already have it
filtered = filteredRows[fti];
}
sendToCompressedStream(filtered);
}
}
// rotate
if (rows.size() > rowsPerBandCurrent) {
rows.addFirst(rows.removeLast());
} else
rows.addFirst(new byte[buflen]);
}
@Override
public byte[] getRowb() {
return rows.get(0);
}
private void setBandFromNewRown() {
boolean newBand = currentRow == 0 || currentRow > lastRowInThisBand;
if (currentRow == 0)
bandNum = -1;
if (newBand) {
bandNum++;
rowInBand = 0;
} else {
rowInBand++;
}
if (newBand) {
firstRowInThisBand = currentRow;
lastRowInThisBand = firstRowInThisBand + rowsPerBand - 1;
int lastRowInNextBand = firstRowInThisBand + 2 * rowsPerBand - 1;
if (lastRowInNextBand >= imgInfo.rows) // hack:make this band bigger, so we don't have a small
// last band
lastRowInThisBand = imgInfo.rows - 1;
rowsPerBandCurrent = 1 + lastRowInThisBand - firstRowInThisBand;
tryAdaptive =
rowsPerBandCurrent <= 3 || (rowsPerBandCurrent < 10 && imgInfo.bytesPerRow < 64) ? false
: true;
// rebuild bank
rebuildFiltersBank();
}
}
private void rebuildFiltersBank() {
long bytesPerBandCurrent = rowsPerBandCurrent * (long) buflen;
final int DEFLATER_COMP_LEVEL = 4;
for (int i = 0; i <= 5; i++) {// one for each filter plus one adaptive
CompressorStream cp = filterBank[i];
if (cp == null || cp.totalbytes != bytesPerBandCurrent) {
if (cp != null)
cp.close();
if (useLz4)
cp = new CompressorStreamLz4(null, buflen, bytesPerBandCurrent);
else
cp =
new CompressorStreamDeflater(null, buflen, bytesPerBandCurrent, DEFLATER_COMP_LEVEL,
Deflater.DEFAULT_STRATEGY);
filterBank[i] = cp;
} else {
cp.reset();
}
cp.setStoreFirstByte(true, rowsPerBandCurrent); // TODO: only for adaptive?
}
}
private int computeInitialRowsPerBand() {
// memory (only buffers) ~ (r+1+5) * bytesPerRow
int r = (int) ((hintMemoryKb * 1024.0) / (imgInfo.bytesPerRow + 1) - 5);
if (r < 1)
r = 1;
if (hintRowsPerBand > 0 && r > hintRowsPerBand)
r = hintRowsPerBand;
if (r > imgInfo.rows)
r = imgInfo.rows;
if (r > 2 && r > imgInfo.rows / 8) { // redistribute more evenly
int k = (imgInfo.rows + (r - 1)) / r;
r = (imgInfo.rows + k / 2) / k;
}
// PngHelperInternal.debug("rows :" + r + "/" + imgInfo.rows);
return r;
}
private int getBestCompressor() {
double bestcr = Double.MAX_VALUE;
int bestb = -1;
for (int i = tryAdaptive ? 5 : 4; i >= 0; i--) {
CompressorStream fb = filterBank[i];
double cr = fb.getCompressionRatio();
if (cr <= bestcr) { // dirty trick, here the equality gains for row 0, so that SUB is prefered
// over PAETH, UP, AVE...
bestb = i;
bestcr = cr;
}
}
return bestb;
}
@Override
protected void initParams() {
super.initParams();
// if adaptative but too few rows or columns, use default
if (imgInfo.cols < 3 && !FilterType.isValidStandard(filterType))
filterType = FilterType.FILTER_DEFAULT;
if (imgInfo.rows < 3 && !FilterType.isValidStandard(filterType))
filterType = FilterType.FILTER_DEFAULT;
for (int i = 1; i <= 4; i++) { // element 0 is not allocated
if (filteredRows[i] == null || filteredRows[i].length < buflen)
filteredRows[i] = new byte[buflen];
}
if (rowsPerBand == 0)
rowsPerBand = computeInitialRowsPerBand();
}
@Override
public void close() {
super.close();
rows.clear();
for (CompressorStream f : filterBank) {
f.close();
}
}
public void setHintMemoryKb(int hintMemoryKb) {
this.hintMemoryKb =
hintMemoryKb <= 0 ? HINT_MEMORY_DEFAULT_KB : (hintMemoryKb > 10000 ? 10000 : hintMemoryKb);
}
public void setHintRowsPerBand(int hintRowsPerBand) {
this.hintRowsPerBand = hintRowsPerBand;
}
public void setUseLz4(boolean lz4) {
this.useLz4 = lz4;
}
/** for tuning memory or other parameters */
public FiltersPerformance getFiltersPerf() {
return filtersPerf;
}
public void setTryAdaptive(boolean tryAdaptive) {
this.tryAdaptive = tryAdaptive;
}
}

View File

@ -0,0 +1,14 @@
<html>
<body bgcolor="white">
<p>
Mostly related with logic specific to reading/writing pixels.
</p>
<p>
Includes ImageLine related classes, and rows filtering
</p>
<p>
Some classes like ImageLineInt should belong here, but we keep them in the main package for backward compatibility.
</p>
</body>
</html>

View File

@ -2,6 +2,7 @@ package org.warp.picalculator;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.ref.WeakReference;
public final class PlatformUtils {
public static final boolean isJavascript = false;
@ -29,4 +30,22 @@ public final class PlatformUtils {
e.printStackTrace(pw);
return sw.toString().toUpperCase().replace("\t", " ").replace("\r", "").split("\n");
}
public static void loadPlatformRules() {
}
public static void gc() {
Object obj = new Object();
final WeakReference<Object> ref = new WeakReference<>(obj);
obj = null;
while (ref.get() != null) {
System.gc();
}
}
public static void shiftChanged(boolean alpha) {
}
public static void alphaChanged(boolean alpha) {
}
}

View File

@ -24,4 +24,7 @@ public class DEngine {
public static GraphicEngine newFBEngine() {
return new FBEngine();
}
public static GraphicEngine newHtmlEngine() {
return null;
}
}

View File

@ -0,0 +1,20 @@
package org.warp.picalculator.deps;
import java.util.ArrayList;
import java.util.Queue;
import java.util.concurrent.Semaphore;
public class DSemaphore extends Semaphore {
private static final long serialVersionUID = -2362314723921013871L;
public DSemaphore(int arg0) {
super(arg0);
// TODO Auto-generated constructor stub
}
public DSemaphore(int permits, boolean fair) {
super(permits, fair);
// TODO Auto-generated constructor stub
}
}

Some files were not shown because too many files have changed in this diff Show More