jcdf-1.2-3/ 0000775 0000000 0000000 00000000000 13203340177 0012422 5 ustar 00root root 0000000 0000000 jcdf-1.2-3/.gitignore 0000664 0000000 0000000 00000000143 13203340177 0014410 0 ustar 00root root 0000000 0000000 jcdf.jar
jcdf_test.jar
index.html
cdflist.html
cdfdump.html
tmp/
javadocs/
.*.swp
data/local/*.cdf
jcdf-1.2-3/AttributeDescriptorRecord.java 0000664 0000000 0000000 00000003357 13203340177 0020436 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
/**
* Field data for CDF record of type Attribute Descriptor Record.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public class AttributeDescriptorRecord extends Record {
@CdfField @OffsetField public final long adrNext;
@CdfField @OffsetField public final long agrEdrHead;
@CdfField public final int scope;
@CdfField public final int num;
@CdfField public final int nGrEntries;
@CdfField public final int maxGrEntry;
@CdfField public final int rfuA;
@CdfField @OffsetField public final long azEdrHead;
@CdfField public final int nZEntries;
@CdfField public final int maxZEntry;
@CdfField public final int rfuE;
@CdfField public final String name;
/**
* Constructor.
*
* @param plan basic record info
* @param nameLeng number of characters used for attribute names
*/
public AttributeDescriptorRecord( RecordPlan plan, int nameLeng )
throws IOException {
super( plan, "ADR", 4 );
Buf buf = plan.getBuf();
Pointer ptr = plan.createContentPointer();
this.adrNext = buf.readOffset( ptr );
this.agrEdrHead = buf.readOffset( ptr );
this.scope = buf.readInt( ptr );
this.num = buf.readInt( ptr );
this.nGrEntries = buf.readInt( ptr );
this.maxGrEntry = buf.readInt( ptr );
this.rfuA = checkIntValue( buf.readInt( ptr ), 0 );
this.azEdrHead = buf.readOffset( ptr );
this.nZEntries = buf.readInt( ptr );
this.maxZEntry = buf.readInt( ptr );
this.rfuE = checkIntValue( buf.readInt( ptr ), -1 );
this.name = buf.readAsciiString( ptr, nameLeng );
checkEndRecord( ptr );
}
}
jcdf-1.2-3/AttributeEntry.java 0000664 0000000 0000000 00000005633 13203340177 0016261 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf;
/**
* Represents an entry in a global or variable attribute.
*
* @author Mark Taylor
* @since 28 Jun 2013
*/
public class AttributeEntry {
private final DataType dataType_;
private final Object rawValue_;
private final int nitem_;
/**
* Constructor.
*
* @param dataType data type
* @param rawValue array object storing original representation
* of the object in the CDF (array of primitives or
* Strings)
* @param nitem number of items represented by the array
*/
public AttributeEntry( DataType dataType, Object rawValue, int nitem ) {
dataType_ = dataType;
rawValue_ = rawValue;
nitem_ = nitem;
}
/**
* Returns the data type of this entry.
*
* @return data type
*/
public DataType getDataType() {
return dataType_;
}
/**
* Returns the array object storing the original representation
* of the object in the CDF. This is either an array of either
* primitives or Strings.
*
* @return raw array value
*/
public Object getRawValue() {
return rawValue_;
}
/**
* Returns the value of this entry as a convenient object.
* If the item count is 1 it's the same as getItem(0)
,
* and if the item count is >1 it's the same as the raw value.
*
* @return shaped entry value
*/
public Object getShapedValue() {
if ( nitem_ == 0 ) {
return null;
}
else if ( nitem_ == 1 ) {
return dataType_.getScalar( rawValue_, 0 );
}
else {
return rawValue_;
}
}
/**
* Returns the number of items in this entry.
*
* @return item count
*/
public int getItemCount() {
return nitem_;
}
/**
* Returns an object representing one of the items in this entry.
* If the raw array is a primitive, the result is a wrapper object.
*
* @param itemIndex item index
* @return value of item
*/
public Object getItem( int itemIndex ) {
return dataType_.getScalar( rawValue_,
dataType_.getArrayIndex( itemIndex ) );
}
/**
* Formats the value of this entry as a string.
*/
@Override
public String toString() {
if ( rawValue_ == null || nitem_ == 0 ) {
return "";
}
else {
StringBuffer sbuf = new StringBuffer();
for ( int i = 0; i < nitem_; i++ ) {
if ( i > 0 ) {
sbuf.append( ", " );
}
sbuf.append( dataType_
.formatArrayValue( rawValue_,
dataType_.getArrayIndex( i ) ) );
}
return sbuf.toString();
}
}
}
jcdf-1.2-3/AttributeEntryDescriptorRecord.java 0000664 0000000 0000000 00000005375 13203340177 0021462 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
/**
* Abstract superclass for CDF Attribute Entry Descriptor Records.
* Two concrete subclasses exist for AzEDRs and AgrEDRs.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public abstract class AttributeEntryDescriptorRecord extends Record {
@CdfField @OffsetField public final long aedrNext;
@CdfField public final int attrNum;
@CdfField public final int dataType;
@CdfField public final int num;
@CdfField public final int numElems;
@CdfField public final int rfuA;
@CdfField public final int rfuB;
@CdfField public final int rfuC;
@CdfField public final int rfuD;
@CdfField public final int rfuE;
private final long valueOffset_;
/**
* Constructor.
*
* @param plan basic record info
* @param abbrev abbreviated name for record type
* @param recordType record type code
*/
private AttributeEntryDescriptorRecord( RecordPlan plan, String abbrev,
int recordType )
throws IOException {
super( plan, abbrev, recordType );
Buf buf = plan.getBuf();
Pointer ptr = plan.createContentPointer();
this.aedrNext = buf.readOffset( ptr );
this.attrNum = buf.readInt( ptr );
this.dataType = buf.readInt( ptr );
this.num = buf.readInt( ptr );
this.numElems = buf.readInt( ptr );
this.rfuA = checkIntValue( buf.readInt( ptr ), 0 );
this.rfuB = checkIntValue( buf.readInt( ptr ), 0 );
this.rfuC = checkIntValue( buf.readInt( ptr ), 0 );
this.rfuD = checkIntValue( buf.readInt( ptr ), -1 );
this.rfuE = checkIntValue( buf.readInt( ptr ), -1 );
valueOffset_ = ptr.get();
}
/**
* Returns the file offset at which this record's Value field starts.
*
* @return file offset of Value field
*/
public long getValueOffset() {
return valueOffset_;
}
/**
* Field data for CDF record of type Attribute g/rEntry Descriptor Record.
*/
public static class GrVariant extends AttributeEntryDescriptorRecord {
/**
* Constructor.
*
* @param plan basic record information
*/
public GrVariant( RecordPlan plan ) throws IOException {
super( plan, "AgrEDR", 5 );
}
}
/**
* Field data for CDF record of type Attribute zEntry Descriptor Record.
*/
public static class ZVariant extends AttributeEntryDescriptorRecord {
/**
* Constructor.
*
* @param plan basic record information
*/
public ZVariant( RecordPlan plan ) throws IOException {
super( plan, "AzEDR", 9 );
}
}
}
jcdf-1.2-3/BankBuf.java 0000664 0000000 0000000 00000056017 13203340177 0014606 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.EOFException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.SequenceInputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.Arrays;
import java.util.Collections;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Abstract Buf implementation that divides the byte sequence into one
* or more contiguous data banks.
* Each bank contains a run of bytes short enough to be indexed by
* a 4-byte integer.
*
* @author Mark Taylor
* @since 18 Jun 2013
*/
public abstract class BankBuf implements Buf {
private final long size_;
private boolean isBit64_;
private boolean isBigendian_;
private static final Logger logger_ =
Logger.getLogger( BankBuf.class.getName() );
/**
* Constructor.
*
* @param size total size of buffer
* @param isBit64 64bit-ness of buf
* @param isBigendian true for big-endian data, false for little-endian
*/
protected BankBuf( long size, boolean isBit64, boolean isBigendian ) {
size_ = size;
isBit64_ = isBit64;
isBigendian_ = isBigendian;
}
/**
* Returns the bank which can read a given number of bytes starting
* at the given offset.
*
*
Implementation: in most cases this will return one of the
* large banks that this object has allocated.
* However, in the case that the requested run straddles a bank
* boundary it may be necessary to generate a short-lived bank
* just to return from this method.
*
* @param offset start of required sequence
* @param count number of bytes in required sequence
* @return bank
*/
protected abstract Bank getBank( long offset, int count )
throws IOException;
/**
* Returns a list of active banks. Banks which have not been
* created yet do not need to be included.
*/
protected abstract List On the topic of intellectual property, Mark Nelson
* says:
* This method should be called before the As currently specified, there are only two possibiliies,
* Big-Endian and Little-Endian. Interface and implementation would
* need to be reworked somewhat to accommodate the
* (presumably, rarely seen in this day and age)
* D_FLOAT and G_FLOAT encodings supported by the CDF standard.
*
* This method should be called before any of the The output can optionally be written in HTML format.
* The point of this is so that field values which represent pointers
* to records can be displayed as hyperlinks, which makes it very easy
* to chase pointers around the CDF file in a web browser.
*
* @author Mark Taylor
* @since 21 Jun 2013
*/
public class CdfDump {
private final CdfReader crdr_;
private final PrintStream out_;
private final boolean writeFields_;
private final boolean html_;
/**
* Constructor.
*
* @param crdr CDF reader
* @param out output stream for listing
* @param writeFields true to write field data as well as record IDs
* @param html true to write output in HTML format
*/
public CdfDump( CdfReader crdr, PrintStream out, boolean writeFields,
boolean html ) {
crdr_ = crdr;
out_ = out;
writeFields_ = writeFields;
html_ = html;
}
/**
* Does the work, writing output.
*/
public void run() throws IOException {
Buf buf = crdr_.getBuf();
RecordFactory recFact = crdr_.getRecordFactory();
long offset = 8; // magic number
long leng = buf.getLength();
long eof = leng;
CdfDescriptorRecord cdr = null;
GlobalDescriptorRecord gdr = null;
long gdroff = -1;
if ( html_ ) {
out_.println( " These fields are all public and final, and have names matching
* (apart perhaps from minor case tweaking)
* the fields documented in the relevant subsections of Section 2 of the
* CDF Internal Format Description document.
*
* See that document for a description of the meaning of these fields.
*
* @author Mark Taylor
* @since 25 Jun 2013
* @see
* CDF Internal Format Description document
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface CdfField {
}
jcdf-1.2-3/CdfFormatException.java 0000664 0000000 0000000 00000001506 13203340177 0017013 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf;
import java.io.IOException;
/**
* Exception thrown during CDF parsing when the data stream appears either
* to be in contravention of the CDF format, or uses some feature of
* the CDF format which is unsupported by the current implementation.
*
* @author Mark Taylor
* @since 18 Jun 2013
*/
public class CdfFormatException extends IOException {
/**
* Constructs an exception with a message.
*
* @param msg message
*/
public CdfFormatException( String msg ) {
super( msg );
}
/**
* Constructs an exception with a message and a cause.
*
* @param msg message
* @param cause upstream exception
*/
public CdfFormatException( String msg, Throwable cause ) {
super( msg );
initCause( cause );
}
}
jcdf-1.2-3/CdfInfo.java 0000664 0000000 0000000 00000003240 13203340177 0014574 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf;
/**
* Encapsulates some global information about a CDF file.
*
* @author Mark Taylor
* @since 20 Jun 2013
*/
public class CdfInfo {
private final boolean rowMajor_;
private final int[] rDimSizes_;
private final int leapSecondLastUpdated_;
/**
* Constructor.
*
* @param rowMajor true for row majority, false for column majority
* @param rDimSizes array of dimension sizes for rVariables
* @param leapSecondLastUpdated value of the GDR LeapSecondLastUpdated
* field
*/
public CdfInfo( boolean rowMajor, int[] rDimSizes,
int leapSecondLastUpdated ) {
rowMajor_ = rowMajor;
rDimSizes_ = rDimSizes;
leapSecondLastUpdated_ = leapSecondLastUpdated;
}
/**
* Indicates majority of CDF arrays.
*
* @return true for row majority, false for column majority
*/
public boolean getRowMajor() {
return rowMajor_;
}
/**
* Returns array dimensions for rVariables.
*
* @return array of dimension sizes for rVariables
*/
public int[] getRDimSizes() {
return rDimSizes_;
}
/**
* Returns the date of the last leap second the CDF file knows about.
* This is the value of the LeapSecondLastUpdated field from the GDR
* (introduced at CDF v3.6). The value is an integer whose
* decimal representation is of the form YYYYMMDD.
* Values 0 and -1 have special meaning (no last leap second).
*
* @return last known leap second indicator
*/
public int getLeapSecondLastUpdated() {
return leapSecondLastUpdated_;
}
}
jcdf-1.2-3/CdfList.java 0000664 0000000 0000000 00000017543 13203340177 0014627 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.util;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import uk.ac.bristol.star.cdf.AttributeEntry;
import uk.ac.bristol.star.cdf.CdfContent;
import uk.ac.bristol.star.cdf.CdfReader;
import uk.ac.bristol.star.cdf.DataType;
import uk.ac.bristol.star.cdf.GlobalAttribute;
import uk.ac.bristol.star.cdf.Variable;
import uk.ac.bristol.star.cdf.VariableAttribute;
/**
* Utility to describe a CDF file, optionally with record data.
* Intended to be used from the commandline via the Constructing an instance of this class reads enough of a file
* to identify it as a CDF and work out how to access its records.
* Most of the actual contents are only read from the data buffer
* as required.
* Although only the magic numbers and CDR are read during construction,
* in the case of a file-compressed CDF the whole thing is uncompressed,
* so it could still be an expensive operation.
*
* For low-level access to the CDF internal records, use the
* {@link #getCdr} method to get the CdfDescriptorRecord and use that
* in conjunction with knowledge of the internal format of CDF files
* as a starting point to chase pointers around the file constructing
* other records. When you have a pointer to another record, you can
* use the record factory got from {@link #getRecordFactory} to turn
* it into a typed Record object.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public class CdfReader {
private final CdfDescriptorRecord cdr_;
private final Buf buf_;
private final RecordFactory recordFactory_;
private static final Logger logger_ =
Logger.getLogger( CdfReader.class.getName() );
/**
* Constructs a CdfReader from a buffer containing its byte data.
*
* @param buf buffer containing CDF file
*/
public CdfReader( Buf buf ) throws IOException {
Pointer ptr = new Pointer( 0 );
// Read the CDF magic number bytes.
int magic1 = buf.readInt( ptr );
int magic2 = buf.readInt( ptr );
int offsetRec0 = (int) ptr.get();
// Work out from that what variant (if any) of the CDF format
// this file implements.
CdfVariant variant = decodeMagic( magic1, magic2 );
if ( variant == null ) {
String msg = new StringBuffer()
.append( "Unrecognised magic numbers: " )
.append( "0x" )
.append( Integer.toHexString( magic1 ) )
.append( ", " )
.append( "0x" )
.append( Integer.toHexString( magic2 ) )
.toString();
throw new CdfFormatException( msg );
}
logger_.config( "CDF magic number for " + variant.label_ );
logger_.config( "Whole file compression: " + variant.compressed_ );
// The length of the pointers and sizes used in CDF files are
// dependent on the CDF file format version.
// Notify the buffer which regime is in force for this file.
// Note that no operations for which this makes a difference have
// yet taken place.
buf.setBit64( variant.bit64_ );
// The lengths of some fields differ according to CDF version.
// Construct a record factory that does it right.
recordFactory_ = new RecordFactory( variant.nameLeng_ );
// Read the CDF Descriptor Record. This may be the first record,
// or it may be in a compressed form along with the rest of
// the internal records.
if ( variant.compressed_ ) {
// Work out compression type and location of compressed data.
CompressedCdfRecord ccr =
recordFactory_.createRecord( buf, offsetRec0,
CompressedCdfRecord.class );
CompressedParametersRecord cpr =
recordFactory_.createRecord( buf, ccr.cprOffset,
CompressedParametersRecord.class );
final Compression compress =
Compression.getCompression( cpr.cType );
// Uncompress the compressed data into a new buffer.
// The compressed data is the data record of the CCR.
// When uncompressed it can be treated just like the whole of
// an uncompressed CDF file, except that it doesn't have the
// magic numbers (8 bytes) prepended to it.
// Note however that any file offsets recorded within the file
// are given as if the magic numbers are present - this is not
// very clear from the Internal Format Description document,
// but it appears to be the case from reverse engineering
// whole-file compressed files. To work round this, we hack
// the compression to prepend a dummy 8-byte block to the
// uncompressed stream it provides.
final int prepad = offsetRec0;
assert prepad == 8;
Compression padCompress =
new Compression( "Padded " + compress.getName() ) {
public InputStream uncompressStream( InputStream in )
throws IOException {
InputStream in1 =
new ByteArrayInputStream( new byte[ prepad ] );
InputStream in2 = compress.uncompressStream( in );
return new SequenceInputStream( in1, in2 );
}
};
buf = Bufs.uncompress( padCompress, buf, ccr.getDataOffset(),
ccr.uSize + prepad );
}
cdr_ = recordFactory_.createRecord( buf, offsetRec0,
CdfDescriptorRecord.class );
// Interrogate CDR for required information.
boolean isSingleFile = Record.hasBit( cdr_.flags, 1 );
if ( ! isSingleFile ) {
throw new CdfFormatException( "Multi-file CDFs not supported" );
}
NumericEncoding encoding =
NumericEncoding.getEncoding( cdr_.encoding );
Boolean bigEndian = encoding.isBigendian();
if ( bigEndian == null ) {
throw new CdfFormatException( "Unsupported encoding " + encoding );
}
buf.setEncoding( bigEndian.booleanValue() );
buf_ = buf;
}
/**
* Constructs a CdfReader from a readable file containing its byte data.
*
* @param file CDF file
*/
public CdfReader( File file ) throws IOException {
this( Bufs.createBuf( file, true, true ) );
}
/**
* Returns the buffer containing the uncompressed record stream for
* this reader's CDF file.
* This will be the buffer originally submitted at construction time
* only if the CDF does not use whole-file compression.
*
* @return buffer containing CDF records
*/
public Buf getBuf() {
return buf_;
}
/**
* Returns a RecordFactory that can be applied to this reader's Buf
* to construct CDF Record objects.
*
* @return record factory
*/
public RecordFactory getRecordFactory() {
return recordFactory_;
}
/**
* Returns the CDF Descriptor Record object for this reader's CDF.
*
* @return CDF Descriptor Record
*/
public CdfDescriptorRecord getCdr() {
return cdr_;
}
/**
* Examines a byte array to see if it looks like the start of a CDF file.
*
* @param intro byte array, at least 8 bytes if available
* @return true iff the first 8 bytes of The The Currently, this returns the same as Use -help for help.
*
* Tests are made using java assertions, so this test must be
* run with java assertions enabled. If it's not, it will fail anyway.
*/
public static void main( String[] args ) throws IOException {
assert checkAssertions();
if ( ! assertionsOn_ ) {
throw new RuntimeException( "Assertions disabled - bit pointless" );
}
String usage = "Usage: " + ExampleTest.class.getName()
+ " example1.cdf example2.cdf";
if ( args.length != 3 ) {
System.err.println( usage );
System.exit( 1 );
}
File ex1 = new File( args[ 0 ] );
File ex2 = new File( args[ 1 ] );
File test = new File( args[ 2 ] );
if ( ! ex1.canRead() || ! ex2.canRead() || ! test.canRead() ) {
System.err.println( usage );
System.exit( 1 );
}
ExampleTest extest = new ExampleTest();
extest.testExample1( ex1 );
extest.testExample2( ex2 );
extest.testTest( test );
}
}
jcdf-1.2-3/GlobalAttribute.java 0000664 0000000 0000000 00000002017 13203340177 0016351 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf;
/**
* Provides the description and entry values
* for CDF attribute with global scope.
*
* The gEntries and zEntries are combined in a single list,
* on the grounds that users are not likely to be much interested
* in the difference.
*
* @author Mark Taylor
* @since 20 Jun 2013
*/
public class GlobalAttribute {
private final String name_;
private final AttributeEntry[] entries_;
/**
* Constructor.
*
* @param name attribute name
* @param entries attribute entries
*/
public GlobalAttribute( String name, AttributeEntry[] entries ) {
name_ = name;
entries_ = entries;
}
/**
* Returns this attribute's name.
*
* @return attribute name
*/
public String getName() {
return name_;
}
/**
* Returns this attribute's entry values.
*
* @return entry values for this attribute
*/
public AttributeEntry[] getEntries() {
return entries_;
}
}
jcdf-1.2-3/GlobalDescriptorRecord.java 0000664 0000000 0000000 00000003531 13203340177 0017665 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
/**
* Field data for CDF record of type Global Descriptor Record.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public class GlobalDescriptorRecord extends Record {
@CdfField @OffsetField public final long rVdrHead;
@CdfField @OffsetField public final long zVdrHead;
@CdfField @OffsetField public final long adrHead;
@CdfField public final long eof;
@CdfField public final int nrVars;
@CdfField public final int numAttr;
@CdfField public final int rMaxRec;
@CdfField public final int rNumDims;
@CdfField public final int nzVars;
@CdfField @OffsetField public final long uirHead;
@CdfField public final int rfuC;
@CdfField public final int leapSecondLastUpdated;
@CdfField public final int rfuE;
@CdfField public final int[] rDimSizes;
/**
* Constructor.
*
* @param plan basic record information
*/
public GlobalDescriptorRecord( RecordPlan plan ) throws IOException {
super( plan, "GDR", 2 );
Buf buf = plan.getBuf();
Pointer ptr = plan.createContentPointer();
this.rVdrHead = buf.readOffset( ptr );
this.zVdrHead = buf.readOffset( ptr );
this.adrHead = buf.readOffset( ptr );
this.eof = buf.readOffset( ptr );
this.nrVars = buf.readInt( ptr );
this.numAttr = buf.readInt( ptr );
this.rMaxRec = buf.readInt( ptr );
this.rNumDims = buf.readInt( ptr );
this.nzVars = buf.readInt( ptr );
this.uirHead = buf.readOffset( ptr );
this.rfuC = checkIntValue( buf.readInt( ptr ), 0 );
this.leapSecondLastUpdated = buf.readInt( ptr );
this.rfuE = checkIntValue( buf.readInt( ptr ), -1 );
this.rDimSizes = readIntArray( buf , ptr, this.rNumDims );
checkEndRecord( ptr );
}
}
jcdf-1.2-3/LogUtil.java 0000664 0000000 0000000 00000005616 13203340177 0014654 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.util;
import java.util.logging.ConsoleHandler;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
/**
* Utilities for controlling logging level.
*
* @author Mark Taylor
* @since 21 Jun 2013
*/
public class LogUtil {
/**
* Private constructor prevents instantiation.
*/
private LogUtil() {
}
/**
* Sets the logging verbosity of the root logger and ensures that
* logging messages at that level are reported to the console.
* You'd think this would be simple, but it requires jumping through hoops.
*
* @param verbose 0 for normal, positive for more, negative for less
* (0=INFO, +1=CONFIG, -1=WARNING)
*/
public static void setVerbosity( int verbose ) {
// Set a level based on the given verbosity.
int ilevel = Level.INFO.intValue() - ( verbose * 100 );
Level level = Level.parse( Integer.toString( ilevel ) );
// Set the root logger's level to this value.
Logger rootLogger = Logger.getLogger( "" );
rootLogger.setLevel( level );
// Make sure that the root logger's console handler will actually
// emit these messages. By default it seems that anything below
// INFO is squashed.
Handler[] rootHandlers = rootLogger.getHandlers();
if ( rootHandlers.length > 0 &&
rootHandlers[ 0 ] instanceof ConsoleHandler ) {
rootHandlers[ 0 ].setLevel( level );
rootHandlers[ 0 ].setFormatter( new LineFormatter( false ) );
}
for ( int i = 0; i < rootHandlers.length; i++ ) {
rootHandlers[ i ].setLevel( level );
}
}
/**
* Compact log record formatter. Unlike the default
* {@link java.util.logging.SimpleFormatter} this generally uses only
* a single line for each record.
*/
public static class LineFormatter extends Formatter {
private final boolean debug_;
/**
* Constructor.
*
* @param debug iff true, provides more information per log message
*/
public LineFormatter( boolean debug ) {
debug_ = debug;
}
public String format( LogRecord record ) {
StringBuffer sbuf = new StringBuffer();
sbuf.append( record.getLevel().toString() )
.append( ": " )
.append( formatMessage( record ) );
if ( debug_ ) {
sbuf.append( ' ' )
.append( '(' )
.append( record.getSourceClassName() )
.append( '.' )
.append( record.getSourceMethodName() )
.append( ')' );
}
sbuf.append( '\n' );
return sbuf.toString();
}
}
}
jcdf-1.2-3/NumericEncoding.java 0000664 0000000 0000000 00000005143 13203340177 0016341 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import uk.ac.bristol.star.cdf.CdfFormatException;
/**
* Enumeration of numeric encoding values supported by CDF.
*
* @author Mark Taylor
* @since 20 Jun 2013
*/
public enum NumericEncoding {
NETWORK( Boolean.TRUE ),
SUN( Boolean.TRUE ),
NeXT( Boolean.TRUE ),
MAC( Boolean.TRUE ),
HP( Boolean.TRUE ),
SGi( Boolean.TRUE ),
IBMRS( Boolean.TRUE ),
DECSTATION( Boolean.FALSE ),
IBMPC( Boolean.FALSE ),
ALPHAOSF1( Boolean.FALSE ),
ALPHAVMSi( Boolean.FALSE ),
VAX( null ),
ALPHAVMSd( null ),
ALPHAVMSg( null );
private final Boolean isBigendian_;
/**
* Constructor.
*
* @param isBigendian TRUE for simple big-endian,
* FALSE for simple little-endian,
* null for something else
*/
NumericEncoding( Boolean isBigendian ) {
isBigendian_ = isBigendian;
}
/**
* Gives the big/little-endianness of this encoding, if that's all
* the work that has to be done.
* If the return value is non-null, then numeric values are
* encoded the same way that java does it (two's complement for
* integers and IEEE754 for floating point) with big- or little-endian
* byte ordering, according to the return value.
* Otherwise, some unspecified encoding is in operation.
*
* @return TRUE for simple big-endian, FALSE for simple little-endian,
* null for something weird
*/
public Boolean isBigendian() {
return isBigendian_;
}
/**
* Returns the encoding corresponding to the value of the
* If run with arguments a utility function that reports JCDF and NASA
* formatting for TIME_TT2000 nanosecond values.
*
*/
public static void main( String[] args ) {
List To work out the buffer and offset from which to read a record value,
* you can do something like this:
* If non-negative, the result can be used with the
* Note that following the usage in VXR fields, the first and
* last values are inclusive, so the number of records represented
* by this entry is The Note that following the usage in VXR fields, the low and
* high values are inclusive, so the number of records represented
* by this entry is The compressed stream is just like the uncompressed one,
* except that a byte with the special value V is followed by
* a byte giving the number of additional bytes V to consider present
* in the stream.
* Thus the compressed stream:
* This format was deduced from reading the cdfrle.c source file
* from the CDF distribution.
*
* @author Mark Taylor
* @since 17 May 2013
*/
class RunLengthInputStream extends InputStream {
private final InputStream base_;
private final int rleVal_;
private int vCount_;
/**
* Constructor.
*
* @param base input stream containing RLE-compressed data
* @param rleVal the byte value whose run lengths are compressed
* (always zero for CDF as far as I can tell)
*/
public RunLengthInputStream( InputStream base, byte rleVal ) {
base_ = base;
rleVal_ = rleVal & 0xff;
}
@Override
public int read() throws IOException {
if ( vCount_ > 0 ) {
vCount_--;
return rleVal_;
}
else {
int b = base_.read();
if ( b == rleVal_ ) {
int c = base_.read();
if ( c >= 0 ) {
vCount_ = c;
return rleVal_;
}
else {
throw new CdfFormatException( "Bad RLE data" );
}
}
else {
return b;
}
}
}
@Override
public int available() throws IOException {
return base_.available() + vCount_;
}
@Override
public void close() throws IOException {
base_.close();
}
@Override
public boolean markSupported() {
return false;
}
}
jcdf-1.2-3/SameTest.java 0000664 0000000 0000000 00000025125 13203340177 0015017 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.test;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
import java.util.logging.Level;
import java.util.logging.Logger;
import uk.ac.bristol.star.cdf.AttributeEntry;
import uk.ac.bristol.star.cdf.CdfContent;
import uk.ac.bristol.star.cdf.CdfReader;
import uk.ac.bristol.star.cdf.GlobalAttribute;
import uk.ac.bristol.star.cdf.Variable;
import uk.ac.bristol.star.cdf.VariableAttribute;
/**
* Tests that multiple specified CDF files identical CDF content.
* The second, third, fourth, ... -named files are compared with the
* first-named one.
* Any discrepancies are reported with context.
* The error count can be obtained.
*
* @author Mark Taylor
* @since 25 Jun 2013
*/
public class SameTest {
private final File[] files_;
private final PrintStream out_;
private int nerror_;
private Stack An external leap seconds table can be referenced with the
* {@value #LEAP_FILE_ENV} environment variable in exactly the same way
* as for the NASA library. Otherwise an internal leap seconds table
* will be used.
*
* @author Mark Taylor
* @since 8 Aug 2013
*/
public abstract class TtScaler {
private final double fixOffset_;
private final double scaleBase_;
private final double scaleFactor_;
private final long fromTt2kMillis_;
private final long toTt2kMillis_;
/** Number of milliseconds in a day. */
private static final double MILLIS_PER_DAY = 1000 * 60 * 60 * 24;
/** Date of the J2000 epoch as a Modified Julian Date. */
private static final double J2000_MJD = 51544.5;
/** Date of the Unix epoch (1970-01-01T00:00:00) as an MJD. */
private static final double UNIXEPOCH_MJD = 40587.0;
/** TT is ahead of TAI by approximately 32.184 seconds. */
private static final double TT_TAI_MILLIS = 32184;
/** Fixed time zone. */
private static final TimeZone UTC = TimeZone.getTimeZone( "UTC" );
/** Date of the J2000 epoch (2000-01-01T12:00:00) as a Unix time. */
public static final double J2000_UNIXMILLIS = 946728000000.0;
/**
* Environment variable to locate external leap seconds file ({@value}).
* The environment variable name and file format are just the same
* as for the NASA CDF library.
*/
public static final String LEAP_FILE_ENV = "CDF_LEAPSECONDSTABLE";
private static final Logger logger_ =
Logger.getLogger( TtScaler.class.getName() );
/**
* TT2000 coefficients:
* year, month (1=Jan), day_of_month (1-based),
* fix_offset, scale_base, scale_factor.
* year month day_of_month:
* TAI-UTC= fix_offset S + (MJD - scale_base) * scale_factor S
*
* Array initialiser lifted from gsfc.nssdc.cdf.util.CDFTT2000
* source code. That derives it from
* http://maia.usno.navy.mil/ser7/tai-utc.dat.
* See also http://cdf.gsfc.nasa.gov/html/CDFLeapSeconds.txt.
*/
private static final double[][] LTS = new double[][] {
{ 1960, 1, 1, 1.4178180, 37300.0, 0.0012960 },
{ 1961, 1, 1, 1.4228180, 37300.0, 0.0012960 },
{ 1961, 8, 1, 1.3728180, 37300.0, 0.0012960 },
{ 1962, 1, 1, 1.8458580, 37665.0, 0.0011232 },
{ 1963, 11, 1, 1.9458580, 37665.0, 0.0011232 },
{ 1964, 1, 1, 3.2401300, 38761.0, 0.0012960 },
{ 1964, 4, 1, 3.3401300, 38761.0, 0.0012960 },
{ 1964, 9, 1, 3.4401300, 38761.0, 0.0012960 },
{ 1965, 1, 1, 3.5401300, 38761.0, 0.0012960 },
{ 1965, 3, 1, 3.6401300, 38761.0, 0.0012960 },
{ 1965, 7, 1, 3.7401300, 38761.0, 0.0012960 },
{ 1965, 9, 1, 3.8401300, 38761.0, 0.0012960 },
{ 1966, 1, 1, 4.3131700, 39126.0, 0.0025920 },
{ 1968, 2, 1, 4.2131700, 39126.0, 0.0025920 },
{ 1972, 1, 1, 10.0, 0.0, 0.0 },
{ 1972, 7, 1, 11.0, 0.0, 0.0 },
{ 1973, 1, 1, 12.0, 0.0, 0.0 },
{ 1974, 1, 1, 13.0, 0.0, 0.0 },
{ 1975, 1, 1, 14.0, 0.0, 0.0 },
{ 1976, 1, 1, 15.0, 0.0, 0.0 },
{ 1977, 1, 1, 16.0, 0.0, 0.0 },
{ 1978, 1, 1, 17.0, 0.0, 0.0 },
{ 1979, 1, 1, 18.0, 0.0, 0.0 },
{ 1980, 1, 1, 19.0, 0.0, 0.0 },
{ 1981, 7, 1, 20.0, 0.0, 0.0 },
{ 1982, 7, 1, 21.0, 0.0, 0.0 },
{ 1983, 7, 1, 22.0, 0.0, 0.0 },
{ 1985, 7, 1, 23.0, 0.0, 0.0 },
{ 1988, 1, 1, 24.0, 0.0, 0.0 },
{ 1990, 1, 1, 25.0, 0.0, 0.0 },
{ 1991, 1, 1, 26.0, 0.0, 0.0 },
{ 1992, 7, 1, 27.0, 0.0, 0.0 },
{ 1993, 7, 1, 28.0, 0.0, 0.0 },
{ 1994, 7, 1, 29.0, 0.0, 0.0 },
{ 1996, 1, 1, 30.0, 0.0, 0.0 },
{ 1997, 7, 1, 31.0, 0.0, 0.0 },
{ 1999, 1, 1, 32.0, 0.0, 0.0 },
{ 2006, 1, 1, 33.0, 0.0, 0.0 },
{ 2009, 1, 1, 34.0, 0.0, 0.0 },
{ 2012, 7, 1, 35.0, 0.0, 0.0 },
{ 2015, 7, 1, 36.0, 0.0, 0.0 },
{ 2017, 1, 1, 37.0, 0.0, 0.0 },
};
private static TtScaler[] ORDERED_INSTANCES;
/**
* Constructor.
*
* @param fixOffset fixed offset of UTC in seconds from TAI
* @param scaleBase MJD base for scaling
* @param scaleFactor factor for scaling
* @param fromTt2kMillis start of validity range
* in TT milliseconds since J2000
* @param toTt2kMillis end of validity range
* in TT milliseconds since J2000
*/
public TtScaler( double fixOffset, double scaleBase, double scaleFactor,
long fromTt2kMillis, long toTt2kMillis ) {
fixOffset_ = fixOffset;
scaleBase_ = scaleBase;
scaleFactor_ = scaleFactor;
fromTt2kMillis_ = fromTt2kMillis;
toTt2kMillis_ = toTt2kMillis;
}
/**
* Converts time in milliseconds from TT since J2000 to UTC since 1970
* for this scaler.
*
* @param tt2kMillis TT milliseconds since J2000
* @return UTC milliseconds since Unix epoch
*/
public double tt2kToUnixMillis( long tt2kMillis ) {
return tt2kToUnixMillis( tt2kMillis,
fixOffset_, scaleBase_, scaleFactor_ );
}
/**
* Returns the start of the validity range of this scaler
* in TT milliseconds since J2000.
*
* @return validity range start
*/
public long getFromTt2kMillis() {
return fromTt2kMillis_;
}
/**
* Returns the end of the validity range of this scaler
* in TT milliseconds since J2000.
*
* @return validity range end
*/
public long getToTt2kMillis() {
return toTt2kMillis_;
}
/**
* Assesses validity of this scaler for a given time.
* The result will be zero if this scaler is valid,
* negative if the given time is earlier than this scaler's range, and
* positive if the given time is later than this scaler's range.
*
* @param tt2kMillis TT milliseconds since J2000
* @return validity signum
*/
public int compareTt2kMillis( long tt2kMillis ) {
if ( tt2kMillis < fromTt2kMillis_ ) {
return -1;
}
else if ( tt2kMillis >= toTt2kMillis_ ) {
return +1;
}
else {
return 0;
}
}
/**
* Indicates whether and how far a given time is into the duration of
* a leap second. If the supplied time falls during a leap second,
* the number of milliseconds elapsed since the leap second's start
* is returned. Otherwise (i.e. nearly always) -1 is returned.
*
* @param tt2kMillis TT time in milliseconds since J2000
* @return a value in the range 0...1000 if in a leap second, otherwise -1
*/
public abstract int millisIntoLeapSecond( long tt2kMillis );
/**
* Searches an ordered array of scaler instances for one that is
* applicable to a supplied TT time.
* The supplied array of instances must be ordered and cover the
* supplied time value; the result of {@link #getTtScalers} is suitable
* and most likely what you want to use here.
*
* @param tt2kMillis TT time in milliseconds since J2000
* @param orderedScalers list of TtScaler instances ordered in time
* @param i0 initial guess at index of the right answer;
* if negative no best guess is assumed
*/
public static int getScalerIndex( long tt2kMillis,
TtScaler[] orderedScalers, int i0 ) {
int ns = orderedScalers.length;
return scalerBinarySearch( tt2kMillis, orderedScalers,
i0 >= 0 ? i0 : ns / 2, 0, ns - 1 );
}
/**
* Recursive binary search of an ordered array of scaler instances
* for one that covers a given point in time.
*
* @param tt2kMillis TT time in milliseconds since J2000
* @param orderedScalers list of TtScaler instances ordered in time
* @param i0 initial guess at index of the right answer
* @param imin minimum possible value of the right answer
* @parma imax maximum possible value of the right answer
*/
private static int scalerBinarySearch( long tt2kMillis, TtScaler[] scalers,
int i0, int imin, int imax ) {
// If the guess is correct, return it directly.
int icmp = scalers[ i0 ].compareTt2kMillis( tt2kMillis );
if ( icmp == 0 ) {
return i0;
}
// Sanity check. This condition shouldn't happen, but could do
// for one of two reasons: a programming error in this code,
// or an improperly ordered scalers array.
if ( i0 < imin || i0 > imax ) {
return -1;
}
assert i0 >= imin && i0 <= imax;
// Bisect up or down and recurse.
if ( icmp < 0 ) {
return scalerBinarySearch( tt2kMillis, scalers,
i0 - ( i0 - imin + 1 ) / 2,
imin, i0 - 1 );
}
else {
assert icmp > 0;
return scalerBinarySearch( tt2kMillis, scalers,
i0 + ( imax - i0 + 1 ) / 2,
i0 + 1, imax );
}
}
/**
* Converts time in milliseconds from TT since J2000 to UTC since 1970
* for given coefficients.
*
* @param tt2kMillis TT milliseconds since J2000
* @param fixOffset fixed offset of UTC in seconds from TAI
* @param scaleBase MJD base for scaling
* @param scaleFactor factor for scaling
* @return UTC milliseconds since Unix epoch
*/
private static double tt2kToUnixMillis( long tt2kMillis, double fixOffset,
double scaleBase,
double scaleFactor ) {
double mjd = ((double) tt2kMillis) / MILLIS_PER_DAY + J2000_MJD;
double utcOffsetSec = fixOffset + ( mjd - scaleBase ) * scaleFactor;
double utcOffsetMillis = utcOffsetSec * 1000;
return tt2kMillis - TT_TAI_MILLIS - utcOffsetMillis + J2000_UNIXMILLIS;
}
/**
* Converts time in milliseconds from UTC since 1970 to TT since J2000
* for given coefficients.
*
* @param unixMillis UTC milliseconds since the Unix epoch
* @param fixOffset fixed offset of UTC in seconds from TAI
* @param scaleBase MJD base for scaling
* @param scaleFactor factor for scaling
* @return TT milliseconds since J2000
*/
private static double unixToTt2kMillis( long unixMillis, double fixOffset,
double scaleBase,
double scaleFactor ) {
double mjd = ((double) unixMillis) / MILLIS_PER_DAY + UNIXEPOCH_MJD;
double utcOffsetSec = fixOffset + ( mjd - scaleBase ) * scaleFactor;
double utcOffsetMillis = utcOffsetSec * 1000;
return unixMillis + TT_TAI_MILLIS + utcOffsetMillis - J2000_UNIXMILLIS;
}
/**
* Returns an ordered list of scalers covering the whole range of times.
* Ordering is by time, as per the {@link #compareTt2kMillis} method;
* every long At construction time, a map of where the records are stored is
* constructed, but the record data itself is not read unless or until
* one of the This interface does not currently support data reading in such
* a flexible way as the official CDF interface.
* You can read a record's worth of data at a time using either
* {@link #readRawRecord readRawRecord} (which should be fairly efficient) or
* {@link #readShapedRecord readShapedRecord} (which may have to copy and
* possibly re-order the array, and may not be so efficient).
*
* @author Mark Taylor
* @since 20 Jun 2013
*/
public class Variable {
private final VariableDescriptorRecord vdr_;
private final Buf buf_;
private final RecordFactory recFact_;
private final boolean isZVariable_;
private final boolean recordVariance_;
private final Shaper shaper_;
private final int rvaleng_;
private final DataType dataType_;
private final DataReader dataReader_;
private final Object padRawValueArray_;
private final Object shapedPadValueRowMajor_;
private final Object shapedPadValueColumnMajor_;
private final String summaryTxt_;
private RecordReader recordReader_;
/**
* Constructor.
*
* @param vdr variable descriptor record for the variable
* @param cdfInfo global CDF information
* @param recFact record factory
*/
public Variable( VariableDescriptorRecord vdr, CdfInfo cdfInfo,
RecordFactory recFact ) throws IOException {
// Prepare state for reading data.
vdr_ = vdr;
buf_ = vdr.getBuf();
recFact_ = recFact;
isZVariable_ = vdr.getRecordType() == 8;
dataType_ = DataType.getDataType( vdr.dataType, cdfInfo );
recordVariance_ = Record.hasBit( vdr_.flags, 0 );
int[] dimSizes = isZVariable_ ? vdr.zDimSizes : cdfInfo.getRDimSizes();
boolean[] dimVarys = vdr.dimVarys;
boolean rowMajor = cdfInfo.getRowMajor();
int numElems = vdr.numElems;
// As far as I understand the internal formats document, only
// character data types can have numElems>1 here.
assert dataType_.hasMultipleElementsPerItem() || numElems == 1;
shaper_ =
Shaper.createShaper( dataType_, dimSizes, dimVarys, rowMajor );
int nraw = shaper_.getRawItemCount();
dataReader_ = new DataReader( dataType_, numElems, nraw );
rvaleng_ = Array.getLength( dataReader_.createValueArray() );
// Read pad value if present.
long padOffset = vdr.getPadValueOffset();
if ( padOffset >= 0 ) {
DataReader padReader = new DataReader( dataType_, numElems, 1 );
assert vdr.getPadValueSize() == padReader.getRecordSize();
Object padValueArray = padReader.createValueArray();
padReader.readValue( buf_, padOffset, padValueArray );
Object rva = dataReader_.createValueArray();
int ngrp = dataType_.getGroupSize();
for ( int i = 0; i < nraw; i++ ) {
System.arraycopy( padValueArray, 0, rva, i * ngrp, ngrp );
}
padRawValueArray_ = rva;
shapedPadValueRowMajor_ = shaper_.shape( padRawValueArray_, true );
shapedPadValueColumnMajor_ =
shaper_.shape( padRawValueArray_, false );
}
else if ( vdr_.sRecords != 0 ) {
Object padValueArray = dataType_.getDefaultPadValueArray();
Object rva = dataReader_.createValueArray();
int ngrp = dataType_.getGroupSize();
for ( int i = 0; i < nraw; i++ ) {
System.arraycopy( padValueArray, 0, rva, i * ngrp, ngrp );
}
padRawValueArray_ = rva;
shapedPadValueRowMajor_ = shaper_.shape( padRawValueArray_, true );
shapedPadValueColumnMajor_ = shapedPadValueRowMajor_;
}
else {
padRawValueArray_ = null;
shapedPadValueRowMajor_ = null;
shapedPadValueColumnMajor_ = null;
}
// Assemble a short summary string.
String shapeTxt = "";
String varyTxt = "";
for ( int idim = 0; idim < dimSizes.length; idim++ ) {
if ( idim > 0 ) {
shapeTxt += ',';
}
shapeTxt += dimSizes[ idim ];
varyTxt += dimVarys[ idim ] ? 'T' : 'F';
}
summaryTxt_ = new StringBuffer()
.append( dataType_.getName() )
.append( ' ' )
.append( isZVariable_ ? "(z)" : "(r)" )
.append( ' ' )
.append( dimSizes.length )
.append( ':' )
.append( '[' )
.append( shapeTxt )
.append( ']' )
.append( ' ' )
.append( recordVariance_ ? 'T' : 'F' )
.append( '/' )
.append( varyTxt )
.toString();
}
/**
* Returns this variable's name.
*
* @return variable name
*/
public String getName() {
return vdr_.name;
}
/**
* Returns the index number within the CDF of this variable.
*
* @return variable num
*/
public int getNum() {
return vdr_.num;
}
/**
* Indicates whether this variable is a zVariable or rVariable.
*
* @return true for zVariable, false for rVariable
*/
public boolean isZVariable() {
return isZVariable_;
}
/**
* Returns the upper limit of records that may have values.
* The actual number of records may be lower than this in case of sparsity.
*
* @return maximum record count
*/
public int getRecordCount() {
return vdr_.maxRec + 1;
}
/**
* Returns the data type of this variable.
*
* @return data type
*/
public DataType getDataType() {
return dataType_;
}
/**
* Returns an object that knows about the array dimensions
* of the data values.
*
* @return shaper
*/
public Shaper getShaper() {
return shaper_;
}
/**
* Indicates whether this variable has a value which is fixed for all
* records or can vary per record.
*
* @return false for fixed, true for varying
*/
public boolean getRecordVariance() {
return recordVariance_;
}
/**
* Returns a short text string describing the type, shape and variance
* of this variable.
*
* @return text summary of variable characteristics
*/
public String getSummary() {
return summaryTxt_;
}
/**
* Returns the VariableDescriptorRecord on which this Variable instance
* is based.
*
* @return variable descriptor record (rVDR or zVDR)
*/
public VariableDescriptorRecord getDescriptor() {
return vdr_;
}
/**
* Creates a workspace array suitable for use with this variable's
* reading methods.
* The returned array is a 1-dimensional array of a primitive type
* or of String.
*
* @return workspace array for data reading
*/
public Object createRawValueArray() {
return dataReader_.createValueArray();
}
/**
* Indicates whether a real distinct file-based record exists for
* the given index.
* Reading a record will give you a result in any case, but if this
* returns false it will be some kind of fixed or default value.
*
* @param irec record index
* @return true iff a file-based record exists for irec
*/
public boolean hasRecord( int irec ) throws IOException {
return getRecordReader().hasRecord( irec );
}
/**
* Reads the data from a single record into a supplied raw value array.
* The values are read into the supplied array in the order in which
* they are stored in the data stream, that is depending on the row/column
* majority of the CDF.
* The raw value array is as obtained from {@link #createRawValueArray}.
*
* @param irec record index
* @param rawValueArray workspace array, as created by the
* The workspace is as obtained from {@link #createRawValueArray}.
*
* @param irec record index
* @param rowMajor required majority of output array; true for row major,
* false for column major; only has an effect for
* dimensionality >=2
* @param rawValueArrayWorkspace workspace array, as created by the
* JCDF is a pure java library capable of reading files in the
Common Data Format defined by NASA.
It runs within the J2SE1.5 (or later), but other than that has no dependencies,
neither the official CDF C library nor any other java class libraries.
The classes are provided with comprehensive
javadocs.
Start reading at the
JCDF is a completely separate implementation from the Java interface to
the official CDF library, which uses native code via JNI.
It was written mainly with reference to the CDF Internal Format Description
document (v3.4). Minor updates at version JCDF 1.1 are believed to
bring it into line with CDF v3.6.
The main benefit of using JCDF, and the reason for developing it,
is that it's pure java, so it can be deployed using only the JCDF jar file.
There is no need to install the system-dependent official CDF library.
The API is very different from that of the official CDF library.
JCDF gives you a simple view of the CDF file, in terms of its
global attributes, variable attributes and variables.
This is fairly easy to use, but may or may not suit your purposes.
It's also possible to get a low-level view of the CDF file as a
sequence of CDF records.
JCDF offers no capabilities for writing or editing CDF files,
it only reads them.
JCDF is based on NIO mapped ByteBuffers, it's expected to be reasonably
fast, but I haven't done any benchmarking.
Use of mapped buffers leads to efficient serial and random I/O, but
does have some associated problems concerning release of allocated resources.
For reasons that are quite
complicated,
it is hard to release mapped buffers after use, which means that for instance
files you have read using JCDF may be hard to delete,
and that reading large files may leak significant amounts of virtual memory.
There are some more or less nasty ways to work round this in user code or
in the library. If these issues cause practical problems for library users,
you are encouraged to contact me by email or open a github issue.
Support for the CDF format is almost, but not totally, complete.
In particular:
The library comes with a couple of simple utilities for examining
CDF files:
The source code is hosted on github at
https://github.com/mbtaylor/jcdf.
It comes with a makefile that can be used to build the jar file,
javadocs, and this documentation, and to run some tests.
Pre-built copies of the jar file and documentation
for the current version (v1.2-3) can be found here:
Previous versions may be available at
ftp://andromeda.star.bris.ac.uk/pub/star/jcdf/.
This software was written by
Mark Taylor
at the University of Bristol at the request of, and funded by,
the science archive group at ESA's European Space Astronomy Centre (ESAC).
It is used within
TOPCAT/STILTS/STIL
to enable access to CDF tables alongside other tabular data formats
by that software.
It is believed to be in some use also at ESAC and elsewhere.
If anybody out there is using it and is willing to be listed here,
let me know.
It is licenced under the LGPL, though if you need a different licence
I can probably fix it.
My thanks to Michael Liu and Robert Candey at the NASA CDF office
for encouragement and help with some of the testing and implementation,
and to Jeremy Faden for discussions and help with testing.
Bugs, questions, feedback, enhancement requests welcome to
m.b.taylor@bristol.ac.uk.
For low-level access to the record data of a CDF file, use the
* {@link uk.ac.bristol.star.cdf.CdfReader} class.
* For high-level access to the variables and attributes that form
* the CDF data and metadata, use the
* {@link uk.ac.bristol.star.cdf.CdfContent} class.
*
* The package makes extensive use of NIO buffers for mapped read-on-demand
* data access, so should be fairly efficient for reading scalar records
* and whole raw array records. Convenience methods for reading shaped
* arrays may be less efficient.
*
* This package is less capable than the official JNI-based
* java interface to the CDF C library (read only, less flexible data read
* capabilities), but it is pure java (no native code required) and it's
* also quite a bit less complicated to use.
*/
package uk.ac.bristol.star.cdf;
Attribution
*
* The code for the Huffman and Adaptive Huffman decompressing stream
* implementations in this class is based on the C implementation in
* "The Data Compression Book" (Mark Nelson, 1992), via the code
* in cdfhuff.c from the CDF source distribution.
*
*
*
* And I even bought the book (MBT).
*
* @author Mark Taylor
* @author Mark Nelson
* @author J Love
* @since 19 Jun 2013
* @see "The Data Compression Book, Mark Nelson, 1992"
*/
abstract class BitExpandInputStream extends InputStream {
private final InputStream base_;
private int rack_;
private int mask_;
private boolean ended_;
/** End of stream marker. */
protected static final int END_OF_STREAM = 256;
/**
* Constructor.
*
* @param base compressed bit stream
*/
protected BitExpandInputStream( InputStream base ) {
base_ = base;
mask_ = 0x80;
}
@Override
public void close() throws IOException {
base_.close();
}
@Override
public boolean markSupported() {
return false;
}
@Override
public int read() throws IOException {
if ( ended_ ) {
return -1;
}
int token = readToken();
if ( token == END_OF_STREAM ) {
ended_ = true;
return -1;
}
else {
return token;
}
}
/**
* Reads a single uncompressed character.
* The result may be either a byte value
* in the range 0--255, or the terminator value END_OF_STREAM.
* The actual end of the input stream should not be encountered
* (it should be flagged by an END_OF_STREAM indicator token);
* if it is, an EOFException is thrown.
*
* @return next uncompressed character, or END_OF_STREAM
*/
protected abstract int readToken() throws IOException;
/**
* Reads the next bit from the compressed base stream.
*
* @return true/false for next input bit 1/0
*/
public boolean readBit() throws IOException {
if ( mask_ == 0x80 ) {
rack_ = read1( base_ );
}
int value = rack_ & mask_;
mask_ >>= 1;
if ( mask_ == 0 ) {
mask_ = 0x80;
}
return value != 0;
}
/**
* Reads up to 32 bits from the compressed input stream
* and returns them in the least-significant end of an int.
*
* @param bitCount number of bits to read
* @return int containing bits
*/
public int readBits( int bitCount ) throws IOException {
int mask = 1 << ( bitCount - 1 );
int value = 0;
while ( mask != 0 ) {
if ( readBit() ) {
value |= mask;
}
mask >>= 1;
}
return value;
}
/**
* Reads a single byte from an input stream.
* If the end of stream is encountered, an exception is thrown.
*
* @param in input stream
* @return byte value in the range 0--255
*/
private static int read1( InputStream in ) throws IOException {
int b = in.read();
if ( b < 0 ) {
throw new EOFException();
}
return b;
}
/**
* Decompresses an input stream compressed using the CDF (Nelson)
* version of Huffman coding.
*/
public static class HuffmanInputStream extends BitExpandInputStream {
private final Node[] nodes_;
private final int iRoot_;
private boolean ended_;
/**
* Constructor.
*
* @param base compressed bit stream
*/
public HuffmanInputStream( InputStream base ) throws IOException {
super( base );
nodes_ = inputCounts( base );
iRoot_ = buildTree( nodes_ );
}
@Override
protected int readToken() throws IOException {
int inode = iRoot_;
do {
Node node = nodes_[ inode ];
boolean bit = readBit();
inode = bit ? node.child1_ : node.child0_;
} while ( inode > END_OF_STREAM );
return inode;
}
private static Node[] inputCounts( InputStream in ) throws IOException {
Node[] nodes = new Node[ 514 ];
for ( int i = 0; i < 514; i++ ) {
nodes[ i ] = new Node();
}
int ifirst = read1( in );
int ilast = read1( in );
while ( true ) {
for ( int i = ifirst; i <= ilast; i++ ) {
nodes[ i ].count_ = read1( in );
}
ifirst = read1( in );
if ( ifirst == 0 ) {
break;
}
ilast = read1( in );
}
nodes[ END_OF_STREAM ].count_ = 1;
return nodes;
}
private static int buildTree( Node[] nodes ) {
int min1;
int min2;
nodes[ 513 ].count_ = Integer.MAX_VALUE;
int nextFree = END_OF_STREAM + 1;
while ( true ) {
min1 = 513;
min2 = 513;
for ( int i = 0; i < nextFree; i++ ) {
if ( nodes[ i ].count_ != 0 ) {
if ( nodes[ i ].count_ < nodes[ min1 ].count_ ) {
min2 = min1;
min1 = i;
}
else if ( nodes[ i ].count_ < nodes[ min2 ].count_ ) {
min2 = i;
}
}
}
if ( min2 == 513 ) {
break;
}
nodes[ nextFree ].count_ = nodes[ min1 ].count_
+ nodes[ min2 ].count_;
nodes[ min1 ].savedCount_ = nodes[ min1 ].count_;
nodes[ min1 ].count_ = 0;
nodes[ min2 ].savedCount_ = nodes[ min2 ].count_;
nodes[ min2 ].count_ = 0;
nodes[ nextFree ].child0_ = min1;
nodes[ nextFree ].child1_ = min2;
nextFree++;
}
nextFree--;
nodes[ nextFree ].savedCount_ = nodes[ nextFree ].count_;
return nextFree;
}
/**
* Data structure containing a Huffman tree node.
*/
private static class Node {
int count_;
int savedCount_;
int child0_;
int child1_;
}
}
/**
* Decompresses an input stream compressed using the CDF (Nelson)
* version of Huffman coding.
*/
public static class AdaptiveHuffmanInputStream
extends BitExpandInputStream {
// Tree members. This class acts as its own tree.
private final int[] leafs_;
private final Node[] nodes_;
private int nextFreeNode_;
private static final int ESCAPE = 257;
private static final int SYMBOL_COUNT = 258;
private static final int NODE_TABLE_COUNT = ( SYMBOL_COUNT * 2 ) - 1;
private static final int ROOT_NODE = 0;
private static final int MAX_WEIGHT = 0x8000;
/**
* Constructor.
*
* @param base compressed bit stream
*/
public AdaptiveHuffmanInputStream( InputStream base ) {
super( base );
// Initialise the tree.
leafs_ = new int[ SYMBOL_COUNT ];
nodes_ = new Node[ NODE_TABLE_COUNT ];
nodes_[ ROOT_NODE ] = new Node( ROOT_NODE + 1, false, 2, -1 );
nodes_[ ROOT_NODE + 1 ] = new Node( END_OF_STREAM, true, 1,
ROOT_NODE );
leafs_[ END_OF_STREAM ] = ROOT_NODE + 1;
nodes_[ ROOT_NODE + 2 ] = new Node( ESCAPE, true, 1, ROOT_NODE );
leafs_[ ESCAPE ] = ROOT_NODE + 2;
nextFreeNode_ = ROOT_NODE + 3;
for ( int i = 0; i < END_OF_STREAM; i++ ) {
leafs_[ i ] = -1;
}
}
@Override
protected int readToken() throws IOException {
int iCurrentNode = ROOT_NODE;
while ( ! nodes_[ iCurrentNode ].childIsLeaf_ ) {
iCurrentNode = nodes_[ iCurrentNode ].child_;
boolean bit = readBit();
iCurrentNode += bit ? 1 : 0;
}
int c = nodes_[ iCurrentNode ].child_;
if ( c == ESCAPE ) {
c = readBits( 8 );
addNewNode( c );
}
updateModel( c );
return c;
}
private void addNewNode( int c ) {
int iLightestNode = nextFreeNode_ - 1;
int iNewNode = nextFreeNode_;
int iZeroWeightNode = nextFreeNode_ + 1;
nextFreeNode_ += 2;
nodes_[ iNewNode ] = new Node( nodes_[ iLightestNode ] );
nodes_[ iNewNode ].parent_ = iLightestNode;
leafs_[ nodes_[ iNewNode ].child_ ] = iNewNode;
nodes_[ iLightestNode ] =
new Node( iNewNode, false, nodes_[ iLightestNode ].weight_,
nodes_[ iLightestNode ].parent_ );
nodes_[ iZeroWeightNode ] = new Node( c, true, 0, iLightestNode );
leafs_[ c ] = iZeroWeightNode;
}
private void updateModel( int c ) {
if ( nodes_[ ROOT_NODE ].weight_ == MAX_WEIGHT ) {
rebuildTree();
}
int iCurrentNode = leafs_[ c ];
while ( iCurrentNode != -1 ) {
nodes_[ iCurrentNode ].weight_++;
int iNewNode;
for ( iNewNode = iCurrentNode; iNewNode > ROOT_NODE;
iNewNode-- ) {
if ( nodes_[ iNewNode - 1 ].weight_ >=
nodes_[ iCurrentNode ].weight_ ) {
break;
}
}
if ( iCurrentNode != iNewNode ) {
swapNodes( iCurrentNode, iNewNode );
iCurrentNode = iNewNode;
}
iCurrentNode = nodes_[ iCurrentNode ].parent_;
}
}
private void swapNodes( int i, int j ) {
if ( nodes_[ i ].childIsLeaf_ ) {
leafs_[ nodes_[ i ].child_ ] = j;
}
else {
nodes_[ nodes_[ i ].child_ ].parent_ = j;
nodes_[ nodes_[ i ].child_ + 1 ].parent_ = j;
}
if ( nodes_[ j ].childIsLeaf_ ) {
leafs_[ nodes_[ j ].child_ ] = i;
}
else {
nodes_[ nodes_[ j ].child_ ].parent_ = i;
nodes_[ nodes_[ j ].child_ + 1 ].parent_ = i;
}
Node temp = new Node( nodes_[ i ] );
nodes_[ i ] = new Node( nodes_[ j ] );
nodes_[ i ].parent_ = temp.parent_;
temp.parent_ = nodes_[ j ].parent_;
nodes_[ j ] = temp;
}
private void rebuildTree() {
int j = nextFreeNode_ - 1;
for ( int i = j; i >= ROOT_NODE; i-- ) {
if ( nodes_[ i ].childIsLeaf_ ) {
nodes_[ j ] = new Node( nodes_[ i ] );
nodes_[ j ].weight_ = ( nodes_[ j ].weight_ + 1 ) / 2;
j--;
}
}
for ( int i = nextFreeNode_ - 2; j >= ROOT_NODE; i -= 2, j-- ) {
int k = i + 1;
nodes_[ j ].weight_ = nodes_[ i ].weight_ + nodes_[ k ].weight_;
int weight = nodes_[ j ].weight_;
nodes_[ j ].childIsLeaf_ = false;
for ( k = j + 1; weight < nodes_[ k ].weight_; k++ ) {
}
k--;
System.arraycopy( nodes_, j + 1, nodes_, j, k - j );
nodes_[ k ] = new Node( i, false, weight, nodes_[ k ].parent_ );
}
for ( int i = nextFreeNode_ - 1; i >= ROOT_NODE; i-- ) {
if ( nodes_[ i ].childIsLeaf_ ) {
int k = nodes_[ i ].child_;
leafs_[ k ] = i;
}
else {
int k = nodes_[ i ].child_;
nodes_[ k ].parent_ = nodes_[ k + 1 ].parent_ = i;
}
}
}
/**
* Data structure representing an Adaptive Huffman tree node.
*/
private static class Node {
int child_;
boolean childIsLeaf_;
int weight_;
int parent_;
Node( int child, boolean childIsLeaf, int weight, int parent ) {
child_ = child;
childIsLeaf_ = childIsLeaf;
weight_ = weight;
parent_ = parent;
}
Node( Node node ) {
this( node.child_, node.childIsLeaf_, node.weight_,
node.parent_ );
}
}
}
}
jcdf-1.2-3/Buf.java 0000664 0000000 0000000 00000015231 13203340177 0014003 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
import java.io.InputStream;
/**
* Represents a sequence of bytes along with operations to read
* primitive values from it.
* This interface abstracts away implementation details such as storage
* mechanism, data encoding and pointer length.
* It is capable of dealing with 64-bit lengths and offsets.
* All of the read*
methods are safe for use from multiple
* threads concurrently.
*
* @author Mark Taylor
* @since 18 Jun 2013
*/
public interface Buf {
/**
* Returns the extent of this buf in bytes.
*
* @return buffer length
*/
long getLength();
/**
* Reads a single byte from the pointer position,
* returning a value in the range 0..255.
* Pointer position is moved on appropriately.
*
* @param ptr pointer
* @return byte value
*/
int readUnsignedByte( Pointer ptr ) throws IOException;
/**
* Reads a signed big-endian 4-byte integer from the pointer position.
* Pointer position is moved on appropriately.
*
* @param ptr pointer
* @return integer value
*/
int readInt( Pointer ptr ) throws IOException;
/**
* Reads a file offset or size from the pointer position.
* This is a signed big-endian integer,
* occupying either 4 or 8 bytes according
* to the return value of {@link #isBit64}.
* Pointer position is moved on appropriately.
*
* @return buffer size or offset value
*/
long readOffset( Pointer ptr ) throws IOException;
/**
* Reads a fixed number of bytes interpreting them as ASCII characters
* and returns the result as a string.
* If a character 0x00 appears before nbyte
bytes have
* been read, it is taken as the end of the string.
* Pointer position is moved on appropriately.
*
* @param ptr pointer
* @param nbyte maximum number of bytes in string
* @return ASCII string
*/
String readAsciiString( Pointer ptr, int nbyte ) throws IOException;
/**
* Sets the 64bit-ness of this buf.
* This determines whether {@link #readOffset readOffset} reads
* 4- or 8-byte values.
*
* readOffset
* method is invoked.
*
* @param isBit64 true for 8-byte offsets, false for 4-byte offsets
*/
void setBit64( boolean isBit64 );
/**
* Determines the 64bit-ness of this buf.
* This determines whether {@link #readOffset readOffset} reads
* 4- or 8-byte values.
*
* @return true for 8-byte offsets, false for 4-byte offsets
*/
boolean isBit64();
/**
* Sets the encoding for reading numeric values as performed by the
* readData*
methods.
*
* readData*
* methods are invoked.
*
* @param isBigendian true for big-endian, false for little-endian
*/
void setEncoding( boolean isBigendian );
/**
* Determines the data encoding of this buf.
*
* @return true for big-endian, false for little-endian
*/
boolean isBigendian();
/**
* Reads a sequence of byte values from this buf into an array.
*
* @param offset position sequence start in this buffer in bytes
* @param count number of byte values to read
* @param array array to receive values, starting at array element 0
*/
void readDataBytes( long offset, int count, byte[] array )
throws IOException;
/**
* Reads a sequence of short values from this buf into an array.
*
* @param offset position sequence start in this buffer in bytes
* @param count number of short values to read
* @param array array to receive values, starting at array element 0
*/
void readDataShorts( long offset, int count, short[] array )
throws IOException;
/**
* Reads a sequence of int values from this buf into an array.
*
* @param offset position sequence start in this buffer in bytes
* @param count number of int values to read
* @param array array to receive values, starting at array element 0
*/
void readDataInts( long offset, int count, int[] array )
throws IOException;
/**
* Reads a sequence of long integer values from this buf into an array.
*
* @param offset position sequence start in this buffer in bytes
* @param count number of long values to read
* @param array array to receive values, starting at array element 0
*/
void readDataLongs( long offset, int count, long[] array )
throws IOException;
/**
* Reads a sequence of float values from this buf into an array.
*
* @param offset position sequence start in this buffer in bytes
* @param count number of float values to read
* @param array array to receive values, starting at array element 0
*/
void readDataFloats( long offset, int count, float[] array )
throws IOException;
/**
* Reads a sequence of double values from this buf into an array.
*
* @param offset position sequence start in this buffer in bytes
* @param count number of double values to read
* @param array array to receive values, starting at array element 0
*/
void readDataDoubles( long offset, int count, double[] array )
throws IOException;
/**
* Returns an input stream consisting of all the bytes in this buf
* starting from the given offset.
*
* @param offset position of first byte in buf that will appear in
* the returned stream
* @return input stream
*/
InputStream createInputStream( long offset );
/**
* Creates a new Buf of a given length populated from a given input stream.
* The new buf object must have the same data encoding and 64bit-ness
* as this one.
*
* @param count size of new buffer in bytes
* @param in input stream capable of supplying
* (at least) count
bytes
* @return new buffer of length count
filled with bytes
* from in
*/
Buf fillNewBuf( long count, InputStream in ) throws IOException;
}
jcdf-1.2-3/BufTest.java 0000664 0000000 0000000 00000013361 13203340177 0014645 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.test;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import uk.ac.bristol.star.cdf.record.BankBuf;
import uk.ac.bristol.star.cdf.record.Buf;
import uk.ac.bristol.star.cdf.record.Pointer;
import uk.ac.bristol.star.cdf.record.SimpleNioBuf;
public class BufTest {
private static boolean assertionsOn_;
private final int blk_ = 54;
private final int nn_ = 64;
// Puts the various Buf implementations through their paces.
public void testBufs() throws IOException {
byte[] data = new byte[ 8 * 100 ];
ByteArrayOutputStream bout = new ByteArrayOutputStream();
DataOutputStream dout = new DataOutputStream( bout );
for ( int i = 0; i < nn_; i++ ) {
dout.writeByte( -i );
dout.writeByte( i );
dout.writeShort( -i );
dout.writeShort( i );
dout.writeInt( -i );
dout.writeInt( i );
dout.writeLong( -i );
dout.writeLong( i );
dout.writeFloat( -i );
dout.writeFloat( i );
dout.writeDouble( -i );
dout.writeDouble( i );
}
dout.flush();
dout.close();
byte[] bytes = bout.toByteArray();
int nbyte = bytes.length;
assert nbyte == blk_ * nn_;
boolean isBit64 = false;
boolean isBigEndian = true;
ByteBuffer buf1 = ByteBuffer.wrap( bytes );
checkBuf( new SimpleNioBuf( buf1, isBit64, isBigEndian ) );
checkBuf( BankBuf.createSingleBankBuf( buf1, isBit64, isBigEndian ) );
checkBuf( BankBuf.createMultiBankBuf( new ByteBuffer[] { buf1 },
isBit64, isBigEndian ) );
int[] banksizes =
{ 23, blk_ - 1, blk_ + 1, 49, blk_ * 4, blk_ * 2 + 2 };
ListinBuf
at which the
* compressed data starts
* @param outSize byte count of the uncompressed data
* @return new buffer of size outSize
containing
* uncompressed data
*/
public static Buf uncompress( Compression compression, Buf inBuf,
long inOffset, long outSize )
throws IOException {
logger_.config( "Uncompressing CDF data to new " + outSize
+ "-byte buffer" );
InputStream uin =
compression
.uncompressStream( new BufferedInputStream(
inBuf.createInputStream( inOffset ) ) );
Buf ubuf = inBuf.fillNewBuf( outSize, uin );
uin.close();
return ubuf;
}
/**
* Utility method to acquire the data from an NIO buffer in the form
* of an InputStream.
*
* @param bbuf NIO buffer
* @return stream
*/
public static InputStream createByteBufferInputStream( ByteBuffer bbuf ) {
return new ByteBufferInputStream( bbuf );
}
// Utility methods to read arrays of data from buffers.
// These essentially provide bulk absolute NIO buffer read operations;
// The NIO Buffer classes themselves only provide relative read operations
// for bulk reads.
//
// We work differently according to whether we are in fact reading
// single value or multiple values. This is because NIO Buffer
// classes have absolute read methods for scalar reads, but only
// relative read methods for array reads (i.e. you need to position
// a pointer and then do the read). For thread safety we need to
// synchronize in that case to make sure somebody else doesn't
// reposition before the read takes place.
//
// For the array reads, we also recast the ByteBuffer to a Buffer of
// the appropriate type for the data being read.
//
// Both these steps are taken on the assumption that the bulk reads
// are more efficient than multiple byte reads perhaps followed by
// bit manipulation where required. The NIO javadocs suggest that
// assumption is true, but I haven't tested it. Doing it the other
// way would avoid the need for synchronization.
/**
* Utility method to read a fixed length ASCII string from an NIO buffer.
* If a character 0x00 is encountered before the end of the byte sequence,
* it is considered to terminate the string.
*
* @param bbuf NIO buffer
* @param ioff offset into buffer of start of string
* @param nbyte number of bytes in string
*/
static String readAsciiString( ByteBuffer bbuf, int ioff, int nbyte ) {
byte[] abuf = new byte[ nbyte ];
synchronized ( bbuf ) {
bbuf.position( ioff );
bbuf.get( abuf, 0, nbyte );
}
StringBuffer sbuf = new StringBuffer( nbyte );
for ( int i = 0; i < nbyte; i++ ) {
byte b = abuf[ i ];
if ( b == 0 ) {
break;
}
else {
sbuf.append( (char) b );
}
}
return sbuf.toString();
}
/**
* Utility method to read an array of byte values from an NIO buffer
* into an array.
*
* @param bbuf buffer
* @param ioff offset into bbuf of data start
* @param count number of values to read
* @param a array into which values will be read, starting at element 0
*/
static void readBytes( ByteBuffer bbuf, int ioff, int count, byte[] a ) {
if ( count == 1 ) {
a[ 0 ] = bbuf.get( ioff );
}
else {
synchronized ( bbuf ) {
bbuf.position( ioff );
bbuf.get( a, 0, count );
}
}
}
/**
* Utility method to read an array of short values from an NIO buffer
* into an array.
*
* @param bbuf buffer
* @param ioff offset into bbuf of data start
* @param count number of values to read
* @param a array into which values will be read, starting at element 0
*/
static void readShorts( ByteBuffer bbuf, int ioff, int count, short[] a ) {
if ( count == 1 ) {
a[ 0 ] = bbuf.getShort( ioff );
}
else {
synchronized ( bbuf ) {
bbuf.position( ioff );
bbuf.asShortBuffer().get( a, 0, count );
}
}
}
/**
* Utility method to read an array of int values from an NIO buffer
* into an array.
*
* @param bbuf buffer
* @param ioff offset into bbuf of data start
* @param count number of values to read
* @param a array into which values will be read, starting at element 0
*/
static void readInts( ByteBuffer bbuf, int ioff, int count, int[] a ) {
if ( count == 1 ) {
a[ 0 ] = bbuf.getInt( ioff );
}
else {
synchronized ( bbuf ) {
bbuf.position( ioff );
bbuf.asIntBuffer().get( a, 0, count );
}
}
}
/**
* Utility method to read an array of long values from an NIO buffer
* into an array.
*
* @param bbuf buffer
* @param ioff offset into bbuf of data start
* @param count number of values to read
* @param a array into which values will be read, starting at element 0
*/
static void readLongs( ByteBuffer bbuf, int ioff, int count, long[] a ) {
if ( count == 1 ) {
a[ 0 ] = bbuf.getLong( ioff );
}
else {
synchronized ( bbuf ) {
bbuf.position( ioff );
bbuf.asLongBuffer().get( a, 0, count );
}
}
}
/**
* Utility method to read an array of float values from an NIO buffer
* into an array.
*
* @param bbuf buffer
* @param ioff offset into bbuf of data start
* @param count number of values to read
* @param a array into which values will be read, starting at element 0
*/
static void readFloats( ByteBuffer bbuf, int ioff, int count, float[] a ) {
if ( count == 1 ) {
a[ 0 ] = bbuf.getFloat( ioff );
}
else {
synchronized ( bbuf ) {
bbuf.position( ioff );
bbuf.asFloatBuffer().get( a, 0, count );
}
}
}
/**
* Utility method to read an array of double values from an NIO buffer
* into an array.
*
* @param bbuf buffer
* @param ioff offset into bbuf of data start
* @param count number of values to read
* @param a array into which values will be read, starting at element 0
*/
static void readDoubles( ByteBuffer bbuf, int ioff, int count,
double[] a ) {
if ( count == 1 ) {
a[ 0 ] = bbuf.getDouble( ioff );
}
else {
synchronized ( bbuf ) {
bbuf.position( ioff );
bbuf.asDoubleBuffer().get( a, 0, count );
}
}
}
/**
* Input stream that reads from an NIO buffer.
* You'd think there was an implementation of this in the J2SE somewhere,
* but I can't see one.
*/
private static class ByteBufferInputStream extends InputStream {
private final ByteBuffer bbuf_;
/**
* Constructor.
*
* @param bbuf NIO buffer supplying data
*/
ByteBufferInputStream( ByteBuffer bbuf ) {
bbuf_ = bbuf;
}
@Override
public int read() {
return bbuf_.remaining() > 0 ? bbuf_.get() : -1;
}
@Override
public int read( byte[] b ) {
return read( b, 0, b.length );
}
@Override
public int read( byte[] b, int off, int len ) {
if ( len == 0 ) {
return 0;
}
int remain = bbuf_.remaining();
if ( remain == 0 ) {
return -1;
}
else {
int nr = Math.min( remain, len );
bbuf_.get( b, off, nr );
return nr;
}
}
@Override
public boolean markSupported() {
return true;
}
@Override
public void mark( int readLimit ) {
bbuf_.mark();
}
@Override
public void reset() {
bbuf_.reset();
}
@Override
public long skip( long n ) {
int nsk = (int) Math.min( n, bbuf_.remaining() );
bbuf_.position( bbuf_.position() + nsk );
return nsk;
}
@Override
public int available() {
return bbuf_.remaining();
}
}
}
jcdf-1.2-3/CdfContent.java 0000664 0000000 0000000 00000024102 13203340177 0015313 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import uk.ac.bristol.star.cdf.record.AttributeDescriptorRecord;
import uk.ac.bristol.star.cdf.record.AttributeEntryDescriptorRecord;
import uk.ac.bristol.star.cdf.record.Buf;
import uk.ac.bristol.star.cdf.record.CdfDescriptorRecord;
import uk.ac.bristol.star.cdf.record.DataReader;
import uk.ac.bristol.star.cdf.record.GlobalDescriptorRecord;
import uk.ac.bristol.star.cdf.record.Record;
import uk.ac.bristol.star.cdf.record.RecordFactory;
import uk.ac.bristol.star.cdf.record.VariableDescriptorRecord;
/**
* Provides all the data and metadata in a CDF file in a high-level
* read-only easy to use form.
*
* @author Mark Taylor
* @since 20 Jun 2013
*/
public class CdfContent {
private final CdfInfo cdfInfo_;
private final GlobalAttribute[] globalAtts_;
private final VariableAttribute[] variableAtts_;
private final Variable[] variables_;
/**
* Constructs a CdfContent from a CdfReader.
* This reads the attribute metadata and entries and variable metadata.
* Record data for variables is not read at construction time.
*
* @param crdr object which knows how to read CDF records
*/
public CdfContent( CdfReader crdr ) throws IOException {
// Get basic information from reader.
Buf buf = crdr.getBuf();
RecordFactory recordFact = crdr.getRecordFactory();
CdfDescriptorRecord cdr = crdr.getCdr();
// Get global descriptor record.
GlobalDescriptorRecord gdr =
recordFact.createRecord( buf, cdr.gdrOffset,
GlobalDescriptorRecord.class );
// Store global format information.
boolean rowMajor = Record.hasBit( cdr.flags, 0 );
int[] rDimSizes = gdr.rDimSizes;
int leapSecondLastUpdated = gdr.leapSecondLastUpdated;
cdfInfo_ = new CdfInfo( rowMajor, rDimSizes, leapSecondLastUpdated );
// Read the rVariable and zVariable records.
VariableDescriptorRecord[] rvdrs =
walkVariableList( buf, recordFact, gdr.nrVars, gdr.rVdrHead );
VariableDescriptorRecord[] zvdrs =
walkVariableList( buf, recordFact, gdr.nzVars, gdr.zVdrHead );
// Collect the rVariables and zVariables into a single list.
// Turn the rVariable and zVariable records into a single list of
// Variable objects.
VariableDescriptorRecord[] vdrs = arrayConcat( rvdrs, zvdrs );
variables_ = new Variable[ vdrs.length ];
for ( int iv = 0; iv < vdrs.length; iv++ ) {
variables_[ iv ] = new Variable( vdrs[ iv ], cdfInfo_, recordFact );
}
// Read the attributes records (global and variable attributes
// are found in the same list).
AttributeDescriptorRecord[] adrs =
walkAttributeList( buf, recordFact, gdr.numAttr, gdr.adrHead );
// Read the entries for all the attributes, and turn the records
// with their entries into two lists, one of global attributes and
// one of variable attributes.
Listmain
method.
* The function is roughly comparable to the cdfirsdump
* command in the CDF distribution.
*
* " );
}
for ( int ix = 0; offset < eof; ix++ ) {
Record rec = recFact.createRecord( buf, offset );
dumpRecord( ix, rec, offset );
if ( cdr == null && rec instanceof CdfDescriptorRecord ) {
cdr = (CdfDescriptorRecord) rec;
gdroff = cdr.gdrOffset;
}
if ( offset == gdroff && rec instanceof GlobalDescriptorRecord ) {
gdr = (GlobalDescriptorRecord) rec;
eof = gdr.eof;
}
offset += rec.getRecordSize();
}
if ( html_ ) {
out_.println( "
" );
}
}
/**
* Writes infromation about a single record to the output.
*
* @param index record index
* @param rec recor object
* @param offset byte offset into the file of the record
*/
private void dumpRecord( int index, Record rec, long offset ) {
StringBuffer sbuf = new StringBuffer();
if ( html_ ) {
sbuf.append( "
" );
}
long extra = leng - eof;
if ( extra > 0 ) {
out_.println( " + " + extra + " bytes after final record" );
}
if ( html_ ) {
out_.println( "
" );
}
sbuf.append( index )
.append( ":\t" )
.append( rec.getRecordTypeAbbreviation() )
.append( "\t" )
.append( rec.getRecordType() )
.append( "\t" )
.append( rec.getRecordSize() )
.append( "\t" )
.append( formatOffsetId( offset ) );
if ( html_ ) {
sbuf.append( "" );
}
out_.println( sbuf.toString() );
// If required write the field values. Rather than list them
// for each record type, just obtain them by introspection.
if ( writeFields_ ) {
Field[] fields = rec.getClass().getFields();
for ( int i = 0; i < fields.length; i++ ) {
Field field = fields[ i ];
if ( isCdfRecordField( field ) ) {
String name = field.getName();
Object value;
try {
value = field.get( rec );
}
catch ( IllegalAccessException e ) {
throw new RuntimeException( "Reflection error", e );
}
out_.println( formatFieldValue( name, value,
isOffsetField( field ) ) );
}
}
}
}
/**
* Determines whether a given object field is a field of the CDF record.
*
* @param field field of java Record subclass
* @return true iff field represents a field of the corresponding CDF
* record type
*/
private boolean isCdfRecordField( Field field ) {
if ( field.getAnnotation( CdfField.class ) != null ) {
int mods = field.getModifiers();
assert Modifier.isFinal( mods )
&& Modifier.isPublic( mods )
&& ! Modifier.isStatic( mods );
return true;
}
else {
return false;
}
}
/**
* Determines whetehr a given object field represents a file offset.
*
* @param field field of java Record subclass
* @return true iff field represents a scalar or array file offset value
*/
private boolean isOffsetField( Field field ) {
return field.getAnnotation( OffsetField.class ) != null;
}
/**
* Formats a field name/value pair for output.
*
* @param name field name
* @param value field value
*/
private String formatFieldValue( String name, Object value,
boolean isOffset ) {
StringBuffer sbuf = new StringBuffer();
sbuf.append( spaces( 4 ) );
sbuf.append( name )
.append( ":" );
sbuf.append( spaces( 28 - sbuf.length() ) );
if ( value == null ) {
}
else if ( value.getClass().isArray() ) {
int len = Array.getLength( value );
if ( isOffset ) {
assert value instanceof long[];
long[] larray = (long[]) value;
for ( int i = 0; i < len; i++ ) {
if ( i > 0 ) {
sbuf.append( ", " );
}
sbuf.append( formatOffsetRef( larray[ i ] ) );
}
}
else {
for ( int i = 0; i < len; i++ ) {
if ( i > 0 ) {
sbuf.append( ", " );
}
sbuf.append( Array.get( value, i ) );
}
}
}
else if ( isOffset ) {
assert value instanceof Long;
sbuf.append( formatOffsetRef( ((Long) value).longValue() ) );
}
else {
sbuf.append( value.toString() );
}
return sbuf.toString();
}
/**
* Format a value for output if it represents a possible target of
* a pointer.
*
* @param offset pointer target value
* @return string for output
*/
private String formatOffsetId( long offset ) {
String txt = "0x" + Long.toHexString( offset );
return html_ ? "" + txt + ""
: txt;
}
/**
* Format a value for output if it apparentl represents a pointer
* to a particular file offset.
*
* @param offset target file offset
* @return string for output
*/
private String formatOffsetRef( long offset ) {
String txt = "0x" + Long.toHexString( offset );
// Only format strictly positive values. In some circumstances
// -1 and 0 are used as special values indicating no reference exists.
// The first record in any case starts at 0x8 (after the magic numbers)
// so any such values can't be genuine offsets.
return ( html_ && offset > 0L )
? "" + txt + ""
: txt;
}
/**
* Construct a padding string.
*
* @param count number of spaces
* @return string composed only of count
spaces
*/
static String spaces( int count ) {
StringBuffer sbuf = new StringBuffer( count );
for ( int i = 0; i < count; i++ ) {
sbuf.append( ' ' );
}
return sbuf.toString();
}
/**
* Does the work for the command line tool, handling arguments.
* Sucess is indicated by the return value.
*
* @param args command-line arguments
* @return 0 for success, non-zero for failure
*/
public static int runMain( String[] args ) throws IOException {
String usage = new StringBuffer()
.append( "\n Usage:" )
.append( CdfDump.class.getName() )
.append( " [-help]" )
.append( " [-verbose]" )
.append( " [-fields]" )
.append( " [-html]" )
.append( " main
method.
* The output format is somewhat reminiscent of the cdfdump
* command in the CDF distribution.
*
* @author Mark Taylor
* @since 21 Jun 2013
*/
public class CdfList {
private final CdfContent cdf_;
private final PrintStream out_;
private final boolean writeData_;
private static final String[] NOVARY_MARKS = { "{ ", " }" };
private static final String[] VIRTUAL_MARKS = { "[ ", " ]" };
private static final String[] REAL_MARKS = { " ", "" };
/**
* Constructor.
*
* @param cdf CDF content
* @param out output stream for listing
* @param writeData true if data values as well as metadata are to
* be written
*/
public CdfList( CdfContent cdf, PrintStream out, boolean writeData ) {
cdf_ = cdf;
out_ = out;
writeData_ = writeData;
}
/**
* Does the work, writing output.
*/
public void run() throws IOException {
// Read the CDF.
GlobalAttribute[] gAtts = cdf_.getGlobalAttributes();
VariableAttribute[] vAtts = cdf_.getVariableAttributes();
Variable[] vars = cdf_.getVariables();
// Write global attribute information.
header( "Global Attributes" );
for ( int iga = 0; iga < gAtts.length; iga++ ) {
GlobalAttribute gAtt = gAtts[ iga ];
out_.println( " " + gAtt.getName() );
AttributeEntry[] entries = gAtt.getEntries();
for ( int ie = 0; ie < entries.length; ie++ ) {
out_.println( " " + entries[ ie ] );
}
}
// Write variable information.
for ( int iv = 0; iv < vars.length; iv++ ) {
out_.println();
Variable var = vars[ iv ];
header( "Variable " + var.getNum() + ": " + var.getName()
+ " --- " + var.getSummary() );
for ( int ia = 0; ia < vAtts.length; ia++ ) {
VariableAttribute vAtt = vAtts[ ia ];
AttributeEntry entry = vAtt.getEntry( var );
if ( entry != null ) {
out_.println( " " + vAtt.getName() + ":\t" + entry );
}
}
// Optionally write variable data as well.
if ( writeData_ ) {
DataType dataType = var.getDataType();
Object abuf = var.createRawValueArray();
boolean isVar = var.getRecordVariance();
int nrec = var.getRecordCount();
int nrdigit = Integer.toString( nrec ).length();
for ( int ir = 0; ir < nrec; ir++ ) {
var.readRawRecord( ir, abuf );
final String[] marks;
if ( ! isVar ) {
marks = NOVARY_MARKS;
}
else if ( ! var.hasRecord( ir ) ) {
marks = VIRTUAL_MARKS;
}
else {
marks = REAL_MARKS;
}
String sir = Integer.toString( ir );
StringBuffer sbuf = new StringBuffer()
.append( marks[ 0 ] )
.append( CdfDump.spaces( nrdigit - sir.length() ) )
.append( sir )
.append( ':' )
.append( '\t' )
.append( formatValues( abuf, dataType ) )
.append( marks[ 1 ] );
out_.println( sbuf.toString() );
}
}
}
}
/**
* Applies string formatting to a value of a given data type.
*
* @param abuf array buffer containing data
* @param dataType data type for data
* @return string representation of value
*/
private String formatValues( Object abuf, DataType dataType ) {
StringBuffer sbuf = new StringBuffer();
if ( abuf == null ) {
}
else if ( abuf.getClass().isArray() ) {
int groupSize = dataType.getGroupSize();
int len = Array.getLength( abuf );
for ( int i = 0; i < len; i += groupSize ) {
if ( i > 0 ) {
sbuf.append( ", " );
}
sbuf.append( dataType.formatArrayValue( abuf, i ) );
}
}
else {
sbuf.append( dataType.formatScalarValue( abuf ) );
}
return sbuf.toString();
}
/**
* Writes a header to the output listing.
*
* @param txt header text
*/
private void header( String txt ) {
out_.println( txt );
StringBuffer sbuf = new StringBuffer( txt.length() );
for ( int i = 0; i < txt.length(); i++ ) {
sbuf.append( '-' );
}
out_.println( sbuf.toString() );
}
/**
* Does the work for the command line tool, handling arguments.
* Sucess is indicated by the return value.
*
* @param args command-line arguments
* @return 0 for success, non-zero for failure
*/
public static int runMain( String[] args ) throws IOException {
// Usage string.
String usage = new StringBuffer()
.append( "\n Usage: " )
.append( CdfList.class.getName() )
.append( " [-help]" )
.append( " [-verbose]" )
.append( " [-data]" )
.append( " intro
are
* a CDF magic number
*/
public static boolean isMagic( byte[] intro ) {
if ( intro.length < 8 ) {
return false;
}
return decodeMagic( readInt( intro, 0 ), readInt( intro, 4 ) ) != null;
}
/**
* Reads an 4-byte big-endian integer from a byte array.
*
* @param b byte array
* @param ioff index into b
of integer start
* @return int value
*/
private static int readInt( byte[] b, int ioff ) {
return ( b[ ioff++ ] & 0xff ) << 24
| ( b[ ioff++ ] & 0xff ) << 16
| ( b[ ioff++ ] & 0xff ) << 8
| ( b[ ioff++ ] & 0xff ) << 0;
}
/**
* Interprets two integer values as the magic number sequence at the
* start of a CDF file, and returns an object encoding the information
* about CDF encoding specifics.
*
* @param magic1 big-endian int at CDF file offset 0x00
* @param magic2 big-endian int at CDF file offset 0x04
* @return object describing CDF encoding specifics,
* or null if this is not a recognised CDF magic number
*/
private static CdfVariant decodeMagic( int magic1, int magic2 ) {
final String label;
final boolean bit64;
final int nameLeng;
final boolean compressed;
if ( magic1 == 0xcdf30001 ) { // version 3.0 - 3.4 (3.*?)
label = "V3";
bit64 = true;
nameLeng = 256;
if ( magic2 == 0x0000ffff ) {
compressed = false;
}
else if ( magic2 == 0xcccc0001 ) {
compressed = true;
}
else {
return null;
}
}
else if ( magic1 == 0xcdf26002 ) { // version 2.6/2.7
label = "V2.6/2.7";
bit64 = false;
nameLeng = 64;
if ( magic2 == 0x0000ffff ) {
compressed = false;
}
else if ( magic2 == 0xcccc0001 ) {
compressed = true;
}
else {
return null;
}
}
else if ( magic1 == 0x0000ffff ) { // pre-version 2.6
label = "pre-V2.6";
bit64 = false;
nameLeng = 64; // true as far as I know
if ( magic2 == 0x0000ffff ) {
compressed = false;
}
else {
return null;
}
}
else {
return null;
}
return new CdfVariant( label, bit64, nameLeng, compressed );
}
/**
* Encapsulates CDF encoding details as determined from the magic number.
*/
private static class CdfVariant {
final String label_;
final boolean bit64_;
final int nameLeng_;
final boolean compressed_;
/**
* Constructor.
*
* @param label short string indicating CDF format version number
* @param bit64 true for 8-bit pointers, false for 4-bit pointers
* @param nameLeng number of bytes used for attribute and variable
* names
* @param compressed true iff the CDF file uses whole-file compression
*/
CdfVariant( String label, boolean bit64, int nameLeng,
boolean compressed ) {
label_ = label;
bit64_ = bit64;
nameLeng_ = nameLeng;
compressed_ = compressed;
}
}
}
jcdf-1.2-3/CompressedCdfRecord.java 0000664 0000000 0000000 00000002133 13203340177 0017144 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
/**
* Field data for CDF record of type Compressed CDF Record.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public class CompressedCdfRecord extends Record {
@CdfField @OffsetField public final long cprOffset;
@CdfField public final long uSize;
@CdfField public final int rfuA;
private final long dataOffset_;
/**
* Constructor.
*
* @param plan basic record information
*/
public CompressedCdfRecord( RecordPlan plan ) throws IOException {
super( plan, "CCR", 10 );
Buf buf = plan.getBuf();
Pointer ptr = plan.createContentPointer();
this.cprOffset = buf.readOffset( ptr );
this.uSize = buf.readOffset( ptr );
this.rfuA = checkIntValue( buf.readInt( ptr ), 0 );
dataOffset_ = ptr.get();
}
/**
* Returns the file offset at which the compressed data in
* this record starts.
*
* @return file offset for start of data field
*/
public long getDataOffset() {
return dataOffset_;
}
}
jcdf-1.2-3/CompressedParametersRecord.java 0000664 0000000 0000000 00000001654 13203340177 0020562 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
/**
* Field data for CDF record of type Compressed Parameters Record.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public class CompressedParametersRecord extends Record {
@CdfField public final int cType;
@CdfField public final int rfuA;
@CdfField public final int pCount;
@CdfField public final int[] cParms;
/**
* Constructor.
*
* @param plan basic record information
*/
public CompressedParametersRecord( RecordPlan plan ) throws IOException {
super( plan, "CPR", 11 );
Buf buf = plan.getBuf();
Pointer ptr = plan.createContentPointer();
this.cType = buf.readInt( ptr );
this.rfuA = checkIntValue( buf.readInt( ptr ), 0 );
this.pCount = buf.readInt( ptr );
this.cParms = readIntArray( buf, ptr, this.pCount );
checkEndRecord( ptr );
}
}
jcdf-1.2-3/CompressedVariableValuesRecord.java 0000664 0000000 0000000 00000002043 13203340177 0021355 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
/**
* Field data for CDF record of type Compressed Variable Values Record.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public class CompressedVariableValuesRecord extends Record {
@CdfField public final int rfuA;
@CdfField public final long cSize;
private final long dataOffset_;
/**
* Constructor.
*
* @param plan basic record information
*/
public CompressedVariableValuesRecord( RecordPlan plan )
throws IOException {
super( plan, "CVVR", 13 );
Buf buf = plan.getBuf();
Pointer ptr = plan.createContentPointer();
this.rfuA = checkIntValue( buf.readInt( ptr ), 0 );
this.cSize = buf.readOffset( ptr );
dataOffset_ = ptr.get();
}
/**
* Returns the file offset at which the compressed data in
* this record starts.
*
* @return file offset for start of data field
*/
public long getDataOffset() {
return dataOffset_;
}
}
jcdf-1.2-3/Compression.java 0000664 0000000 0000000 00000006647 13203340177 0015603 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.GZIPInputStream;
import uk.ac.bristol.star.cdf.CdfFormatException;
/**
* Defines a data compression type supported for compressing CDF data.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public abstract class Compression {
/** No compression. */
public static final Compression NONE = new Compression( "NONE" ) {
public InputStream uncompressStream( InputStream in ) {
return in;
}
};
/** Run length encoding. */
public static final Compression RLE = new Compression( "RLE" ) {
public InputStream uncompressStream( InputStream in )
throws IOException {
return new RunLengthInputStream( in, (byte) 0 );
}
};
/** Huffman encoding. */
public static final Compression HUFF = new Compression( "HUFF" ) {
public InputStream uncompressStream( InputStream in )
throws IOException {
return new BitExpandInputStream.HuffmanInputStream( in );
}
};
/** Adaptive Huffman encoding. */
public static final Compression AHUFF = new Compression( "AHUFF" ) {
public InputStream uncompressStream( InputStream in )
throws IOException {
return new BitExpandInputStream.AdaptiveHuffmanInputStream( in );
}
};
/** Gzip compression. */
public static final Compression GZIP = new Compression( "GZIP" ) {
public InputStream uncompressStream( InputStream in )
throws IOException {
return new GZIPInputStream( in );
}
};
private final String name_;
/**
* Constructor.
*
* @param name compression format name
*/
protected Compression( String name ) {
name_ = name;
}
/**
* Turns a stream containing compressed data into a stream containing
* uncompressed data.
*
* @param in compressed input stream
* @return uncompressed input stream
*/
public abstract InputStream uncompressStream( InputStream in )
throws IOException;
/**
* Returns this compression format's name.
*
* @return name
*/
public String getName() {
return name_;
}
/**
* Returns a Compression object corresponding to a given compression code.
*
* @param cType compression code, as taken from the CPR cType field
* @return compression object
* @throws CdfFormatException if the compression type is unknown
*/
public static Compression getCompression( int cType )
throws CdfFormatException {
// The mapping is missing from the CDF Internal Format Description
// document, but cdf.h says:
// #define NO_COMPRESSION 0L
// #define RLE_COMPRESSION 1L
// #define HUFF_COMPRESSION 2L
// #define AHUFF_COMPRESSION 3L
// #define GZIP_COMPRESSION 5L
switch ( cType ) {
case 0: return NONE;
case 1: return RLE;
case 2: return HUFF;
case 3: return AHUFF;
case 5: return GZIP;
default:
throw new CdfFormatException( "Unknown compression format "
+ "cType=" + cType );
}
}
}
jcdf-1.2-3/DataReader.java 0000664 0000000 0000000 00000004063 13203340177 0015264 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
import java.lang.reflect.Array;
import uk.ac.bristol.star.cdf.CdfFormatException;
import uk.ac.bristol.star.cdf.DataType;
/**
* Reads items with a given data type from a buffer into an array.
*
* @author Mark Taylor
* @since 20 Jun 2013
*/
public class DataReader {
private final DataType dataType_;
private final int nelPerItem_;
private final int nItem_;
/**
* Constructor.
*
* @param dataType data type
* @param nelPerItem number of dataType elements per read item;
* usually 1 except for character data
* @param nItem number of items of given data type in the array,
* for scalar records it will be 1
*/
public DataReader( DataType dataType, int nelPerItem, int nItem ) {
dataType_ = dataType;
nelPerItem_ = nelPerItem;
nItem_ = nItem;
}
/**
* Creates a workspace array which can contain a value read for one record.
* The return value will be an array of a primitive type or String.
*
* @return workspace array for this reader
*/
public Object createValueArray() {
return Array.newInstance( dataType_.getArrayElementClass(),
nItem_ * dataType_.getGroupSize() );
}
/**
* Reads a value from a data buffer into a workspace array.
*
* @param buf data buffer
* @param offset byte offset into buf of data start
* @param valueArray object created by createValueArray
* into which results will be read
*/
public void readValue( Buf buf, long offset, Object valueArray )
throws IOException {
dataType_.readValues( buf, offset, nelPerItem_, valueArray, nItem_ );
}
/**
* Returns the size in bytes of one record as stored in the data buffer.
*
* @return record size in bytes
*/
public int getRecordSize() {
return dataType_.getByteCount() * nelPerItem_ * nItem_;
}
}
jcdf-1.2-3/DataType.java 0000664 0000000 0000000 00000052705 13203340177 0015011 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf;
import java.io.IOException;
import java.lang.reflect.Array;
import uk.ac.bristol.star.cdf.record.Buf;
import uk.ac.bristol.star.cdf.record.Pointer;
/**
* Enumerates the data types supported by the CDF format.
*
* @author Mark Taylor
* @since 20 Jun 2013
*/
public abstract class DataType {
private final String name_;
private final int byteCount_;
private final int groupSize_;
private final Class> arrayElementClass_;
private final Class> scalarClass_;
private final Object dfltPadValueArray_;
private boolean hasMultipleElementsPerItem_;
public static final DataType INT1 = new Int1DataType( "INT1" );
public static final DataType INT2 = new Int2DataType( "INT2" );
public static final DataType INT4 = new Int4DataType( "INT4" );
public static final DataType INT8 = new Int8DataType( "INT8" );
public static final DataType UINT1 = new UInt1DataType( "UINT1" );
public static final DataType UINT2 = new UInt2DataType( "UINT2" );
public static final DataType UINT4 = new UInt4DataType( "UINT4" );
public static final DataType REAL4 = new Real4DataType( "REAL4" );
public static final DataType REAL8 = new Real8DataType( "REAL8" );
public static final DataType CHAR = new CharDataType( "CHAR" );
public static final DataType EPOCH16 = new Epoch16DataType( "EPOCH16" );
public static final DataType BYTE = new Int1DataType( "BYTE" );
public static final DataType FLOAT = new Real4DataType( "FLOAT" );
public static final DataType DOUBLE = new Real8DataType( "DOUBLE" );
public static final DataType EPOCH = new EpochDataType( "EPOCH" );
public static final DataType TIME_TT2000 =
new Tt2kDataType( "TIME_TT2000", -1 );
public static final DataType UCHAR = new CharDataType( "UCHAR" );
/**
* Constructor.
*
* @param name type name
* @param byteCount number of bytes to store one item
* @param groupSize number of elements of type
* arrayElementClass
that are read
* into the value array for a single item read
* @param arrayElementClass component class of the value array
* @param scalarClass object type returned by getScalar
* @param dfltPadValueArray 1-item array of arrayElementClass values
* containing the default pad value for this type
* @param hasMultipleElementsPerItem true iff a variable number of
* array elements may correspond to a single item
*/
private DataType( String name, int byteCount, int groupSize,
Class> arrayElementClass, Class> scalarClass,
Object dfltPadValueArray,
boolean hasMultipleElementsPerItem ) {
name_ = name;
byteCount_ = byteCount;
groupSize_ = groupSize;
arrayElementClass_ = arrayElementClass;
scalarClass_ = scalarClass;
dfltPadValueArray_ = dfltPadValueArray;
hasMultipleElementsPerItem_ = hasMultipleElementsPerItem;
}
/**
* Constructor for a single-element-per-item type with a zero-like
* pad value.
*
* @param name type name
* @param byteCount number of bytes to store one item
* @param groupSize number of elements of type
* arrayElementClass
that are read
* into the value array for a single item read
* @param arrayElementClass component class of the value array
* @param scalarClass object type returned by getScalar
*/
private DataType( String name, int byteCount, int groupSize,
Class> arrayElementClass, Class> scalarClass ) {
this( name, byteCount, groupSize, arrayElementClass, scalarClass,
Array.newInstance( arrayElementClass, groupSize ), false );
}
/**
* Returns the name for this data type.
*
* @return data type name
*/
public String getName() {
return name_;
}
/**
* Returns the number of bytes used in a CDF to store a single item
* of this type.
*
* @return size in bytes
*/
public int getByteCount() {
return byteCount_;
}
/**
* Returns the element class of an array that this data type can
* be read into.
* In most cases this is a primitive type or String.
*
* @return array raw value element class
*/
public Class> getArrayElementClass() {
return arrayElementClass_;
}
/**
* Returns the type of objects obtained by the getScalar
* method.
*
* @return scalar type associated with this data type
*/
public Class> getScalarClass() {
return scalarClass_;
}
/**
* Number of elements of type arrayElementClass that are read into
* valueArray for a single item read.
* This is usually 1, but not, for instance, for EPOCH16.
*
* @return number of array elements per item
*/
public int getGroupSize() {
return groupSize_;
}
/**
* Returns the index into a value array which corresponds to the
* item
'th element.
*
* @return itemIndex
* groupSize
*/
public int getArrayIndex( int itemIndex ) {
return groupSize_ * itemIndex;
}
/**
* True if this type may turn a variable number of elements from the
* value array into a single read item. This is usually false,
* but true for character types, which turn into strings.
*
* @return true iff type may have multiple elements per read item
*/
public boolean hasMultipleElementsPerItem() {
return hasMultipleElementsPerItem_;
}
/**
* Returns an array of array-class values containing a single item
* with the default pad value for this type.
*
* @return default raw pad value array
* @see "Section 2.3.20 of CDF User's Guide"
*/
public Object getDefaultPadValueArray() {
return dfltPadValueArray_;
}
/**
* Reads data of this data type from a buffer into an appropriately
* typed value array.
*
* @param buf data buffer
* @param offset byte offset into buffer at which data starts
* @param nelPerItem number of elements per item;
* usually 1, but may not be for strings
* @param valueArray array to receive result data
* @param count number of items to read
*/
public abstract void readValues( Buf buf, long offset, int nelPerItem,
Object valueArray, int count )
throws IOException;
/**
* Reads a single item from an array which has previously been
* populated by {@link #readValues readValues}.
* The class of the returned value is that returned by
* {@link #getScalarClass}.
*
* arrayIndex
argument is the index into the
* array object, not necessarily the item index -
* see the {@link #getArrayIndex getArrayIndex} method.
*
* @param valueArray array filled with data for this data type
* @param arrayIndex index into array at which the item to read is found
* @return scalar representation of object at position index
* in valueArray
*/
public abstract Object getScalar( Object valueArray, int arrayIndex );
/**
* Provides a string view of a scalar value obtained for this data type.
*
* @param value value returned by getScalar
* @return string representation
*/
public String formatScalarValue( Object value ) {
return value == null ? "" : value.toString();
}
/**
* Provides a string view of an item obtained from an array value
* of this data type.
* arrayIndex
argument is the index into the
* array object, not necessarily the item index -
* see the {@link #getArrayIndex getArrayIndex} method.
*
* @param array array value populated by readValues
* @param arrayIndex index into array
* @return string representation
*/
public String formatArrayValue( Object array, int arrayIndex ) {
Object value = Array.get( array, arrayIndex );
return value == null ? "" : value.toString();
}
@Override
public String toString() {
return name_;
}
/**
* Returns a DataType corresponding to a CDF data type code,
* possibly customised for a particular CDF file.
*
* getDataType(int)
,
* except for TIME_TT2000 columns, in which case the last known leap
* second may be taken into account.
*
* @param dataType dataType field of AEDR or VDR
* @param cdfInfo specifics of CDF file
* @return data type object
*/
public static DataType getDataType( int dataType, CdfInfo cdfInfo )
throws CdfFormatException {
DataType type = getDataType( dataType );
return type == TIME_TT2000
? new Tt2kDataType( type.getName(),
cdfInfo.getLeapSecondLastUpdated() )
: type;
}
/**
* Returns the DataType object corresponding to a CDF data type code.
*
* @param dataType dataType field of AEDR or VDR
* @return data type object
*/
public static DataType getDataType( int dataType )
throws CdfFormatException {
switch ( dataType ) {
case 1: return INT1;
case 2: return INT2;
case 4: return INT4;
case 8: return INT8;
case 11: return UINT1;
case 12: return UINT2;
case 14: return UINT4;
case 41: return BYTE;
case 21: return REAL4;
case 22: return REAL8;
case 44: return FLOAT;
case 45: return DOUBLE;
case 31: return EPOCH;
case 32: return EPOCH16;
case 33: return TIME_TT2000;
case 51: return CHAR;
case 52: return UCHAR;
default:
throw new CdfFormatException( "Unknown data type " + dataType );
}
}
/**
* DataType for signed 1-byte integer.
*/
private static final class Int1DataType extends DataType {
Int1DataType( String name ) {
super( name, 1, 1, byte.class, Byte.class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
buf.readDataBytes( offset, n, (byte[]) array );
}
public Object getScalar( Object array, int index ) {
return new Byte( ((byte[]) array)[ index ] );
}
}
/**
* DataType for signed 2-byte integer.
*/
private static final class Int2DataType extends DataType {
Int2DataType( String name ) {
super( name, 2, 1, short.class, Short.class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
buf.readDataShorts( offset, n, (short[]) array );
}
public Object getScalar( Object array, int index ) {
return new Short( ((short[]) array)[ index ] );
}
}
/**
* DataType for signed 4-byte integer.
*/
private static final class Int4DataType extends DataType {
Int4DataType( String name ) {
super( name, 4, 1, int.class, Integer.class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
buf.readDataInts( offset, n, (int[]) array );
}
public Object getScalar( Object array, int index ) {
return new Integer( ((int[]) array)[ index ] );
}
}
/**
* DataType for signed 8-byte integer.
*/
private static class Int8DataType extends DataType {
Int8DataType( String name ) {
super( name, 8, 1, long.class, Long.class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
buf.readDataLongs( offset, n, (long[]) array );
}
public Object getScalar( Object array, int index ) {
return new Long( ((long[]) array)[ index ] );
}
}
/**
* DataType for unsigned 1-byte integer.
* Output values are 2-byte signed integers because of the difficulty
* of handling unsigned integers in java.
*/
private static class UInt1DataType extends DataType {
UInt1DataType( String name ) {
super( name, 1, 1, short.class, Short.class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
Pointer ptr = new Pointer( offset );
short[] sarray = (short[]) array;
for ( int i = 0; i < n; i++ ) {
sarray[ i ] = (short) buf.readUnsignedByte( ptr );
}
}
public Object getScalar( Object array, int index ) {
return new Short( ((short[]) array)[ index ] );
}
}
/**
* DataType for unsigned 2-byte integer.
* Output vaules are 4-byte signed integers because of the diffculty
* of handling unsigned integers in java.
*/
private static class UInt2DataType extends DataType {
UInt2DataType( String name ) {
super( name, 2, 1, int.class, Integer.class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
Pointer ptr = new Pointer( offset );
int[] iarray = (int[]) array;
boolean bigend = buf.isBigendian();
for ( int i = 0; i < n; i++ ) {
int b0 = buf.readUnsignedByte( ptr );
int b1 = buf.readUnsignedByte( ptr );
iarray[ i ] = bigend ? b1 | ( b0 << 8 )
: b0 | ( b1 << 8 );
}
}
public Object getScalar( Object array, int index ) {
return new Integer( ((int[]) array)[ index ] );
}
}
/**
* DataType for unsigned 4-byte integer.
* Output values are 8-byte signed integers because of the difficulty
* of handling unsigned integers in java.
*/
private static class UInt4DataType extends DataType {
UInt4DataType( String name ) {
super( name, 4, 1, long.class, Long.class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
Pointer ptr = new Pointer( offset );
long[] larray = (long[]) array;
boolean bigend = buf.isBigendian();
for ( int i = 0; i < n; i++ ) {
long b0 = buf.readUnsignedByte( ptr );
long b1 = buf.readUnsignedByte( ptr );
long b2 = buf.readUnsignedByte( ptr );
long b3 = buf.readUnsignedByte( ptr );
larray[ i ] = bigend
? b3 | ( b2 << 8 ) | ( b1 << 16 ) | ( b0 << 24 )
: b0 | ( b1 << 8 ) | ( b2 << 16 ) | ( b3 << 24 );
}
}
public Object getScalar( Object array, int index ) {
return new Long( ((long[]) array )[ index ] );
}
}
/**
* DataType for 4-byte floating point.
*/
private static class Real4DataType extends DataType {
Real4DataType( String name ) {
super( name, 4, 1, float.class, Float.class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
buf.readDataFloats( offset, n, (float[]) array );
}
public Object getScalar( Object array, int index ) {
return new Float( ((float[]) array)[ index ] );
}
}
/**
* DataType for 8-byte floating point.
*/
private static class Real8DataType extends DataType {
Real8DataType( String name ) {
super( name, 8, 1, double.class, Double.class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
buf.readDataDoubles( offset, n, (double[]) array );
}
public Object getScalar( Object array, int index ) {
return new Double( ((double[]) array)[ index ] );
}
}
/**
* DataType for TIME_TT2000. May be qualified by last known leap second.
*/
private static class Tt2kDataType extends Int8DataType {
final int leapSecondLastUpdated_;
final EpochFormatter formatter_;
final long[] dfltPad_ = new long[] { Long.MIN_VALUE + 1 };
Tt2kDataType( String name, int leapSecondLastUpdated ) {
super( name );
leapSecondLastUpdated_ = leapSecondLastUpdated;
formatter_ = new EpochFormatter( leapSecondLastUpdated );
}
@Override
public Object getDefaultPadValueArray() {
return dfltPad_;
}
@Override
public String formatScalarValue( Object value ) {
synchronized ( formatter_ ) {
return formatter_
.formatTimeTt2000( ((Long) value).longValue() );
}
}
@Override
public String formatArrayValue( Object array, int index ) {
synchronized ( formatter_ ) {
return formatter_
.formatTimeTt2000( ((long[]) array)[ index ] );
}
}
@Override
public int hashCode() {
int code = 392552;
code = 23 * code + leapSecondLastUpdated_;
return code;
}
@Override
public boolean equals( Object o ) {
if ( o instanceof Tt2kDataType ) {
Tt2kDataType other = (Tt2kDataType) o;
return this.leapSecondLastUpdated_ ==
other.leapSecondLastUpdated_;
}
else {
return false;
}
}
}
/**
* DataType for 1-byte character.
* Output is as numElem-character String.
*/
private static class CharDataType extends DataType {
CharDataType( String name ) {
super( name, 1, 1, String.class, String.class,
new String[] { null }, true );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
String[] sarray = (String[]) array;
byte[] cbuf = new byte[ nelPerItem * n ];
buf.readDataBytes( offset, nelPerItem * n, cbuf );
for ( int i = 0; i < n; i++ ) {
@SuppressWarnings("deprecation")
String s = new String( cbuf, i * nelPerItem, nelPerItem );
sarray[ i ] = s;
}
}
public Object getScalar( Object array, int index ) {
return ((String[]) array)[ index ];
}
}
/**
* DataType for 8-byte floating point epoch.
*/
private static class EpochDataType extends Real8DataType {
private final EpochFormatter formatter_ = new EpochFormatter();
EpochDataType( String name ) {
super( name );
}
@Override
public String formatScalarValue( Object value ) {
synchronized ( formatter_ ) {
return formatter_.formatEpoch( ((Double) value).doubleValue() );
}
}
@Override
public String formatArrayValue( Object array, int index ) {
synchronized ( formatter_ ) {
return formatter_.formatEpoch( ((double[]) array)[ index ] );
}
}
}
/**
* DataType for 16-byte (2*double) epoch.
* Output is as a 2-element array of doubles.
*/
private static class Epoch16DataType extends DataType {
private final EpochFormatter formatter_ = new EpochFormatter();
Epoch16DataType( String name ) {
super( name, 16, 2, double.class, double[].class );
}
public void readValues( Buf buf, long offset, int nelPerItem,
Object array, int n ) throws IOException {
buf.readDataDoubles( offset, n * 2, (double[]) array );
}
public Object getScalar( Object array, int index ) {
double[] darray = (double[]) array;
return new double[] { darray[ index ], darray[ index + 1 ] };
}
@Override
public String formatScalarValue( Object value ) {
double[] v2 = (double[]) value;
synchronized ( formatter_ ) {
return formatter_.formatEpoch16( v2[ 0 ], v2[ 1 ] );
}
}
@Override
public String formatArrayValue( Object array, int index ) {
double[] darray = (double[]) array;
synchronized ( formatter_ ) {
return formatter_.formatEpoch16( darray[ index ],
darray[ index + 1 ] );
}
}
}
}
jcdf-1.2-3/EpochFormatter.java 0000664 0000000 0000000 00000030257 13203340177 0016216 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Locale;
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Does string formatting of epoch values in various representations.
* The methods of this object are not in general thread-safe.
*
* @author Mark Taylor
* @since 21 Jun 2013
*/
public class EpochFormatter {
private final DateFormat epochMilliFormat_ =
createDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSS" );
private final DateFormat epochSecFormat_ =
createDateFormat( "yyyy-MM-dd'T'HH:mm:ss" );
private final int iMaxValidTtScaler_;
private int iLastTtScaler_ = -1;
private static final TimeZone UTC = TimeZone.getTimeZone( "UTC" );
private static final long HALF_DAY = 1000 * 60 * 60 * 12;
private static final TtScaler[] TT_SCALERS = TtScaler.getTtScalers();
private static final long LAST_KNOWN_LEAP_UNIX_MILLIS =
getLastKnownLeapUnixMillis( TT_SCALERS );
private static final Logger logger_ =
Logger.getLogger( EpochFormatter.class.getName() );
/**
* Configures behaviour when a date is encountered which is known to
* have incorrectly applied leap seconds.
* If true, a RuntimeException is thrown, if false a log message is written.
*/
public static boolean FAIL_ON_LEAP_ERROR = true;
/** 0 A.D. in Unix milliseconds as used by EPOCH/EPOCH16 data types. */
public static final long AD0_UNIX_MILLIS = getAd0UnixMillis();
/**
* Constructs a formatter without leap second awareness.
*/
public EpochFormatter() {
this( 0 );
}
/**
* Constructs a formatter aware of the latest known leap second.
*
* @param leapSecondLastUpdated value of GDR LeapSecondLastUpdated
* field (YYYYMMDD, or -1 for unused, or 0 for no leap seconds)
*/
public EpochFormatter( int leapSecondLastUpdated ) {
long lastDataLeapUnixMillis =
getLastDataLeapUnixMillis( leapSecondLastUpdated );
/* If we know about leap seconds later than the last known one
* supplied (presumably acquired from a data file),
* issue a warning that an update might be a good idea. */
if ( lastDataLeapUnixMillis > LAST_KNOWN_LEAP_UNIX_MILLIS &&
lastDataLeapUnixMillis - LAST_KNOWN_LEAP_UNIX_MILLIS > HALF_DAY ) {
DateFormat fmt = createDateFormat( "yyyy-MM-dd" );
String msg = new StringBuffer()
.append( "Data knows more leap seconds than library" )
.append( " (" )
.append( fmt.format( new Date( lastDataLeapUnixMillis
+ HALF_DAY ) ) )
.append( " > " )
.append( fmt.format( new Date( LAST_KNOWN_LEAP_UNIX_MILLIS
+ HALF_DAY ) ) )
.append( ")" )
.toString();
logger_.warning( msg );
}
/* If the supplied last known leap second is known to be out of date
* (because we know of a later one), then prepare to complain if
* this formatter is called upon to perform a conversion of
* a date that would be affected by leap seconds we know about,
* but the data file didn't. */
if ( lastDataLeapUnixMillis > 0 ) {
long lastDataLeapTt2kMillis =
lastDataLeapUnixMillis - (long) TtScaler.J2000_UNIXMILLIS;
iMaxValidTtScaler_ = getScalerIndex( lastDataLeapTt2kMillis );
}
else {
iMaxValidTtScaler_ = TT_SCALERS.length - 1;
}
}
/**
* Formats a CDF EPOCH value as an ISO-8601 date.
*
* @param epoch EPOCH value
* @return date string
*/
public String formatEpoch( double epoch ) {
long unixMillis = (long) ( epoch + AD0_UNIX_MILLIS );
Date date = new Date( unixMillis );
return epochMilliFormat_.format( date );
}
/**
* Formats a CDF EPOCH16 value as an ISO-8601 date.
*
* @param epoch1 first element of EPOCH16 pair (seconds since 0AD)
* @param epoch2 second element of EPOCH16 pair (additional picoseconds)
* @return date string
*/
public String formatEpoch16( double epoch1, double epoch2 ) {
long unixMillis = (long) ( epoch1 * 1000 ) + AD0_UNIX_MILLIS;
Date date = new Date( unixMillis );
long plusPicos = (long) epoch2;
if ( plusPicos < 0 || plusPicos >= 1e12 ) {
return "??";
}
String result = new StringBuffer( 32 )
.append( epochSecFormat_.format( date ) )
.append( '.' )
.append( prePadWithZeros( plusPicos, 12 ) )
.toString();
assert result.length() == 32;
return result;
}
/**
* Formats a CDF TIME_TT2000 value as an ISO-8601 date.
*
* @param timeTt2k TIME_TT2000 value
* @return date string
*/
public String formatTimeTt2000( long timeTt2k ) {
// Special case - see "Variable Pad Values" section
// (sec 2.3.20 at v3.4, and footnote) of CDF Users Guide.
if ( timeTt2k == Long.MIN_VALUE ) {
return "9999-12-31T23:59:59.999999999";
}
// Second special case - not sure if this is documented, but
// advised by Michael Liu in email to MBT 12 Aug 2013.
else if ( timeTt2k == Long.MIN_VALUE + 1 ) {
return "0000-01-01T00:00:00.000000000";
}
// Split the raw long value into a millisecond base and
// nanosecond adjustment.
long tt2kMillis = timeTt2k / 1000000;
int plusNanos = (int) ( timeTt2k % 1000000 );
if ( plusNanos < 0 ) {
tt2kMillis--;
plusNanos += 1000000;
}
// Get the appropriate TT scaler object for this epoch.
int scalerIndex = getScalerIndex( tt2kMillis );
if ( scalerIndex > iMaxValidTtScaler_ ) {
String msg = new StringBuffer()
.append( "CDF TIME_TT2000 date formatting failed" )
.append( " - library leap second table known to be out of date" )
.append( " with respect to data." )
.append( " Update " )
.append( TtScaler.LEAP_FILE_ENV )
.append( " environment variable to point at file" )
.append( " http://cdf.gsfc.nasa.gov/html/CDFLeapSeconds.txt" )
.toString();
if ( FAIL_ON_LEAP_ERROR ) {
throw new RuntimeException( msg );
}
else {
logger_.log( Level.SEVERE, msg );
}
}
TtScaler scaler = TT_SCALERS[ scalerIndex ];
// Use it to convert to Unix time, which is UTC.
long unixMillis = (long) scaler.tt2kToUnixMillis( tt2kMillis );
int leapMillis = scaler.millisIntoLeapSecond( tt2kMillis );
// Format the unix time as an ISO-8601 date.
// In most (99.999998%) cases this is straightforward.
final String txt;
if ( leapMillis < 0 ) {
Date date = new Date( unixMillis );
txt = epochMilliFormat_.format( date );
}
// However if we happen to fall during a leap second, we have to
// do some special (and not particularly elegant) handling to
// produce the right string, since the java DateFormat
// implementation can't(?) be persuaded to cope with 61 seconds
// in a minute.
else {
Date date = new Date( unixMillis - 1000 );
txt = epochMilliFormat_.format( date )
.replaceFirst( ":59\\.", ":60." );
}
// Append the nanoseconds part and return.
return txt + prePadWithZeros( plusNanos, 6 );
}
/**
* Returns the index into the TT_SCALERS array of the TtScaler
* instance that is valid for a given time.
*
* @param tt2kMillis TT time since J2000 in milliseconds
* @return index into TT_SCALERS
*/
private int getScalerIndex( long tt2kMillis ) {
// Use the most recently used value as the best guess.
// There's a good chance it's the right one.
int index = TtScaler
.getScalerIndex( tt2kMillis, TT_SCALERS, iLastTtScaler_ );
iLastTtScaler_ = index;
return index;
}
/**
* Constructs a DateFormat object for a given pattern for UTC.
*
* @param pattern formatting pattern
* @return format
* @see java.text.SimpleDateFormat
*/
private static DateFormat createDateFormat( String pattern ) {
DateFormat fmt = new SimpleDateFormat( pattern );
fmt.setTimeZone( UTC );
fmt.setCalendar( new GregorianCalendar( UTC, Locale.UK ) );
return fmt;
}
/**
* Returns the CDF epoch (0000-01-01T00:00:00)
* in milliseconds since the Unix epoch (1970-01-01T00:00:00).
*
* @return -62,167,219,200,000
*/
private static long getAd0UnixMillis() {
GregorianCalendar cal = new GregorianCalendar( UTC, Locale.UK );
cal.setLenient( true );
cal.clear();
cal.set( 0, 0, 1, 0, 0, 0 );
long ad0 = cal.getTimeInMillis();
// Fudge factor to make this calculation match the apparent result
// from the CDF library. Not quite sure why it's required, but
// I think something to do with the fact that the first day is day 1
// and signs around AD0/BC0.
long fudge = 1000 * 60 * 60 * 24 * 2; // 2 days
return ad0 + fudge;
}
/**
* Pads a numeric value with zeros to return a fixed length string
* representing a given numeric value.
*
* @param value number
* @param leng number of characters in result
* @return leng-character string containing value
* padded at start with zeros
*/
private static String prePadWithZeros( long value, int leng ) {
String txt = Long.toString( value );
int nz = leng - txt.length();
if ( nz == 0 ) {
return txt;
}
else if ( nz < 0 ) {
throw new IllegalArgumentException();
}
else {
StringBuffer sbuf = new StringBuffer( leng );
for ( int i = 0; i < nz; i++ ) {
sbuf.append( '0' );
}
sbuf.append( txt );
return sbuf.toString();
}
}
/**
* Returns the date, in milliseconds since the Unix epoch,
* of the last leap second known by the library.
*
* @param scalers ordered array of all scalers
* @return last leap second epoch in unix milliseconds
*/
private static long getLastKnownLeapUnixMillis( TtScaler[] scalers ) {
TtScaler lastScaler = scalers[ scalers.length - 1 ];
return (long)
lastScaler.tt2kToUnixMillis( lastScaler.getFromTt2kMillis() );
}
/**
* Returns the date, in milliseconds since the Unix epoch,
* of the last leap second indicated by an integer in the form
* used by the GDR LeapSecondLastUpdated field.
* If no definite value is indicated, Long.MIN_VALUE is returned.
*
* @param leapSecondLastUpdated value of GDR LeapSecondLastUpdated
* field (YYYYMMDD, or -1 for unused, or 0 for no leap seconds)
* @return last leap second epoch in unix milliseconds,
* or very negative value
*/
private static long getLastDataLeapUnixMillis( int leapSecondLastUpdated ) {
if ( leapSecondLastUpdated == 0 ) {
return Long.MIN_VALUE;
}
else if ( leapSecondLastUpdated == -1 ) {
return Long.MIN_VALUE;
}
else {
DateFormat fmt = createDateFormat( "yyyyMMdd" );
try {
return fmt.parse( Integer.toString( leapSecondLastUpdated ) )
.getTime();
}
catch ( ParseException e ) {
logger_.warning( "leapSecondLastUpdated="
+ leapSecondLastUpdated
+ "; not YYYYMMDD" );
return Long.MIN_VALUE;
}
}
}
}
jcdf-1.2-3/ExampleTest.java 0000664 0000000 0000000 00000040311 13203340177 0015517 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.test;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.logging.Level;
import java.util.logging.Logger;
import uk.ac.bristol.star.cdf.AttributeEntry;
import uk.ac.bristol.star.cdf.CdfContent;
import uk.ac.bristol.star.cdf.CdfReader;
import uk.ac.bristol.star.cdf.GlobalAttribute;
import uk.ac.bristol.star.cdf.Variable;
import uk.ac.bristol.star.cdf.VariableAttribute;
import uk.ac.bristol.star.cdf.EpochFormatter;
/**
* Tests the contents of three of the example files
* (samples/example1.cdf, samples/example2.cdf, cdfjava/examples/test.cdf)
* from the NASA CDF software distribution.
* The assertions in this file were written by examining the output
* of cdfdump by eye.
*/
public class ExampleTest {
private static boolean assertionsOn_;
public void testExample1( File ex1file ) throws IOException {
CdfContent content = new CdfContent( new CdfReader( ex1file ) );
GlobalAttribute[] gatts = content.getGlobalAttributes();
assert gatts.length == 1;
GlobalAttribute gatt0 = gatts[ 0 ];
assert "TITLE".equals( gatt0.getName() );
assert Arrays.equals( new String[] { "CDF title", "Author: CDF" },
getEntryShapedValues( gatt0.getEntries() ) );
VariableAttribute[] vatts = content.getVariableAttributes();
assert vatts.length == 2;
assert "FIELDNAM".equals( vatts[ 0 ].getName() );
assert "UNITS".equals( vatts[ 1 ].getName() );
Variable[] vars = content.getVariables();
assert vars.length == 3;
assert "Time".equals( vars[ 0 ].getName() );
assert "Latitude".equals( vars[ 1 ].getName() );
assert "Image".equals( vars[ 2 ].getName() );
assert vars[ 0 ].getSummary().matches( "INT4 .* 0:\\[\\] T/" );
assert vars[ 1 ].getSummary().matches( "INT2 .* 1:\\[181\\] T/T" );
assert vars[ 2 ].getSummary().matches( "INT4 .* 2:\\[10,20\\] T/TT" );
assert vatts[ 1 ].getEntry( vars[ 0 ] ).getShapedValue()
.equals( "Hour/Minute" );
assert vatts[ 1 ].getEntry( vars[ 1 ] ) == null;
assert readShapedRecord( vars[ 0 ], 0, true )
.equals( new Integer( 23 ) );
assert readShapedRecord( vars[ 0 ], 1, true )
.equals( new Integer( 24 ) );
assert readShapedRecord( vars[ 0 ], 2, true ) == null;
assert Arrays.equals( (short[]) readShapedRecord( vars[ 1 ], 0, true ),
shortSequence( -90, 1, 181 ) );
assert Arrays.equals( (short[]) readShapedRecord( vars[ 1 ], 0, false ),
shortSequence( -90, 1, 181 ) );
assert readShapedRecord( vars[ 1 ], 1, true ) == null;
assert readShapedRecord( vars[ 1 ], 2, false ) == null;
assert Arrays.equals( (int[]) readShapedRecord( vars[ 2 ], 0, true ),
intSequence( 0, 1, 200 ) );
assert Arrays.equals( (int[]) readShapedRecord( vars[ 2 ], 1, true ),
intSequence( 200, 1, 200 ) );
assert Arrays.equals( (int[]) readShapedRecord( vars[ 2 ], 2, true ),
intSequence( 400, 1, 200 ) );
int[] sideways = (int[]) readShapedRecord( vars[ 2 ], 0, false );
assert sideways[ 0 ] == 0;
assert sideways[ 1 ] == 20;
assert sideways[ 2 ] == 40;
assert sideways[ 10 ] == 1;
assert sideways[ 199 ] == 199;
}
public void testExample2( File ex2file ) throws IOException {
CdfContent content = new CdfContent( new CdfReader( ex2file ) );
GlobalAttribute[] gatts = content.getGlobalAttributes();
assert gatts.length == 1;
GlobalAttribute gatt0 = gatts[ 0 ];
assert "TITLE".equals( gatt0.getName() );
assert "An example CDF (2)."
.equals( ((String) gatt0.getEntries()[ 0 ].getShapedValue())
.trim() );
VariableAttribute[] vatts = content.getVariableAttributes();
assert vatts.length == 9;
VariableAttribute fnVatt = vatts[ 0 ];
VariableAttribute vminVatt = vatts[ 1 ];
VariableAttribute vmaxVatt = vatts[ 2 ];
assert fnVatt.getName().equals( "FIELDNAM" );
assert vminVatt.getName().equals( "VALIDMIN" );
assert vmaxVatt.getName().equals( "VALIDMAX" );
Variable[] vars = content.getVariables();
assert vars.length == 4;
Variable timeVar = vars[ 0 ];
Variable lonVar = vars[ 1 ];
Variable latVar = vars[ 2 ];
Variable tempVar = vars[ 3 ];
assert timeVar.getName().equals( "Time" );
assert lonVar.getName().equals( "Longitude" );
assert latVar.getName().equals( "Latitude" );
assert tempVar.getName().equals( "Temperature" );
assert timeVar.getSummary().matches( "INT4 .* 0:\\[\\] T/" );
assert lonVar.getSummary().matches( "REAL4 .* 1:\\[2\\] F/T" );
assert latVar.getSummary().matches( "REAL4 .* 1:\\[2\\] F/T" );
assert tempVar.getSummary().matches( "REAL4 .* 2:\\[2,2\\] T/TT" );
assert timeVar.getRecordCount() == 24;
assert tempVar.getRecordCount() == 24;
assert lonVar.getRecordCount() == 1;
assert latVar.getRecordCount() == 1;
assert ((String) fnVatt.getEntry( timeVar ).getShapedValue()).trim()
.equals( "Time of observation" );
assert vminVatt.getEntry( timeVar ).getShapedValue()
.equals( new Integer( 0 ) );
assert vmaxVatt.getEntry( timeVar ).getShapedValue()
.equals( new Integer( 2359 ) );
assert vminVatt.getEntry( lonVar ).getShapedValue()
.equals( new Float( -180f ) );
assert vmaxVatt.getEntry( lonVar ).getShapedValue()
.equals( new Float( 180f ) );
assert readShapedRecord( timeVar, 0, true )
.equals( new Integer( 0 ) );
assert readShapedRecord( timeVar, 23, false )
.equals( new Integer( 2300 ) );
float[] lonVal = new float[] { -165f, -150f };
float[] latVal = new float[] { 40f, 30f };
for ( int irec = 0; irec < 24; irec++ ) {
assert Arrays.equals( (float[]) readShapedRecord( lonVar, irec,
true ),
lonVal );
assert Arrays.equals( (float[]) readShapedRecord( latVar, irec,
false ),
latVal );
}
assert Arrays.equals( (float[]) readShapedRecord( tempVar, 0, true ),
new float[] { 20f, 21.7f, 19.2f, 20.7f } );
assert Arrays.equals( (float[]) readShapedRecord( tempVar, 23, true ),
new float[] { 21f, 19.5f, 18.4f, 22f } );
assert Arrays.equals( (float[]) readShapedRecord( tempVar, 23, false ),
new float[] { 21f, 18.4f, 19.5f, 22f } );
}
public void testTest( File testFile ) throws IOException {
CdfContent content = new CdfContent( new CdfReader( testFile ) );
GlobalAttribute[] gatts = content.getGlobalAttributes();
assert gatts.length == 5;
assert "Project".equals( gatts[ 0 ].getName() );
GlobalAttribute gatt1 = gatts[ 1 ];
assert "PI".equals( gatt1.getName() );
assert Arrays.equals( new String[] { null, null, null, "Ernie Els" },
getEntryShapedValues( gatt1.getEntries() ) );
GlobalAttribute gatt2 = gatts[ 2 ];
assert "Test".equals( gatt2.getName() );
AttributeEntry[] tents = gatt2.getEntries();
assert tents[ 0 ].getShapedValue().equals( new Double( 5.3432 ) );
assert tents[ 1 ] == null;
assert tents[ 2 ].getShapedValue().equals( new Float( 5.5f ) );
assert Arrays.equals( (float[]) tents[ 3 ].getShapedValue(),
new float[] { 5.5f, 10.2f } );
assert Arrays.equals( (float[]) tents[ 3 ].getRawValue(),
new float[] { 5.5f, 10.2f } );
assert ((Byte) tents[ 4 ].getShapedValue()).byteValue() == 1;
assert Arrays.equals( (byte[]) tents[ 5 ].getShapedValue(),
new byte[] { (byte) 1, (byte) 2, (byte) 3 } );
assert ((Short) tents[ 6 ].getShapedValue()).shortValue() == -32768;
assert Arrays.equals( (short[]) tents[ 7 ].getShapedValue(),
new short[] { (short) 1, (short) 2 } );
assert ((Integer) tents[ 8 ].getShapedValue()).intValue() == 3;
assert Arrays.equals( (int[]) tents[ 9 ].getShapedValue(),
new int[] { 4, 5 } );
assert "This is a string".equals( tents[ 10 ].getShapedValue() );
assert ((Long) tents[ 11 ].getShapedValue()).longValue() == 4294967295L;
assert Arrays.equals( (long[]) tents[ 12 ].getShapedValue(),
new long[] { 4294967295L, 2147483648L } );
assert ((Integer) tents[ 13 ].getShapedValue()).intValue() == 65535;
assert Arrays.equals( (int[]) tents[ 14 ].getShapedValue(),
new int[] { 65535, 65534 } );
assert ((Short) tents[ 15 ].getShapedValue()).shortValue() == 255;
assert Arrays.equals( (short[]) tents[ 16 ].getShapedValue(),
new short[] { 255, 254 } );
EpochFormatter epf = new EpochFormatter();
GlobalAttribute gatt3 = gatts[ 3 ];
assert "TestDate".equals( gatt3.getName() );
assert "2002-04-25T00:00:00.000"
.equals( epf
.formatEpoch( ((Double)
gatt3.getEntries()[ 1 ].getShapedValue())
.doubleValue() ) );
assert "2008-02-04T06:08:10.012014016"
.equals( epf
.formatTimeTt2000( ((Long) gatt3.getEntries()[ 2 ]
.getShapedValue())
.longValue() ) );
double[] epDate = (double[])
gatts[ 4 ].getEntries()[ 0 ].getShapedValue();
assert "2004-05-13T15:08:11.022033044055"
.equals( epf.formatEpoch16( epDate[ 0 ], epDate[ 1 ] ) );
Variable[] vars = content.getVariables();
Variable latVar = vars[ 0 ];
assert "Latitude".equals( latVar.getName() );
assert Arrays.equals( new byte[] { (byte) 1, (byte) 2, (byte) 3 },
(byte[]) readShapedRecord( latVar, 0, true ) );
assert Arrays.equals( new byte[] { (byte) 1, (byte) 2, (byte) 3 },
(byte[]) readShapedRecord( latVar, 100, true ) );
Variable lat1Var = vars[ 1 ];
assert "Latitude1".equals( lat1Var.getName() );
assert Arrays.equals( new short[] { (short) 100, (short) 128,
(short) 255 },
(short[]) readShapedRecord( lat1Var, 2, true ) );
Variable longVar = vars[ 2 ];
assert "Longitude".equals( longVar.getName() );
assert Arrays.equals( new short[] { (short) 100, (short) 200,
(short) 300 },
(short[]) readShapedRecord( longVar, 0, true ) );
assert Arrays.equals( new short[] { (short) -32767, (short) -32767,
(short) -32767 },
(short[]) readShapedRecord( longVar, 1, true ) );
Variable nameVar = vars[ 8 ];
assert "Name".equals( nameVar.getName() );
assert Arrays.equals( new String[] { "123456789 ", "13579 " },
(String[]) readShapedRecord( nameVar, 0, true ) );
Variable tempVar = vars[ 9 ];
assert "Temp".equals( tempVar.getName() );
assert Arrays.equals( new float[] { 55.5f, -1e30f, 66.6f },
(float[]) readShapedRecord( tempVar, 0, true ) );
assert Arrays.equals( new float[] { -1e30f, -1e30f, -1e30f },
(float[]) readShapedRecord( tempVar, 1, true ) );
Variable epVar = vars[ 15 ];
assert "ep".equals( epVar.getName() );
assert "1999-03-05T05:06:07.100"
.equals( epf
.formatEpoch( (Double) readShapedRecord( epVar, 0 ) ) );
Variable ep16Var = vars[ 16 ];
assert "ep16".equals( ep16Var.getName() );
double[] ep2 = (double[]) readShapedRecord( ep16Var, 1, true );
assert "2004-12-29T16:56:24.031411522634"
.equals( epf.formatEpoch16( ep2[ 0 ], ep2[ 1 ] ) );
Variable ttVar = vars[ 18 ];
assert "tt2000".equals( ttVar.getName() );
assert "2015-06-30T23:59:58.123456789"
.equals( epf.formatTimeTt2000( (Long)
readShapedRecord( ttVar, 0 ) ) );
assert "2015-06-30T23:59:60.123456789"
.equals( epf.formatTimeTt2000( (Long)
readShapedRecord( ttVar, 2 ) ) );
assert "2015-07-01T00:00:00.123456789"
.equals( epf.formatTimeTt2000( (Long)
readShapedRecord( ttVar, 3 ) ) );
}
private Object readShapedRecord( Variable var, int irec, boolean rowMajor )
throws IOException {
return var.readShapedRecord( irec, rowMajor,
var.createRawValueArray() );
}
private Object readShapedRecord( Variable var, int irec )
throws IOException {
return readShapedRecord( var, irec, true );
}
private short[] shortSequence( int start, int step, int count ) {
short[] array = new short[ count ];
for ( int i = 0; i < count; i++ ) {
array[ i ] = (short) ( start + i * step );
}
return array;
}
private int[] intSequence( int start, int step, int count ) {
int[] array = new int[ count ];
for ( int i = 0; i < count; i++ ) {
array[ i ] = start + i * step;
}
return array;
}
private static Object[] getEntryShapedValues( AttributeEntry[] entries ) {
int nent = entries.length;
Object[] vals = new Object[ nent ];
for ( int ie = 0; ie < nent; ie++ ) {
AttributeEntry entry = entries[ ie ];
vals[ ie ] = entry == null ? null : entry.getShapedValue();
}
return vals;
}
private static boolean checkAssertions() {
assertionsOn_ = true;
return true;
}
/**
* Main method. Run with locations of the following files from the
* NASA CDF software distribution as arguments:
* samples/example1.cdf
* samples/example2.cdf
* cdfjava/examples/test.cdf
* The versions of these files assumed here probably correspond to
* CDF V3.6.5 (hence the above files are in subdirs of cdf36_5-dist/).
* However these files were supplied by the CDF office prior to
* V3.6.5 release, so changes are possible.
*
* encoding
field of the CDF Descriptor Record.
*
* @param code encoding code
* @return encoding object
* @throws CdfFormatException if code is unknown
*/
public static NumericEncoding getEncoding( int code )
throws CdfFormatException {
switch ( code ) {
case 1: return NETWORK;
case 2: return SUN;
case 3: return VAX;
case 4: return DECSTATION;
case 5: return SGi;
case 6: return IBMPC;
case 7: return IBMRS;
case 9: return MAC;
case 11: return HP;
case 12: return NeXT;
case 13: return ALPHAOSF1;
case 14: return ALPHAVMSd;
case 15: return ALPHAVMSg;
case 16: return ALPHAVMSi;
default:
throw new CdfFormatException( "Unknown numeric encoding "
+ code );
}
}
}
jcdf-1.2-3/OffsetField.java 0000664 0000000 0000000 00000001215 13203340177 0015456 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Marks field members of {@link Record} subclasses which represent
* absolute file offsets. Fields marked with this annotation must also
* be marked with {@link CdfField}, and must be of type
* Long
or long[]
.
*
* @author Mark Taylor
* @since 26 Jun 2013
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface OffsetField {
}
jcdf-1.2-3/OtherTest.java 0000664 0000000 0000000 00000017415 13203340177 0015216 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.test;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import uk.ac.bristol.star.cdf.TtScaler;
import uk.ac.bristol.star.cdf.EpochFormatter;
import uk.ac.bristol.star.cdf.util.LogUtil;
public class OtherTest {
private static boolean assertionsOn_;
private static boolean triedNasa_;
private static Method nasaConvMethod_;
private EpochFormatter epf_ = new EpochFormatter();
public void testTtScaler() {
TtScaler[] scalers = TtScaler.getTtScalers();
int ns = scalers.length;
// Check scaler list is properly ordered and contigously covers the
// whole range of times.
for ( int i = 0; i < ns - 1; i++ ) {
long from = scalers[ i ].getFromTt2kMillis();
long to = scalers[ i ].getToTt2kMillis();
assert from < to;
assert to == scalers[ i + 1 ].getFromTt2kMillis();
}
assert scalers[ 0 ].getFromTt2kMillis() == Long.MIN_VALUE;
assert scalers[ ns - 1 ].getToTt2kMillis() == Long.MAX_VALUE;
// Exhaustive test of binary search.
for ( int i = 0; i < ns; i++ ) {
TtScaler scaler = scalers[ i ];
long from = scalers[ i ].getFromTt2kMillis();
long to = scalers[ i ].getToTt2kMillis();
long mid = (long) ( 0.5 * from + 0.5 * to ); // careful of overflow
checkScalerSearch( from, scalers, i );
checkScalerSearch( to - 1, scalers, i );
checkScalerSearch( mid, scalers, i );
}
}
private void checkScalerSearch( long tt2kMillis, TtScaler[] scalers,
int iResult ) {
for ( int i = 0; i < scalers.length; i++ ) {
assert TtScaler.getScalerIndex( tt2kMillis, scalers, i ) == iResult;
}
}
public void testTtFormatter() {
// Spot tests.
assertTt( 284040064183000000L, "2008-12-31T23:59:58.999000000" );
assertTt( 284040065184000000L, "2008-12-31T23:59:60.000000000" );
assertTt( 284040066183000000L, "2008-12-31T23:59:60.999000000" );
assertTt( 284040066183000023L, "2008-12-31T23:59:60.999000023" );
assertTt( 284040066184000000L, "2009-01-01T00:00:00.000000000" );
assertTt( 284040066185000000L, "2009-01-01T00:00:00.001000000" );
assertTt( 284040065307456789L, "2008-12-31T23:59:60.123456789" );
// Special values.
assertTt( Long.MIN_VALUE, "9999-12-31T23:59:59.999999999" );
assertTt( Long.MIN_VALUE + 1, "0000-01-01T00:00:00.000000000" );
// Systematic tests for all scaler ranges except the last.
TtScaler[] scalers = TtScaler.getTtScalers();
int ns = scalers.length;
for ( int i = 0; i < ns - 1; i++ ) {
TtScaler scaler = scalers[ i ];
long from = scalers[ i ].getFromTt2kMillis();
long to = scalers[ i ].getToTt2kMillis();
long mid = (long) ( 0.5 * from + 0.5 * to ); // careful of overflow
checkWithNasa( from );
checkWithNasa( from + 50 );
checkWithNasa( from + 333333333 );
checkWithNasa( to - 1 );
checkWithNasa( to + 1 );
checkWithNasa( to - 55555555 );
checkWithNasa( to + 99999999 );
checkWithNasa( mid );
}
checkWithNasa( Long.MIN_VALUE / 2 );
checkWithNasa( Long.MAX_VALUE / 2 );
checkWithNasa( 284040065307456789L );
// The NASA library v3.4 appeared to be wrong here: it reported
// a date of 1707-09-22T11:37:39.106448384 for values larger
// than about 9223370000000000000L.
// It was fixed at (or maybe before) v3.6.0.4, so we can run
// this test now.
checkWithNasa( 9223370000000000000L );
}
private void checkWithNasa( long tt2kNanos ) {
assert epf_.formatTimeTt2000( tt2kNanos )
.equals( nasaFormatTimeTt2000( tt2kNanos ) )
: reportFormats( tt2kNanos );
}
private void assertTt( long tt2kNanos, String text ) {
assert text.equals( epf_.formatTimeTt2000( tt2kNanos ) );
}
private static String nasaFormatTimeTt2000( long tt2knanos ) {
if ( ! triedNasa_ ) {
try {
Class> ttClazz =
Class.forName( "gsfc.nssdc.cdf.util.CDFTT2000" );
nasaConvMethod_ =
ttClazz.getMethod( "toUTCstring", long.class );
}
catch ( Throwable e ) {
System.err.println( "No NASA implementation available:" );
e.printStackTrace( System.err );
nasaConvMethod_ = null;
}
// Call this method once. If the native library is not present
// it fails in the static initialisation, then subsequent calls
// seem to be OK, but give the wrong result. So make sure
// it doesn't run at all in case of initialisation failure.
try {
nasaConvMethod_.invoke( null, 0L );
}
catch ( Throwable e ) {
System.err.println( "No NASA implementation available:" );
e.printStackTrace( System.err );
nasaConvMethod_ = null;
}
triedNasa_ = true;
}
if ( nasaConvMethod_ == null ) {
return "[No NASA CDF library]";
}
else {
try {
return (String) nasaConvMethod_.invoke( null, tt2knanos );
}
catch ( Throwable e ) {
return "[toUTCstring error: " + e + "]";
}
}
}
private static boolean checkAssertions() {
assertionsOn_ = true;
return true;
}
private static void runTests() {
assert checkAssertions();
if ( ! assertionsOn_ ) {
throw new RuntimeException( "Assertions disabled - bit pointless" );
}
OtherTest test = new OtherTest();
test.testTtScaler();
test.testTtFormatter();
}
private static String reportFormats( long tt2kNanos ) {
return new StringBuffer()
.append( "nanos: " )
.append( tt2kNanos )
.append( "\n\t" )
.append( "NASA: " )
.append( nasaFormatTimeTt2000( tt2kNanos ) )
.append( "\n\t" )
.append( "JCDF: " )
.append( new EpochFormatter().formatTimeTt2000( tt2kNanos ) )
.toString();
}
/**
* Main method. If run with no arguments runs test.
* Tests are made using java assertions, so this test must be
* run with java assertions enabled. If it's not, it will fail anyway.
*
* actualValue
*/
protected int checkIntValue( int actualValue, int fixedValue ) {
if ( actualValue != fixedValue ) {
warnFormat( "Unexpected fixed value " + actualValue + " != "
+ fixedValue );
}
return actualValue;
}
/**
* Checks that a pointer is positioned at the end of this record.
* If not, a warning may be emitted.
* This performs an assertion-like function.
* This can be called by code which thinks it has read a whole record's
* content to check that it's got the counting right.
*
* @param ptr pointer notionally positioned at end of record
*/
protected void checkEndRecord( Pointer ptr ) {
long readCount = plan_.getReadCount( ptr );
long recSize = getRecordSize();
if ( readCount != recSize ) {
warnFormat( "Bytes read in record not equal to record size ("
+ readCount + " != " + recSize + ")" );
}
}
/**
* Called by check*
methods to issue a warning if the
* check has failed.
*
* @param msg message to output
*/
protected void warnFormat( String msg ) {
assert false : msg;
logger_.warning( msg );
}
/**
* Reads a moderately-sized array of 4-byte big-endian integers.
* Pointer position is moved on appropriately.
* Not intended for potentially very large arrays.
*
* @param buf buffer
* @param ptr pointer
* @param count number of values to read
* @return count
-element array of values
*/
public static int[] readIntArray( Buf buf, Pointer ptr, int count )
throws IOException {
int[] array = new int[ count ];
for ( int i = 0; i < count; i++ ) {
array[ i ] = buf.readInt( ptr );
}
return array;
}
/**
* Reads a moderately-sized offset 8-byte big-endian integers.
* Pointer position is moved on appropriately.
* Not intended for potentially very large arrays.
*
* @param buf buffer
* @param ptr pointer
* @param count number of values to read
* @return count
-element array of values
*/
public static long[] readOffsetArray( Buf buf, Pointer ptr, int count )
throws IOException {
long[] array = new long[ count ];
for ( int i = 0; i < count; i++ ) {
array[ i ] = buf.readOffset( ptr );
}
return array;
}
/**
* Splits an ASCII string into 0x0A-terminated lines.
*
* @param text string containing ASCII characters
* @return array of lines split on linefeeds
*/
public static String[] toLines( String text ) {
ListgetRecord
* method, and attempts to cast the result, throwing a
* CdfFormatException if it has the wrong type.
*
* @param buf byte buffer
* @param offset start of record in buf
* @param clazz record class asserted for the result
* @return record
* @throws CdfFormatException if the record found there turns out
* not to be of type clazz
*/
public
* int ient = recMap.getEntryIndex(irec);
* Object value =
* ient >= 0
* ? readBuffer(recMap.getBuf(ient), recMap.getOffset(ient,irec))
* : NO_STORED_VALUE;
*
*
*
* @author Mark Taylor
* @since 21 Jun 2013
*/
public class RecordMap {
private final int nent_;
private final int[] firsts_;
private final int[] lasts_;
private final Buf[] bufs_;
private final long[] offsets_;
private final int recSize_;
private Block lastBlock_;
/**
* Constructor.
*
* @param array of entries containing stored variable record blocks,
* need not be sorted
* @param recSize size of each variable record in bytes
*/
private RecordMap( Entry[] entries, int recSize ) {
recSize_ = recSize;
// Sort entries into order of record data.
Arrays.sort( entries );
// Store the entry information in a convenient form.
nent_ = entries.length;
firsts_ = new int[ nent_ ];
lasts_ = new int[ nent_ ];
bufs_ = new Buf[ nent_ ];
offsets_ = new long[ nent_ ];
for ( int ie = 0; ie < nent_; ie++ ) {
Entry entry = entries[ ie ];
firsts_[ ie ] = entry.first_;
lasts_[ ie ] = entry.last_;
bufs_[ ie ] = entry.buf_;
offsets_[ ie ] = entry.offset_;
}
// Initialise the most recently used block value
lastBlock_ = nent_ > 0 ? calculateBlock( 0 )
: new Block( -1, -1, -1 );
}
/**
* Returns the number of entries managed by this map.
*
* @return entry count
*/
public int getEntryCount() {
return nent_;
}
/**
* Returns the index of the entry containing a given record.
* If one of the entries contains the given record, return its index.
* If no entry contains it (the record is in a sparse region),
* return (-fr-2)
, where fr
* is the index of the previous entry.
* A value of -1 indicates that the requested record is
* in a sparse region before the first stored record.
*
* getBuf
and getOffset
methods.
*
* @param irec record index
* @return index of entry covering irec
, or a negative
* value if no entry covers it
*/
public synchronized int getEntryIndex( int irec ) {
// There's a good chance that the answer is the same as the last
// time somebody asked, so first of all do the cheap test to find
// out if that's the case. If so, return the cached one.
// Otherwise, do the work to find out the right answer.
if ( ! lastBlock_.contains( irec ) ) {
lastBlock_ = calculateBlock( irec );
}
assert lastBlock_.contains( irec );
return lastBlock_.ient_;
}
/**
* Returns the data buffer for a given entry.
* The entry index must correspond to an actual entry,
* that is it must not be negative.
*
* @param ient entry index
* @return buf
* @see #getEntryIndex
*/
public Buf getBuf( int ient ) {
return bufs_[ ient ];
}
/**
* Returns the byte offset for a record in a given entry.
* The ient
parameter must reference an actual entry
* (it must be non-negative), and that entry must contain
* the given record irec
,
*
* @param ient entry index for entry containing irec
* @param irec record index
* @return offset into the entry's buffer at which irec
* can be found
* @see #getEntryIndex
*/
public long getOffset( int ient, int irec ) {
assert irec >= firsts_[ ient ] && irec <= lasts_[ ient ];
return offsets_[ ient ] + ( irec - firsts_[ ient ] ) * recSize_;
}
/**
* Returns the offset of the last record in a given entry.
*
* @param ient non-negative entry index
* @return offset into ient's buffer of ient's final record
*/
public long getFinalOffsetInEntry( int ient ) {
return offsets_[ ient ]
+ ( lasts_[ ient ] - firsts_[ ient ] + 1 ) * recSize_;
}
/**
* Examines this map's lookup tables to determine the block covering
* a given record.
*
* @param irec record index
* @return block containing irec
*/
private Block calculateBlock( int irec ) {
// Look for the record in the first-record-of-entry list.
int firstIndex = binarySearch( firsts_, irec );
// If found, irec is in the corresponding block.
if ( firstIndex >= 0 ) {
return new Block( firstIndex,
firsts_[ firstIndex ], lasts_[ firstIndex ] );
}
// If it's located before the start, it's in a sparse block
// before the first actual record.
else if ( firstIndex == -1 ) {
return new Block( -firstIndex - 2, 0, firsts_[ 0 ] - 1 );
}
// Otherwise, record the first entry it's after the start of.
else {
firstIndex = -2 - firstIndex;
}
// Look for the record in the last-record-of-entry list.
int lastIndex = binarySearch( lasts_, irec );
// If found, irec is in the corresponding block.
if ( lastIndex >= 0 ) {
return new Block( lastIndex,
firsts_[ lastIndex ], lasts_[ lastIndex ] );
}
// If it's located after the end, it's in a sparse block
// after the last actual record.
else if ( lastIndex == - nent_ - 1 ) {
return new Block( lastIndex,
lasts_[ nent_ - 1 ], Integer.MAX_VALUE );
}
// Otherwise, record the last entry it's before the end of.
else {
lastIndex = -1 - lastIndex;
}
// If it's after the first record and before the last record
// of a single block, that's the one.
if ( firstIndex == lastIndex ) {
return new Block( firstIndex,
firsts_[ firstIndex ], lasts_[ firstIndex ] );
}
// Otherwise, it's in a sparse block between
// the end of the entry it's after the first record of, and
// the start of the entry it's before the last record of.
else {
return new Block( -firstIndex - 2,
lasts_[ firstIndex ] + 1,
firsts_[ lastIndex ] - 1 );
}
}
/**
* Returns a record map for a given variable.
*
* @param vdr variable descriptor record
* @param recFact record factory
* @param recSize size in bytes of each variable value record
* @return record map
*/
public static RecordMap createRecordMap( VariableDescriptorRecord vdr,
RecordFactory recFact,
int recSize )
throws IOException {
Compression compress = getCompression( vdr, recFact );
Buf buf = vdr.getBuf();
// Walk the entry linked list to assemble a list of entries.
Listlast-first+1
.
*/
private static class Entry implements Comparableient
member gives the index of the corresponding
* Entry.
* If there is no corresponding entry (the record is in a sparse
* region), the value is (-fr-2)
, where fr
* is the index of the previous entry.
* A value of -1 indicates that the requested record is
* in a sparse region before the first stored record.
*
* high-low+1
.
*
*/
private static class Block {
final int ient_;
final int low_;
final int high_;
/**
* Constructor.
*
* @param ient index of Entry containing this block's data;
* negative value means sparse
* @param low lowest record index contained in this block
* @param high highest record index contained in this block
*/
Block( int ient, int low, int high ) {
ient_ = ient;
low_ = low;
high_ = high;
}
/**
* Indicates whether a given record falls within this block.
*
* @param irec record index
* @return true iff irec is covered by this block
*/
boolean contains( int irec ) {
return irec >= low_ && irec <= high_;
}
}
/**
* Performs a binary search on an array.
* Calls Arrays.binarySearch to do the work.
*
* @param array array in ascending sorted order
* @param key value to search for
* @return index of the search key, if it is contained in the list;
* otherwise, (-(insertion point) - 1).
* @see java.util.Arrays#binarySearch(int[],int)
*/
private static int binarySearch( int[] array, int key ) {
assert isSorted( array );
return Arrays.binarySearch( array, key );
}
/**
* Determines whether an integer array is currently sorted in
* ascending (well, non-descending) order.
*
* @param values array
* @return true iff sorted
*/
private static boolean isSorted( int[] values ) {
int nval = values.length;
for ( int i = 1; i < nval; i++ ) {
if ( values[ i ] < values[ i - 1 ] ) {
return false;
}
}
return true;
}
}
jcdf-1.2-3/RecordPlan.java 0000664 0000000 0000000 00000004322 13203340177 0015317 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
/**
* Records basic information about the position, extent and type of
* a CDF record.
*
* @author Mark Taylor
* @since 18 Jun 2013
*/
public class RecordPlan {
private final long start_;
private final long recSize_;
private final int recType_;
private final Buf buf_;
/**
* Constructor.
*
* @param start offset into buffer of record start
* @param recSize number of bytes comprising record
* @param recType integer record type field
* @param buf buffer containing record bytes
*/
public RecordPlan( long start, long recSize, int recType, Buf buf ) {
start_ = start;
recSize_ = recSize;
recType_ = recType;
buf_ = buf;
}
/**
* Returns the size of the record in bytes.
*
* @return record size
*/
public long getRecordSize() {
return recSize_;
}
/**
* Returns the type code identifying what kind of CDF record it is.
*
* @return record type
*/
public int getRecordType() {
return recType_;
}
/**
* Returns the buffer containing the record data.
*
* @return buffer
*/
public Buf getBuf() {
return buf_;
}
/**
* Returns a pointer initially pointing at the first content byte of
* the record represented by this plan.
* This is the first item after the RecordSize and RecordType items
* that always appear first in a CDF record, and whose values are
* known by this object.
*
* @return pointer pointing at the start of the record-type-specific
* content
*/
public Pointer createContentPointer() {
long pos = start_;
pos += buf_.isBit64() ? 8 : 4; // record size
pos += 4; // record type
return new Pointer( pos );
}
/**
* Returns the number of bytes in this record read (or skipped) by the
* current state of a given pointer.
*
* @param ptr pointer
* @return number of bytes between record start and pointer value
*/
public long getReadCount( Pointer ptr ) {
return ptr.get() - start_;
}
}
jcdf-1.2-3/RunLengthInputStream.java 0000664 0000000 0000000 00000004215 13203340177 0017371 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
import java.io.InputStream;
import uk.ac.bristol.star.cdf.CdfFormatException;
/**
* Decompression stream for CDF's version of Run Length Encoding.
*
*
* 1 2 3 0 0 4 5 6 0 2
*
* is decompressed as
*
* 1 2 3 0 4 5 6 0 0 0
*
* (assuming a special value V=0).
*
* lvalue
*/
private static int toInt( long lvalue ) {
int ivalue = (int) lvalue;
if ( ivalue != lvalue ) {
throw new IllegalArgumentException( "Pointer out of range: "
+ lvalue + " >32 bits" );
}
return ivalue;
}
}
jcdf-1.2-3/SparsenessParametersRecord.java 0000664 0000000 0000000 00000001704 13203340177 0020600 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
/**
* Field data for CDF record of type Sparseness Parameters Record.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public class SparsenessParametersRecord extends Record {
@CdfField public final int sArraysType;
@CdfField public final int rfuA;
@CdfField public final int pCount;
@CdfField public final int[] sArraysParms;
/**
* Constructor.
*
* @param plan basic record information
*/
public SparsenessParametersRecord( RecordPlan plan ) throws IOException {
super( plan, "SPR", 12 );
Buf buf = plan.getBuf();
Pointer ptr = plan.createContentPointer();
this.sArraysType = buf.readInt( ptr );
this.rfuA = checkIntValue( buf.readInt( ptr ), 0 );
this.pCount = buf.readInt( ptr );
this.sArraysParms = readIntArray( buf, ptr, this.pCount );
checkEndRecord( ptr );
}
}
jcdf-1.2-3/TtScaler.java 0000664 0000000 0000000 00000057761 13203340177 0015026 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Handles conversions between TT_TIME2000 (TT since J2000.0)
* and Unix (UTC since 1970-01-01) times.
* An instance of this class is valid for a certain range of TT2000 dates
* (one that does not straddle a leap second).
* To convert between TT_TIME2000 and Unix time, first acquire the
* right instance of this class for the given time, and then use it
* for the conversion.
*
* tt2kMillis
value will be valid for one of
* the list.
*
* @return ordered list of time scalers
*/
public static synchronized TtScaler[] getTtScalers() {
if ( ORDERED_INSTANCES == null ) {
ORDERED_INSTANCES = createTtScalers();
}
return ORDERED_INSTANCES.clone();
}
/**
* Creates an ordered list of instances covering the whole range of times.
*
* @return ordered list of time scaler instances
*/
private static TtScaler[] createTtScalers() {
// Acquire leap seconds table.
LtEntry[] ents = readLtEntries();
int nent = ents.length;
logger_.config( "CDF Leap second table: " + ents.length + " entries, "
+ "last is " + ents[ nent - 1 ] );
Listread
methods is called.
*
* createRawValueArray
method
*/
public void readRawRecord( int irec, Object rawValueArray )
throws IOException {
getRecordReader().readRawRecord( irec, rawValueArray );
}
/**
* Reads the data from a single record and returns it as an object
* of a suitable type for this variable.
* If the variable type a scalar, then the return value will be
* one of the primitive wrapper types (Integer etc),
* otherwise it will be an array of primitive or String values.
* If the majority of the stored data does not match the
* rowMajor
argument, the array elements will be
* rordered appropriately.
* If some of the dimension variances are false, the values will
* be duplicated accordingly.
* The Shaper returned from the {@link #getShaper} method
* can provide more information on the return value from this method.
*
* createRawValueArray
method
* @return a new object containing the shaped result
* (not the same object as rawValueArray
*/
public Object readShapedRecord( int irec, boolean rowMajor,
Object rawValueArrayWorkspace )
throws IOException {
return getRecordReader()
.readShapedRecord( irec, rowMajor, rawValueArrayWorkspace );
}
/**
* Returns an object that can read records for this variable.
* Constructing it requires reading maps of where the record values
* are stored, which might in principle involve a bit of work,
* so do it lazily.
*
* @return record reader
*/
private synchronized RecordReader getRecordReader() throws IOException {
if ( recordReader_ == null ) {
recordReader_ = createRecordReader();
}
return recordReader_;
}
/**
* Constructs a record reader.
*
* @return new record reader
*/
private RecordReader createRecordReader() throws IOException {
RecordMap recMap =
RecordMap.createRecordMap( vdr_, recFact_,
dataReader_.getRecordSize() );
if ( ! recordVariance_ ) {
return new NoVaryRecordReader( recMap );
}
else {
// Get sparse records type. This is missing from the CDF Internal
// Format Description document, but cdf.h says:
// #define NO_SPARSERECORDS 0L
// #define PAD_SPARSERECORDS 1L
// #define PREV_SPARSERECORDS 2L
int sRecords = vdr_.sRecords;
if ( sRecords == 0 ) {
return new UnsparseRecordReader( recMap );
}
else if ( sRecords == 1 ) {
assert padRawValueArray_ != null;
return new PadRecordReader( recMap );
}
else if ( sRecords == 2 ) {
assert padRawValueArray_ != null;
return new PreviousRecordReader( recMap );
}
else {
throw new CdfFormatException( "Unknown sparse record type "
+ sRecords );
}
}
}
/**
* Object which can read record values for this variable.
* This provides the implementations of several of the Variable methods.
*/
private interface RecordReader {
/**
* Indicates whether a real file-based record exists for the given
* record index.
*
* @param irec record index
* @return true iff a file-based record exists for irec
*/
boolean hasRecord( int irec );
/**
* Reads the data from a single record into a supplied raw value array.
*
* @param irec record index
* @param rawValueArray workspace array
*/
void readRawRecord( int irec, Object rawValueArray )
throws IOException;
/**
* Reads the data from a single record and returns it as an object
* of a suitable type for this variable.
*
* @param irec record index
* @param rowMajor required majority of output array
* @param rawValueArrayWorkspace workspace array
* @return a new object containing shaped result
*/
Object readShapedRecord( int irec, boolean rowMajor,
Object rawValueArrayWorkspace )
throws IOException;
}
/**
* RecordReader implementation for non-record-varying variables.
*/
private class NoVaryRecordReader implements RecordReader {
private final Object rawValue_;
private final Object rowMajorValue_;
private final Object colMajorValue_;
/**
* Constructor.
*
* @param recMap record map
*/
NoVaryRecordReader( RecordMap recMap ) throws IOException {
// When record variance is false, the fixed value appears
// to be located where you would otherwise expect to find record #0.
// Read it once and store it in raw, row-major and column-major
// versions for later use.
RecordReader rt = new UnsparseRecordReader( recMap );
rawValue_ = createRawValueArray();
rt.readRawRecord( 0, rawValue_ );
rowMajorValue_ = shaper_.shape( rawValue_, true );
colMajorValue_ = shaper_.shape( rawValue_, false );
}
public boolean hasRecord( int irec ) {
return false;
}
public void readRawRecord( int irec, Object rawValueArray ) {
System.arraycopy( rawValue_, 0, rawValueArray, 0, rvaleng_ );
}
public Object readShapedRecord( int irec, boolean rowMajor,
Object work ) {
return rowMajor ? rowMajorValue_ : colMajorValue_;
}
}
/**
* RecordReader implementation for non-sparse variables.
*/
private class UnsparseRecordReader implements RecordReader {
private final RecordMap recMap_;
private final int nrec_;
private final Object zeros_;
/**
* Constructor.
*
* @param recMap record map
*/
UnsparseRecordReader( RecordMap recMap ) {
recMap_ = recMap;
nrec_ = vdr_.maxRec + 1;
zeros_ = createRawValueArray();
}
public boolean hasRecord( int irec ) {
return irec < nrec_;
}
public void readRawRecord( int irec, Object rawValueArray )
throws IOException {
if ( hasRecord( irec ) ) {
int ient = recMap_.getEntryIndex( irec );
dataReader_.readValue( recMap_.getBuf( ient ),
recMap_.getOffset( ient, irec ),
rawValueArray );
}
else {
System.arraycopy( zeros_, 0, rawValueArray, 0, rvaleng_ );
}
}
public Object readShapedRecord( int irec, boolean rowMajor,
Object work )
throws IOException {
if ( hasRecord( irec ) ) {
int ient = recMap_.getEntryIndex( irec );
dataReader_.readValue( recMap_.getBuf( ient ),
recMap_.getOffset( ient, irec ),
work );
return shaper_.shape( work, rowMajor );
}
else {
return null;
}
}
}
/**
* RecordReader implementation for record-varying variables
* with sparse padding or no padding.
*/
private class PadRecordReader implements RecordReader {
private final RecordMap recMap_;
/**
* Constructor.
*
* @param recMap record map
*/
PadRecordReader( RecordMap recMap ) {
recMap_ = recMap;
}
public boolean hasRecord( int irec ) {
return hasRecord( irec, recMap_.getEntryIndex( irec ) );
}
public void readRawRecord( int irec, Object rawValueArray )
throws IOException {
int ient = recMap_.getEntryIndex( irec );
if ( hasRecord( irec, ient ) ) {
dataReader_.readValue( recMap_.getBuf( ient ),
recMap_.getOffset( ient, irec ),
rawValueArray );
}
else {
System.arraycopy( padRawValueArray_, 0, rawValueArray, 0,
rvaleng_ );
}
}
public Object readShapedRecord( int irec, boolean rowMajor,
Object work )
throws IOException {
int ient = recMap_.getEntryIndex( irec );
if ( hasRecord( irec, ient ) ) {
dataReader_.readValue( recMap_.getBuf( ient ),
recMap_.getOffset( ient, irec ),
work );
return shaper_.shape( work, rowMajor );
}
else {
return rowMajor ? shapedPadValueRowMajor_
: shapedPadValueColumnMajor_;
}
}
private boolean hasRecord( int irec, int ient ) {
return ient >= 0 && ient < recMap_.getEntryCount()
&& irec < getRecordCount();
}
}
/**
* RecordReader implementation for record-varying variables
* with previous padding.
*/
private class PreviousRecordReader implements RecordReader {
private final RecordMap recMap_;
/**
* Constructor.
*
* @param recMap record map
*/
PreviousRecordReader( RecordMap recMap ) {
recMap_ = recMap;
}
public boolean hasRecord( int irec ) {
// I'm not sure whether the constraint on getRecordCount ought
// to be applied here - maybe for previous padding, non-existent
// records are OK??
return recMap_.getEntryIndex( irec ) >= 0
&& irec < getRecordCount();
}
public void readRawRecord( int irec, Object rawValueArray )
throws IOException {
int ient = recMap_.getEntryIndex( irec );
if ( ient >= 0 ) {
dataReader_.readValue( recMap_.getBuf( ient ),
recMap_.getOffset( ient, irec ),
rawValueArray );
}
else if ( ient == -1 ) {
System.arraycopy( padRawValueArray_, 0, rawValueArray, 0,
rvaleng_ );
}
else {
int iPrevEnt = -ient - 2;
long offset = recMap_.getFinalOffsetInEntry( iPrevEnt );
dataReader_.readValue( recMap_.getBuf( iPrevEnt ), offset,
rawValueArray );
}
}
public Object readShapedRecord( int irec, boolean rowMajor,
Object work )
throws IOException {
int ient = recMap_.getEntryIndex( irec );
if ( ient >= 0 ) {
dataReader_.readValue( recMap_.getBuf( ient ),
recMap_.getOffset( ient, irec ),
work );
return shaper_.shape( work, rowMajor );
}
else if ( ient == -1 ) {
return rowMajor ? shapedPadValueRowMajor_
: shapedPadValueColumnMajor_;
}
else {
int iPrevEnt = -ient - 2;
long offset = recMap_.getFinalOffsetInEntry( iPrevEnt );
dataReader_.readValue( recMap_.getBuf( ient ),
recMap_.getOffset( ient, irec ),
work );
return shaper_.shape( work, rowMajor );
}
}
}
}
jcdf-1.2-3/VariableAttribute.java 0000664 0000000 0000000 00000003000 13203340177 0016667 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf;
/**
* Provides the description and per-variable entry values
* for a CDF attribute with variable scope.
*
* @author Mark Taylor
* @since 20 Jun 2013
*/
public class VariableAttribute {
private final String name_;
private final AttributeEntry[] rEntries_;
private final AttributeEntry[] zEntries_;
/**
* Constructor.
*
* @param name attribute name
* @param rEntries rEntry values for this attribute
* @param zEntries zEntry values for this attribute
*/
public VariableAttribute( String name, AttributeEntry[] rEntries,
AttributeEntry[] zEntries ) {
name_ = name;
rEntries_ = rEntries;
zEntries_ = zEntries;
}
/**
* Returns this attribute's name.
*
* @return attribute name
*/
public String getName() {
return name_;
}
/**
* Returns the entry value that a given variable has for this attribute.
* If the variable has no entry for this attribute, null is returned.
*
* @param variable CDF variable from the same CDF as this attribute
* @return this attribute's value for variable
*/
public AttributeEntry getEntry( Variable variable ) {
AttributeEntry[] entries = variable.isZVariable() ? zEntries_
: rEntries_;
int ix = variable.getNum();
return ix < entries.length ? entries[ ix ] : null;
}
}
jcdf-1.2-3/VariableDescriptorRecord.java 0000664 0000000 0000000 00000013070 13203340177 0020211 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
import uk.ac.bristol.star.cdf.DataType;
/**
* Abstract superclass for CDF Variable Descriptor Records.
* Two concrete subclasses exist for rVDRs and zVDRs.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public abstract class VariableDescriptorRecord extends Record {
@CdfField @OffsetField public final long vdrNext;
@CdfField public final int dataType;
@CdfField public final int maxRec;
@CdfField @OffsetField public final long vxrHead;
@CdfField @OffsetField public final long vxrTail;
@CdfField public final int flags;
@CdfField public final int sRecords;
@CdfField public final int rfuB;
@CdfField public final int rfuC;
@CdfField public final int rfuF;
@CdfField public final int numElems;
@CdfField public final int num;
@CdfField @OffsetField public final long cprOrSprOffset;
@CdfField public final int blockingFactor;
@CdfField public final String name;
@CdfField public final int zNumDims;
@CdfField public final int[] zDimSizes;
@CdfField public final boolean[] dimVarys;
private final long padOffset_;
private final int padBytes_;
/**
* Constructor.
*
* @param plan basic record info
* @param abbrev abbreviated name for record type
* @param recordType record type code
* @param hasDims true iff the zNumDims and zDimSizes fields
* will be present
* @param nameLeng number of characters used for attribute names
*/
private VariableDescriptorRecord( RecordPlan plan, String abbrev,
int recordType, boolean hasDims,
int nameLeng )
throws IOException {
super( plan, abbrev, recordType );
Buf buf = plan.getBuf();
Pointer ptr = plan.createContentPointer();
this.vdrNext = buf.readOffset( ptr );
this.dataType = buf.readInt( ptr );
this.maxRec = buf.readInt( ptr );
this.vxrHead = buf.readOffset( ptr );
this.vxrTail = buf.readOffset( ptr );
this.flags = buf.readInt( ptr );
this.sRecords = buf.readInt( ptr );
this.rfuB = checkIntValue( buf.readInt( ptr ), 0 );
this.rfuC = checkIntValue( buf.readInt( ptr ), -1 );
this.rfuF = checkIntValue( buf.readInt( ptr ), -1 );
this.numElems = buf.readInt( ptr );
this.num = buf.readInt( ptr );
this.cprOrSprOffset = buf.readOffset( ptr );
this.blockingFactor = buf.readInt( ptr );
this.name = buf.readAsciiString( ptr, nameLeng );
if ( hasDims ) {
this.zNumDims = buf.readInt( ptr );
this.zDimSizes = readIntArray( buf, ptr, this.zNumDims );
}
else {
this.zNumDims = 0;
this.zDimSizes = null;
}
boolean hasPad = hasBit( this.flags, 1 );
padBytes_ = hasPad ? DataType.getDataType( this.dataType )
.getByteCount() * this.numElems
: 0;
final int ndim;
if ( hasDims ) {
ndim = this.zNumDims;
}
else {
// Work out the number of dimensions of an rVariable by subtracting
// the values of all the other fields from the record size.
// The more direct way would be by using the rNumDims field of
// the GDR, but we don't have access to that here.
long runningCount = plan.getReadCount( ptr );
long spareBytes = getRecordSize() - runningCount - padBytes_;
assert spareBytes == (int) spareBytes;
if ( spareBytes % 4 != 0 ) {
warnFormat( "rVDR DimVarys field non-integer size??" );
}
ndim = ( (int) spareBytes ) / 4;
}
int[] iDimVarys = readIntArray( buf, ptr, ndim );
this.dimVarys = new boolean[ ndim ];
for ( int i = 0; i < ndim; i++ ) {
this.dimVarys[ i ] = iDimVarys[ i ] != 0;
}
long padpos = ptr.getAndIncrement( padBytes_ );
padOffset_ = hasPad ? padpos : -1L;
checkEndRecord( ptr );
}
/**
* Returns the file offset at which this record's PadValue can be found.
* If there is no pad value, -1 is returned.
*
* @return pad file offset, or -1
*/
public long getPadValueOffset() {
return padOffset_;
}
/**
* Returns the number of bytes in the pad value.
* If there is no pad value, 0 is returned.
*
* @return pad value size in bytes
*/
public int getPadValueSize() {
return padBytes_;
}
/**
* Field data for CDF record of type rVariable Descriptor Record.
*/
public static class RVariant extends VariableDescriptorRecord {
/**
* Constructor.
*
* @param plan basic record info
* @param nameLeng number of characters used for attribute names
*/
public RVariant( RecordPlan plan, int nameLeng ) throws IOException {
super( plan, "rVDR", 3, false, nameLeng );
}
}
/**
* Field data for CDF record of type zVariable Descriptor Record.
*/
public static class ZVariant extends VariableDescriptorRecord {
/**
* Constructor.
*
* @param plan basic record info
* @param nameLeng number of characters used for attribute names
*/
public ZVariant( RecordPlan plan, int nameLeng ) throws IOException {
super( plan, "zVDR", 8, true, nameLeng );
}
}
}
jcdf-1.2-3/VariableIndexRecord.java 0000664 0000000 0000000 00000002211 13203340177 0017135 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
/**
* Field data for CDF record of type Variable Index Record.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public class VariableIndexRecord extends Record {
@CdfField @OffsetField public final long vxrNext;
@CdfField public final int nEntries;
@CdfField public final int nUsedEntries;
@CdfField public final int[] first;
@CdfField public final int[] last;
@CdfField @OffsetField public final long[] offset;
/**
* Constructor.
*
* @param plan basic record information
*/
public VariableIndexRecord( RecordPlan plan ) throws IOException {
super( plan, "VXR", 6 );
Buf buf = plan.getBuf();
Pointer ptr = plan.createContentPointer();
this.vxrNext = buf.readOffset( ptr );
this.nEntries = buf.readInt( ptr );
this.nUsedEntries = buf.readInt( ptr );
this.first = readIntArray( buf, ptr, this.nEntries );
this.last = readIntArray( buf, ptr, this.nEntries );
this.offset = readOffsetArray( buf, ptr, this.nEntries );
checkEndRecord( ptr );
}
}
jcdf-1.2-3/VariableValuesRecord.java 0000664 0000000 0000000 00000001375 13203340177 0017337 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
/**
* Field data for CDF record of type Variable Values Record.
*
* @author Mark Taylor
* @since 19 Jun 2013
*/
public class VariableValuesRecord extends Record {
private final long recordsOffset_;
/**
* Constructor.
*
* @param plan basic record information
*/
public VariableValuesRecord( RecordPlan plan ) {
super( plan, "VVR", 7 );
Pointer ptr = plan.createContentPointer();
recordsOffset_ = ptr.get();
}
/**
* Returns the file offset at which the records data in this record
* starts.
*
* @return file offset for start of Records field
*/
public long getRecordsOffset() {
return recordsOffset_;
}
}
jcdf-1.2-3/WrapperBuf.java 0000664 0000000 0000000 00000005027 13203340177 0015346 0 ustar 00root root 0000000 0000000 package uk.ac.bristol.star.cdf.record;
import java.io.IOException;
import java.io.InputStream;
/**
* Buf implementation based on an existing Buf instance.
* All methods are delegated to the base buf.
*
* @author Mark Taylor
* @since 18 Jun 2013
*/
public class WrapperBuf implements Buf {
private final Buf base_;
/**
* Constructor.
*
* @param base base buf
*/
public WrapperBuf( Buf base ) {
base_ = base;
}
public long getLength() {
return base_.getLength();
}
public int readUnsignedByte( Pointer ptr ) throws IOException {
return base_.readUnsignedByte( ptr );
}
public int readInt( Pointer ptr ) throws IOException {
return base_.readInt( ptr );
}
public long readOffset( Pointer ptr ) throws IOException {
return base_.readOffset( ptr );
}
public String readAsciiString( Pointer ptr, int nbyte ) throws IOException {
return base_.readAsciiString( ptr, nbyte );
}
public void setBit64( boolean bit64 ) {
base_.setBit64( bit64 );
}
public boolean isBit64() {
return base_.isBit64();
}
public void setEncoding( boolean isBigendian ) {
base_.setEncoding( isBigendian );
}
public boolean isBigendian() {
return base_.isBigendian();
}
public void readDataBytes( long offset, int count, byte[] array )
throws IOException {
base_.readDataBytes( offset, count, array );
}
public void readDataShorts( long offset, int count, short[] array )
throws IOException {
base_.readDataShorts( offset, count, array );
}
public void readDataInts( long offset, int count, int[] array )
throws IOException {
base_.readDataInts( offset, count, array );
}
public void readDataLongs( long offset, int count, long[] array )
throws IOException {
base_.readDataLongs( offset, count, array );
}
public void readDataFloats( long offset, int count, float[] array )
throws IOException {
base_.readDataFloats( offset, count, array );
}
public void readDataDoubles( long offset, int count, double[] array )
throws IOException {
base_.readDataDoubles( offset, count, array );
}
public InputStream createInputStream( long offset ) {
return base_.createInputStream( offset );
}
public Buf fillNewBuf( long count, InputStream in ) throws IOException {
return base_.fillNewBuf( count, in );
}
}
jcdf-1.2-3/cdfvar.sh 0000775 0000000 0000000 00000005621 13203340177 0014232 0 ustar 00root root 0000000 0000000 #!/bin/sh
# Command-line utility to generate various versions of a CDF file.
# An input file is specified on the command line, and this script
# rewrites different versions of it with varying endianness,
# compression style, CDF format version etc. These changes affect
# the format, but not the content, of the file.
# The resulting files can then be compared with the original to see
# if the library finds the same content in them all, which is a good
# test of the handling of different endiannesses, compression styles,
# CDF format versions etc.
#
# Flags:
# -create - actually writes the files
# -report - just output the filenames that would be written on stdout
# -verbose - be verbose
# -outdir "
basecmd="$1"
shift
while [ $# -gt 0 ]
do
args="$1"
shift
echo "
"
echo ""
echo ""
jcdf-1.2-3/jcdf.xhtml 0000664 0000000 0000000 00000026556 13203340177 0014424 0 ustar 00root root 0000000 0000000
"
echo "% $basecmd $args"
echo "JCDF
Overview
Documentation
CdfContent
class
for high-level access to CDF data and metadata, or
CdfReader
for low-level access to the CDF internal records.
Comparison with the official CDF library
Implementation Notes
Implementation Status
short
(16-bit)
integer.
Utilities
CdfList
:cdfdump
command in the official
CDF distribution.
If the -data
flag is supplied, record data as well as
metadata is shown.
See CdfList examples.
CdfDump
:cdfirsdump
command in the official CDF distribution.
If the -fields
flag is supplied, field information from
each record is shown. If the -html
flag is supplied,
the output is in HTML with files offsets displayed as hyperlinks,
which is nice for chasing pointers.
See CdfDump examples.
Downloads
History
CDF_LEAPSECONDSTABLE
environment variable as for
NASA library. Internal leap seconds table is updated until
2012-07-01.
rfuD
is now renamed as
leapSecondLastUpdated
.
It is also used when formatting TIME_TT2000 data values for output;
if the library leap second table is out of date with respect to
the data a warning is issued for information, and if the
time values are known to have leap seconds applied invalidly,
an error is thrown or a severe log message is issued.
This behaviour follows that of the official CDF library.
Variable.getDescriptor
method.
BankBuf
):
unsigned bytes could be read wrong in some cases, and
data could be read wrong near the boundaries of multi-buffer files
(only likely to show up for files >2Gbyte).
Thanks to Lukas Kvasnica (Brno) for identifying and fixing these.Buf
implementations.Context
Mark Taylor --
Astrophysics Group,
School of Physics,
Bristol University
jcdf-1.2-3/makefile 0000664 0000000 0000000 00000012471 13203340177 0014127 0 ustar 00root root 0000000 0000000
VERSION = 1.2-3
JAVAC = javac
JAVA = java
JAR = jar
JAVADOC = javadoc
# If you're building with java8, you can uncomment this to reduce warnings
# JAVADOC_FLAGS = -Xdoclint:all,-missing
JARFILE = jcdf.jar
WWW_FILES = $(JARFILE) javadocs index.html cdflist.html cdfdump.html
WWW_DIR = /homeb/mbt/public_html/jcdf
TEST_JARFILE = jcdf_test.jar
TEST_CDFS = data/example1.cdf data/example2.cdf data/test.cdf data/local/*.cdf
TEST_BADLEAP = data/test_badleap.cdf
NASACDFJAR = nasa/cdfjava_3.6.0.4.jar
NASALEAPSECFILE = nasa/CDFLeapSeconds.txt
JSRC = \
BankBuf.java \
Buf.java \
Bufs.java \
Pointer.java \
SimpleNioBuf.java \
WrapperBuf.java \
\
AttributeDescriptorRecord.java \
AttributeEntryDescriptorRecord.java \
CdfDescriptorRecord.java \
CompressedCdfRecord.java \
CompressedParametersRecord.java \
CompressedVariableValuesRecord.java \
GlobalDescriptorRecord.java \
Record.java \
RecordFactory.java \
RecordPlan.java \
SparsenessParametersRecord.java \
UnusedInternalRecord.java \
VariableDescriptorRecord.java \
VariableIndexRecord.java \
VariableValuesRecord.java \
CdfField.java \
OffsetField.java \
\
BitExpandInputStream.java \
Compression.java \
DataReader.java \
NumericEncoding.java \
RunLengthInputStream.java \
RecordMap.java \
\
AttributeEntry.java \
CdfContent.java \
GlobalAttribute.java \
VariableAttribute.java \
Variable.java \
CdfInfo.java \
CdfReader.java \
DataType.java \
Shaper.java \
CdfFormatException.java \
EpochFormatter.java \
TtScaler.java \
\
CdfDump.java \
CdfList.java \
LogUtil.java \
TEST_JSRC = \
ExampleTest.java \
SameTest.java \
OtherTest.java \
BufTest.java \
build: jar docs
jar: $(JARFILE)
docs: $(WWW_FILES)
javadocs: $(JSRC) package-info.java
rm -rf javadocs
mkdir javadocs
$(JAVADOC) $(JAVADOC_FLAGS) -quiet \
-d javadocs $(JSRC) package-info.java
index.html: jcdf.xhtml
xmllint -noout jcdf.xhtml && \
xmllint -html jcdf.xhtml >index.html
cdflist.html: $(JARFILE)
./examples.sh \
"java -classpath $(JARFILE) uk.ac.bristol.star.cdf.util.CdfList" \
"-help" \
"data/example1.cdf" \
"-data data/example1.cdf" \
>$@
cdfdump.html: $(JARFILE)
./examples.sh \
"java -classpath $(JARFILE) uk.ac.bristol.star.cdf.util.CdfDump" \
"-help" \
"data/example1.cdf" \
"-fields -html data/example1.cdf" \
>$@
installwww: $(WWW_DIR) $(WWW_FILES)
rm -rf $(WWW_DIR)/* && \
cp -r $(WWW_FILES) $(WWW_DIR)/
updatewww: $(WWW_DIR)/index.html
$(WWW_DIR)/index.html: index.html
cp index.html $@
$(NASALEAPSECFILE):
curl 'https://cdf.gsfc.nasa.gov/html/CDFLeapSeconds.txt' >$@
test: build buftest extest othertest badleaptest convtest
convtest: $(JARFILE) $(TEST_JARFILE)
rm -rf tmp; \
mkdir tmp; \
for f in $(TEST_CDFS); \
do \
files=`./cdfvar.sh -outdir tmp -report $$f`; \
cmd="java -ea -classpath $(JARFILE):$(TEST_JARFILE) \
uk.ac.bristol.star.cdf.test.SameTest $$files"; \
./cdfvar.sh -outdir tmp -create $$f && \
echo $$cmd && \
$$cmd || \
break; \
done
extest: $(JARFILE) $(TEST_JARFILE)
jargs="-ea \
-classpath $(JARFILE):$(TEST_JARFILE) \
uk.ac.bristol.star.cdf.test.ExampleTest \
data/example1.cdf data/example2.cdf data/test.cdf" && \
java -Duser.timezone=GMT $$jargs && \
java -Duser.timezone=PST $$jargs && \
java -Duser.timezone=EET $$jargs && \
java $$jargs
othertest: $(JARFILE) $(TEST_JARFILE) $(NASACDFJAR) $(NASALEAPSECFILE)
jargs="-ea \
-classpath $(JARFILE):$(TEST_JARFILE):$(NASACDFJAR) \
uk.ac.bristol.star.cdf.test.OtherTest" && \
export CDF_LEAPSECONDSTABLE=$(NASALEAPSECFILE) && \
java -Duser.timezone=GMT $$jargs && \
java -Duser.timezone=PST $$jargs && \
java -Duser.timezone=EET $$jargs && \
java $$jargs
buftest: $(JARFILE) $(TEST_JARFILE)
java -ea \
-classpath $(JARFILE):$(TEST_JARFILE) \
uk.ac.bristol.star.cdf.test.BufTest
badleaptest: $(JARFILE) $(TEST_BADLEAP)
# This one should run OK
java -classpath $(JARFILE) uk.ac.bristol.star.cdf.util.CdfDump \
$(TEST_BADLEAP) >/dev/null
# but this one should report that the file's leap seconds table
# is out of date and exit with a RuntimeException
if java -classpath $(JARFILE) \
uk.ac.bristol.star.cdf.util.CdfList -data \
$(TEST_BADLEAP) >/dev/null 2>&1; then \
should_have_failed; \
fi
clean:
rm -rf $(JARFILE) $(TEST_JARFILE) tmp \
index.html javadocs cdflist.html cdfdump.html
$(JARFILE): $(JSRC)
rm -rf tmp
mkdir -p tmp
$(JAVAC) -Xlint:unchecked -d tmp $(JSRC) \
&& echo "$(VERSION)" >tmp/uk/ac/bristol/star/cdf/jcdf.version \
&& $(JAR) cf $@ -C tmp .
rm -rf tmp
$(TEST_JARFILE): $(JARFILE) $(TEST_JSRC)
rm -rf tmp
mkdir -p tmp
$(JAVAC) -Xlint:unchecked -d tmp -classpath $(JARFILE) $(TEST_JSRC) \
&& $(JAR) cf $@ -C tmp .
rm -rf tmp
jcdf-1.2-3/notes.txt 0000664 0000000 0000000 00000012060 13203340177 0014312 0 ustar 00root root 0000000 0000000 Implementation notes for CDF
----------------------------
File formats:
Single file only supported (not multiple file)
Numeric encodings:
Unsupported: VMS D_FLOAT, G_FLOAT
Compression formats:
All supported.
Data types:
Unsigned integer types supported, but transformed to larger signed types
(CDF_UINT1 -> short, CDF_UINT2 -> int, CDF_UINT4 -> long).
CDF_UCHAR treated like CDF_CHAR (-> char, which is 16 bit anyway).
CDF_EPOCH, CDF_EPOCH16, CDF_TIME_TT2000 treated as double, double[2],
long respectively, not obviously times.
Options: transform them to ISO8601 strings on input, or make sure
TOPCAT can transform them to epochs (in that case you could plot them,
but they wouldn't look OK in the table view, stats window etc).
Probably best to transform these to iso8601 strings in the STIL layer.
Think about how that affects precision for CDF_EPOCH16 and leap seconds.
Is precision at that level important?
Time string formatting to ISO-8601 is performed for the time formats,
but currently done wrong for TT2000 since it does not cope with
leap seconds.
CDF Data Format version:
Version 3.4 supported (v3.4 document used for implementation).
Following notes in that document, probably 2.6, 2.7, 3.* are also
supported, maybe others, but I haven't checked them all.
Large files:
No file size restriction. Files >2Gb are allowed, but require use
of a 64-bit system (OS+JVM).
Fill values:
Implemented for CDF and CEF for scalars and floating point arrays.
However, I can't make integer array elements behave like nulls
without significant changes to the framework. Most of these CDFs
seem to have a lot of array-valued columns. Are fill values in
integer array values must used? likely to cause trouble?
I/O:
Read access only, output not supported at all.
Array access:
Read raw array or shaped array - less flexibility than HyperRead.
Implementation notes for CEF
----------------------------
Version:
Working from CEF version 2.0. Document suggests that 1.0 is
not likely to work, though I haven't seen 1.0 specification.
I don't know if any other versions exist.
Data types:
There are places where the CEF standard is not very explicit.
For instance it doesn't say how many bits INTs/FLOAT/DOUBLEs have,
whether a BYTE is signed, or whether the fill value is to be
matched as a string or as a typed value.
I've looked at some examples and made my best guess.
Syntax:
INCLUDE = "filename" not supported.
Array data:
CEF specifies C-type array ordering, and STIL uses FORTRAN/FITS-type
array ordering. Not quite sure what to do about this. Could transform
on read, but it would be inefficient, and if the data is addressed
as a vector (which is natural) anyone expecting CEF-ordered arrays
would get it wrong.
Metadata:
Lots of per-column metadata (TENSOR_FRAME, DEPEND_i, etc etc) read
in and available in GUI but otherwise ignored.
CEF/CDF release:
----------------
Code structure:
Libraries for basic CDF and CEF access, with optional STIL layers
on top to provide the TOPCAT/STILTS integration.
Code status:
Code is public on github (https://github.com/mbtaylor/cdf)
but not formally released.
Javadocs mostly done.
Some tests involving comparison of simple data files with results
from CDF library tools, and matching CDF files that have been
modified using CDF format conversion tools. This is not a bad
suite of tests, though more could be added. Some CDF data types
not tested, since I can't find and test data
(e.g. EPOCH16, TIME_TT2000 variables). More tests on time formatting
would be good too.
Release questions:
Any opinions on how release should be done?
What priority is (quality of) independent CDF/CEF release?
Java namespace (int.esa.cdf, uk.ac.starlink.cdf, uk.ac.bristol.star.cdf)?
Starjava integration:
---------------------
Auto format detection:
CDF yes, CEF no. CEF could do, but if the FILE_FORMAT_VERSION
is more than 512 bytes into the file it could cause trouble.
Treeview:
Not implemented. Could do.
SAMP:
You can send a CDF or CEF to TOPCAT using SAMP with the non-standard
MType table.load.stil:
{ "samp.mtype": "table.load.stil",
"samp.params": {
"url": "file://localhost/mbt/data/cdf/C4_V120822.cdf",
"format": "cdf" } }
As well as being non-standard, this MType was not documented in the
TOPCAT user documentation at v4.0-b, though it will be in later
releases.
Discussion of table.load.cdf (and maybe .cef) under way on apps-samp.
Plans:
------
CDF/CEF I/O:
Public independent CDF library release (when?)
CEF within STILTS? doesn't really deserve its own library.
TOPCAT visualisation:
Implement time series layer plot (like stacked line plot?)
Implement better time axis labelling
Implement time series/array plots (what are these called?)
Implement external plot control
Requirements:
Talk to Chris Perry at RAL when I have reasonably working line and
vector plots to get feedback about functionality etc.
jcdf-1.2-3/package-info.java 0000664 0000000 0000000 00000001634 13203340177 0015615 0 ustar 00root root 0000000 0000000 /**
* Pure java library for read-only access to CDF (NASA Common Data Format)
* files.
*
*