/*
* CdfUtil.java
*
* Created on July 24, 2007, 12:56 PM
*/
package org.autoplot.cdf;
import gov.nasa.gsfc.spdf.cdfj.AttributeEntry;
import gov.nasa.gsfc.spdf.cdfj.CDFException;
import gov.nasa.gsfc.spdf.cdfj.CDFReader;
import java.util.logging.Level;
import org.das2.datum.DatumRange;
import org.das2.datum.EnumerationUnits;
import org.das2.datum.Units;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.logging.Logger;
import org.das2.datum.InconvertibleUnitsException;
import org.das2.datum.UnitsConverter;
import org.das2.datum.UnitsUtil;
import org.das2.util.LoggerManager;
import org.das2.util.monitor.ProgressMonitor;
import org.das2.qds.buffer.BufferDataSet;
import org.das2.qds.ArrayDataSet;
import org.das2.qds.DDataSet;
import org.das2.qds.DataSetUtil;
import org.das2.qds.QDataSet;
import org.das2.qds.MutablePropertyDataSet;
import org.das2.qds.Slice0DataSet;
import org.autoplot.datasource.DataSourceUtil;
import org.autoplot.datasource.MetadataModel;
import org.autoplot.metatree.IstpMetadataModel;
import org.das2.qds.SemanticOps;
import org.das2.qds.ops.Ops;
import org.das2.util.monitor.NullProgressMonitor;
/**
* static methods supporting CdfFileDataSource
*
* @author jbf
*/
public class CdfUtil {
private final static Logger logger= LoggerManager.getLogger("apdss.cdf");
/**
* return the Java type used to store the CDF data type.
* @param type 45, 44, or 51
* @return String like double, float or string
*/
private static String getTargetType(int type) {
switch (type) {
case (int)CDFConstants.CDF_DOUBLE:
case (int)CDFConstants.CDF_REAL8:
case (int)CDFConstants.CDF_EPOCH:
return "double";
case (int)CDFConstants.CDF_EPOCH16:
return "double";
case (int)CDFConstants.CDF_FLOAT:
case (int)CDFConstants.CDF_REAL4:
return "float";
case (int)CDFConstants.CDF_UINT4:
return "double";
case (int)CDFConstants.CDF_INT8:
case (int)CDFConstants.CDF_TT2000:
return "long";
case (int)CDFConstants.CDF_INT4:
case (int)CDFConstants.CDF_UINT2:
return "int";
case (int)CDFConstants.CDF_INT2:
case (int)CDFConstants.CDF_UINT1:
return "short";
case (int)CDFConstants.CDF_INT1:
case (int)CDFConstants.CDF_BYTE:
return "byte";
case (int)CDFConstants.CDF_CHAR:
case (int)CDFConstants.CDF_UCHAR:
return "string";
default:
throw new IllegalArgumentException("unsupported type: "+type);
}
}
private static Object byteBufferType( int type ) {
switch (type) {
case (int)CDFConstants.CDF_DOUBLE:
case (int)CDFConstants.CDF_REAL8:
case (int)CDFConstants.CDF_EPOCH:
return BufferDataSet.DOUBLE;
case (int)CDFConstants.CDF_FLOAT:
case (int)CDFConstants.CDF_REAL4:
return BufferDataSet.FLOAT;
case (int)CDFConstants.CDF_UINT4:
return BufferDataSet.DOUBLE;
case (int)CDFConstants.CDF_INT8:
case (int)CDFConstants.CDF_TT2000:
return BufferDataSet.LONG;
case (int)CDFConstants.CDF_INT4:
case (int)CDFConstants.CDF_UINT2:
return BufferDataSet.INT;
case (int)CDFConstants.CDF_INT2:
case (int)CDFConstants.CDF_UINT1:
return BufferDataSet.SHORT;
case (int)CDFConstants.CDF_INT1:
case (int)CDFConstants.CDF_BYTE:
return BufferDataSet.BYTE;
case (int)CDFConstants.CDF_CHAR:
return BufferDataSet.BYTE; // determined experimentally: vap+cdfj:file:///home/jbf/ct/hudson/data.backup/cdf/ac_k0_mfi_20080602_v01.cdf?BGSEc
case (int)CDFConstants.CDF_UCHAR:
return BufferDataSet.BYTE; // TODO: I think...
case (int)CDFConstants.CDF_EPOCH16:
return BufferDataSet.DOUBLE;
default:
throw new IllegalArgumentException("unsupported type: "+type);
}
}
/**
* column major files require a transpose of each record. This makes a copy of the input, because I'm nervous
* that this might be backed by a writable cdf file.
* @param recLenBytes length of each record in bytes. (qube=2,3 bbType=float, then this is 2*4=8.)
* @param qube dimensions, a 0,1,..,4 element array.
* @param byteBuffer
* @param bbType
* @return the byte buffer.
*/
private static ByteBuffer transpose( int recLenBytes, int[] qube, ByteBuffer byteBuffer, Object bbType ) {
if ( qube.length<3 ) {
return byteBuffer;
}
ByteBuffer temp= ByteBuffer.allocate(recLenBytes);
ByteBuffer result= ByteBuffer.allocate(recLenBytes * qube[0]);
result.order(byteBuffer.order());
int fieldBytes= BufferDataSet.byteCount(bbType);
switch (qube.length) {
case 3:
{
int len1= qube[1];
int len2= qube[2];
for ( int i0=0; i0 props, MutablePropertyDataSet ds ) {
Units pu= (Units) props.get(QDataSet.UNITS);
Units u= (Units) ds.property( QDataSet.UNITS );
UnitsConverter uc;
if ( pu==null || u==null ) {
uc= UnitsConverter.IDENTITY;
} else if ( u==Units.cdfEpoch ) {
uc= UnitsConverter.IDENTITY;
} else if ( pu==Units.microseconds && u==Units.us2000 ) { // epoch16
uc= UnitsConverter.IDENTITY;
} else {
if ( pu==u ) {
uc= UnitsConverter.IDENTITY;
} else if ( UnitsUtil.isOrdinalMeasurement(u) || UnitsUtil.isOrdinalMeasurement(pu) ) {
return;
} else {
try {
uc= UnitsConverter.getConverter( pu, u );
} catch ( InconvertibleUnitsException ex ) { // PlasmaWave group Polar H7 files
uc= UnitsConverter.IDENTITY;
}
}
}
double dmin=Double.NEGATIVE_INFINITY;
double dmax=Double.POSITIVE_INFINITY;
if ( ds.rank()==1 && ds.length()>0 ) {
QDataSet range= Ops.extent(ds,null,null);
dmin= uc.convert(range.value(0));
dmax= uc.convert(range.value(1));
}
Number nmin= (Number)props.get(QDataSet.VALID_MIN);
double vmin= nmin==null ? Double.POSITIVE_INFINITY : nmin.doubleValue();
Number nmax= (Number)props.get(QDataSet.VALID_MAX);
double vmax= nmax==null ? Double.POSITIVE_INFINITY : nmax.doubleValue();
boolean intersects= false;
if ( dmax>vmin && dmin1e30 ) { //bugfix 3235447: all data invalid
if ( nmax!=null ) ds.putProperty(QDataSet.VALID_MAX, uc.convert(nmax) );
if ( nmin!=null ) ds.putProperty(QDataSet.VALID_MIN, uc.convert(nmin) );
}
String t= (String) props.get(QDataSet.SCALE_TYPE);
if ( t!=null ) ds.putProperty( QDataSet.SCALE_TYPE, t );
}
/**
* returns the size of the data type in bytes.
* @param itype type of data, such as CDFConstants.CDF_FLOAT
* @return the size the data atom in bytes
* TODO: this needs to be verified. Unsigned numbers may come back as next larger size.
*/
protected static int sizeOf( long itype ) {
int sizeBytes;
if ( itype==CDFConstants.CDF_EPOCH16 ) {
sizeBytes= 16;
} else if(itype == CDFConstants.CDF_DOUBLE || itype == CDFConstants.CDF_REAL8 || itype == CDFConstants.CDF_EPOCH || itype==CDFConstants.CDF_TT2000 || itype==CDFConstants.CDF_INT8 || itype==CDFConstants.CDF_UINT4 ) {
sizeBytes= 8;
} else if( itype == CDFConstants.CDF_FLOAT || itype == CDFConstants.CDF_REAL4 || itype==CDFConstants.CDF_INT4 || itype == CDFConstants.CDF_UINT2 ) {
sizeBytes=4; //sizeBytes= 4;
} else if( itype == CDFConstants.CDF_INT2 || itype == CDFConstants.CDF_UINT1 || itype==CDFConstants.CDF_UCHAR ) {
sizeBytes=2; //sizeBytes= 2;
} else if( itype == CDFConstants.CDF_INT1 || itype==CDFConstants.CDF_BYTE || itype==CDFConstants.CDF_CHAR ) {
sizeBytes=1; //sizeBytes= 1;
} else {
throw new IllegalArgumentException("didn't code for type");
}
return sizeBytes;
}
/**
* returns the size of the variable in bytes.
* @param dims number of dimensions in each record
* @param dimSizes dimensions of each record
* @param itype type of data, such as CDFConstants.CDF_FLOAT
* @param rc number of records (rec count)
* @return the size the variable in bytes
*/
private static long sizeOf( int dims, int[] dimSizes, long itype, long rc ) {
long size= dims==0 ? rc : rc * DataSetUtil.product( dimSizes );
size= size*sizeOf(itype);
return size;
}
/**
* returns effective rank. Nand's code looks for 1-element dimensions, which messes up Seth's file rbspb_pre_ect-mageisHIGH.
* See files:
* - vap+cdfj:ftp://cdaweb.gsfc.nasa.gov/pub/data/geotail/lep/2011/ge_k0_lep_20111016_v01.cdf?V0
*
- vap+cdfj:file:///home/jbf/ct/autoplot/data.backup/examples/cdf/seth/rbspb_pre_ect-mageisHIGH-sp-L1_20130709_v1.0.0.cdf?Histogram_prot
*
* @param varies array of boolean indicating if a dimension varies.
* @return the rank
*/
protected static int getEffectiveRank( boolean[] varies ) {
int rank = 0;
for (int i = 0; i < varies.length; i++) {
if (!varies[i]) continue;
rank++;
}
return rank;
}
/**
* implements slice1 by packing all the remaining elements towards the front and trimming.
* @param buf the byte buffer, which can be read-only.
* @param varType the variable type, see sizeOf(varType)
* @param qube the dimensions of the unsliced dataset
* @param slice1 the index to slice
* @param rowMajority true if the buffer is row majority.
* @return a copy containing just the slice1 of the input buffer.
*/
private static ByteBuffer doSlice1( ByteBuffer buf, long varType, int[] qube, int slice1, boolean rowMajority ) {
int recSizeBytes= DataSetUtil.product(qube) / qube[0] * sizeOf(varType);
ByteBuffer result= ByteBuffer.allocate( recSizeBytes / qube[1] * qube[0] );
result.order(buf.order());
if ( rowMajority ) { // one of these two is wrong.
int p1= slice1 * recSizeBytes / qube[1];
int p2= ( slice1 * recSizeBytes / qube[1] + recSizeBytes / qube[1] );
for ( int irec=0; irec 3 ) {
if (recCount != -1) {
throw new IllegalArgumentException("rank 5 not implemented");
}
}
int varRecCount= cdf.getNumberOfValues(svariable);
if ( recCount==-1 && recStart>0 && varRecCount==1 ) { // another kludge for Rockets, where depend was assigned variance
recStart= 0;
}
if ( recCount>1 ) { // check for length limit
int bytesPerRecord= DataSetUtil.product(dimSizes) * sizeOf(varType);
int limit= (int)(Integer.MAX_VALUE)/1000; // KB
if ( limit<(recCount/1000/recInterval*bytesPerRecord) ) {
int newRecCount= (int)( limit * recInterval * 1000 / bytesPerRecord );
String suggest;
if ( recInterval>1 ) {
suggest= "[0:"+newRecCount+":"+recInterval+"]";
} else {
suggest= "[0:"+newRecCount+"]";
}
throw new IllegalArgumentException("data read would result in more than 2GB read, which is not yet supported. Use "+svariable+suggest+" to read first records.");
}
}
long rc= recCount;
if ( rc==-1 ) rc= 1; // -1 is used as a flag for a slice, we still really read one record.
logger.log( Level.FINEST, "size of {0}: {1}MB type: {2}", new Object[]{svariable, sizeOf(dims, dimSizes, varType, rc) / 1024. / 1024., varType});
String stype = getTargetType( cdf.getType(svariable) );
ByteBuffer buff;
long t0= System.currentTimeMillis();
logger.entering("gov.nasa.gsfc.spdf.cdfj.CDFReader", "getBuffer" );
if ( recInterval==1 ) {
try {
boolean preserve= true;
if ( stype.equals("string") ) {
buff= null;
} else {
buff= cdf.getBuffer(svariable, stype, new int[] { (int)recStart,(int)(recStart+recInterval*(rc-1)) }, preserve );
}
} catch ( CDFException ex ) {
buff= myGetBuffer(cdf, svariable, (int)recStart, (int)(recStart+rc*recInterval), (int)recInterval );
}
} else {
buff= myGetBuffer(cdf, svariable, (int)recStart, (int)(recStart+rc*recInterval), (int)recInterval );
}
logger.exiting("gov.nasa.gsfc.spdf.cdfj.CDFReader", "getBuffer" );
logger.log(Level.FINE, "read variable {0} in (ms): {1}", new Object[]{svariable, System.currentTimeMillis()-t0});
Object bbType= byteBufferType( cdf.getType(svariable) );
int recLenBytes= BufferDataSet.byteCount(bbType);
if ( dimSizes.length>0 ) recLenBytes= recLenBytes * DataSetUtil.product( dimSizes );
MutablePropertyDataSet result;
int[] qube;
qube= new int[ 1+dimSizes.length ];
for ( int i=0; i-1 && qube.length>1 ) {
buff= doSlice1( buff, varType, qube, slice1, cdf.rowMajority() );
if ( recCount==-1 ) {
// throw new IllegalArgumentException("recCount==-1 and slice1>-1 when loading "+svariable);
logger.log(Level.FINE, "recCount==-1 and slice1>-1 when loading {0}", svariable);
}
int[] nqube= new int[qube.length-1];
nqube[0]= qube[0];
for ( int i=2;i0 ) { // vap+cdfj:file:///home/jbf/ct/hudson/data.backup/cdf/c4_cp_fgm_spin_20030102_v01.cdf?B_vec_xyz_gse__C4_CP_FGM_SPIN
boolean reform= true;
for ( int i=1; i1 ) {
// result= new RepeatIndexDataSet( result, i+1, repeatDimensions[i] );
// }
// //}
// }
if ( varType == CDFConstants.CDF_CHAR || varType==CDFConstants.CDF_UCHAR ) {
throw new IllegalArgumentException("We shouldn't get here because stype=string");
} else if ( varType == CDFConstants.CDF_EPOCH ) {
result.putProperty(QDataSet.UNITS, Units.cdfEpoch);
result.putProperty(QDataSet.VALID_MIN, 1.); // kludge for Timas, which has zeros.
} else if ( varType==CDFConstants.CDF_EPOCH16 ) {
result.putProperty(QDataSet.UNITS, Units.cdfEpoch);
result.putProperty(QDataSet.VALID_MIN, 1.); // kludge for Timas, which has zeros.
DDataSet result1= DDataSet.createRank1(result.length());
for ( int i=0; i1 ) {
uri= uri + "["+recStart+":"+(recStart+recCount)+":"+recInterval+"]";
}
CdfDataSource.dsCachePut( uri, result );
}
}
return result;
}
private static MutablePropertyDataSet readStringData(String svariable, long recInterval, CDFReader cdf, long recCount, int[] qube ) throws ArrayIndexOutOfBoundsException, IllegalArgumentException, CDFException.ReaderError {
EnumerationUnits units = EnumerationUnits.create(svariable);
Object o;
if ( recInterval>1 ) throw new IllegalArgumentException("recInterval>1 not supported here");
o = cdf.get(svariable);
Object o0= Array.get(o,0);
String[] sdata;
if ( o0.getClass().isArray() ) {
sdata= new String[ Array.getLength(o0) ];
for ( int j=0; j0 ? ( dimVary[0]==true ? 0 : 1 ) : 0 ;
int lastVary=-1;
for ( int iv=dimVary.length-1; iv>=shift; iv-- ) {
if ( dimVary[iv] ) {
lastVary= iv;
break;
}
}
if ( lastVary>-1 ) {
if ( shift==0 ) {
int[] newDims= Arrays.copyOfRange( dims, 0, lastVary+1 );
return newDims;
} else {
int[] newDims= Arrays.copyOfRange( dims, 1, lastVary+1 );
return newDims;
}
} else {
return new int[0];
}
} else {
return dims;
}
}
/**
* factor out common code that gets the properties for each dimension.
* @param cdf
* @param var
* @param rank
* @param dims
* @param dim
* @param warn
* @return
*/
private static DepDesc getDepDesc( CDFReader cdf, String svar, int rank, int[] dims, int dim, List warn, boolean isMaster ) {
DepDesc result= new DepDesc();
result.nrec=-1;
try {
if ( hasAttribute( cdf, svar, "DEPEND_"+dim ) ) { // check for metadata for DEPEND_
Object att= getAttribute( cdf, svar, "DEPEND_"+dim );
if ( att!=null && rank>1 ) {
logger.log(Level.FINER, "get attribute DEPEND_"+dim+" entry for {0}", svar );
result.dep = String.valueOf(att);
if ( cdf.getDimensions( result.dep ).length>0 && ( isMaster || cdf.getNumberOfValues( result.dep )>1 ) && cdf.recordVariance( result.dep ) ) {
result.rank2= true;
result.nrec = cdf.getDimensions( result.dep )[0];
warn.add( "NOTE: " + result.dep + " is record varying" );
} else {
result.nrec = cdf.getNumberOfValues( result.dep );
if (result.nrec == 1) {
result.nrec = getDimensions( cdf, result.dep )[0];
}
}
if ( dims.length>(dim-1) && (result.nrec)!=dims[dim-1] ) {
warn.add("DEPEND_"+dim+" length ("+result.nrec+") is inconsistent with length ("+dims[dim-1]+")" );
}
}
}
} catch ( CDFException e) {
warn.add( "problem with DEPEND_"+dim+": " + e.getMessage() );//e.printStackTrace();
}
try {
if (result.nrec==-1 && hasAttribute( cdf, svar, "LABL_PTR_"+dim ) ) { // check for metadata for LABL_PTR_1
Object att= getAttribute( cdf, svar, "LABL_PTR_"+dim );
if ( att!=null && rank>1 ) {
logger.log(Level.FINER, "get attribute LABL_PTR_"+dim+" entry for {0}", svar );
result.labl = String.valueOf(att);
if ( !cdf.existsVariable(result.labl) ) throw new Exception("No such variable: "+String.valueOf(att));
result.nrec = cdf.getNumberOfValues( result.labl );
if (result.nrec == 1) {
result.nrec = cdf.getDimensions(svar)[0];
}
if ( dims.length>(dim-1) && (result.nrec)!=dims[dim-1] ) {
warn.add("LABL_PTR_"+dim+" length is inconsistent with length ("+dims[dim-1]+")" );
}
}
} else if ( hasAttribute( cdf, svar, "LABL_PTR_"+dim ) ) { // check that the LABL_PTR_i is the right length as well.
Object att= getAttribute( cdf, svar, "LABL_PTR_"+dim );
if ( att!=null && rank>1 ) {
logger.log(Level.FINER, "get attribute LABL_PTR_"+dim+" entry for {0}", svar );
result.labl= String.valueOf(att);
int nrec = cdf.getNumberOfValues(result.labl);
if ( nrec == 1 ) {
nrec = cdf.getDimensions(result.labl)[0];
}
if ( dims.length>(dim-1) && (nrec)!=dims[dim-1] ) {
warn.add("LABL_PTR_"+dim+" length is inconsistent with length ("+dims[dim-1]+")" );
}
}
}
} catch (CDFException e) {
warn.add( "problem with LABL_PTR_"+dim+": " + e.getMessage() );//e.printStackTrace();
} catch (Exception e) {
warn.add( "problem with LABL_PTR_"+dim+": " + e.getMessage() );//e.printStackTrace();
}
return result;
}
private static boolean hasVariable( CDFReader cdf, String var ) {
List names= Arrays.asList( cdf.getVariableNames() );
return names.contains(var);
}
/**
* Return a map where keys are the names of the variables, and values are descriptions.
* @param cdf the cdf reader reference.
* @param dataOnly show only the DATA and not SUPPORT_DATA. Note I reclaimed this parameter because I wasn't using it.
* @param rankLimit show only variables with no more than this rank.
* @return map of parameter name to short description
* @throws Exception
*/
public static Map getPlottable(CDFReader cdf, boolean dataOnly, int rankLimit) throws Exception {
return getPlottable(cdf, dataOnly, rankLimit, false, false);
}
/**
* abbreviate names, motivated by Cluster CDF files which have
* Data__C1_CP_PEA_3DRH_cnts with DEPEND_0 of
* time_tags__C1_CP_PEA_3DRH_cnts.
* @param context
* @param name
* @return
*/
public static String maybeShorten( String context, String name ) {
int i1= context.length()-1;
int i2= name.length()-1;
while( i1>0 && i2>0 && context.charAt(i1)==name.charAt(i2) ) {
i1=i1-1;
i2=i2-1;
}
i2++;
if ( i2<(name.length()-3) ) {
return name.substring(0,i2)+"...";
} else {
return name;
}
}
/**
* Return a map where keys are the names of the variables, and values are descriptions. This
* allows for a deeper query, getting detailed descriptions within the values, and also supports the
* mode where the master CDFs (used by the CDAWeb plugin) don't contain data and record counts should
* not be supported.
* @param cdf
* @param dataOnly show only the DATA and not SUPPORT_DATA. Note I reclaimed this parameter because I wasn't using it.
* @param rankLimit show only variables with no more than this rank.
* @param deep return more detailed descriptions in HTML
* @param master cdf is a master cdf, so don't indicate record counts.
* @return map of parameter name to short description
* @throws Exception
*/
public static Map getPlottable(CDFReader cdf, boolean dataOnly, int rankLimit, boolean deep, boolean master) throws Exception {
Map result = new LinkedHashMap<>();
Map dependent= new LinkedHashMap<>();
boolean isMaster= master; //cdf.getName().contains("MASTERS"); // don't show of Epoch=0, just "Epoch"
logger.fine("getting CDF variables");
String[] v = cdf.getVariableNames();
logger.log(Level.FINE, "got {0} variables", v.length);
logger.fine("getting CDF attributes");
boolean[] isData= new boolean[v.length];
int i=-1;
int skipCount=0;
for (String svar : v) {
i=i+1;
if ( dataOnly ) {
Object attr= getAttribute(cdf, svar, "VAR_TYPE" );
if ( attr==null ) {
for ( String s: cdf.variableAttributeNames(svar) ) {
if ( s.equalsIgnoreCase("VAR_TYPE") ) {
attr= getAttribute(cdf,svar,s);
}
}
if ( attr!=null ) {
logger.log(Level.INFO, "Wrong-case VAR_TYPE attribute found, should be \"VAR_TYPE\"");
}
}
if ( attr!=null && "data".equalsIgnoreCase(attr.toString()) ) {
if ( !attr.equals("data") ) {
logger.log(Level.INFO, "var_type is case-sensitive, should be \"data\", not {0}", attr);
attr= "data";
}
}
if ( attr==null || !attr.equals("data") ) {
skipCount++;
isData[i]= false;
} else {
isData[i]= true;
}
}
}
//if ( skipCount==v.length ) {
// logger.fine( "turning off dataOnly because it rejects everything");
// dataOnly= false;
//}
i=-1;
for (String v1 : v) {
i=i+1;
String svar=null;
List warn= new ArrayList();
String xDependVariable=null;
boolean isVirtual= false;
long xMaxRec = -1;
long maxRec= -1;
long recCount= -1;
String scatDesc = null;
String svarNotes = null;
StringBuilder vdescr=null;
int rank=-1;
int[] dims=new int[0];
int varType=0;
try {
svar = v1;
try {
varType= cdf.getType(svar);
} catch ( CDFException ex ) {
throw new RuntimeException(ex);
}
// reject variables that are ordinal data that do not have DEPEND_0.
boolean hasDep0= hasAttribute( cdf, svar, "DEPEND_0" );
if ( ( varType==CDFConstants.CDF_CHAR || varType==CDFConstants.CDF_UCHAR ) && ( !hasDep0 ) ) {
logger.log(Level.FINER, "skipping because ordinal and no depend_0: {0}", svar );
continue;
}
maxRec = cdf.getNumberOfValues(svar);
recCount= maxRec;
dims = getDimensions(cdf, svar);
if (dims == null) {
rank = 1;
} else {
rank = dims.length + 1;
}
if (rank > rankLimit) {
continue;
}
if ( svar.equals("Time_PB5") ) {
logger.log(Level.FINER, "skipping {0} because we always skip Time_PB5", svar );
continue;
}
if ( dataOnly ) {
if ( !isData[i] ) continue;
}
Object att= getAttribute( cdf, svar, "VIRTUAL" );
if ( att!=null ) {
logger.log(Level.FINER, "get attribute VIRTUAL entry for {0}", svar );
if ( String.valueOf(att).toUpperCase().equals("TRUE") ) {
String funct= (String)getAttribute( cdf, svar, "FUNCTION" );
if ( funct==null ) funct= (String) getAttribute( cdf, svar, "FUNCT" ) ; // in alternate_view in IDL: 11/5/04 - TJK - had to change FUNCTION to FUNCT for IDL6.* compatibili
if ( !CdfVirtualVars.isSupported(funct) ) {
if ( !funct.startsWith("comp_themis") ) {
logger.log(Level.FINER, "virtual function not supported: {0}", funct);
}
continue;
} else {
vdescr= new StringBuilder(funct);
vdescr.append( "( " );
int icomp=0;
String comp= (String)getAttribute( cdf, svar, "COMPONENT_"+icomp );
if ( comp!=null ) {
vdescr.append( comp );
icomp++;
}
for ( ; icomp<5; icomp++ ) {
comp= (String)getAttribute( cdf, svar, "COMPONENT_"+icomp );
if ( comp!=null ) {
vdescr.append(", ").append(comp);
} else {
break;
}
}
vdescr.append(" )");
}
isVirtual= true;
}
}
}catch (CDFException | RuntimeException e) {
logger.fine(e.getMessage());
}
try {
if ( hasAttribute( cdf, svar, "DEPEND_0" )) { // check for metadata for DEPEND_0
Object att= getAttribute( cdf, svar, "DEPEND_0" );
if ( att!=null ) {
logger.log(Level.FINER, "get attribute DEPEND_0 entry for {0}", svar);
xDependVariable = String.valueOf(att);
if ( !hasVariable(cdf,xDependVariable ) ) throw new Exception("No such variable: "+String.valueOf(att));
xMaxRec = cdf.getNumberOfValues( xDependVariable );
if ( xMaxRec!=maxRec && vdescr==null && cdf.recordVariance(svar) ) {
if ( maxRec==-1 ) maxRec+=1; //why?
if ( maxRec==0 ) {
warn.add("data contains no records" );
} else {
warn.add("depend0 length ("+xDependVariable+"["+xMaxRec+"]) is inconsistent with length ("+(maxRec)+")" );
}
//TODO: warnings are incorrect for Themis data.
}
} else {
if ( dataOnly ) {
continue; // vap+cdaweb:ds=GE_K0_PWI&id=freq_b&timerange=2012-06-12
}
}
}
} catch (CDFException e) {
warn.add( "problem with DEPEND_0: " + e.getMessage() );
} catch (Exception e) {
warn.add( "problem with DEPEND_0: " + e.getMessage() );
}
DepDesc dep1desc= getDepDesc( cdf, svar, rank, dims, 1, warn, isMaster );
DepDesc dep2desc= getDepDesc( cdf, svar, rank, dims, 2, warn, isMaster );
DepDesc dep3desc= getDepDesc( cdf, svar, rank, dims, 3, warn, isMaster );
if (deep) {
Object o= (Object) getAttribute( cdf, svar, "CATDESC" );
if ( o != null && o instanceof String ) {
logger.log(Level.FINER, "get attribute CATDESC entry for {0}", svar );
scatDesc = (String)o ;
}
o= getAttribute( cdf, svar, "VAR_NOTES" );
if ( o!=null && o instanceof String ) {
logger.log(Level.FINER, "get attribute VAR_NOTES entry for {0}", svar );
svarNotes = (String)o ;
}
}
String desc = svar;
if (xDependVariable != null) {
desc += "[" + maybeShorten( svar, xDependVariable );
if ( ( xMaxRec>0 || !isMaster ) && xMaxRec==maxRec ) { // small kludge for CDAWeb, where we expect masters to be empty.
desc+= "=" + (xMaxRec);
}
if ( dep1desc.dep != null) {
desc += "," + maybeShorten( svar, dep1desc.dep ) + "=" + dep1desc.nrec + ( dep1desc.rank2 ? "*": "" );
if ( dep2desc.dep != null) {
desc += "," + maybeShorten( svar, dep2desc.dep ) + "=" + dep2desc.nrec + ( dep2desc.rank2 ? "*": "" );
if (dep3desc.dep != null) {
desc += "," + maybeShorten( svar, dep3desc.dep ) + "=" + dep3desc.nrec + ( dep3desc.rank2 ? "*": "" );
}
}
} else if ( rank>1 ) {
desc += ","+DataSourceUtil.strjoin( dims, ",");
}
desc += "]";
}
if (deep) {
StringBuilder descbuf = new StringBuilder("" + desc + "
");
int itype= -1;
try {
//assert svar is valid.
itype= cdf.getType(svar);
} catch ( CDFException ex ) {}
String recDesc= ""+ CdfUtil.getStringDataType( itype );
if ( dims!=null && dims.length>0 ) {
recDesc= recDesc+"["+ DataSourceUtil.strjoin( dims, ",") + "]";
}
if (scatDesc != null) {
descbuf.append(scatDesc).append("
");
}
if (svarNotes !=null ) {
descbuf.append("").append(svarNotes).append("
");
}
Vector variablePurpose= cdf.getAttributeEntries(svar,"VARIABLE_PURPOSE");
if ( variablePurpose.size()>0 ) {
AttributeEntry e= (AttributeEntry)variablePurpose.get(0);
StringBuilder s= new StringBuilder( String.valueOf(e.getValue()) );
for ( int i1=1; i1VARIABLE_PURPOSE: ").append(s).append("
");
}
if (maxRec != xMaxRec) {
if ( isVirtual ) {
descbuf.append("(virtual function ").append(vdescr).append( ")
");
} else {
if ( isMaster ) {
descbuf.append("records of ").append(recDesc).append("
");
} else {
descbuf.append( recCount ).append(" records of ").append(recDesc).append("
");
}
}
} else {
if ( isMaster ) {
descbuf.append("records of ").append(recDesc).append("
");
} else {
descbuf.append( recCount ).append(" records of ").append(recDesc).append("
");
}
}
for ( String s: warn ) {
descbuf.append("
");
if ( s.startsWith("NOTE") ) {
descbuf.append(s);
} else {
descbuf.append("WARNING: ").append(s);
}
}
descbuf.append("");
if ( xDependVariable!=null ) {
dependent.put(svar, descbuf.toString());
} else {
result.put(svar, descbuf.toString());
}
} else {
if ( xDependVariable!=null ) {
dependent.put(svar, desc);
} else {
result.put(svar, desc);
}
}
} // for
logger.fine("done, get plottable ");
dependent.putAll(result);
return dependent;
}
/**
* apply the ISTP metadata to the dataset. This is used to implement master files, where metadata from one file
* can override the data within another. Do not use this, as its location will probably change.
*
* @param attr1 the ISTP metadata
* @param result the data
* @param os1 if non-null, then modify the metadata for slice1
* @param constraint if non-null, then drop the render type.
*/
public static void doApplyAttributes(Map attr1, MutablePropertyDataSet result, String os1, String constraint) {
Map istpProps;
MetadataModel model = new IstpMetadataModel();
istpProps= model.properties(attr1);
CdfUtil.maybeAddValidRange(istpProps, result);
Number n= (Number)istpProps.get(QDataSet.FILL_VALUE);
if ( result instanceof BufferDataSet ) {
Class c= ((BufferDataSet)result).getCompatibleComponentType();
if ( n instanceof Double ) {
if ( c==float.class ) {
istpProps.put( QDataSet.FILL_VALUE, (float)n.doubleValue() );
}
}
}
result.putProperty(QDataSet.FILL_VALUE, istpProps.get(QDataSet.FILL_VALUE));
result.putProperty(QDataSet.LABEL, istpProps.get(QDataSet.LABEL) );
result.putProperty(QDataSet.TITLE, istpProps.get(QDataSet.TITLE) );
result.putProperty(QDataSet.DESCRIPTION, istpProps.get(QDataSet.DESCRIPTION) );
String renderType= (String)istpProps.get(QDataSet.RENDER_TYPE);
if ( renderType!=null && renderType.equals( "time_series" ) ) {
// kludge for rbsp-a_WFR-waveform_emfisis-L2_20120831_v1.2.1.cdf. This is actually a waveform.
// Note Seth (RBSP/ECT Team) has a file with 64 channels. Dan's file rbsp-a_HFR-spectra_emfisis-L2_20120831_v1.2.3.cdf has 82 channels.
if ( result.rank()>1 && result.length(0)>QDataSet.MAX_UNIT_BUNDLE_COUNT ) {
logger.log(Level.FINE, "result.length(0)>QDataSet.MAX_UNIT_BUNDLE_COUNT={0}, this cannot be treated as a time_series", QDataSet.MAX_UNIT_BUNDLE_COUNT);
renderType=null;
}
}
if ( renderType !=null && renderType.startsWith("image") ) {
logger.fine("renderType=image not supported in CDF files");
renderType= null;
}
if ( UnitsUtil.isNominalMeasurement(SemanticOps.getUnits(result)) ) {
renderType= "eventsbar";
}
if ( constraint!=null ) {
logger.finer("dropping render type because of constraint");
} else if ( os1!=null && os1.length()>0 ) {
logger.finer("dropping render type because of slice1");
for ( int i1=1; i10 && result.length(0) depProps= (Map) istpProps.get("DEPEND_"+j);
if ( depds!=null && depProps!=null ) {
CdfUtil.maybeAddValidRange( depProps, depds );
Map istpProps2 = model.properties(depProps);
depds.putProperty(QDataSet.FILL_VALUE, istpProps2.get(QDataSet.FILL_VALUE));
if ( !UnitsUtil.isTimeLocation( SemanticOps.getUnits(depds) ) ) {
depds.putProperty(QDataSet.LABEL, istpProps2.get(QDataSet.LABEL) );
depds.putProperty(QDataSet.TITLE, istpProps2.get(QDataSet.TITLE) );
}
}
}
result.putProperty( QDataSet.METADATA, attr1 );
result.putProperty( QDataSet.METADATA_MODEL, QDataSet.VALUE_METADATA_MODEL_ISTP );
}
}