package org.autoplot.tca; import java.util.logging.Level; import java.util.logging.Logger; import org.das2.datum.Datum; import org.das2.datum.DatumRange; import org.das2.datum.DatumRangeUtil; import org.das2.datum.EnumerationUnits; import org.das2.datum.Units; import org.das2.util.monitor.NullProgressMonitor; import org.das2.util.monitor.ProgressMonitor; import org.das2.qds.AbstractQFunction; import org.das2.qds.BundleDataSet; import org.das2.qds.DDataSet; import org.das2.qds.DataSetUtil; import org.das2.qds.MutablePropertyDataSet; import org.das2.qds.QDataSet; import org.das2.qds.SemanticOps; import org.autoplot.datasource.DataSource; import org.autoplot.datasource.capability.TimeSeriesBrowse; import org.autoplot.dom.DataSourceFilter; import org.das2.qds.DataSetOps; import org.das2.qds.ops.Ops; /** * Allow Autoplot DataSources to lookup datasets. The filter within the * DataSourceFilter is then applied to the data. * @author jbf */ public class DataSourceTcaSource extends AbstractQFunction { TimeSeriesBrowse tsb; boolean needToRead; QDataSet ds; QDataSet tlim; QDataSet bundleDs; DataSource dss; QDataSet error; QDataSet errorNoDs; QDataSet nonValueDs; //QDataSet nonMonoDs; QDataSet initialError; DataSourceFilter dsf; static final Logger logger= org.das2.util.LoggerManager.getLogger( "autoplot.tca.uritcasource" ); // cache the example input so we only attempt read once. private MutablePropertyDataSet exampleInput=null; public DataSourceTcaSource( DataSourceFilter node ) throws Exception { EnumerationUnits eu= new EnumerationUnits("UriTcaSource"); error= DataSetUtil.asDataSet( eu.createDatum("Error") ); errorNoDs= DataSetUtil.asDataSet( eu.createDatum("No Data") ); nonValueDs= DataSetUtil.asDataSet( eu.createDatum(" ") ); //nonMonoDs= DataSetUtil.asDataSet( eu.createDatum("Non Mono") ); DataSource dss1; try { dsf= node; dss1= node.getController().getDataSource(); initialError= null; this.tsb= dss1.getCapability( TimeSeriesBrowse.class ); this.dss= dss1; this.needToRead= true; } catch ( Exception lex ) { logger.log( Level.WARNING, lex.getMessage(), lex ); initialError= DataSetUtil.asDataSet( eu.createDatum(lex.toString()) ); } } private void doRead( ) throws Exception { ProgressMonitor mon= new NullProgressMonitor(); // DasProgressPanel.createFramed("loading data"); if ( this.tsb!=null ) { logger.log(Level.FINE, "reading TCAs from TSB {0}", this.tsb.getURI()); } else { logger.log(Level.FINE, "reading TCAs from {0}", dss); } needToRead= false; // clear the flag in case there is an exception. ds= dss.getDataSet( mon.getSubtaskMonitor("read data") ); ds= DataSetOps.sprocess( this.dsf.getFilters(), ds, mon.getSubtaskMonitor("sprocess") ); if ( ds==null ) { logger.log(Level.FINE, "doRead getDataSet got null "); } else { logger.log(Level.FINE, "doRead got: {0}", ds); QDataSet dep0= SemanticOps.xtagsDataSet(ds); if ( !DataSetUtil.isMonotonicAndIncreasing(dep0) ) { logger.warning("TCA contains data which is not monotonically increasing"); if ( dep0.value(0)>dep0.value(dep0.length()-1) ) { ds= Ops.copy( Ops.reverse(ds) ); dep0= SemanticOps.xtagsDataSet(ds); if ( !DataSetUtil.isMonotonicAndIncreasing(dep0) ) { logger.warning("reversed TCA dataset still contains non-monotonic tags"); ds= Ops.ensureMonotonicAndIncreasingWithFill(ds); } else { logger.info("reversing TCA dataset makes tags monotonically increasing."); } } else { logger.warning("removing non-monotonically increasing tags of TCA dataset."); ds= Ops.ensureMonotonicAndIncreasingWithFill(ds); } } tlim= DataSetUtil.guessCadenceNew( SemanticOps.xtagsDataSet(ds), ds ); if ( this.tsb!=null ) { DatumRange dr= this.tsb.getTimeRange(); QDataSet ext= Ops.extent( SemanticOps.xtagsDataSet(ds), null ); double d0= DatumRangeUtil.normalize( dr, DataSetUtil.asDatum( ext.slice(0) ) ); double d1= DatumRangeUtil.normalize( dr, DataSetUtil.asDatum( ext.slice(1) ) ); logger.log(Level.FINE, "normalized after load: {0}-{1}", new Object[]{d0, d1}); } bundleDs= (QDataSet)ds.property(QDataSet.BUNDLE_1); if ( bundleDs==null ) { if ( ds.rank()==1 ) { // just a single param, go ahead and support this. DDataSet bds1= DDataSet.createRank2(1,0); String name= (String) ds.property(QDataSet.NAME); String label= (String) ds.property(QDataSet.LABEL); bds1.putProperty( QDataSet.NAME, 0, name==null ? "ds0" : name ); bds1.putProperty( QDataSet.LABEL, 0, label==null ? ( name==null ? "" : name ) : label ); if ( ds.property(QDataSet.VALID_MIN)!=null ) bds1.putProperty( QDataSet.VALID_MIN, 0, ds.property(QDataSet.VALID_MIN) ); if ( ds.property(QDataSet.VALID_MAX)!=null ) bds1.putProperty( QDataSet.VALID_MAX, 0, ds.property(QDataSet.VALID_MAX) ); if ( ds.property(QDataSet.FILL_VALUE)!=null ) bds1.putProperty( QDataSet.FILL_VALUE, 0, ds.property(QDataSet.FILL_VALUE) ); bundleDs= bds1; } else { DDataSet bds1= DDataSet.createRank2(ds.length(0),0); QDataSet dep1= (QDataSet) ds.property(QDataSet.DEPEND_1); Units u= dep1==null ? Units.dimensionless : SemanticOps.getUnits(dep1); for ( int i=0; i0; } else { boolean valid= true; for ( int i=0; i0; } return valid; } } /** * This will set the focus range for the TimeSeriesBrowse, if available, * and then call each tick individually. * @param parms * @return */ @Override public synchronized QDataSet values( QDataSet parms ) { if ( initialError!=null ) { if ( ds==null ) { return new BundleDataSet( error ); } } QDataSet tt= Ops.copy( Ops.unbundle(parms, 0 ) ); QDataSet dtt= Ops.diff( tt ); QDataSet gcd; try { gcd= DataSetUtil.gcd( dtt, Ops.divide( dtt.slice(0),100 ) ); } catch ( IllegalArgumentException ex ) { ex.printStackTrace(); gcd= Ops.reduceMin( dtt, 0 ); } Datum d; DatumRange dr= null; // calculate the bounding DatumRange for all params. for ( int i=0; i200 ) { System.err.println("check suppressed bad read..."); context=null; } } if ( context!=null ) dr= DatumRangeUtil.union( dr, DataSetUtil.asDatumRange(context,true) ); tsb.setTimeRange(dr); } } } try { if ( read ) { doRead(); logger.log( Level.FINER, "loaded dataset: {0} {1} ", new Object[]{ tsb!=null ? tsb.getTimeRange() : "", ds } ); } if ( ds==null ) { BundleDataSet result= new BundleDataSet( errorNoDs ); ((MutablePropertyDataSet)result).putProperty( QDataSet.UNITS, errorNoDs.property(QDataSet.UNITS) ); return result; } QDataSet dep0= SemanticOps.xtagsDataSet(ds); QDataSet d0= parm.slice(0); QDataSet findex; if ( dep0.length()==1 ) { findex= Ops.dataset(0); } else { findex= Ops.findex( dep0, d0 ); if ( Math.abs( findex.value() % 1.0 ) > 0.1 ) { logger.log(Level.FINE, "interpolating to calculate tick for {0}", d); } } QDataSet result; if ( findex.value()>=-0.5 && findex.value()=dep0.length() ) imax= dep0.length()-1; int irad= Math.max( ii-imin, imax-ii ); for ( int iiii= 1; iiii= imin ) { result= ds.slice(ii-iiii); if ( isValid(result) ) { break; } } if ( ii+iiii <= imax ) { result= ds.slice(ii+iiii); if ( isValid(result) ) { break; } } } } else { logger.log( Level.FINER, "findex={0} for {1} {2}", new Object[]{findex, d0, result}); if ( deltaPlus!=null ) { QDataSet delta= Ops.magnitude( Ops.subtract( d0, dep0.slice(ii) ) ); if ( Ops.gt( delta, tlim ).value()==1 ) { BundleDataSet result1= new BundleDataSet( nonValueDs ); for ( int i=1; idep0.length()-1 && ( Ops.ge( Ops.add( dep0.slice(dep0.length()-1), deltaMinus ), d0 ).value()==1 ) ) { result= ds.slice(dep0.length()-1); } else if ( findex.value()<0 && ( Ops.le( Ops.subtract( dep0.slice(0), deltaPlus ), d0 ).value()==1 ) ) { result= ds.slice(0); } else { if ( tsb==null ) { BundleDataSet result1= new BundleDataSet( nonValueDs ); for ( int i=1; i