package org.das2.qds.util; import java.util.Iterator; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.das2.datum.Datum; import org.das2.datum.DatumUtil; import org.das2.datum.Units; import org.das2.datum.UnitsConverter; import org.das2.datum.UnitsUtil; import org.das2.qds.ArrayDataSet; import org.das2.qds.ConstantDataSet; import org.das2.util.LoggerManager; import org.das2.qds.DDataSet; import org.das2.qds.DataSetOps; import org.das2.qds.DataSetUtil; import org.das2.qds.IDataSet; import org.das2.qds.JoinDataSet; import org.das2.qds.MutablePropertyDataSet; import org.das2.qds.QDataSet; import org.das2.qds.SemanticOps; import org.das2.qds.WritableDataSet; import org.das2.qds.ops.Ops; /** * Reduction is set of static methods for reducing data, or * averaging data to make smaller datasets. * @author jbf */ public class Reduction { private static final Logger logger= LoggerManager.getLogger("qdataset.ops.reduction"); /** * return a converter for differences. If dstUnits are specified, * then explicitly this is the target. * @param src source dataset * @param dst target dataset * @param dstUnits if not null, then explicitly use these units. * @return a converter for differences. */ private static UnitsConverter getDifferencesConverter( QDataSet src, QDataSet dst, Units dstUnits ) { Units unitsIn, unitsOut; unitsIn= (Units) dst.property( QDataSet.UNITS ); if ( unitsIn==null ) unitsIn= Units.dimensionless; unitsOut= (Units)src.property( QDataSet.UNITS ); if ( unitsOut==null ) unitsOut= Units.dimensionless; UnitsConverter xuc; if ( dstUnits!=null ) { xuc= unitsOut.getConverter( dstUnits ); } else { xuc= unitsOut.getConverter( unitsIn.getOffsetUnits() ); } return xuc; } /** * @param ds a rank1 or rank2 waveform dataset. * @param xLimit the target resolution, result will be finer than this, if possible. * @return either the original dataset when there is no reduction to be done, or a series data set with bins (and deltas to support legacy scripts). * @see org.das2.qstream.filter.MinMaxReduceFilter. This is basically a copy of that code. */ private static QDataSet reducexWaveform( QDataSet ds, QDataSet xLimit ) { DataSetBuilder xbuilder; DataSetBuilder ybuilder; DataSetBuilder yminbuilder; DataSetBuilder ymaxbuilder; xbuilder= new DataSetBuilder( 1, 1000 ); ybuilder= new DataSetBuilder( 1, 1000 ); yminbuilder= new DataSetBuilder( 1, 1000 ); ymaxbuilder= new DataSetBuilder( 1, 1000 ); //wbuilder= new DataSetBuilder( 2, 1000, ds.length(0) ); Datum cadence= DataSetUtil.asDatum(xLimit); QDataSet _offsets= (QDataSet) ds.property(QDataSet.DEPEND_1); MutablePropertyDataSet offsets= DataSetOps.makePropertiesMutable(_offsets); offsets.putProperty( QDataSet.VALID_MIN, null ); //TODO: EMFISIS HFR has incorrect VALID_MAX. offsets.putProperty( QDataSet.VALID_MAX, null ); if ( offsets.rank()==2 ) { offsets= (MutablePropertyDataSet)offsets.slice(0); logger.fine("slice(0) on rank 2 dataset because code doesn't support time-varying DEPEND_1"); } int icadence; Datum packetLen= DataSetUtil.asDatum(offsets.slice(offsets.length()-1)).subtract( DataSetUtil.asDatum( offsets.slice(0) ) ); if ( packetLen.lt(cadence) ) { icadence= offsets.length(); } else { icadence= 4; while ( icadence= dxLimit ) { // clear the accumulators if ( nx>0 ) { if ( xregular ) { ax0 = x0 + dxLimit/2; x0 = Math.floor( ( pxx - basex ) /dxLimit) * dxLimit + basex; } else { ax0 = basex + sx0/nx; x0 = pxx; } if ( logger.isLoggable(Level.FINEST) ) { logger.log(Level.FINEST, "out: {0} {1} ({2})", new Object[]{ax0, nx, tu.createDatum(ax0)}); } xbuilder.putValue( points, ax0 ); sx0 = 0.0; nx= 0; boolean nv= nn0==0; ay0 = nv ? fill : sy0 / nn0; ybuilder.putValue( points, ay0 ); yminbuilder.putValue( points, nv ? fill : miny0 ); ymaxbuilder.putValue( points, nv ? fill : maxy0 ); wbuilder.putValue( points, nn0 ); double pyy = yy.value(); double wwj= ww.value(); sy0 = 0.; nn0 = 0.; if ( wwj>0 ) { miny0 = pyy; maxy0 = pyy; } else { miny0 = Double.POSITIVE_INFINITY; maxy0 = Double.NEGATIVE_INFINITY; } } points++; } if ( logger.isLoggable(Level.FINEST) ) { logger.log(Level.FINEST, " in: {0} ({1})", new Object[]{pxx, tu.createDatum(pxx)}); } { // Here is the accumulation. sx0 += (pxx-basex)*wx; nx+= 1; double ww1= ww.value(); if ( ww1>0 ) { double pyy = yy.value(); sy0 += pyy*ww1; nn0 += ww1; if ( ww1>0 ) { miny0 = Math.min( miny0, pyy ); maxy0 = Math.max( maxy0, pyy ); } } } i++; } // end loop over all records if ( nx>0 ) { // clean up any remaining data. if ( xregular ) { ax0 = x0 + dxLimit/2; } else { ax0 = basex + sx0/nx; } if ( logger.isLoggable(Level.FINEST) ) { logger.log(Level.FINEST, "out: {0} {1} ({2})", new Object[]{ax0, nx, tu.createDatum(ax0)}); } xbuilder.putValue( points, ax0 ); boolean nv= nn0==0; ay0 = nv ? fill : sy0 / nn0; ybuilder.putValue( points, ay0 ); yminbuilder.putValue( points, nv ? fill : miny0 ); ymaxbuilder.putValue( points, nv ? fill : maxy0 ); wbuilder.putValue( points, nn0 ); points++; } MutablePropertyDataSet result= ybuilder.getDataSet(); MutablePropertyDataSet xds= xbuilder.getDataSet(); Map xprops= DataSetUtil.getDimensionProperties(x,null); if ( xprops.containsKey( QDataSet.CADENCE ) ) xprops.put( QDataSet.CADENCE, xLimit ); if ( xprops.containsKey( QDataSet.CACHE_TAG ) ) xprops.put( QDataSet.CACHE_TAG, null ); if ( xprops.containsKey( QDataSet.DEPEND_0 ) ) xprops.put( QDataSet.DEPEND_0, null ); if ( xprops.containsKey( QDataSet.BIN_MINUS ) ) xprops.put( QDataSet.BIN_MINUS, null ); if ( xprops.containsKey( QDataSet.BIN_PLUS ) ) xprops.put( QDataSet.BIN_PLUS, null ); if ( xprops.containsKey( QDataSet.BIN_MIN ) ) xprops.put( QDataSet.BIN_MIN, null ); if ( xprops.containsKey( QDataSet.BIN_MAX ) ) xprops.put( QDataSet.BIN_MAX, null ); DataSetUtil.putProperties( xprops, xds ); Map yprops= DataSetUtil.getProperties(ds); yprops.put( QDataSet.DEPEND_0, xds ); DataSetUtil.putProperties( yprops, result ); yminbuilder.putProperty( QDataSet.UNITS, SemanticOps.getUnits(result) ); ymaxbuilder.putProperty( QDataSet.UNITS, SemanticOps.getUnits(result) ); result.putProperty( QDataSet.DEPEND_0, xds ); result.putProperty( QDataSet.WEIGHTS, wbuilder.getDataSet() ); QDataSet yminDs= yminbuilder.getDataSet(); QDataSet ymaxDs= ymaxbuilder.getDataSet(); result.putProperty( QDataSet.DELTA_MINUS, Ops.subtract( result, yminDs ) ); // TODO: This bad behavior should be deprecated. result.putProperty( QDataSet.DELTA_PLUS, Ops.subtract( ymaxDs, result ) ); result.putProperty( QDataSet.BIN_MIN, yminDs ); result.putProperty( QDataSet.BIN_MAX, ymaxDs ); logger.log( Level.FINE, "time to reducex({0} records -> {1} records) (ms): {2}", new Object[] { ds.length(), result.length(), System.currentTimeMillis()-t0 } ); logger.exiting("Reduction", "reducex" ); //System.err.println( String.format( "time to reducex(%d records -> %d records) (ms): %d", ds.length(), result.length(), System.currentTimeMillis()-t0) ); return result; } /** * produce a simpler version of the dataset by averaging data adjacent in X. * code taken from org.das2.graph.GraphUtil.reducePath. Adjacent points are * averaged together until a point is found that is not in the bin, and then * a new bin is started. The bin's lower bounds are integer multiples * of xLimit. * * xLimit is a rank 0 dataset. * * 2015-06-18: xcadence and bins are now regular. * * Because of high-resolution magnetometer data, this is extended to support this data type. * * This will set the DELTA_PLUS and DELTA_MINUS variables to the extremes of * each bin. To remove these, use putProperty( QDataSet.DELTA_MINUS, None ) * (None in Jython, null for Java) and putProperty( QDataSet.DELTA_PLUS, None ). * * @param ds rank 1 or rank 2 dataset. Must have DEPEND_0 (presently) and be a qube. If this is null, then the result is null. * @param xLimit the size of the bins or null to indicate no limit. * @param xregular if true, then return xtags with a uniform cadence; if false, return averages of x as well, and don't grid x. * @return the reduced dataset, or null if the input dataset was null. */ public static QDataSet reducex( QDataSet ds, QDataSet xLimit, boolean xregular ) { long t0= System.currentTimeMillis(); logger.entering( "Reduction", "reducex" ); if ( ds==null ) return ds; // Craig 2038937185 if ( !DataSetUtil.isQube(ds) ) { throw new IllegalArgumentException("rank 2 dataset must be a qube"); } if ( ds.rank()==0 ) { return ds; } DataSetBuilder xbuilder= new DataSetBuilder( 1, 1000 ); DataSetBuilder ybuilder; DataSetBuilder yminbuilder; DataSetBuilder ymaxbuilder; DataSetBuilder wbuilder; if ( ds.rank()==1 ) { if ( xregular==false ) { return reduce2D( ds, xLimit, null ); } else { return reducexRank1( ds, xLimit, xregular ); } } else if ( ds.rank()==2 ) { if ( SemanticOps.isRank2Waveform(ds) ) { return reducexWaveform( ds, xLimit ); } else { ybuilder= new DataSetBuilder( 2, 1000, ds.length(0) ); yminbuilder= new DataSetBuilder( 2, 1000, ds.length(0) ); ymaxbuilder= new DataSetBuilder( 2, 1000, ds.length(0) ); wbuilder= new DataSetBuilder( 2, 1000, ds.length(0) ); } } else if ( ds.rank()==3 && DataSetUtil.isQube(ds) ) { return reduceRankN(ds, DataSetUtil.asDatum(xLimit)); } else if ( ds.rank()==3 && SemanticOps.isJoin(ds) ) { JoinDataSet result= new JoinDataSet(3); for ( int i=0; i= dxLimit ) { // clear the accumulators if ( nx>0 ) { if ( xregular ) { ax0 = x0 + dxLimit/2; x0 = Math.floor(pxx/dxLimit) * dxLimit; } else { ax0 = basex + sx0/nx; x0 = pxx; } if ( logger.isLoggable(Level.FINEST) ) { logger.log(Level.FINEST, "out: {0} {1} ({2})", new Object[]{ax0, nx, tu.createDatum(ax0)}); } xbuilder.putValue( points, ax0 ); sx0 = 0.0; nx= 0; } for ( int j=0; j0 ) { miny0[j] = pyy; maxy0[j] = pyy; } else { miny0[j] = Double.POSITIVE_INFINITY; maxy0[j] = Double.NEGATIVE_INFINITY; } } points++; } if ( logger.isLoggable(Level.FINEST) ) { logger.log(Level.FINEST, " in: {0} ({1})", new Object[]{pxx, tu.createDatum(pxx)}); } { // Here is the accumulation. sx0 += (pxx-basex)*wx; nx+= 1; for ( int j=0; j0 ) { miny0[j] = Math.min( miny0[j], pyy ); maxy0[j] = Math.max( maxy0[j], pyy ); } } } i++; } // end loop over all records if ( nx>0 ) { // clean up any remaining data. if ( nx>0 ) { if ( xregular ) { ax0 = x0 + dxLimit/2; } else { ax0 = basex + sx0/nx; } if ( logger.isLoggable(Level.FINEST) ) { logger.log(Level.FINEST, "out: {0} {1} ({2})", new Object[]{ax0, nx, tu.createDatum(ax0)}); } xbuilder.putValue( points, ax0 ); } for ( int j=0; j xprops= DataSetUtil.getDimensionProperties(x,null); if ( xprops.containsKey( QDataSet.CADENCE ) ) xprops.put( QDataSet.CADENCE, xLimit ); if ( xprops.containsKey( QDataSet.CACHE_TAG ) ) xprops.put( QDataSet.CACHE_TAG, null ); if ( xprops.containsKey( QDataSet.DEPEND_0 ) ) xprops.put( QDataSet.DEPEND_0, null ); if ( xprops.containsKey( QDataSet.BIN_MINUS ) ) xprops.put( QDataSet.BIN_MINUS, null ); if ( xprops.containsKey( QDataSet.BIN_PLUS ) ) xprops.put( QDataSet.BIN_PLUS, null ); if ( xprops.containsKey( QDataSet.BIN_MIN ) ) xprops.put( QDataSet.BIN_MIN, null ); if ( xprops.containsKey( QDataSet.BIN_MAX ) ) xprops.put( QDataSet.BIN_MAX, null ); DataSetUtil.putProperties( xprops, xds ); Map yprops= DataSetUtil.getProperties(ds); yprops.put( QDataSet.DEPEND_0, xds ); for ( int j=1; j {1} records) (ms): {2}", new Object[] { ds.length(), result.length(), System.currentTimeMillis()-t0 } ); logger.exiting("Reduction", "reducex" ); //System.err.println( String.format( "time to reducex(%d records -> %d records) (ms): %d", ds.length(), result.length(), System.currentTimeMillis()-t0) ); return result; } /** * produce a simpler version of the dataset by averaging adjacent data. * code taken from org.das2.graph.GraphUtil.reducePath. Adjacent points are * averaged together until a point is found that is not in the bin, and then * a new bin is started. The bin's lower bounds are integer multiples * of xLimit and yLimit. * * If yLimit is null, then averaging is done for all points in the x bin, * regardless of how close they are in Y. This is similarly true when * xLimit is null. * * xLimit and yLimit are rank 0 datasets, so that they can indicate that binning * should be done in log space rather than linear. In this case, a SCALE_TYPE * for the dataset should be "log" and its unit should be convertible to * Units.logERatio (for example, Units.log10Ratio or Units.percentIncrease). * Note when either is log, then averaging is done in the log space. * * @param ds rank 1 dataset. Must have DEPEND_0 (presently) * @param xLimit the size of the bins or null to indicate no limit. * @param yLimit the size of the bins or null to indicate no limit. * @return the reduced dataset, rank 1 with DEPEND_0. */ public static QDataSet reduce2D( QDataSet ds, QDataSet xLimit, QDataSet yLimit ) { logger.entering("Reduction", "reduce2D"); long t0= System.currentTimeMillis(); DataSetBuilder xbuilder= new DataSetBuilder( 1, 1000 ); DataSetBuilder ybuilder= new DataSetBuilder( 1, 1000 ); DataSetBuilder yminbuilder= new DataSetBuilder( 1, 1000 ); DataSetBuilder ymaxbuilder= new DataSetBuilder( 1, 1000 ); DataSetBuilder wbuilder= new DataSetBuilder( 1, 1000 ); // weights to go here QDataSet x= (QDataSet) ds.property( QDataSet.DEPEND_0 ); if ( x==null ) { if ( SemanticOps.getUnits(xLimit)!=Units.dimensionless ) { throw new IllegalArgumentException("xLimit is not dimensionless, yet there are no timetags in the data set: "+ds ); } else { x= new org.das2.qds.IndexGenDataSet(ds.length()); } } QDataSet y= ds; double x0 = Float.MAX_VALUE; double y0 = Float.MAX_VALUE; double sx0 = 0; double sy0 = 0; double nn0 = 0; double miny0 = Double.POSITIVE_INFINITY; double maxy0 = Double.NEGATIVE_INFINITY; double ax0; double ay0; // last averaged location boolean xlog= xLimit!=null && "log".equals( xLimit.property( QDataSet.SCALE_TYPE ) ); boolean ylog= yLimit!=null && "log".equals( yLimit.property( QDataSet.SCALE_TYPE ) ); UnitsConverter uc; double dxLimit, dyLimit; if ( xLimit!=null ) { uc= getDifferencesConverter( xLimit, x, xlog ? Units.logERatio : null ); dxLimit = uc.convert( xLimit.value() ); } else { dxLimit= Double.MAX_VALUE; } if ( yLimit!=null ) { uc= getDifferencesConverter( yLimit, y, ylog ? Units.logERatio : null ); dyLimit = uc.convert( yLimit.value() ); } else { dyLimit= Double.MAX_VALUE; } int points = 0; //int inCount = 0; QDataSet wds= DataSetUtil.weightsDataSet(y); int i=0; while ( i0 ) { miny0 = Math.min( miny0, yy); maxy0 = Math.max( maxy0, yy); } i++; continue; } if ( nn0>0 ) { ax0 = sx0 / nn0; ay0 = sy0 / nn0; xbuilder.putValue( points, xlog ? Math.exp(ax0) : ax0 ); ybuilder.putValue( points, ylog ? Math.exp(ay0) : ay0 ); yminbuilder.putValue( points, miny0 ); ymaxbuilder.putValue( points, maxy0 ); wbuilder.putValue( points, nn0 ); points++; } i++; x0 = dxLimit * ( 0.5 + (int) Math.floor(pxx/dxLimit) ); y0 = dyLimit * ( 0.5 + (int) Math.floor(pyy/dyLimit) ); sx0 = pxx*ww; sy0 = pyy*ww; nn0 = ww; if ( ww>0 ) { miny0 = yy; maxy0 = yy; } else { miny0 = Double.POSITIVE_INFINITY; maxy0 = Double.NEGATIVE_INFINITY; } } //loop over all records if ( nn0>0 ) { ax0 = sx0 / nn0; ay0 = sy0 / nn0; xbuilder.putValue( points, xlog ? Math.exp(ax0) : ax0 ); ybuilder.putValue( points, ylog ? Math.exp(ay0) : ay0 ); yminbuilder.putValue( points, miny0 ); ymaxbuilder.putValue( points, maxy0 ); wbuilder.putValue( points, nn0 ); points++; } MutablePropertyDataSet yds= ybuilder.getDataSet(); MutablePropertyDataSet xds= xbuilder.getDataSet(); Map xprops= DataSetUtil.getProperties(x); if ( xprops.containsKey( QDataSet.CADENCE ) ) xprops.put( QDataSet.CADENCE, xLimit ); if ( xprops.containsKey( QDataSet.CACHE_TAG ) ) xprops.put( QDataSet.CACHE_TAG, null ); if ( xprops.containsKey( QDataSet.DEPEND_0 ) ) xprops.put( QDataSet.DEPEND_0, null ); if ( xprops.containsKey( QDataSet.BIN_MINUS ) ) xprops.put( QDataSet.BIN_MINUS, null ); if ( xprops.containsKey( QDataSet.BIN_PLUS ) ) xprops.put( QDataSet.BIN_PLUS, null ); if ( xprops.containsKey( QDataSet.BIN_MIN ) ) xprops.put( QDataSet.BIN_MIN, null ); if ( xprops.containsKey( QDataSet.BIN_MAX ) ) xprops.put( QDataSet.BIN_MAX, null ); DataSetUtil.putProperties( xprops, xds ); Map yprops= DataSetUtil.getProperties(y); yprops.put( QDataSet.DEPEND_0, xds ); DataSetUtil.putProperties( yprops, yds ); yminbuilder.putProperty( QDataSet.UNITS, SemanticOps.getUnits(y) ); ymaxbuilder.putProperty( QDataSet.UNITS, SemanticOps.getUnits(y) ); yds.putProperty( QDataSet.DEPEND_0, xds ); yds.putProperty( QDataSet.WEIGHTS, wbuilder.getDataSet() ); //TODO: this should probably be BIN_PLUS, BIN_MINUS yds.putProperty( QDataSet.DELTA_MINUS, Ops.subtract( yds, yminbuilder.getDataSet() ) ); yds.putProperty( QDataSet.DELTA_PLUS, Ops.subtract( ymaxbuilder.getDataSet(), yds ) ); logger.log( Level.FINE, "time to reduce2D({0} records -> {1} records) (ms): {2}", new Object[] { ds.length(), yds.length(), System.currentTimeMillis()-t0 } ); logger.entering("Reduction", "reduce2D"); return yds; } /** * reduce the buckshot scatter data by laying it out on a 2-D hexgrid and * accumulating the hits to each cell. This has not been thoroughly verified. * @param ds rank1 Y(X) * @param z null or data to average * @return rank 2 ds containing frequency of occurrence for each bin, with DEPEND_0=xxx and DEPEND_1=yyy. * @see org.das2.qds.ops.Ops#histogram2d(org.das2.qds.QDataSet, org.das2.qds.QDataSet, int[], org.das2.qds.QDataSet, org.das2.qds.QDataSet) * @throws IllegalArgumentException when the units cannot be converted * @see https://cran.r-project.org/web/packages/hexbin/vignettes/hexagon_binning.pdf * */ public static QDataSet hexbin( QDataSet ds, QDataSet z ) { logger.entering("Reduction", "hexbin"); if ( ds.rank()!=1 && !Ops.isBundle(ds) ) { throw new IllegalArgumentException("ds.rank() must be 1"); } QDataSet xx= SemanticOps.xtagsDataSet(ds); QDataSet yy= SemanticOps.ytagsDataSet(ds); QDataSet xr= Ops.extent(xx); QDataSet yr= Ops.multiply( Ops.extent(yy), (3/Math.sqrt(3)) ); QDataSet xxx= Ops.linspace( xr.value(0), xr.value(1), 100 ); QDataSet yyy1= Ops.linspace( yr.value(0), yr.value(1), 100 ); double dy= yyy1.value(1)-yyy1.value(0); yyy1= Ops.linspace( yr.value(0)-dy/4, yr.value(1)-dy/4, 100 ); QDataSet yyy2= Ops.linspace( yr.value(0)+dy/4, yr.value(1)+dy/4, 100 ); double ymin1= yyy1.value(0); double ymin2= yyy2.value(0); double xmin= xxx.value(0); double xspace= xxx.value(1) - xxx.value(0); double yspace= yyy1.value(1) - yyy1.value(0); int nx= xxx.length(); int ny= yyy1.length(); IDataSet result= IDataSet.createRank2(nx*2,ny); QDataSet ww= SemanticOps.weightsDataSet(yy); UnitsConverter ucx= SemanticOps.getUnitsConverter( xx,xxx ); UnitsConverter ucy= SemanticOps.getUnitsConverter( yy,yyy1 ); boolean xlog= false; boolean ylog= false; DDataSet S; if ( z==null ) { z= Ops.ones(xx.length()); S= null; } else { S= DDataSet.createRank2(nx*2,ny); } for ( int i=0; i0 ) { double x= ucx.convert( xx.value(i) ); double y= ucy.convert( yy.value(i) ); int ix= (int)( xlog ? (Math.log10(x)-xmin)/xspace : (x-xmin)/xspace ); int iy1= (int)( ylog ? (Math.log10(y)-ymin1)/yspace : (y-ymin1)/yspace ); int iy2= (int)( ylog ? (Math.log10(y)-ymin2)/yspace : (y-ymin2)/yspace ); if ( ix>=0 && ix=0 && iy1=0 && iy2=0 && iy20 ) { double x= ucx.convert( xx.value(i) ); double y= ucy.convert( yy.value(i) ); int ix= (int)( xlog ? (Math.log10(x)-xmin)/xspace : (x-xmin)/xspace ); int iy= (int)( ylog ? (Math.log10(y)-ymin)/yspace : (y-ymin)/yspace ); if ( ix>=0 && ix=0 && iy0 ) { double x= ucx.convert( xx.value(i) ); double y= ucy.convert( yy.value(i) ); int ix= (int)( xlog ? (Math.log10(x)-xmin)/xspace : (x-xmin)/xspace ); int iy= (int)( ylog ? (Math.log10(y)-ymin)/yspace : (y-ymin)/yspace ); if ( ix>=0 && ix=0 && iy props= DataSetUtil.getDimensionProperties( ds, null ); for ( Map.Entry en: props.entrySet() ) { resultSBuilder.putProperty( en.getKey(), en.getValue() ); } Map xprops= DataSetUtil.getDimensionProperties( xds, null ); for ( Map.Entry en: xprops.entrySet() ) { if ( !en.getKey().equals(QDataSet.UNITS) ) { resultxBuilder.putProperty( en.getKey(), en.getValue() ); } } resultxBuilder.putProperty( QDataSet.CADENCE, DataSetUtil.asDataSet(xLimit) ); resultSBuilder.putProperty( QDataSet.DEPEND_0, resultxBuilder.getDataSet() ); resultSBuilder.putProperty( QDataSet.WEIGHTS, resultNBuilder.getDataSet() ); DDataSet resultDs= resultSBuilder.getDataSet(); return resultDs; } }