package org.autoplot.hapi; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.URLEncoder; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.WritableByteChannel; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JOptionPane; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.methods.PostMethod; import org.apache.commons.httpclient.methods.multipart.ByteArrayPartSource; import org.apache.commons.httpclient.methods.multipart.FilePart; import org.apache.commons.httpclient.methods.multipart.MultipartRequestEntity; import org.apache.commons.httpclient.methods.multipart.Part; import org.apache.commons.httpclient.methods.multipart.StringPart; import org.das2.datum.DatumRange; import org.das2.datum.TimeUtil; import org.das2.datum.Units; import org.das2.datum.UnitsUtil; import org.das2.datum.format.DatumFormatter; import org.das2.datum.format.DefaultDatumFormatterFactory; import org.das2.util.monitor.ProgressMonitor; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.das2.qds.DataSetUtil; import org.das2.qds.QDataSet; import org.das2.qds.QubeDataSetIterator; import org.das2.qds.SemanticOps; import org.autoplot.datasource.DataSourceFormat; import org.autoplot.datasource.URISplit; import org.das2.datum.LoggerManager; import org.das2.datum.format.TimeDatumFormatter; import org.das2.qds.FloatReadAccess; import org.das2.qds.ops.Ops; import org.das2.qstream.AsciiTimeTransferType; import org.das2.qstream.DoubleTransferType; import org.das2.qstream.IntegerTransferType; import org.das2.qstream.TransferType; /** * Format the QDataSet into HAPI server info and data responses. * @author jbf */ public class HapiDataSourceFormat implements DataSourceFormat { private static final Logger logger= LoggerManager.getLogger("apdss.hapi"); private void upload( String uri, QDataSet data, ProgressMonitor mon ) throws Exception { URISplit split= URISplit.parse(uri); Map params= URISplit.parseParams(split.params); String key= params.get("key"); if ( key==null ) { throw new IllegalArgumentException("missing key"); } if ( data.rank()!=2 ) { throw new IllegalArgumentException("data must be rank 2 bundle"); } StringBuilder dataBuilder= new StringBuilder(); for ( int i=0; i0 ) dataBuilder.append(','); dataBuilder.append( slice.slice(j).svalue() ); } dataBuilder.append("\n"); //TODO: what should this be? } HttpClient client = new HttpClient(); client.getHttpConnectionManager().getParams().setConnectionTimeout(3000); PostMethod postMethod = new PostMethod(split.file + "?" + URISplit.formatParams(params) ); Charset ch= Charset.forName("UTF-8"); byte[] dataBytes= dataBuilder.toString().getBytes(ch); Part[] parts= { new StringPart( "key", key ), new FilePart( "data", new ByteArrayPartSource( "data", dataBytes ), "text/csv", ch.name() ), }; postMethod.setRequestEntity( new MultipartRequestEntity( parts, postMethod.getParams() ) ); try { int statusCode1 = client.executeMethod(postMethod); if ( statusCode1==200 ) { postMethod.releaseConnection(); } else { postMethod.releaseConnection(); throw new IllegalAccessException( postMethod.getStatusLine().toString() ); } } catch ( IOException | IllegalAccessException ex ) { throw ex; } } @Override public void formatData(String uri, QDataSet data, ProgressMonitor mon) throws Exception { // file:///home/jbf/hapi?id=mydata logger.log(Level.FINE, "formatData {0} {1}", new Object[]{uri, data}); URISplit split= URISplit.parse(uri); Map params= URISplit.parseParams(split.params); String s= split.file; if ( s.startsWith("file://") ) { s= s.substring(7); } else { upload( uri, data, mon ); return; } int ix= s.lastIndexOf(".hapi"); if ( ix==-1 ) { throw new IllegalArgumentException("uri must end in .hapi"); } File hapiDir= new File( s.substring(0,ix) ); hapiDir= new File( hapiDir, "hapi" ); if ( !hapiDir.exists() ) { logger.log(Level.FINE, "mkdir {0}", hapiDir); if ( !hapiDir.mkdirs() ) { throw new IOException("failed to mkdirs: "+hapiDir); } } String id= params.get("id"); if ( id==null || id.length()==0 ) id="data"; String format= params.get("format"); if ( format==null || format.length()==0 ) format="csv"; File infoFile= new File( new File( hapiDir, "info" ), id+".json" ); JSONObject jo= new JSONObject(); jo.put("HAPI","2.0"); //jo.put("createdAt",TimeUtil.now().toString()); jo.put("modificationDate", TimeUtil.now().toString()); jo.put( "status", getHapiStatusObject() ); JSONArray parameters= new JSONArray(); List dss= new ArrayList<>(); List ffds= new ArrayList<>(); String groupTitle; QDataSet dep0= (QDataSet) data.property( QDataSet.DEPEND_0 ); if ( dep0!=null ) { dss.add(dep0); ffds.add(null); } else { throw new IllegalArgumentException("data must have a DEPEND_0"); } boolean dep1IsOrdinal= false; QDataSet dep1= (QDataSet)data.property(QDataSet.DEPEND_1); if ( dep1!=null && dep1.rank()==1 ) { if ( UnitsUtil.isOrdinalMeasurement( SemanticOps.getUnits(dep1) ) ) { dep1IsOrdinal= true; } else { dep1IsOrdinal= true; for ( int i=0; dep1IsOrdinal && i1 ) { j1.put("size", DataSetUtil.qubeDims(ds.slice(0)) ); } Number f= (Number)ds.property(QDataSet.FILL_VALUE); if ( f!=null ) { j1.put("fill",f.toString()); //TODO: check that this is properly handled as Object. } else { j1.put("fill",JSONObject.NULL ); } if ( ds.rank()>=2 ) { j1.put("bins", getBinsFor(ds) ); } parameters.put(i,j1); } i++; } DatumRange dr= DataSetUtil.asDatumRange( Ops.extent(dep0) ); if ( dep0.property(QDataSet.VALID_MIN)!=null && dep0.property(QDataSet.VALID_MAX)!=null ) { Units tu= SemanticOps.getUnits(dep0); double vmin= (Double)dep0.property(QDataSet.VALID_MIN); double vmax= (Double)dep0.property(QDataSet.VALID_MAX); DatumRange drvalid= DatumRange.newRange( vmin, vmax, tu ); if ( drvalid.min().gt( tu.parse("1900-01-01") ) && drvalid.max().lt( tu.parse("2200-01-01") ) ) { // sanity check dr= drvalid; } } jo.put( "startDate", dr.min().toString() ); jo.put( "stopDate", dr.max().toString() ); jo.put( "sampleStartDate", dr.min().toString() ); jo.put( "sampleStopDate", dr.max().toString() ); jo.put( "parameters", parameters ); File parentFile= infoFile.getParentFile(); if ( parentFile==null ) throw new IllegalArgumentException("info has no parent"); if ( !parentFile.exists() ) { if ( !parentFile.mkdirs() ) { throw new IllegalArgumentException("unable to make folder for info file."); } } try ( FileWriter fw = new FileWriter(infoFile) ) { fw.write( jo.toString(4) ); } updateCatalog(hapiDir, id, groupTitle); File capabilitiesFile= new File( hapiDir, "capabilities.json" ); JSONObject c= new JSONObject(); c.put("HAPI","2.0"); JSONArray f= new JSONArray(); f.put( 0, "csv" ); f.put( 1, "binary" ); c.put( "outputFormats", f ); c.put( "status", getHapiStatusObject() ); try ( FileWriter fw = new FileWriter(capabilitiesFile) ) { c.write( fw ); fw.write( c.toString(4) ); } String ext= format.equals("binary") ? ".binary" : ".csv"; File dataFile= new File( new File( hapiDir, "data" ), id+ ext ); if ( !dataFile.getParentFile().exists() ) { if ( !dataFile.getParentFile().mkdirs() ) { throw new IOException("unable to mkdir: "+dataFile.getParentFile() ); } } if ( format.equals("binary") ) { TransferType[] tts= new TransferType[dss.size()]; int nbytes= 0; for ( int ids=0; ids2 ) { QDataSet ds1= ds.slice(irec); QubeDataSetIterator iter= new QubeDataSetIterator(ds1); while ( iter.hasNext() ) { iter.next(); double d= iter.getValue(ds1); tt.write( d, buf ); } } } buf.flip(); channel.write(buf); buf.flip(); } } } else { DatumFormatter[] dfs= new DatumFormatter[dss.size()]; for ( int ids=0; ids0 ) delim=","; boolean uIsOrdinal= UnitsUtil.isOrdinalMeasurement(u); fra= ffds.get(ids); if ( ds.rank()==1 ) { if ( ids>0 ) fw.write( delim ); if ( fra!=null ) { fw.write( String.valueOf( fra.fvalue(irec) ) ); } else { fw.write( df.format( u.createDatum(ds.value(irec)), u ) ); } } else if ( ds.rank()==2 ) { if ( fra!=null ) { for ( int j=0; j0 ) fw.write( delim ); fw.write( String.valueOf( fra.fvalue(irec,j) ) ); } } else { for ( int j=0; j0 ) fw.write( delim ); fw.write( df.format( u.createDatum(ds.value(irec,j)), u ) ); } } } else if ( ds.rank()>2 ) { QDataSet ds1= ds.slice(irec); QubeDataSetIterator iter= new QubeDataSetIterator(ds1); while ( iter.hasNext() ) { iter.next(); double d= iter.getValue(ds1); if ( ids>0 ) fw.write( delim ); if ( uIsOrdinal ) { fw.write("\""); fw.write( df.format( u.createDatum(d), u ) ); fw.write("\""); } else { fw.write( df.format( u.createDatum(d), u ) ); } } } } fw.write( "\n" ); } } } } private JSONObject getHapiStatusObject() throws JSONException { JSONObject jo1= new JSONObject(); jo1.put("code", 1200 ); jo1.put("message", "OK request successful"); return jo1; } private void updateCatalog(File hapiDir, String id, String groupTitle) throws JSONException, IOException { File catalogFile= new File( hapiDir, "catalog.json" ); JSONObject catalog; JSONArray catalogArray; if ( catalogFile.exists() ) { StringBuilder builder= new StringBuilder(); try ( BufferedReader in= new BufferedReader( new InputStreamReader( new FileInputStream(catalogFile), HapiServer.UTF8 ) ) ) { String line= in.readLine(); while ( line!=null ) { builder.append(line); line= in.readLine(); } } catalog= new JSONObject(builder.toString()); catalogArray= catalog.getJSONArray("catalog"); } else { catalog= new JSONObject(); catalog.put( "HAPI", "2.0" ); catalogArray= new JSONArray(); catalog.put( "catalog", catalogArray ); } JSONObject item; int itemIndex=-1; for ( int j=0; j params, Iterator dataIt, OutputStream out) throws Exception { String format= params.get("format"); if ( format==null || format.length()==0 ) format="csv"; WritableByteChannel channel= null; OutputStreamWriter fw=null; if ( format.equals("binary") ) { channel= Channels.newChannel(out); } else if ( format.equals("csv") ) { fw= new OutputStreamWriter(out); } while ( dataIt.hasNext() ) { QDataSet data= dataIt.next(); List dss= new ArrayList<>(); List ffds= new ArrayList<>(); QDataSet dep0= (QDataSet) data.property( QDataSet.CONTEXT_0 ); if ( dep0!=null ) { dss.add(dep0); ffds.add(null); } else { throw new IllegalArgumentException("data must have a DEPEND_0"); } boolean dep1IsOrdinal= false; QDataSet dep1= (QDataSet)data.property(QDataSet.DEPEND_1); if ( dep1!=null && dep1.rank()==1 ) { if ( UnitsUtil.isOrdinalMeasurement( SemanticOps.getUnits(dep1) ) ) { dep1IsOrdinal= true; } else { dep1IsOrdinal= true; for ( int i=0; dep1IsOrdinal && i1 ) { QDataSet ds1= ds; QubeDataSetIterator iter= new QubeDataSetIterator(ds1); while ( iter.hasNext() ) { iter.next(); double d= iter.getValue(ds1); tt.write( d, buf ); } } } buf.flip(); assert channel!=null; channel.write(buf); buf.flip(); } else { DatumFormatter[] dfs= new DatumFormatter[dss.size()]; for ( int ids=0; ids0 ) delim=","; boolean uIsOrdinal= UnitsUtil.isOrdinalMeasurement(u); fra= ffds.get(ids); if ( ds.rank()==0 ) { if ( ids>0 ) fw.write( delim ); if ( fra!=null ) { fw.write( String.valueOf( fra.fvalue() ) ); } else { fw.write( df.format( u.createDatum(ds.value()), u ) ); } } else if ( ds.rank()==1 ) { if ( fra!=null ) { for ( int j=0; j0 ) fw.write( delim ); fw.write( String.valueOf( fra.fvalue(j) ) ); } } else { for ( int j=0; j0 ) fw.write( delim ); fw.write( df.format( u.createDatum(ds.value(j)), u ) ); } } } else if ( ds.rank()>1 ) { QDataSet ds1= ds; QubeDataSetIterator iter= new QubeDataSetIterator(ds1); while ( iter.hasNext() ) { iter.next(); double d= iter.getValue(ds1); if ( ids>0 ) fw.write( delim ); if ( uIsOrdinal ) { fw.write("\""); fw.write( df.format( u.createDatum(d), u ) ); fw.write("\""); } else { fw.write( df.format( u.createDatum(d), u ) ); } } } } fw.write( "\n" ); //} } } if ( fw!=null ) fw.close(); if ( channel!=null ) channel.close(); return true; } }