diff --git a/.github/workflows/first_workflow.yml b/.github/workflows/first_workflow.yml index 37f1b1e0..57910cd7 100644 --- a/.github/workflows/first_workflow.yml +++ b/.github/workflows/first_workflow.yml @@ -23,14 +23,14 @@ jobs: git status echo "===" cd /var/www/cottagesystems.com/autoplot/git/server-java/HapiServerBase - ant clean jar + ant -Dplatforms.JDK_8__System_.home=/usr/local/jdk1.8 clean jar echo "see https://cottagesystems.com/autoplot/git/server-java/HapiServerBase/dist/HapiServerBase.jar" echo "================" cd /var/www/cottagesystems.com/autoplot/git/server-java/HapiServer - ant -Dj2ee.server.home=/usr/local/apache-tomcat-8/ clean dist + ant -Dplatforms.JDK_8__System_.home=/usr/local/jdk1.8 -Dj2ee.server.home=/usr/local/apache-tomcat-9/ clean dist echo "see https://cottagesystems.com/autoplot/git/server-java/HapiServer/dist/HapiServer.war" - echo "================" - cd /var/www/cottagesystems.com/autoplot/git/server-java/SimpleClasspathExtension - ant clean jar - echo "see https://cottagesystems.com/autoplot/git/server-java/SimpleClasspathExtension/dist/SimpleClasspathExtension.jar" + #echo "================" + #cd /var/www/cottagesystems.com/autoplot/git/server-java/SimpleClasspathExtension + #ant -Dplatforms.JDK_8__System_.home=/usr/local/jdk1.8 clean jar + #echo "see https://cottagesystems.com/autoplot/git/server-java/SimpleClasspathExtension/dist/SimpleClasspathExtension.jar" echo "===" diff --git a/.github/workflows/ssh_test.yml b/.github/workflows/ssh_test.yml new file mode 100644 index 00000000..cdc9de67 --- /dev/null +++ b/.github/workflows/ssh_test.yml @@ -0,0 +1,24 @@ +name: SSH test + +on: + workflow_dispatch: # lets you click "Run workflow" + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - name: Setup SSH + run: | + mkdir -p ~/.ssh + echo "here line14" + echo "${{ secrets.SSH_KEY }}" > ~/.ssh/id_ed25519 + chmod 600 ~/.ssh/id_ed25519 + echo "here line17" + ssh-keyscan -H "${{ secrets.SSH_HOST }}" >> ~/.ssh/known_hosts + + - name: SSH and echo hello + run: | + ssh -i ~/.ssh/id_ed25519 \ + "${{ secrets.SSH_USER }}@${{ secrets.SSH_HOST }}" \ + 'echo hello' diff --git a/CDAWebServer/build.xml b/CDAWebServer/build.xml new file mode 100644 index 00000000..fea6ccec --- /dev/null +++ b/CDAWebServer/build.xml @@ -0,0 +1,73 @@ + + + + + + + + + + + Builds, tests, and runs the project CDAWebServer. + + + diff --git a/CDAWebServer/lib/CopyLibs/org-netbeans-modules-java-j2seproject-copylibstask.jar b/CDAWebServer/lib/CopyLibs/org-netbeans-modules-java-j2seproject-copylibstask.jar new file mode 100644 index 00000000..91e787a6 Binary files /dev/null and b/CDAWebServer/lib/CopyLibs/org-netbeans-modules-java-j2seproject-copylibstask.jar differ diff --git a/CDAWebServer/lib/nblibraries.properties b/CDAWebServer/lib/nblibraries.properties new file mode 100644 index 00000000..b5bd56c4 --- /dev/null +++ b/CDAWebServer/lib/nblibraries.properties @@ -0,0 +1,4 @@ +libs.CopyLibs.classpath=\ + ${base}/CopyLibs/org-netbeans-modules-java-j2seproject-copylibstask.jar +libs.CopyLibs.displayName=CopyLibs Task +libs.CopyLibs.prop-version=3.0 diff --git a/CDAWebServer/manifest.mf b/CDAWebServer/manifest.mf new file mode 100644 index 00000000..328e8e5b --- /dev/null +++ b/CDAWebServer/manifest.mf @@ -0,0 +1,3 @@ +Manifest-Version: 1.0 +X-COMMENT: Main-Class will be added automatically by build + diff --git a/CDAWebServer/nbproject/build-impl.xml b/CDAWebServer/nbproject/build-impl.xml new file mode 100644 index 00000000..2c57bdd9 --- /dev/null +++ b/CDAWebServer/nbproject/build-impl.xml @@ -0,0 +1,1842 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must set platform.home + Must set platform.bootcp + Must set platform.java + Must set platform.javac + + The J2SE Platform is not correctly set up. + Your active platform is: ${platform.active}, but the corresponding property "platforms.${platform.active}.home" is not found in the project's properties files. + Either open the project in the IDE and setup the Platform with the same name or add it manually. + For example like this: + ant -Duser.properties.file=<path_to_property_file> jar (where you put the property "platforms.${platform.active}.home" in a .properties file) + or ant -Dplatforms.${platform.active}.home=<path_to_JDK_home> jar (where no properties file is used) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must set src.dir + Must set test.src.dir + Must set build.dir + Must set dist.dir + Must set build.classes.dir + Must set dist.javadoc.dir + Must set build.test.classes.dir + Must set build.test.results.dir + Must set build.classes.excludes + Must set dist.jar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must set javac.includes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + No tests executed. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must set JVM to use for profiling in profiler.info.jvm + Must set profiler agent JVM arguments in profiler.info.jvmargs.agent + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must select some files in the IDE or set javac.includes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + To run this application from the command line without Ant, try: + + ${platform.java} -jar "${dist.jar.resolved}" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must select one file in the IDE or set run.class + + + + Must select one file in the IDE or set run.class + + + + + + + + + + + + + + + + + + + + + + + Must select one file in the IDE or set debug.class + + + + + Must select one file in the IDE or set debug.class + + + + + Must set fix.includes + + + + + + + + + + This target only works when run from inside the NetBeans IDE. + + + + + + + + + Must select one file in the IDE or set profile.class + This target only works when run from inside the NetBeans IDE. + + + + + + + + + This target only works when run from inside the NetBeans IDE. + + + + + + + + + + + + + This target only works when run from inside the NetBeans IDE. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must select one file in the IDE or set run.class + + + + + + Must select some files in the IDE or set test.includes + + + + + Must select one file in the IDE or set run.class + + + + + Must select one file in the IDE or set applet.url + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must select some files in the IDE or set javac.includes + + + + + + + + + + + + + + + + + + + + + + + + Some tests failed; see details above. + + + + + + + + + Must select some files in the IDE or set test.includes + + + + Some tests failed; see details above. + + + + Must select some files in the IDE or set test.class + Must select some method in the IDE or set test.method + + + + Some tests failed; see details above. + + + + + Must select one file in the IDE or set test.class + + + + Must select one file in the IDE or set test.class + Must select some method in the IDE or set test.method + + + + + + + + + + + + + + + Must select one file in the IDE or set applet.url + + + + + + + + + Must select one file in the IDE or set applet.url + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/CDAWebServer/nbproject/genfiles.properties b/CDAWebServer/nbproject/genfiles.properties new file mode 100644 index 00000000..976625cc --- /dev/null +++ b/CDAWebServer/nbproject/genfiles.properties @@ -0,0 +1,8 @@ +build.xml.data.CRC32=61aa2b4e +build.xml.script.CRC32=f20c6f70 +build.xml.stylesheet.CRC32=f85dc8f2@1.107.0.48 +# This file is used by a NetBeans-based IDE to track changes in generated files such as build-impl.xml. +# Do not edit this file. You may delete it but then the IDE will never regenerate such files for you. +nbproject/build-impl.xml.data.CRC32=61aa2b4e +nbproject/build-impl.xml.script.CRC32=afbeeb91 +nbproject/build-impl.xml.stylesheet.CRC32=12e0a6c2@1.107.0.48 diff --git a/CDAWebServer/nbproject/project.properties b/CDAWebServer/nbproject/project.properties new file mode 100644 index 00000000..78ec735d --- /dev/null +++ b/CDAWebServer/nbproject/project.properties @@ -0,0 +1,106 @@ +annotation.processing.enabled=true +annotation.processing.enabled.in.editor=false +annotation.processing.processors.list= +annotation.processing.run.all.processors=true +annotation.processing.source.output=${build.generated.sources.dir}/ap-source-output +application.title=CDAWebServer +application.vendor=jbf +build.classes.dir=${build.dir}/classes +build.classes.excludes=**/*.java,**/*.form +# This directory is removed when the project is cleaned: +build.dir=build +build.generated.dir=${build.dir}/generated +build.generated.sources.dir=${build.dir}/generated-sources +# Only compile against the classpath explicitly listed here: +build.sysclasspath=ignore +build.test.classes.dir=${build.dir}/test/classes +build.test.results.dir=${build.dir}/test/results +# Uncomment to specify the preferred debugger connection transport: +#debug.transport=dt_socket +debug.classpath=\ + ${run.classpath} +debug.modulepath=\ + ${run.modulepath} +debug.test.classpath=\ + ${run.test.classpath} +debug.test.modulepath=\ + ${run.test.modulepath} +# Files in build.classes.dir which should be excluded from distribution jar +dist.archive.excludes= +# This directory is removed when the project is cleaned: +dist.dir=dist +dist.jar=${dist.dir}/CDAWebServer.jar +dist.javadoc.dir=${dist.dir}/javadoc +dist.jlink.dir=${dist.dir}/jlink +dist.jlink.output=${dist.jlink.dir}/CDAWebServer +endorsed.classpath= +excludes= +file.reference.cdfj.jar=lib/cdfj.jar +file.reference.jettison-1.4.1.jar=lib/jettison-1.4.1.jar +file.reference.UriTemplatesJava.jar=lib/UriTemplatesJava.jar +includes=** +jar.compress=false +javac.classpath=\ + ${reference.HapiServerBase.jar}:\ + ${file.reference.UriTemplatesJava.jar}:\ + ${file.reference.cdfj.jar}:\ + ${file.reference.jettison-1.4.1.jar} +# Space-separated list of extra javac options +javac.compilerargs= +javac.deprecation=false +javac.external.vm=true +javac.modulepath= +javac.processormodulepath= +javac.processorpath=\ + ${javac.classpath} +javac.source=1.8 +javac.target=1.8 +javac.test.classpath=\ + ${javac.classpath}:\ + ${build.classes.dir} +javac.test.modulepath=\ + ${javac.modulepath} +javac.test.processorpath=\ + ${javac.test.classpath} +javadoc.additionalparam= +javadoc.author=false +javadoc.encoding=${source.encoding} +javadoc.html5=false +javadoc.noindex=false +javadoc.nonavbar=false +javadoc.notree=false +javadoc.private=false +javadoc.splitindex=true +javadoc.use=true +javadoc.version=false +javadoc.windowtitle= +# The jlink additional root modules to resolve +jlink.additionalmodules= +# The jlink additional command line parameters +jlink.additionalparam= +jlink.launcher=true +jlink.launcher.name=CDAWebServer +main.class=cdawebserver.CDAWebServer +manifest.file=manifest.mf +meta.inf.dir=${src.dir}/META-INF +mkdist.disabled=false +platform.active=JDK_8__System_ +project.HapiServerBase=../HapiServerBase +reference.HapiServerBase.jar=${project.HapiServerBase}/dist/HapiServerBase.jar +run.classpath=\ + ${javac.classpath}:\ + ${build.classes.dir} +# Space-separated list of JVM arguments used when running the project. +# You may also define separate properties like run-sys-prop.name=value instead of -Dname=value. +# To set system properties for unit tests define test-sys-prop.name=value: +run.jvmargs= +run.modulepath=\ + ${javac.modulepath} +run.test.classpath=\ + ${javac.test.classpath}:\ + ${build.test.classes.dir} +run.test.modulepath=\ + ${javac.test.modulepath} +source.encoding=UTF-8 +src.dir=src +test.src.dir=test diff --git a/CDAWebServer/nbproject/project.xml b/CDAWebServer/nbproject/project.xml new file mode 100644 index 00000000..7f767aa8 --- /dev/null +++ b/CDAWebServer/nbproject/project.xml @@ -0,0 +1,29 @@ + + + org.netbeans.modules.java.j2seproject + + + CDAWebServer + + + + + + + + + + ./lib/nblibraries.properties + + + + HapiServerBase + jar + + jar + clean + jar + + + + diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/Adapter.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/Adapter.java new file mode 100644 index 00000000..023fdc8b --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/Adapter.java @@ -0,0 +1,40 @@ +package org.hapiserver.source.cdaweb; + +/** + * Generic way to access heterogeneous data as HAPI data, and also + * to provide some virtual variables. + * @author jbf + */ +public abstract class Adapter { + + /** + * one of these methods will be implemented by the adapter. + */ + + public String adaptString(int index) { + throw new IllegalArgumentException("incorrect adapter used for string"); + } + + public double adaptDouble(int index) { + throw new IllegalArgumentException("incorrect adapter used for double"); + } + + public int adaptInteger(int index) { + throw new IllegalArgumentException("incorrect adapter used for integer"); + } + + public double[] adaptDoubleArray(int index) { + throw new IllegalArgumentException("incorrect adapter used for double array"); + } + + public int[] adaptIntegerArray(int index) { + throw new IllegalArgumentException("incorrect adapter used for integer array"); + } + + public String[] adaptStringArray(int index) { + throw new IllegalArgumentException("incorrect adapter used for string array"); + } + + public abstract String getString(int index); + +} diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/AvailabilityIterator.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/AvailabilityIterator.java similarity index 52% rename from HapiServerBase/src/org/hapiserver/source/cdaweb/AvailabilityIterator.java rename to CDAWebServer/src/org/hapiserver/source/cdaweb/AvailabilityIterator.java index f246a79a..205b9b8f 100644 --- a/HapiServerBase/src/org/hapiserver/source/cdaweb/AvailabilityIterator.java +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/AvailabilityIterator.java @@ -3,12 +3,13 @@ import java.util.Iterator; import org.hapiserver.HapiRecord; +import org.hapiserver.TimeString; /** * convert AvailabilitySource iterator into a granule iterator * @author jbf */ -public class AvailabilityIterator implements Iterator { +public class AvailabilityIterator implements Iterator { private final Iterator it; private String file; // the current file @@ -23,28 +24,13 @@ public boolean hasNext() { } @Override - public int[] next() { + public TimeString[] next() { HapiRecord hr= it.next(); String start= hr.getIsoTime(0); String stop= hr.getIsoTime(1); this.file= hr.getString(2); - return new int[] { - Integer.parseInt(start.substring(0,4)), - Integer.parseInt(start.substring(5,7)), - Integer.parseInt(start.substring(8,10)), - Integer.parseInt(start.substring(11,13)), - Integer.parseInt(start.substring(14,16)), - Integer.parseInt(start.substring(17,19)), - 0, - Integer.parseInt(stop.substring(0,4)), - Integer.parseInt(stop.substring(5,7)), - Integer.parseInt(stop.substring(8,10)), - Integer.parseInt(stop.substring(11,13)), - Integer.parseInt(stop.substring(14,16)), - Integer.parseInt(stop.substring(17,19)), - 0 - }; + return new TimeString[] { new TimeString(start), new TimeString(stop) }; } /** diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/CDFLeapSeconds.txt b/CDAWebServer/src/org/hapiserver/source/cdaweb/CDFLeapSeconds.txt similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/cdaweb/CDFLeapSeconds.txt rename to CDAWebServer/src/org/hapiserver/source/cdaweb/CDFLeapSeconds.txt diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/CacheManager.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/CacheManager.java new file mode 100644 index 00000000..02d31f13 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/CacheManager.java @@ -0,0 +1,89 @@ + +package org.hapiserver.source.cdaweb; + +import java.io.File; +import java.io.IOException; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; +import java.util.Timer; +import java.util.TimerTask; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * Process which will remove old products from the cache. This checks every + * 60 seconds for files more than 5 minutes old. + * @author jbf + */ +public class CacheManager { + + private static final Logger logger= Logger.getLogger("hapi.cdaweb"); + + private CacheManager(File dir) { + cacheDir= dir.toPath(); + timer= new Timer("CacheManager",true); + timer.scheduleAtFixedRate( getTimerTask(), new Date(), 60000 ); + } + + private static Map instances= new HashMap<>(); + + public static CacheManager getInstance(File dir) { + CacheManager instance= instances.get(dir); + if ( instance!=null ) { + return instance; + } else { + synchronized (CacheManager.class) { + instance= instances.get(dir); + if ( instance==null ) { + instance= new CacheManager(dir); + instances.put( dir, instance ); + return instance; + } + return instance; + + } + } + } + + private final int maxAgeMilliseconds = 300000; // 5 minutes + + private final Path cacheDir; + + private final Timer timer; + + private TimerTask getTimerTask() { + return new TimerTask() { + @Override + public void run() { + try { + cleanup(); + } catch (IOException ex) { + logger.log(Level.SEVERE, null, ex); + } + } + }; + } + + + public void requestCleanup() { + + } + + private void cleanup() throws IOException { + try (DirectoryStream stream = Files.newDirectoryStream(cacheDir)) { + long now = System.currentTimeMillis(); + for (Path path : stream) { + long ageMillis = now - Files.getLastModifiedTime(path).toMillis(); + //System.err.println(path.toString()+"..."+(ageMillis/1000.)+" seconds old"); + if ( ageMillis > maxAgeMilliseconds) { + Files.deleteIfExists(path); + } + } + } + } + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebAvailabilityHapiRecordSource.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebAvailabilityHapiRecordSource.java new file mode 100644 index 00000000..ef53fa4f --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebAvailabilityHapiRecordSource.java @@ -0,0 +1,487 @@ + +package org.hapiserver.source.cdaweb; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.text.ParseException; +import java.util.Iterator; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.AbstractHapiRecord; +import org.hapiserver.AbstractHapiRecordSource; +import org.hapiserver.CsvDataFormatter; +import org.hapiserver.HapiRecord; +import org.hapiserver.TimeString; +import org.hapiserver.TimeUtil; +import org.hapiserver.source.SourceUtil; +import org.w3c.dom.NodeList; + +/** + * return availability, showing when file granules are found. + * @author jbf + */ +public class CdawebAvailabilityHapiRecordSource extends AbstractHapiRecordSource { + + private static final Logger logger= Logger.getLogger("hapi.cdaweb"); + + /** + * the field containing the partial filename. + */ + public static int FIELD_FILENAME= 2; + + String spid; + int rootlen; + String root; + String bobwurl; + + /** + * @param availRoot folder containing orig_data responses, with a file "info/AC_AT_DEF.pkl" + * @param idavail the id for the availability set, like "AC_OR_SSC/source" + * @param info the info for this availability set. + */ + public CdawebAvailabilityHapiRecordSource( String availRoot, String idavail, JSONObject info ) { + String roots= availRoot + "/" + "info/"; + spid= spidFor(idavail); + bobwurl= roots + spid + ".json"; + try { + JSONArray array= info.getJSONArray("parameters"); + JSONObject p= array.getJSONObject(2); // the filename parameter + JSONObject stringType= p.getJSONObject("stringType"); + JSONObject urin= stringType.getJSONObject("uri"); + rootlen= urin.getString("base").length(); + if ( !urin.getString("base").contains("sp_phys/") ) { + if ( idavail.endsWith("OMNI2_H0_MRG1HR") ) { + rootlen= rootlen - 8; // problem for another day... + } else { + rootlen= rootlen + 4; //TODO: Bernie's server says "sp_phys" while all.xml says "pub". + } + } + root= urin.getString("base"); + } catch (JSONException ex) { + throw new RuntimeException(ex); + } + + } + + + /** + * return the root for references in availability response + * @return + */ + public String getRoot() { + return this.root; + } + + /** + * get the catalog of the source files. + * @param url the catalog of the datasets. Note foo@1 will be just "foo" in this result + * @return + * @throws IOException + */ + public static String getAvailabilityCatalog(String url) throws IOException { + try { + String catalogString= CdawebInfoCatalogSource.getCatalog(url); + JSONObject catalogContainer= new JSONObject(catalogString); + JSONArray catalog= catalogContainer.getJSONArray("catalog"); + int n= catalog.length(); + JSONArray newArray= new JSONArray(); + String last=null; + for ( int i=0; i-1 ) { + id= id.substring(0,ia); + } + last= id; + jo.put( "id", id + "/source" ); + if ( jo.has("title") ) { + jo.put("title","Files for "+id ); + } + newArray.put( newArray.length(), jo ); + } + catalogContainer.put("catalog", newArray); + catalogContainer.setEscapeForwardSlashAlways(false); + return catalogContainer.toString(4); + + } catch (JSONException ex) { + throw new RuntimeException(ex); + } + } + + /** + * get the info for the id. + * @param roots root orig_data folder (website or file://...) containing the file "info/AC_H0_MFI.pkl" file + * @param availId the dataset id, like "AC_H0_SWE/availability" + * @return + */ + public static String getInfoAvail( String roots, String availId ) { + + try { + + if ( roots.endsWith(".json") ) { + throw new IllegalArgumentException("should end with / not json"); + } + + URL sourceURL; + String lastModified= null; + + String availString; + try { + int i= availId.indexOf("/"); + String id= availId.substring(0,i); + String surl= roots + "info/" + id + ".json"; + sourceURL= new URL(surl); + //File jsonfile= SourceUtil.downloadFile( url, File.createTempFile(id, ".json") ); + availString= SourceUtil.getAllFileLines( sourceURL ); + if ( roots.startsWith("file:") ) { + File file= new File( surl.substring(5) ); + lastModified= TimeUtil.reformatIsoTime("2000-01-01T00:00:00Z", + TimeUtil.fromMillisecondsSince1970( file.lastModified() ) ); + } else { + lastModified= TimeUtil.previousDay( TimeUtil.isoTimeFromArray( TimeUtil.now() ) ); + } + } catch (MalformedURLException ex) { + throw new RuntimeException(ex); //TODO + } catch (IOException ex) { + throw new RuntimeException(ex); //TODO + } + + + String root; + + JSONObject filesJson= new JSONObject(availString); + + JSONObject data= filesJson.getJSONObject("data"); + + JSONArray array= data.getJSONArray("FileDescription"); + + String start= array.getJSONObject(0).getString("StartTime"); + + String stop= array.getJSONObject(array.length()-1).getString("EndTime"); + + String startFile= array.getJSONObject(0).getString("Name"); + + String stopFile= array.getJSONObject(array.length()-1).getString("Name"); + + // the last four files + int n2= array.length()-1; + int n1= Math.max( 0, n2-4 ); + + String sampleStart= array.getJSONObject(n1).getString("StartTime"); + String sampleStop= array.getJSONObject(n2).getString("EndTime"); + + int i; + for ( i=0; i getGranuleIterator(TimeString start, TimeString stop) { + return null; //not used + } + + @Override + public boolean hasParamSubsetIterator() { + return false; + } + + @Override + public Iterator getIterator(TimeString start, TimeString stop) { + + try { + + URL url = new URL( bobwurl ); + + logger.log(Level.INFO, "readData URL: {0}", url); + + String availString; + + //File jsonfile= SourceUtil.downloadFile( url, File.createTempFile(id, ".json") ); + availString= SourceUtil.getAllFileLines( url ); + + JSONObject filesJson= new JSONObject(availString); + + JSONObject data= filesJson.getJSONObject("data"); + + JSONArray array= data.getJSONArray("FileDescription"); + + return fromJSONArray( array, root, rootlen ); + + } catch ( IOException | JSONException ex) { + throw new RuntimeException(ex); //TODO + } + + } + + private static Iterator fromJSONArray( JSONArray array, final String root, final int rootlen ) { + final int len= array.length(); + + try { + for ( int i=0; i0 ) { + System.err.println("Found overlapping files"); + n1.put("EndTime",s2); + } + } + } catch (JSONException ex) { + Logger.getLogger(CdawebAvailabilityHapiRecordSource.class.getName()).log(Level.SEVERE, null, ex); + } + + logger.log(Level.FINE, "creating {0} record iterator", len); + + return new Iterator() { + int irec=0; + + JSONObject nextObject; + + @Override + public boolean hasNext() { + boolean result= irec fromNodes( final NodeList starts, final NodeList stops, final String root, final int rootlen, final NodeList files ) { + final int len= starts.getLength(); + String[] fields= new String[3]; + + return new Iterator() { + int irec=0; + + @Override + public boolean hasNext() { + boolean result= irec AC_K1_SWE + * @param idavail AC_K1_SWE/source + * @return AC_K1_SWE + */ + private static String spidFor( String idavail ) { + int i= idavail.indexOf("/"); + String spid= idavail.substring(0,i); + return spid; + } + + public static void main( String[] args ) throws IOException, ParseException { + + args= new String[] { }; + //args= new String[] { "availability/AC_K1_SWE" }; + //args= new String[] { "availability/BAR_1A_L2_SSPC" }; + //args= new String[] { "availability/AC_K1_SWE", "2022-01-01T00:00Z", "2023-05-01T00:00Z" }; + //args= new String[] { "availability/RBSP-A-RBSPICE_LEV-2_ESRHELT", "2014-01-01T00:00Z", "2014-02-01T00:00Z" }; + args= new String[] { "TSS-1R_M1_CSAA/source", "1996-02-28T02:00:00.000Z", "1996-02-28T05:59:46.000Z" }; + //args= new String[] { "availability/FORMOSAT5_AIP_IDN" }; + //args= new String[] { "http://mag.gmu.edu/git-data/cdawmeta/data/orig_data/", "RBSP-A-RBSPICE_LEV-2_ESRHELT" }; + switch (args.length) { + case 0: + System.out.println(getAvailabilityCatalog("http://mag.gmu.edu/git-data/cdawmeta/data/hapi/catalog.json") ); + break; + case 2: + System.out.println(getInfoAvail(args[0],args[1]) ); + break; + case 3: + JSONObject info; + String orig_data= "http://mag.gmu.edu/git-data/cdawmeta/data/orig_data/"; + try { + info= new JSONObject( getInfoAvail(orig_data,args[0]) ); + } catch (JSONException ex) { + throw new RuntimeException(ex); + } + Iterator iter = + new CdawebAvailabilityHapiRecordSource( orig_data,args[0],info).getIterator( + new TimeString( TimeUtil.parseISO8601Time(args[1]) ), + new TimeString( TimeUtil.parseISO8601Time(args[2]) ) ); + if ( iter.hasNext() ) { + CsvDataFormatter format= new CsvDataFormatter(); + format.initialize( info,System.out,iter.next() ); + do { + HapiRecord r= iter.next(); + format.sendRecord( System.out, r ); + } while ( iter.hasNext() ); + } + break; + default: + printHelp(); + } + + } + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebHapiRecordSource.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebHapiRecordSource.java new file mode 100644 index 00000000..132ad460 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebHapiRecordSource.java @@ -0,0 +1,36 @@ + +package org.hapiserver.source.cdaweb; + +import java.io.File; +import java.util.logging.Logger; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.HapiRecordSource; + +/** + * Delegates to the correct implementation + * @author jbf + */ +public class CdawebHapiRecordSource { + private static final Logger logger= Logger.getLogger("hapi.cdaweb"); + + public static HapiRecordSource create( String availRoot, String id, JSONObject info, JSONObject data, String cacheDir ) { + + if ( cacheDir.startsWith("file://") ) { + cacheDir= cacheDir.substring(7); + } else if ( cacheDir.startsWith("file:") ) { + cacheDir= cacheDir.substring(5); + } + File cache= new File(cacheDir); + if ( !cache.exists() ) { + if (!cache.mkdirs()) { + logger.warning("fail to make download area"); + throw new IllegalArgumentException("unable to continue"); + } + } + + CacheManager.getInstance(cache).requestCleanup(); + + return new CdawebServicesHapiRecordSource(availRoot,id,info,data,cache); + } + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebInfoCatalogSource.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebInfoCatalogSource.java new file mode 100644 index 00000000..bbde4e72 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebInfoCatalogSource.java @@ -0,0 +1,317 @@ + +package org.hapiserver.source.cdaweb; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.regex.Pattern; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.TimeUtil; +import org.hapiserver.source.SourceUtil; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +/** + * Returns catalog response based on all.xml, and info responses from + * either Bob's process, Nand's existing server, or a future implementation (and this + * documentation needs to be updated). + * @author jbf + * @see https://cdaweb.gsfc.nasa.gov/pub/catalogs/all.xml + */ +public class CdawebInfoCatalogSource { + + private static final Logger logger= Logger.getLogger("hapi.cdaweb"); + + public static final String CDAWeb = "https://cdaweb.gsfc.nasa.gov/"; + + private static final String CDAWEB_HAPI_VERSION = "20260130.0606"; + + protected static Map coverage= new HashMap<>(); + protected static Map filenaming= new HashMap<>(); + + /** + * read the node and note the filenaming and form a template + * @param id + * @param dataset + * @param jo the JSON object for the catalog + * @return the URL found. + */ + private static String getURL( String id, Node dataset, JSONObject jo ) throws JSONException { + + Node childNode= getNodeByName( dataset, "access" ); + + String lookfor= "ftp://cdaweb.gsfc.nasa.gov/pub/istp/"; + String lookfor2= "ftp://cdaweb.gsfc.nasa.gov/pub/cdaweb_data"; + + Node urlNode= getNodeByName( childNode, "URL" ); + + if ( urlNode.getFirstChild()==null ) { + logger.log(Level.FINE, "URL is missing for {0}, data cannot be accessed.", id); + return null; + } + + String url= urlNode.getFirstChild().getTextContent().trim(); + if ( url.startsWith( lookfor ) ) { + // "ftp://cdaweb.gsfc.nasa.gov/pub/istp/ace/mfi_h2" + // http://cdaweb.gsfc.nasa.gov/istp_public/data/ + url= CDAWeb + "sp_phys/data/" + url.substring(lookfor.length()); + } + if ( url.startsWith(lookfor2) ) { + url= CDAWeb + "sp_phys/data/" + url.substring(lookfor2.length()); + } + String templ= url + "/"; + String subdividedby= childNode.getAttributes().getNamedItem("subdividedby").getTextContent(); + String filenaming= childNode.getAttributes().getNamedItem("filenaming").getTextContent(); + + if ( !subdividedby.equals("None") ) { + templ= templ + subdividedby + "/"; + } + templ= templ + filenaming; + + CdawebInfoCatalogSource.filenaming.put(id,templ); + + jo.put( "x_sourceUrl", url ); + if ( !subdividedby.equals("None") ) { + jo.put( "x_subdividedby", subdividedby ); + } + jo.put( "x_filenaming", filenaming ); + + return url; + + } + + private static HashSet skips; + private static HashSet skipsPatterns; + + private static void readSkips() throws IOException { + logger.info("reading skips"); + skips= new HashSet<>(); + skipsPatterns= new HashSet<>(); + URL skipsFile= CdawebAvailabilityHapiRecordSource.class.getResource("skips.txt"); + + try (BufferedReader r = new BufferedReader(new InputStreamReader( skipsFile.openStream() ))) { + String line = r.readLine(); + while ( line!=null ) { + int i=line.indexOf("#"); + if ( i>-1 ) line= line.substring(0,i).trim(); + String[] ss= line.split(",",-2); + if ( line.length()>0 && ss.length==1 ) { + if ( ss[0].contains(".") || ss[0].contains("[") ) { + skipsPatterns.add( Pattern.compile(ss[0]) ); + } else { + skips.add(ss[0].trim()); + } + } + line = r.readLine(); + } + } + } + + private static Node getNodeByName( Node parent, String childName ) { + NodeList nn = parent.getChildNodes(); + for ( int i=0; i0 ) lastModified=lm; +// } + + URL url= new URL(surl); + String src= SourceUtil.getAllFileLines( url ); + try { + jo= new JSONObject(src); + } catch ( JSONException ex ) { + throw new IllegalArgumentException("input file has JSON syntax issue: " +surl ); + } + + String id= jo.optString("id"); + + if ( jo.has("info") ) { + jo= jo.getJSONObject("info"); + } + jo.put("x_info_author", "bw"); + jo.put("x_cdaweb_hapi_version", CDAWEB_HAPI_VERSION); + JSONArray ja= jo.getJSONArray("parameters"); + JSONArray janew= new JSONArray(); + for ( int ip= 0; ip30 ) { + p.put("length","30"); // match the old server for Epoch16 + } + // https://github.com/rweigel/cdawmeta/issues/42 +// if ( "AWE_L3A_TMP".equals(id) && ip>0 ) {// .../hapi/info?id=AWE_L3A_TMP +// String n= p.getString("name"); +// if ( n.equals("Temperature") +// || n.equals("Latitude") +// || n.equals("Longitude") +// || n.equals("MLCloud") ) { +// logger.info("reform of parameter is necessary here"); +// JSONArray size= p.optJSONArray("size"); +// JSONArray reform= new JSONArray(); +// for ( int i=1; i0 && sampleStartDate.equals(sampleStopDate) ) { //C3_PP_CIS has start and stop times equal for each granule. + logger.info("kludge for sampleStartDate.equals(sampleStopDate)."); + jo.put("sampleStopDate",jo.getString("stopDate") ); + } + + if ( url.getProtocol().equals("file") ) { + long lastModified= new File( url.getFile() ).lastModified(); + jo.put( "x_info_modificationDate", TimeUtil.fromMillisecondsSince1970(lastModified) ); + } + + // communicate to the server that it should not use cached info responses. + jo.put( "x_info_caching", false ); + + //if ( !lastModified.startsWith("00") ) { + // jo.put("lastModified",lastModified); + //} + return jo.toString(4); + } catch ( JSONException ex ) { + throw new IllegalArgumentException("input file has JSON schema issue (something required was missing, etc): " +surl ); + } + + } + + public static void main( String[] args ) throws IOException { + args= new String[] { "AC_AT_DEF" }; + //args= new String[0]; + + if ( args.length==0 ) { + System.out.println( CdawebInfoCatalogSource.getCatalog("http://mag.gmu.edu/git-data/cdawmeta/data/hapi/catalog.json") ); + } else if ( args.length==1 ) { + System.out.println( + CdawebInfoCatalogSource.getInfo("https://cottagesystems.com/~jbf/hapi/p/cdaweb/data/orig_data/info/A1_K0_MPA.json", + "https://cottagesystems.com/~jbf/hapi/p/cdaweb/data/hapi/info/A1_K0_MPA.json" ) ); + } + } + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebServicesHapiRecordIterator.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebServicesHapiRecordIterator.java new file mode 100644 index 00000000..6477e577 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebServicesHapiRecordIterator.java @@ -0,0 +1,1917 @@ +package org.hapiserver.source.cdaweb; + +import gov.nasa.gsfc.spdf.cdfj.CDFException; +import gov.nasa.gsfc.spdf.cdfj.CDFReader; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.UnsupportedEncodingException; +import java.lang.management.ManagementFactory; +import java.lang.reflect.Array; +import java.net.InetAddress; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLEncoder; +import java.net.UnknownHostException; +import java.nio.file.Files; +import java.text.MessageFormat; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Vector; +import java.util.logging.ConsoleHandler; +import java.util.logging.Formatter; +import java.util.logging.Level; +import java.util.logging.LogRecord; +import java.util.logging.Logger; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.HapiRecord; +import org.hapiserver.TimeString; +import org.hapiserver.TimeUtil; +import org.hapiserver.source.SourceUtil; +import org.hapiserver.source.cdaweb.adapters.Add1800; +import org.hapiserver.source.cdaweb.adapters.ApplyEsaQflag; +import org.hapiserver.source.cdaweb.adapters.ApplyFilterFlag; +import org.hapiserver.source.cdaweb.adapters.ApplyRtnQflag; +import org.hapiserver.source.cdaweb.adapters.ArrSlice; +import org.hapiserver.source.cdaweb.adapters.ClampToZero; +import org.hapiserver.source.cdaweb.adapters.CompThemisEpoch; +import org.hapiserver.source.cdaweb.adapters.CompThemisEpoch16; +import org.hapiserver.source.cdaweb.adapters.ConstantAdapter; +import org.hapiserver.source.cdaweb.adapters.ConvertLog10; +import org.hapiserver.source.cdaweb.adapters.VirtualFunctions; +import org.w3c.dom.Document; +import org.xml.sax.SAXException; + +/** + * Implement the CDAWeb HAPI server by calculating a set of adapters which go from CDF variables + * in a file to the Strings, ints, and doubles which implement the HapiRecord. This will download + * and cache CDF files when the server is running remotely, and will call web services and + * cache the response when virtual variables must be calculated. One relatively simple virtual + * variable is resolved, alternate_view, so that the web services are not needed. + * + * This uses CDAWeb Web Services described at https://cdaweb.gsfc.nasa.gov/WebServices/REST/. + * + * @author jbf + */ +public class CdawebServicesHapiRecordIterator implements Iterator { + + private static final Logger logger = Logger.getLogger("hapi.cdaweb"); + + /** + * the variable was not found in the CDF, so just return fill values. + * @param param + * @param nrec + * @return + */ + private static Object makeFillValues( JSONObject param, int nrec) throws JSONException { + JSONArray sizej= param.optJSONArray("size"); + int[] size; + int ndims; + if ( sizej==null ) { + size= new int[1]; + ndims=0; + } else { + size= new int[1+sizej.length()]; + ndims= sizej.length(); + } + size[0]= nrec; + for ( int i=0; i0.999999 && check<1.000001 ) { + dd[i]= fill; + } + } + } + } + + private static double performFuzzyFill(double d, double fill ) { + if ( fill!=0 ) { + double check= d/fill; + if ( check>0.999999 && check<1.000001 ) { + return fill; + } + } + return d; + } + + static class TimerFormatter extends Formatter { + + long t0 = System.currentTimeMillis(); + String resetMessage = "ENTRY"; + + @Override + public String format(LogRecord record) { + if (record.getMessage().equals(resetMessage)) { + t0 = record.getMillis(); + } + String message = MessageFormat.format(record.getMessage(), record.getParameters()); + if (message.equals("ENTRY") || message.equals("RETURN")) { + message = message + " " + record.getSourceClassName() + " " + record.getSourceMethodName(); + } + return String.format("%06d: %s\n", record.getMillis() - t0, message); + } + + } + + static { + logger.setLevel(Level.FINER); + ConsoleHandler h = new ConsoleHandler(); + h.setFormatter(new TimerFormatter()); + h.setLevel(Level.ALL); + logger.addHandler(h); + } + + HapiRecord nextRecord; + Adapter[] adapters; + + int index; + int nindex; + + private static class StringAdapter extends Adapter { + + String[] array; + + protected StringAdapter( String[] array ) { + this.array= array; + } + + @Override + public String adaptString(int index) { + return this.array[index]; + } + + @Override + public String getString(int index) { + return this.array[index]; + } + + } + + private static String addTime( String baseYYYYmmddTHH, double hours ) { + int[] dc; + try { + dc = TimeUtil.parseISO8601Time(baseYYYYmmddTHH); + dc= TimeUtil.add( dc, new int[] { 0, 0, 0, (int)hours, 0, 0, 0, 0 } ); + return String.format("%d-%02d-%02dT%02d", dc[0], dc[1], dc[2], dc[3] ); + } catch ( ParseException ex ) { + throw new RuntimeException(ex); + + } + } + + public static class IsotimeEpochAdapter extends Adapter { + + /** + * the time in milliseconds since year 1 for cdfEpoch, and this + * marks the epoch value of the previous hour boundary. + */ + double baseTime; + + String baseYYYYmmddTHH; + + double[] array; + + String format = ":%02d:%02d.%09d"; + int formatFactor = 1; // number by which to round + + public IsotimeEpochAdapter(double[] array, int length) { + this.array = array; + double d = array[0]; + double us2000 = (d - 6.3113904E13) * 1000; // ms -> microseconds + double day2000 = Math.floor(us2000 / 86400000000.); // days since 2000-01-01. + double usDay = us2000 - day2000 * 86400000000.; // microseconds within this day. + double ms1970 = day2000 * 86400000. + 946684800000.; + String baseDay = TimeUtil.fromMillisecondsSince1970((long) ms1970); + baseYYYYmmddTHH = baseDay.substring(0, 10) + "T00"; + baseTime = (long) (d - usDay / 1000); + switch (length) { // YYYY4hh7mm0HH3MM6SS9NNNNNNNNNZ + case 24: + format = ":%02d:%02d.%03dZ"; + formatFactor = 1000000; + break; + case 27: + format = ":%02d:%02d.%06dZ"; + formatFactor = 1000000; + break; + case 30: + format = ":%02d:%02d.%09dZ"; + break; + default: + throw new IllegalArgumentException("not supported"); + } + } + + private String formatTime(double t) { + double offset = t - baseTime; // milliseconds + while (offset < 0.) { // I don't think it should go here, because it means timetags are not in order. + double us2000 = (t - 6.3113904E13) * 1000; // ms -> microseconds + double day2000 = Math.floor(us2000 / 86400000000.); // days since 2000-01-01. + double usDay = us2000 - day2000 * 86400000000.; // microseconds within this day. + double ms1970 = day2000 * 86400000. + 946684800000.; + String baseDay = TimeUtil.fromMillisecondsSince1970((long) ms1970); + baseYYYYmmddTHH = baseDay.substring(0, 10) + "T00"; + baseTime = (long) (t - usDay / 1000); + try { + baseYYYYmmddTHH = TimeUtil.normalizeTimeString(baseYYYYmmddTHH).substring(0, 13); + } catch ( IllegalArgumentException ex ) { + System.err.println("Here stop"); + } + offset = t - baseTime; + } + while (offset >= 3600000.) { + double hours = Math.floor( offset / 3600000. ); + baseTime = baseTime + hours * 3600000.; + int hour = Integer.parseInt(baseYYYYmmddTHH.substring(11, 13)); + baseYYYYmmddTHH = baseYYYYmmddTHH.substring(0, 11) + String.format("%02d", (int) (hour + hours)); + baseYYYYmmddTHH = TimeUtil.normalizeTimeString(baseYYYYmmddTHH).substring(0, 13); + offset = t - baseTime; + } + int nanos = (int) ((offset * 1000000) % 1000000000.); + offset = (int) (offset / 1000); // now it's in seconds. Note offset must be positive for this to work. + int seconds = (int) (offset % 60); + int minutes = (int) (offset / 60); // now it's in minutes + return baseYYYYmmddTHH + String.format(format, minutes, seconds, nanos / formatFactor); + } + + @Override + public String adaptString(int index) { + return formatTime(array[index]); + } + + @Override + public String getString(int index) { + return adaptString(index); + } + + } + + private static class IsotimeEpoch16Adapter extends Adapter { + + /** + * the time in milliseconds since year 1 for cdfEpoch. + */ + double baseTime; + + String baseYYYYmmddTHH; + + double[][] array; + + String format = ":%02d:%02d.%09d"; + int formatFactor = 1; // number by which to round + private IsotimeEpoch16Adapter(double[][] array, int length) { + this.array = array; + double d = array[0][0]; + double us2000 = (d - 6.3113904e+10 ) * 1e6; // ms -> microseconds + double day2000 = Math.floor(us2000 / 86400000000.); // days since 2000-01-01. + double usDay = us2000 - day2000 * 86400000000.; // microseconds within this day. + double ms1970 = day2000 * 86400000. + 946684800000.; + String baseDay = TimeUtil.fromMillisecondsSince1970((long) ms1970); + baseYYYYmmddTHH = baseDay.substring(0, 10) + "T00"; + baseTime = (long) (d - usDay / 1000); + switch (length) { // YYYY4hh7mm0HH3MM6SS9NNNNNNNNNZ + case 24: + format = ":%02d:%02d.%03dZ"; + formatFactor = 1000000; + break; + case 27: + format = ":%02d:%02d.%06dZ"; + formatFactor = 1000000; + break; + case 30: + format = ":%02d:%02d.%09dZ"; + break; + case 33: + format = ":%02d:%02d.%09dZ"; + break; + default: + throw new IllegalArgumentException("not supported"); + } + } + + private String formatTime(double t,double t1) { + double offset = t - baseTime + t1 / 1e6; // milliseconds + while (offset < 0.) { + // Not sure why we need this, some sort of miscalculation of baseTime + double hours = Math.floor( offset / 3600000. ); + baseTime = baseTime + hours * 3600000.; + baseYYYYmmddTHH= addTime( baseYYYYmmddTHH, hours ); + try { + baseYYYYmmddTHH = TimeUtil.normalizeTimeString(baseYYYYmmddTHH).substring(0, 13); + } catch ( IllegalArgumentException ex ) { + System.err.println("Here stop"); + } + offset = t - baseTime; + } + while (offset >= 3600000.) { + double hours = Math.floor( offset / 3600000. ); + baseTime = baseTime + hours * 3600000.; + int hour = Integer.parseInt(baseYYYYmmddTHH.substring(11, 13)); + baseYYYYmmddTHH = baseYYYYmmddTHH.substring(0, 11) + String.format("%02d", (int) (hour + hours)); + baseYYYYmmddTHH = TimeUtil.normalizeTimeString(baseYYYYmmddTHH).substring(0, 13); + offset = t - baseTime; + } + int nanos = (int) ((offset * 1000000) % 1000000000.); + offset = (int) (offset / 1000); // now it's in seconds. Note offset must be positive for this to work. + int seconds = (int) (offset % 60); + int minutes = (int) (offset / 60); // now it's in minutes + return baseYYYYmmddTHH + String.format(format, minutes, seconds, nanos / formatFactor); + } + + @Override + public String adaptString(int index) { + return formatTime(array[index][0],array[index][1]); + } + + @Override + public String getString(int index) { + return adaptString(index); + } + + } + + private static class IsotimeTT2000Adapter extends Adapter { + + /** + * the time in milliseconds since year 1 for cdfEpoch, or nanoseconds for tt2000. + */ + long baseTime; + + String baseYYYYmmddTHH; + + long[] array; + + private IsotimeTT2000Adapter(long[] array, int width) { + this.array = array; + double d = Array.getDouble(array, 0); + double us2000 = new LeapSecondsConverter(false).convert(d); + double day2000 = Math.floor(us2000 / 86400000000.); // days since 2000-01-01. + double usDay = us2000 - day2000 * 86400000000.; // seconds within this day. + double ms1970 = day2000 * 86400000. + 946684800000.; + String baseDay = TimeUtil.fromMillisecondsSince1970((long) ms1970); + baseYYYYmmddTHH = baseDay.substring(0, 10) + "T00"; + baseTime = (long) (d - usDay * 1000); + } + + private String formatTime(double t) { + long offset = (long) ((t - baseTime)); // This must not cross a leap second, will always be in nanos + while (offset < 0.) { + // Not sure why we need this, some sort of miscalculation of baseTime + long hours = Math.floorDiv( offset, 3600000000000L ); + baseTime = baseTime + hours * 3600000000000L; + baseYYYYmmddTHH= addTime( baseYYYYmmddTHH, hours ); + try { + baseYYYYmmddTHH = TimeUtil.normalizeTimeString(baseYYYYmmddTHH).substring(0, 13); + } catch ( IllegalArgumentException ex ) { + System.err.println("Here stop"); + } + offset = (long)(t - baseTime); + } + while (offset >= 3600000000000L) { + long hours = offset / 3600000000000L; + baseTime = baseTime + hours * 3600000000000L; + int hour = Integer.parseInt(baseYYYYmmddTHH.substring(11, 13)); + baseYYYYmmddTHH = baseYYYYmmddTHH.substring(0, 11) + String.format("%02d", (int) (hour + hours)); + baseYYYYmmddTHH = TimeUtil.normalizeTimeString(baseYYYYmmddTHH).substring(0, 13); + offset = (long) ((t - baseTime)); + } + int nanos = (int) ((offset) % 1000000000.); + offset = offset / 1000000000; // now it's in seconds + int seconds = (int) (offset % 60); + int minutes = (int) (offset / 60); // now it's in minutes + return baseYYYYmmddTHH + String.format(":%02d:%02d.%09dZ", minutes, seconds, nanos); + } + + @Override + public String adaptString(int index) { + return formatTime(array[index]); + } + + @Override + public String getString(int index) { + return adaptString(index); + } + } + + /** + * Integers come out of the library as doubles. + * wget -O - 'http://localhost:8080/HapiServer/hapi/data?id=WI_OR_DEF&start=1997-07-01T23:00:00.000Z&stop=1997-07-01T23:50:00.000Z¶meters=Time,CRN_EARTH' + */ + private static class IntDoubleAdapter extends Adapter { + + double[] array; + double fill; // numerical errors mean we need to make fill data canonical + + private IntDoubleAdapter(double[] array,double fill) { + this.fill= fill; + this.array = array; + } + + @Override + public double adaptDouble(int index) { + if (index >= this.array.length) { + throw new ArrayIndexOutOfBoundsException("can't find the double at position " + index); + } + double d= this.array[index]; + if ( fill!=0 ) { + double check= d/fill; + if ( check>0.999999 && check<1.000001 ) { + return fill; + } + } + return d; + } + + @Override + public int adaptInteger(int index) { + if (index >= this.array.length) { + throw new ArrayIndexOutOfBoundsException("can't find the double at position " + index); + } + double d= this.array[index]; + d= performFuzzyFill(d, fill); + return (int)d; + } + + @Override + public String getString(int index) { + return String.valueOf(adaptInteger(index)); + } + + } + + + private static class DoubleDoubleAdapter extends Adapter { + + double[] array; + double fill; // numerical errors mean we need to make fill data canonical + + private DoubleDoubleAdapter(double[] array,double fill) { + this.fill= fill; + this.array = array; + } + + @Override + public double adaptDouble(int index) { + if (index >= this.array.length) { + throw new ArrayIndexOutOfBoundsException("can't find the double at position " + index); + } + double d= this.array[index]; + d= performFuzzyFill(d, fill); + return d; + } + + @Override + public int[] adaptIntegerArray(int index) { // hapi/data?id=VG1_PWS_WF@1&time.min=2025-104T17:27:13Z&time.max=2025-104T17:27:59Z' + return new int[] { (int)adaptDouble(index) }; + } + + + + @Override + public String getString(int index) { + return String.valueOf(adaptDouble(index)); + } + } + + private static class DoubleArrayDoubleAdapter extends Adapter { + + double[][] array; + int n; // there's a weird bit of code where the Java library is giving me double arrays containing ints. + int items; // number of items in array + double fill; // fill for extra values + + private DoubleArrayDoubleAdapter(double[][] array, int items,double fill) { + this.items= items; + this.array = array; + if (array.length > 0) { + this.n = array[0].length; + } + this.fill= fill; + } + + @Override + public double[] adaptDoubleArray(int index) { + if ( this.array[index].length==items ) { // TODO: comment where this is not the case + double[] dd= this.array[index]; + performFuzzyFill( dd, fill ); + return dd; + } else { + double[] result= new double[items]; + System.arraycopy( this.array[index], 0, result, 0, n ); + Arrays.fill( result, n, items, fill ); + return result; + } + } + + @Override + public int[] adaptIntegerArray(int index) { + int[] adapt = new int[n]; + double[] rec = this.array[index]; + for (int i = 0; i < n; i++) { + adapt[i] = (int) rec[i]; + } + return adapt; + } + + @Override + public String getString(int index) { + double[] dd= adaptDoubleArray(index); + if ( dd.length>2 ) { + return "["+dd[0]+","+dd[1]+",...]"; + } else if ( dd.length==2 ) { + return "["+dd[0]+","+dd[1]+"]"; + } else { + return "["+dd[0]+"]"; + } + } + + } + + private static class DoubleFloatAdapter extends Adapter { + + float[] array; + double fill; + + private DoubleFloatAdapter(float[] array,double fill) { + this.array = array; + this.fill= fill; + } + + @Override + public double adaptDouble(int index) { + double d= this.array[index]; + d= performFuzzyFill( d, fill ); + return d; + } + + @Override + public String getString(int index) { + return String.valueOf(adaptDouble(index)); + } + } + + private static class IntegerLongAdapter extends Adapter { + + long[] array; + + private IntegerLongAdapter(long[] array) { + this.array = array; + } + + @Override + public int adaptInteger(int index) { + return (int) this.array[index]; + } + + @Override + public String getString(int index) { + return String.valueOf(adaptInteger(index)); + } + } + + private static class IntegerIntegerAdapter extends Adapter { + + int[] array; + + private IntegerIntegerAdapter(int[] array) { + this.array = array; + } + + @Override + public int adaptInteger(int index) { + return this.array[index]; + } + + @Override + public String getString(int index) { + return String.valueOf(adaptInteger(index)); + } + } + + private static class IntegerShortAdapter extends Adapter { + + short[] array; + + private IntegerShortAdapter(short[] array) { + this.array = array; + } + + @Override + public int adaptInteger(int index) { + return this.array[index]; + } + + @Override + public String getString(int index) { + return String.valueOf(adaptInteger(index)); + } + } + + private static class IntegerByteAdapter extends Adapter { + + byte[] array; + + private IntegerByteAdapter(byte[] array) { + this.array = array; + } + + @Override + public int adaptInteger(int index) { + return this.array[index]; + } + + @Override + public String getString(int index) { + return String.valueOf(adaptInteger(index)); + } + } + + private static class IntegerArrayIntegerAdapter extends Adapter { + + int[][] array; + + private IntegerArrayIntegerAdapter(int[][] array) { + this.array = array; + } + + @Override + public int[] adaptIntegerArray(int index) { + return this.array[index]; + } + + @Override + public String getString(int index) { + return String.valueOf(adaptInteger(index)); + } + + } + + + /** + * Returns the name of the integer data type, for example, 8 is type 8-byte integer (a.k.a. Java long), and 33 is CDF_TT2000. + * + * @param type the code for data type + * @return a name identifying the type. + * @see https://spdf.gsfc.nasa.gov/pub/software/cdf/doc/cdf380/cdf38ifd.pdf page 33. + */ + public static String nameForType(int type) { + switch (type) { + case 1: + return "CDF_INT1"; + case 41: + return "CDF_BYTE"; // 1-byte signed integer + case 2: + return "CDF_INT2"; + case 4: + return "CDF_INT4"; + case 8: + return "CDF_INT8"; + case 11: + return "CDF_UINT1"; + case 12: + return "CDF_UINT2"; + case 14: + return "CDF_UINT4"; + case 21: + return "CDF_REAL4"; + case 44: + return "CDF_FLOAT"; + case 22: + return "CDF_REAL8"; + case 45: + return "CDF_DOUBLE"; + case 31: + return "CDF_EPOCH"; + case 32: + return "CDF_EPOCH16"; // make of two CDF_REAL8, + case 33: + return "CDF_TT2000"; + case 51: + return "CDF_CHAR"; + case 52: + return "CDF_UCHAR"; + default: + return "???"; + } + } + + /** + * return true if the data contain virtual variables which must be calculated by CDAWeb web services. This is slower than + * reading the files directly. Some virtual variables may be implemented within this server in the future. + * + * @param id the id, for example RBSP-A_DENSITY_EMFISIS-L4 + * @param info info for the parameter. + * @param params the parameters to read + * @return true if web services must be used. + */ + public static boolean mustUseWebServices(String id, JSONObject info, String[] params) { + int iat = id.indexOf("@"); // multiple timetags cdf files will have @\d for each set of timetags. + if (iat > 0) { + id = id.substring(0, iat); + } + if ( hapiServerResolvesId(id) ) return false; + if ( id.equals("AC_OR_SSC") ) { + return true; // uses both rvars and zvars + } + + try { + JSONArray parameters= info.getJSONArray("parameters"); + for ( int i=0; i0 && !VirtualFunctions.virtualFunctionSupported(s) ) { + return true; + } + } + } catch ( JSONException ex ) { + throw new RuntimeException(ex); + } + + return false; + } + + /** + * some virtual variables are easily implemented, so we resolve those within the server + * and data files can be cached. + * @param id the id, for example "AMPTECCE_H0_MEPA" which has only "alternate_view" + * @return true if the id can be resolved. + */ + private static boolean hapiServerResolvesId(String id) { + if ( id.equals("AMPTECCE_H0_MEPA") ) return true; + return false; + } + + /** + * return null or the file which should be used locally. When the server is at Goddard/CDAWeb, + * this is the file in their database. When running the server remotely, this is a mirror + * of their data (in /var/www/cdaweb/htdocs/). + * @param url the file URL + * @return null or the file. + */ + private static File getCdfLocalFile( URL url ) { + if ( url.getHost().equals("cdaweb.gsfc.nasa.gov") && url.getFile().startsWith("/pub/data/") ) { + File f= new File( "/var/www/cdaweb/htdocs/" + url.getFile() ); + return f; + } + if ( url.getHost().equals("cdaweb.gsfc.nasa.gov") && url.getFile().startsWith("/sp_phys/data/") ) { + File f= new File( "/var/www/cdaweb/htdocs/" + url.getFile() ); + return f; + } + return null; + } + + private static String escapeParameters( String s ) { + try { + return URLEncoder.encode(s,"US-ASCII"); + } catch ( UnsupportedEncodingException ex ) { + throw new RuntimeException(ex); + } + } + + /** + * return either the URL of the CDF generated by the web services, or the URL of the CDF file in the https area. Many CDFs + * contain virtual variables which are only computed within the IDL web services. When a file does not contain virtual variables + * (or in the future the virtual variable is trivial to compute), then a reference to the direct file is returned. + * + * @param id the dataset id, such as AC_OR_SSC or RBSP-A_DENSITY_EMFISIS-L4 + * @param info the info object + * @param start the seven-component start time + * @param stop the seven-component stop time + * @param params the list of parameters to read + * @param origFile null or the file which contains the original data (pre Bernie's services) + * @return the URL of the file containing the data. + */ + private static URL getCdfDownloadURL(String id, JSONObject info, int[] start, int[] stop, String[] params, URL origFile) throws MalformedURLException { + logger.entering("CdawebServicesHapiRecordIterator", "getCdfDownloadURL"); + String sstart = String.format("%04d%02d%02dT%02d%02d%02dZ", start[0], start[1], start[2], start[3], start[4], start[5]); + String sstop = String.format("%04d%02d%02dT%02d%02d%02dZ", stop[0], stop[1], stop[2], stop[3], stop[4], stop[5]); + + int iat = id.indexOf("@"); // multiple timetags cdf files will have @\d for each set of timetags. + if (iat > 0) { + id = id.substring(0, iat); + } + + if (origFile == null || origFile.getFile().endsWith(".nc") || mustUseWebServices(id, info, params)) { + + String ss; + if (params.length == 1) { + try { + // special case where we have to request some DATA variable, cannot just request time. + JSONArray parameters = info.getJSONArray("parameters"); + String dependent = parameters.getJSONObject(parameters.length() - 1).getString("name"); + ss = dependent; + } catch (JSONException ex) { + throw new RuntimeException(ex); + } + } else { + ss = String.join(",", Arrays.copyOfRange(params, 1, params.length)); // CDAWeb WS will send time. + } + if (params.length > 2 || (params.length == 2 && !params[0].equals("Time"))) { + ss = "ALL-VARIABLES"; + } + + String surl + = String.format("https://cdaweb.gsfc.nasa.gov/WS/cdasr/1/dataviews/sp_phys/datasets/%s/data/%s,%s/%s?format=cdf", + id, sstart, sstop, escapeParameters(ss) ); + + logger.log(Level.FINER, "request {0}", surl); + + try { + Document doc = SourceUtil.readDocument(new URL(surl)); + XPathFactory factory = XPathFactory.newInstance(); + XPath xpath = (XPath) factory.newXPath(); + String sval = (String) xpath.evaluate("/DataResult/FileDescription/Name/text()", doc, XPathConstants.STRING); + logger.exiting("CdawebServicesHapiRecordIterator", "getCdfDownloadURL"); + return new URL(sval); + } catch ( FileNotFoundException ex ) { + throw new RuntimeException("File not found", ex ); + } catch (XPathExpressionException | SAXException | IOException | ParserConfigurationException ex) { + throw new RuntimeException("unable to handle XML response", ex); + } + + } else { + logger.exiting("CdawebServicesHapiRecordIterator", "getCdfDownloadURL"); + return origFile; + } + + } + + /** + * return the processID (pid), or the fallback if the pid cannot be found. + * + * @param fallback the string (null is okay) to return when the pid cannot be found. + * @return the process id or the fallback provided by the caller. //TODO: Java9 has method for accessing process ID. + */ + public static String getProcessId(final String fallback) { + // Note: may fail in some JVM implementations + // therefore fallback has to be provided + + // something like '@', at least in SUN / Oracle JVMs + final String jvmName = ManagementFactory.getRuntimeMXBean().getName(); + final int index = jvmName.indexOf('@'); + + if (index < 1) { + // part before '@' empty (index = 0) / '@' not found (index = -1) + return fallback; + } + + try { + return Long.toString(Long.parseLong(jvmName.substring(0, index))); + } catch (NumberFormatException e) { + // ignore + } + return fallback; + } + + /** + * flatten 3-D array into 2-D. Thanks, (Google) Bard! + * TODO: review--looks like IBEX_H3_ENA_HI_R13_CG_NOSP_RAM_1YR needs transpose + * @param array + * @return + */ + public static double[][] flatten(double[][][] array) { + int len1 = array[0].length * array[0][0].length; + double[][] flattenedArray = new double[array.length][len1]; + int index; + for (int i = 0; i < array.length; i++) { + index = 0; + for (int j = 0; j < array[i].length; j++) { + System.arraycopy(array[i][j], 0, flattenedArray[i], index, array[i][j].length); + index += array[i][j].length; + } + } + + return flattenedArray; + } + + /** + * flatten 4-D array into 2-D array. Thanks, ChatGPT! + * @param input + * @return + */ + public static double[][] flatten4D(double[][][][] input) { + int dim0 = input.length; + int dim1 = input[0].length; + int dim2 = input[0][0].length; + int dim3 = input[0][0][0].length; + + int cols = dim1 * dim2 * dim3; + double[][] output = new double[dim0][cols]; + + for (int i = 0; i < dim0; i++) { + for (int j = 0; j < dim1; j++) { + for (int k = 0; k < dim2; k++) { + for (int l = 0; l < dim3; l++) { + int colIndex = j * (dim2 * dim3) + k * dim3 + l; + output[i][colIndex] = input[i][j][k][l]; + } + } + } + } + + return output; + } + + + private static double[][] flattenDoubleArray(Object array) { + int numDimensions = 1; + Class componentType = array.getClass().getComponentType(); + while (componentType != double.class) { + numDimensions++; + componentType = componentType.getComponentType(); + } + switch (numDimensions) { + case 2: + return (double[][]) array; + case 3: + return flatten((double[][][]) array); + case 4: + return flatten4D((double[][][][]) array); + default: + throw new IllegalArgumentException("Not supported: rank 4"); + } + } + + /** + * limit the lifespan of locally cached copies of data on spot9 and spot10, Jeremy's + * computers which are used to model the environment at Goddard. Presently these + * just limit the file to one hour, but a future implementation may check URL last modified + * and make better decisions. + * + * @param cdfUrl + * @param maybeLocalFile + * @return the file + */ + private static File checkLocalFileFreshness( URL cdfUrl, File maybeLocalFile ) { + try { + InetAddress addr = InetAddress.getLocalHost(); + String hostname= addr.getCanonicalHostName(); + if ( hostname.equals("spot9") || hostname.equals("spot10") ) { + if ( maybeLocalFile!=null && maybeLocalFile.exists() ) { + if ( ( System.currentTimeMillis() - maybeLocalFile.lastModified() ) > 3600000 ) { + logger.log(Level.INFO, "Removing stale copy of {0} on {1}", new Object[]{maybeLocalFile.getName(), hostname}); + maybeLocalFile.delete(); + } + } + } + return maybeLocalFile; + } catch (UnknownHostException ex) { + return maybeLocalFile; + } + } + + /** + * return the record iterator for the dataset.This presumes that start and stop are based on the intervals calculated by + * CdawebServicesHapiRecordSource, and an incomplete set of records will be returned if this is not the case. The file, + * possibly calculated when figuring out intervals, can be provided as well, so that the web service identifying the file + * is only called once. + * + * @param id the dataset id, such as AC_OR_SSC or RBSP-A_DENSITY_EMFISIS-L4 + * @param info the info for this id + * @param startts the start time + * @param stopts the stop time + * @param params the parameters to read + * @param origFile the file, (or null if not known), of the data. + * @param cacheDir staging area where files can be stored for ~ 1 hour for reuse + * @return the record iterator. + */ + public static CdawebServicesHapiRecordIterator create( + String id, JSONObject info, TimeString startts, TimeString stopts, String[] params, URL origFile, File cacheDir) { + try { + + logger.entering(CdawebServicesHapiRecordIterator.class.getCanonicalName(), "constructor"); + + String ss = String.join(",", Arrays.copyOfRange(params, 1, params.length)); // CDAWeb WS will send time. + if (params.length > 2 || (params.length == 2 && !params[0].equals("Time"))) { + ss = "ALL-VARIABLES"; + } + + int[] start= startts.toComponents(); + int[] stop= stopts.toComponents(); + + String sstart = String.format("%04d%02d%02dT%02d%02d%02dZ", start[0], start[1], start[2], start[3], start[4], start[5]); + String sstop = String.format("%04d%02d%02dT%02d%02d%02dZ", stop[0], stop[1], stop[2], stop[3], stop[4], stop[5]); + + String name = String.format("%s_%s_%s_%s", id, sstart, sstop, escapeParameters(ss) ); + + String u = System.getProperty("user.name"); // getProcessId("000"); + File p = cacheDir; + + File cdfFile; // this is the file we'll use to read the data, possibly created by Bernie's web services + + File tmpFile = new File(p, name + ".cdf"); // madness... apparently tomcat can't write to /tmp + + if ( tmpFile.exists() && (System.currentTimeMillis() - tmpFile.lastModified()) < ( 3600000 )) { + logger.fine("no need to download file I already have loaded within the last hour!"); + cdfFile= tmpFile; + + } else { + URL cdfUrl = getCdfDownloadURL(id, info, start, stop, params, origFile); //TODO: must there be a download here? + logger.log(Level.FINER, "request {0}", cdfUrl); + + File maybeLocalFile= getCdfLocalFile( cdfUrl ); + + if ( maybeLocalFile!=null ) { + maybeLocalFile= checkLocalFileFreshness( cdfUrl, maybeLocalFile ); + } + + if ( maybeLocalFile!=null && maybeLocalFile.exists() ) { + cdfFile= maybeLocalFile; + logger.log(Level.FINER, "using local file {0}", cdfFile.toString() ); + } else { + logger.log(Level.INFO, "Downloading {0}", cdfUrl); + tmpFile = SourceUtil.downloadFileLocking(cdfUrl, tmpFile, tmpFile.toString()+".tmp" ); + + if ( maybeLocalFile!=null && !mustUseWebServices(id, info, params) ) { + if ( maybeLocalFile.getParentFile().exists() || maybeLocalFile.getParentFile().mkdirs() ) { + Files.move( tmpFile.toPath(), maybeLocalFile.toPath() ); + cdfFile= maybeLocalFile; + } else { + logger.log(Level.INFO, "unable to mkdir -p {0}", maybeLocalFile.getParentFile()); + cdfFile= tmpFile; + } + } else { + cdfFile= tmpFile; + } + logger.log(Level.FINER, "downloaded {0}", cdfUrl); + } + } + + if ( cdfFile.getName().endsWith(".nc") ) { + throw new IllegalArgumentException("cdfFile ends with .nc and is probably not a CDF file."); + } + return new CdawebServicesHapiRecordIterator( id, info, start, stop, params, cdfFile); + + } catch (CDFException.ReaderError | JSONException | IOException r ) { + throw new RuntimeException(r); + } + } + + /** + * CDF files coming from CDAWeb will not contain characters which + * are not allowed in IDL tag names. # was removed from list, see DE1_2SEC_OA "Orbit_#_9" + * + */ + private static String mungeParameterName( String paramName ) { + //['\','/','.','%','!','@','^','&','*','(',')','-','+','=', $ , '`','~','|','?','<','>',' '] + String result= paramName.replaceAll("\\/|\\.|\\%|\\!|\\@|\\^|\\&|\\*|\\(|\\)|\\-|\\+|\\=|\\`|\\~|\\?|\\<|\\>|\\ ", "\\$"); + return result; + } + + /** + * return true if one of the parameters is virtual. A virtual parameter is one like "alternate_view" where + * a different variable is used (with different display, which is not relevant in HAPI), or "apply_esa_qflag" + * where another variable is used to filter. While most virtual data is resolved using CDAWeb web + * services, some are easily implemented here within the HAPI server. + * @param info + * @param params + * @return + * @throws JSONException + */ + private static boolean isVirtual( JSONObject info, String[] params ) throws JSONException { + JSONArray parameters= info.getJSONArray("parameters"); + for ( String s: params ) { + JSONObject param= SourceUtil.getParam( info, s ); + if ( param.optBoolean( "x_cdf_VIRTUAL", false ) ) return true; + } + return false; + } + + /** + * return the sizes for the array. This can be an array of arrays, too. + * @param o object which is an array. + * @return the JSONArray of sizes for each dimension. + */ + private static JSONArray getSizeFor( Object o ) { + if ( !o.getClass().isArray() ) throw new IllegalArgumentException("Expected array"); + o= Array.get( o, 0 ); // HAPI wants size for each record. + List sizes= new ArrayList<>(); + while ( o.getClass().isArray() ) { + sizes.add( Array.getLength(o) ); + o= Array.get( o, 0 ); + } + return new JSONArray(sizes); + } + + private static Adapter getAdapterFor( + CDFReader reader, + JSONObject param1, + String param, + int nrec ) throws CDFException.ReaderError, JSONException { + + Adapter result; + + // BB_xyz_xyz_sr2__C1_CP_STA_SM is crash + //if ( param.equals("BB_xyz_xyz_sr2__C1_CP_STA_SM") ) { + // System.err.println("stop here"); + //} + + int type = reader.getType(param); + Object o = reader.get(param); + if ( o==null || !o.getClass().isArray() ) { + try { + o= makeFillValues( param1, nrec ); + //throw new RuntimeException("didn't get array from reader: "+param+" file: "+tmpFile.toString()); + } catch (JSONException ex) { + throw new RuntimeException(ex); + } + } + if (Array.getLength(o) != nrec) { + if ( nrec==1 ) { // IBEX_H3_ENA_HI_R13_CG_NOSP_RAM_1YR has one record of 30x60 map + Object newo= Array.newInstance( o.getClass(), nrec ); + Array.set(newo, 0, o); + o= newo; + } else if ( nrec==-1 ) { + nrec= Array.getLength(o); + } else { + if (Array.getLength(o) == 1) { + // let's assume they meant for this to non-time varying. + if ( nrec==-1 ) { + return new ConstantAdapter( Array.getDouble(o,0) ); + } else { + Object newO = Array.newInstance(o.getClass().getComponentType(), nrec); + Object v1 = Array.get(o, 0); + for (int irec = 0; irec < nrec; irec++) { + Array.set(newO, irec, v1); + } + o = newO; + } + } else { + throw new IllegalArgumentException("nrec is inconsistent! This internal error must be fixed, got "+Array.getLength(o)+" expected "+nrec); + } + } + } + String stype = nameForType(type); + Class c = o.getClass().getComponentType(); + String sfill= param1.getString("fill"); + double fill; + if ( sfill==null ) { + fill= -1e31; + } else { + try { + fill= Double.parseDouble(sfill); + } catch ( NumberFormatException ex ) { + fill= -1e31; + } + } + + if (!c.isArray()) { + if (c == double.class) { + if ( stype.startsWith("CDF_INT") ) { + result = new IntDoubleAdapter((double[]) o,fill); + } else if ( stype.startsWith("CDF_UINT") ) { + result = new IntDoubleAdapter((double[]) o,fill); + } else { + result = new DoubleDoubleAdapter((double[]) o,fill); + } + } else if (c == float.class) { + result = new DoubleFloatAdapter((float[]) o,fill); + } else if (c == int.class) { + result = new IntegerIntegerAdapter((int[]) o); + } else if (c == short.class) { + result = new IntegerShortAdapter((short[]) o); + } else if (c == byte.class) { + result = new IntegerByteAdapter((byte[]) o); + } else if (c == long.class) { + result = new IntegerLongAdapter((long[]) o); + } else if (stype.equals("CDF_UINT2")) { + result = new IntegerIntegerAdapter((int[]) o); + } else if (stype.equals("CDF_UINT1")) { + result = new IntegerShortAdapter((short[]) o); + } else if ( c == String.class ) { + result = new StringAdapter((String[])o); + } else { + throw new IllegalArgumentException("unsupported type"); + } + } else { + c = c.getComponentType(); + if (c == double.class) { + JSONArray size; + size= getSizeFor(o); + int items= size.getInt(0); + for ( int k=1; k 0) { + switch (type) { + case 31: + adapters[i] = new IsotimeEpochAdapter((double[]) o, length); + break; + case 32: + adapters[i] = new IsotimeEpoch16Adapter((double[][]) o, length); + break; + case 33: + adapters[i] = new IsotimeTT2000Adapter((long[]) o, length); + break; + default: + //TODO: epoch16. + throw new IllegalArgumentException("type not supported for column 0 time: "+ nameForType(type) ); + } + nindex = nrec; + } else { + nindex = 0; + } + + } else { + + String param = params[i]; + + adapters[i]= getAdapterFor( reader, param1, param, nrec ); + + } + } + + logger.log(Level.FINER, "calculated adapters"); + + index = 0; + logger.exiting(CdawebServicesHapiRecordIterator.class.getCanonicalName(), "constructor"); + + } catch (CDFException.ReaderError ex) { + ex.printStackTrace(); + throw new RuntimeException(ex); + } + + } + + @Override + public boolean hasNext() { + return index < nindex; + } + + @Override + public HapiRecord next() { + final int j = index; + + index++; + while ( index=0 ) ) { + index++; // typically one increment. + } + + if ( index==nindex ) { + System.err.println("all done"); + } + + return new HapiRecord() { + @Override + public String getIsoTime(int i) { + String s= adapters[i].adaptString(j); + return s; + } + + @Override + public String[] getIsoTimeArray(int i) { + return null; + } + + @Override + public String getString(int i) { + return adapters[i].adaptString(j); + } + + @Override + public String[] getStringArray(int i) { + return null; + } + + @Override + public double getDouble(int i) { + return adapters[i].adaptDouble(j); + } + + @Override + public double[] getDoubleArray(int i) { + return adapters[i].adaptDoubleArray(j); + } + + @Override + public int getInteger(int i) { + return adapters[i].adaptInteger(j); + } + + @Override + public int[] getIntegerArray(int i) { + return adapters[i].adaptIntegerArray(j); + } + + @Override + public String getAsString(int i) { + return adapters[i].getString(j); + } + + @Override + public int length() { + return adapters.length; + } + }; + } + + //RBSP-B_DENSITY_EMFISIS-L4 + public static void mainCase2() { +// CdawebServicesHapiRecordIterator dd= new CdawebServicesHapiRecordIterator( +// "AC_H2_SWE", +// new int[] { 2021, 3, 12, 0, 0, 0, 0 }, +// new int[] { 2021, 3, 13, 0, 0, 0, 0 }, +// new String[] { "Time", "Np", "Vp" } ); + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "RBSP-B_DENSITY_EMFISIS-L4", + null, + new TimeString( new int[]{2019, 7, 15, 0, 0, 0, 0} ), + new TimeString( new int[]{2019, 7, 16, 0, 0, 0, 0} ), + new String[]{"Time", "fce", "bmag"}, null, new File("/tmp/hapi-server-cache/") ); + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + System.err.println(String.format("%s %.2f %.2f", rec.getIsoTime(0), rec.getDouble(1), rec.getDouble(2))); + } + } + + // array-of-array handling + public static void mainCase3() { + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "AC_K0_MFI", + null, + new TimeString( new int[]{2023, 4, 26, 0, 0, 0, 0} ), + new TimeString( new int[]{2023, 4, 27, 0, 0, 0, 0} ), + new String[]{"Time", "BGSEc"}, null, new File("/tmp/hapi-server-cache/")); + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + double[] ds = rec.getDoubleArray(1); + System.err.println(String.format("%s: %.1f %.1f %.1f", rec.getIsoTime(0), ds[0], ds[1], ds[2])); + } + } + + // array-of-array handling + public static void mainCase4() { + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "VG1_PWS_WF", + null, + new TimeString( new int[]{1979, 3, 5, 6, 0, 0, 0} ), + new TimeString( new int[]{1979, 3, 5, 7, 0, 0, 0} ), + new String[]{"Time", "Waveform"}, null, new File("/tmp/hapi-server-cache/")); + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + double[] ds = rec.getDoubleArray(1); + System.err.println(String.format("%s: %.1f %.1f %.1f", rec.getIsoTime(0), ds[0], ds[1], ds[2])); + } + } + + // array-of-array handling + public static void mainCase5() { + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "AC_H1_SIS", + null, + new TimeString( new int[]{2023, 4, 6, 0, 0, 0, 0} ), + new TimeString( new int[]{2023, 4, 7, 0, 0, 0, 0} ), + new String[]{"Time", "cnt_Si", "cnt_S"}, null, new File("/tmp/hapi-server-cache/")); + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + double[] ds1 = rec.getDoubleArray(1); + double[] ds2 = rec.getDoubleArray(2); + System.err.println(String.format("%s: %.1f %.1f %.1f ; %.1f %.1f %.1f", rec.getIsoTime(0), ds1[0], ds1[1], ds1[2], ds2[0], ds2[1], ds2[2])); + } + } + + // large request handling + public static void mainCase6() { + //vap+hapi:http://localhost:8080/HapiServer/hapi?id=AC_H2_CRIS¶meters=Time,flux_B&timerange=2022-12-16+through+2022-12-20 + for (int iday = 16; iday < 21; iday++) { + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "AC_H2_CRIS", + null, + new TimeString( new int[]{2022, 12, iday, 0, 0, 0, 0} ), + new TimeString( new int[]{2022, 12, iday + 1, 0, 0, 0, 0} ), + "Time,flux_B".split(",", -2), null, new File("/tmp/hapi-server-cache/")); + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + //double[] ds1= rec.getDoubleArray(1); + //System.err.println( String.format( "%s: %.1e %.1e %.1e %.1e %.1e %.1e %.1e", + // rec.getIsoTime(0), ds1[0], ds1[1], ds1[2], ds1[3], ds1[4], ds1[5], ds1[6] ) ); + } + } + } + + // AC_H2_CRIS gets three months for the sample range. My measurements and calculations have the extra startup per day as + // about three seconds, so this means the request will take an extra 270 seconds. + public static void mainCase7() { + long t0 = System.currentTimeMillis(); + //http://localhost:8080/HapiServer/hapi/data?id=AC_H2_CRIS¶meters=flux_C&start=2022-12-14T22:00Z&stop=2023-02-12T23:00Z + int[] start = new int[]{2022, 12, 14, 0, 0, 0, 0}; + int[] stop = new int[]{2023, 02, 13, 0, 0, 0, 0}; + while (TimeUtil.gt(stop, start)) { + int[] next = TimeUtil.add(start, new int[]{0, 0, 1, 0, 0, 0, 0}); + System.err.println("t: " + TimeUtil.formatIso8601Time(start)); + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "AC_H2_CRIS", + null, + new TimeString( start ), + new TimeString( next ), + "Time,flux_B".split(",", -2), null, new File("/tmp/hapi-server-cache/")); + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + //double[] ds1= rec.getDoubleArray(1); + //System.err.println( String.format( "%s: %.1e %.1e %.1e %.1e %.1e %.1e %.1e", + // rec.getIsoTime(0), ds1[0], ds1[1], ds1[2], ds1[3], ds1[4], ds1[5], ds1[6] ) ); + } + start = next; + } + System.err.println("time (sec): " + (System.currentTimeMillis() - t0) / 1000.); + } + + // AC_OR_SSC isn't sending anything over for Bob's sample range. + // AC_OR_SSC should format using x_format. + public static void mainCase8() { + long t0 = System.currentTimeMillis(); + //http://localhost:8080/HapiServer/hapi/data?id=AC_H2_CRIS¶meters=flux_C&start=2022-12-14T22:00Z&stop=2023-02-12T23:00Z + int[] start = new int[]{2023, 1, 1, 0, 0, 0, 0}; + int[] stop = new int[]{2023, 01, 11, 0, 0, 0, 0}; + while (TimeUtil.gt(stop, start)) { + int[] next = TimeUtil.add(start, new int[]{0, 0, 1, 0, 0, 0, 0}); + System.err.println("t: " + TimeUtil.formatIso8601Time(start)); + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "AC_OR_SSC", + null, + new TimeString( start ), + new TimeString( next ), + "Time,XYZ_GSEO".split(",", -2), null, new File("/tmp/hapi-server-cache/")); + int nrec = 0; + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + nrec++; + //double[] ds1= rec.getDoubleArray(1); + //System.err.println( String.format( "%s: %.1e %.1e %.1e %.1e %.1e %.1e %.1e", + // rec.getIsoTime(0), ds1[0], ds1[1], ds1[2], ds1[3], ds1[4], ds1[5], ds1[6] ) ); + } + System.err.println(" nrec..." + nrec); + start = next; + } + System.err.println("time (sec): " + (System.currentTimeMillis() - t0) / 1000.); + } + + /** + * ICON_L2-5_FUV_NIGHT has channels which change size with each file. The info says there should + * be 129,6 but the file might only have 127,6. + */ + public static void mainCase10() throws IOException, JSONException { + long t0 = System.currentTimeMillis(); + //http://localhost:8080/HapiServer/hapi/data?id=ICON_L2-5_FUV_NIGHT¶meters=ICON_L25_O_Plus_Density&start=2022-11-23T00:54:54Z&stop=2022-11-23T23:58:38Z + int[] start = new int[]{2022, 11, 23, 0, 54, 54, 0}; + int[] stop = new int[]{2022, 11, 23, 23, 58, 38, 0}; + + JSONObject info= new JSONObject( + CdawebInfoCatalogSource.getInfo( + "https://cottagesystems.com/~jbf/hapi/p/cdaweb/orig_data/info/ICON_L2-5_FUV_NIGHT.json", + "https://cottagesystems.com/~jbf/hapi/p/cdaweb/data/info/ICON_L2-5_FUV_NIGHT.json" ) ); + while (TimeUtil.gt(stop, start)) { + int[] next = TimeUtil.add(start, new int[]{0, 0, 1, 0, 0, 0, 0}); + System.err.println("t: " + TimeUtil.formatIso8601Time(start)); + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "ICON_L2-5_FUV_NIGHT", + info, + new TimeString( start ), + new TimeString( next ), + "Time,ICON_L25_O_Plus_Density".split(",", -2), null, new File("/tmp/hapi-server-cache/")); + int nrec = 0; + + double lastT=0; + int irec=0; + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + nrec++; + double[] rec1= rec.getDoubleArray(1); + if ( ( TimeUtil.toMillisecondsSince1970(rec.getIsoTime(0))-lastT ) > 30000 ) { + System.err.print( String.format( "%4d %s: ", irec, rec.getIsoTime(0) ) ); + for ( int i=0; i0?",":"")+String.format("%15.3e",rec1[i]) ); + } + System.err.println(); + } + irec=irec+1; + lastT= TimeUtil.toMillisecondsSince1970(rec.getIsoTime(0)); + if ( irec>575 ) System.exit(0); + } + System.err.println(" nrec..." + nrec); + start = next; + } + System.err.println("time (sec): " + (System.currentTimeMillis() - t0) / 1000.); + } + + /** + * Virtual variable OMNI2_H0_MRG1HR¶meters=SIGMA-ABS_B1800 doesn't load with Nand's. + */ + public static void mainCase9() { + long t0 = System.currentTimeMillis(); + //http://localhost:8080/HapiServer/hapi/data?id=OMNI2_H0_MRG1HR¶meters=SIGMA-ABS_B1800&start=1979-03-03T00:00Z&stop=1979-03-04T00:00Z + int[] start = new int[]{2024, 1, 4, 0, 0, 0, 0}; + int[] stop = new int[]{2024, 1, 5, 0, 0, 0, 0}; + while (TimeUtil.gt(stop, start)) { + int[] next = TimeUtil.add(start, new int[]{0, 0, 1, 0, 0, 0, 0}); + System.err.println("t: " + TimeUtil.formatIso8601Time(start)); + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "OMNI2_H0_MRG1HR", + null, + new TimeString( start ), + new TimeString( next ), + "Time,ABS_B1800".split(",", -2), null, new File("/tmp/hapi-server-cache/")); + int nrec = 0; + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + nrec++; + System.err.println( String.format( "%s: %.1e", rec.getIsoTime(0), rec.getDouble(1) ) ); + } + System.err.println(" nrec..." + nrec); + start = next; + } + System.err.println("time (sec): " + (System.currentTimeMillis() - t0) / 1000.); + } + + public static void mainCase1() { +// CdawebServicesHapiRecordIterator dd= new CdawebServicesHapiRecordIterator( +// "AC_H2_SWE", +// new int[] { 2021, 3, 12, 0, 0, 0, 0 }, +// new int[] { 2021, 3, 13, 0, 0, 0, 0 }, +// new String[] { "Time", "Np", "Vp" } ); + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "AC_K0_MFI", + null, + new TimeString( new int[]{2023, 4, 26, 0, 0, 0, 0} ), + new TimeString( new int[]{2023, 4, 27, 0, 0, 0, 0} ), + new String[]{"Time", "Magnitude"}, null, new File("/tmp/hapi-server-cache/")); + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + System.err.println(rec.getIsoTime(0)); + } + } + + public static void mainCase11() { +// CdawebServicesHapiRecordIterator dd= new CdawebServicesHapiRecordIterator( +// "AC_H2_SWE", +// new int[] { 2021, 3, 12, 0, 0, 0, 0 }, +// new int[] { 2021, 3, 13, 0, 0, 0, 0 }, +// new String[] { "Time", "Np", "Vp" } ); + // http://localhost:8280/HapiServer/hapi/data?dataset=AC_OR_SSC&start=2023-01-01T00:00Z&stop=2024-01-01T00:00Z + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + "AC_OR_SSC", + null, + new TimeString( new int[]{2023,01,01,00,00,0,0} ), + new TimeString( new int[]{2024,01,01,00,00,0,0} ), + new String[]{"Time", "Radius"}, null, new File("/tmp/hapi-server-cache/")); + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + System.err.println(rec.getIsoTime(0)); + } + } + + /** + * see if we can implement alt_view and Themis quality without web services + * @throws java.io.IOException + * @throws org.codehaus.jettison.json.JSONException + */ + public static void mainCase12() throws IOException, JSONException { + // http://localhost:8280/HapiServer/hapi/data?dataset=AC_OR_SSC&start=2023-01-01T00:00Z&stop=2024-01-01T00:00Z + String id= "AMPTECCE_H0_MEPA@0"; + String urlorig= "file:/net/spot10/hd1_8t/home/weigel/cdawmeta/data/orig_data/info/"+id+".json"; + String surl= "file:/net/spot10/hd1_8t/home/weigel/cdawmeta/data/hapi/info/"+id+".json"; + + JSONObject info = new JSONObject( CdawebInfoCatalogSource.getInfo(urlorig, surl) ); + CdawebServicesHapiRecordIterator dd = CdawebServicesHapiRecordIterator.create( + id, + info, + new TimeString( new int[]{1988,12,22,0,0,0,0} ), + new TimeString( new int[]{1988,12,22,16,19,0,0} ), + new String[]{"Time", "ION_protons_COUNTS_stack"}, null, new File("/tmp/hapi-server-cache/")); + while (dd.hasNext()) { + HapiRecord rec = dd.next(); + System.err.println(rec.getIsoTime(0)+","+rec.getAsString(1)); + } + } + + public static void main(String[] args) throws Exception { + //mainCase1(); + //mainCase2(); + //mainCase3(); + //mainCase4(); + //mainCase5(); + //mainCase6(); + //mainCase7(); + //mainCase8(); + mainCase9(); + //mainCase10(); + //mainCase11(); + //mainCase12(); + } + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebServicesHapiRecordSource.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebServicesHapiRecordSource.java new file mode 100644 index 00000000..8ee6dcad --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/CdawebServicesHapiRecordSource.java @@ -0,0 +1,130 @@ + +package org.hapiserver.source.cdaweb; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Iterator; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.AbstractHapiRecordSource; +import org.hapiserver.HapiRecord; +import org.hapiserver.TimeString; +import org.hapiserver.TimeUtil; + +/** + * CdawebServicesHapiRecordSource creates a HapiRecord iterator from CDF files, + * using the WebServices or reading directly from files. + * @author jbf + */ +public class CdawebServicesHapiRecordSource extends AbstractHapiRecordSource { + + private static final Logger logger= Logger.getLogger("hapi.cdaweb"); + + private String id; + JSONObject info; + JSONObject data; + AvailabilityIterator availabilityIterator; + String root; + String availRoot; // Root containing "info" and the data granule availability files. + File cache; + + /** + * Constructor for the record source. This reads the CDF files needed from the availability files. + * @param availRoot folder containing orig_data responses, with a file "info/AC_AT_DEF.pkl" + * @param id the id, like "AC_H0_EPM" + * @param info the resolved info configuration object + * @param data the data configuration object + * @param cacheDir cacheDir staging area where files can be stored for ~ 1 hour for reuse + */ + public CdawebServicesHapiRecordSource( String availRoot, String id, JSONObject info, JSONObject data, File cacheDir ) { + logger.entering("CdawebServicesHapiRecordSource","constructor"); + this.id= id; + this.info= info; + this.data= data; + this.availRoot= availRoot; + this.cache= cacheDir; + logger.exiting("CdawebServicesHapiRecordSource","constructor"); + } + + @Override + public boolean hasGranuleIterator() { + return true; + } + + @Override + public Iterator getGranuleIterator(TimeString start, TimeString stop) { + logger.entering("CdawebServicesHapiRecordSource","getGranuleIterator"); + + int ia= id.indexOf("@"); + String availId= ia==-1 ? id : id.substring(0,ia); + + String availInfo= CdawebAvailabilityHapiRecordSource.getInfoAvail( availRoot, availId + "/source" ); + JSONObject infoObject; + try { + infoObject = new JSONObject(availInfo); + } catch (JSONException ex) { + throw new RuntimeException(ex); + } + CdawebAvailabilityHapiRecordSource source= new CdawebAvailabilityHapiRecordSource( availRoot, availId + "/source", infoObject ); + Iterator it = source.getIterator( start, stop ); + this.root= source.getRoot(); + + availabilityIterator= new AvailabilityIterator(it); + logger.exiting("CdawebServicesHapiRecordSource","getGranuleIterator"); + return availabilityIterator; + } + + @Override + public boolean hasParamSubsetIterator() { + return true; + } + + @Override + public Iterator getIterator(TimeString start, TimeString stop, String[] params) { + try { + logger.entering("CdawebServicesHapiRecordSource","getIterator"); + URL f= new URL( this.root + availabilityIterator.getFile() ); + + CdawebServicesHapiRecordIterator result= CdawebServicesHapiRecordIterator.create(id, info, start, stop, params, f, cache ); + + logger.exiting("CdawebServicesHapiRecordSource","getIterator"); + return result; + } catch (MalformedURLException ex) { + throw new RuntimeException(ex); + } + } + + public static void main( String[] args ) throws IOException, JSONException { + String origRoot= "file:/net/spot10/hd1_8t/home/weigel/cdawmeta/data/orig_data/"; + String hapiRoot= "file:/net/spot10/hd1_8t/home/weigel/cdawmeta/data/hapi/info/AMPTECCE_H0_MEPA@0.json"; + String id= "AMPTECCE_H0_MEPA@0"; + JSONObject info= new JSONObject( CdawebInfoCatalogSource.getInfo( origRoot, hapiRoot ) ); + CdawebServicesHapiRecordSource crs= new CdawebServicesHapiRecordSource( origRoot, id, info, null, new File("/tmp/hapi-server-cache") ); + + System.err.println("crs: "+ crs); + + int[] start= new int[]{1988,12,22,0,0,0,0}; + int[] stop= new int[]{1988,12,22,16,19,0,0}; + + // This is the "alternate_view" one + String[] params= new String[]{"Time", "ION_protons_COUNTS_stack"}; + + // This is the a non-virtual one + //String[] params= new String[]{"Time", "ION_protons_COUNTS"}; + + Iterator granules= crs.getGranuleIterator( new TimeString( start ), new TimeString( stop ) ); + if ( granules.hasNext() ) { + TimeString[] tr= granules.next(); + Iterator records= crs.getIterator( tr[0], tr[1], params); + while ( records.hasNext() ) { + HapiRecord rec= records.next(); + System.err.println( "next: "+ rec.getIsoTime(0)+" " +rec.getAsString(1) ); + } + } + + } +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/DemoBugCDFJ.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/DemoBugCDFJ.java new file mode 100644 index 00000000..8865c9de --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/DemoBugCDFJ.java @@ -0,0 +1,20 @@ + +package org.hapiserver.source.cdaweb; + +import gov.nasa.gsfc.spdf.cdfj.CDFException; +import gov.nasa.gsfc.spdf.cdfj.CDFReader; +import java.io.File; + +/** + * Demo bug where runtime error occurs when trying to read FEDU with reader.get(vname). Note + * Autoplot uses NIO to access the data, and this works fine. + * @author jbf + */ +public class DemoBugCDFJ { + public static void main( String[] args ) throws CDFException.ReaderError { + File tmpFile= new File("/var/www/cdaweb/htdocs/sp_phys/data/erg/mepe/l2/3dflux/2024/erg_mepe_l2_3dflux_20240830_v01_01.cdf"); + CDFReader reader = new CDFReader(tmpFile.toString()); + reader.getBuffer( "FEDU", "float", new int[] { 0, 8910 }, true ); + reader.get("FEDU"); + } +} diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/LeapSecondsConverter.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/LeapSecondsConverter.java similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/cdaweb/LeapSecondsConverter.java rename to CDAWebServer/src/org/hapiserver/source/cdaweb/LeapSecondsConverter.java diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/Add1800.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/Add1800.java new file mode 100644 index 00000000..d4f65810 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/Add1800.java @@ -0,0 +1,39 @@ +package org.hapiserver.source.cdaweb.adapters; + +import org.hapiserver.TimeUtil; +import org.hapiserver.source.cdaweb.Adapter; + +/** + * Add 1800 seconds to the times. + * @author jbf + */ +public class Add1800 extends Adapter { + + Adapter base; + + public Add1800( Adapter base ) { + this.base= base; + } + + @Override + public String adaptString(int index) { + String s= base.adaptString(index); + if ( s.charAt(14)=='0' && s.charAt(15)=='0' ) { + s= s.substring(0,14) + "30" + s.substring(16); + } else { + throw new IllegalArgumentException("add1800 assumes there are no minutes!"); + } + return s; + //return base.adaptString(index); + } + + + @Override + public String getString(int index) { + String time= base.getString(index); + return time; + } + + + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/ApplyEsaQflag.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/ApplyEsaQflag.java new file mode 100644 index 00000000..c401981d --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/ApplyEsaQflag.java @@ -0,0 +1,61 @@ + +package org.hapiserver.source.cdaweb.adapters; + +import org.hapiserver.source.cdaweb.Adapter; + +/** + * Implements by returning parameter only where the flag is zero, fill otherwise. + *
    + *
  • vap+hapi:http://localhost:8280/HapiServer/hapi?id=THE_L2_GMOM@5¶meters=Time,the_ptebb_avgtempQ&timerange=2025-09-07 + *
+ * @author jbf + */ +public class ApplyEsaQflag extends Adapter { + + Adapter param; + Adapter flag; + double fill; + double[] ffill; + + public ApplyEsaQflag( Adapter param, Adapter flag, double fill ) { + this.param= param; + this.flag= flag; + this.fill= fill; + } + + @Override + public String getString(int index) { + throw new UnsupportedOperationException("Not supported."); + } + + @Override + public double adaptDouble(int index) { + double d= param.adaptDouble(index); + int i= flag.adaptInteger(index); + if ( i==0 ) { + return d; + } else { + return fill; + } + } + + @Override + public double[] adaptDoubleArray(int index) { + double[] d= param.adaptDoubleArray(index); + int i= flag.adaptInteger(index); + if ( i==0 ) { + return d; + } else { + if ( ffill==null ) { // initialize once, now that we know the size of param. + ffill= new double[d.length]; + for ( int j=0; j=v; + break; + case "gt": + filt= f>v; + break; + case "le": + filt= f<=v; + break; + case "lt": + filt= f=v; + break; + case "gt": + filt= f>v; + break; + case "le": + filt= f<=v; + break; + case "lt": + filt= f + *
  • https://github.com/autoplot/cdfj/blob/virtual_variable_descriptions/virtual/apply_rtn_qflag.md + * + * @author jbf + */ +public class ApplyRtnQflag extends Adapter { + + Adapter data; + Adapter quality; + double fill; + double[] ffill; + + public ApplyRtnQflag( Adapter data, Adapter quality, double fill ) { + this.data= data; + this.quality= quality; + this.fill= fill; + } + + @Override + public String getString(int index) { + throw new UnsupportedOperationException("Not supported."); + } + + @Override + public double adaptDouble(int index) { + double d= data.adaptDouble(index); + int i= quality.adaptInteger(index); + if ( i==222 || i==223 ) { + return d; + } else { + return fill; + } + } + + @Override + public double[] adaptDoubleArray(int index) { + double[] d= data.adaptDoubleArray(index); + int i= quality.adaptInteger(index); + if ( i==222 || i==223 ) { + return d; + } else { + if ( ffill==null ) { // initialize once, now that we know the size of param. + ffill= new double[d.length]; + for ( int j=0; j + *
  • vap+hapi:http://localhost:8280/HapiServer/hapi?id=FA_ESA_L2_EEB¶meters=Time,energy_median&timerange=2009-04-30T05:32:25Z/2009-04-30T05:47:04Z + *
  • vap+hapi:http://localhost:8280/HapiServer/hapi?id=MVN_SWE_L2_SVYPAD¶meters=Time, + * + * @author jbf + */ +public class ArrSlice extends Adapter { + + Adapter slicable; + int sliceIndex; // ARR_INDEX in CDF file + int sliceDim; // ARR_DIM in CDF file + int offs1; + int len1; + int offs0; + int len0; + double[] buf; + + public ArrSlice( Adapter slicable, int[] qube, int sliceDim, int sliceIndex ) { + this.slicable= slicable; + this.sliceDim= sliceDim; + this.sliceIndex= sliceIndex; + if ( this.sliceDim==2 ) { + int len0= 1; + for ( int i=1; i + *
  • .../hapi/data?id=THG_L1_ASK@8&timerange=2025-09-08+0:00+to+12:55 + * + * @author jbf + */ +public class CompThemisEpoch extends Adapter { + + double[] base; + double[] offset; + + public CompThemisEpoch( double[] base, double[] offset ) { + this.base= base; + this.offset= offset; + } + + @Override + public double adaptDouble(int index) { + double d= base[0]; + double d2= offset[index]; + return d+d2*1000; + } + + @Override + public String adaptString(int index) { + double d= adaptDouble(index); + //TODO: there might be a better implementation of this. + return new CdawebServicesHapiRecordIterator.IsotimeEpochAdapter( new double[] { d }, 30 ).adaptString(0); + } + + + + @Override + public String getString(int index) { + throw new UnsupportedOperationException("Not supported yet."); // Generated from nbfs://nbhost/SystemFileSystem/Templates/Classes/Code/GeneratedMethodBody + } + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/CompThemisEpoch16.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/CompThemisEpoch16.java new file mode 100644 index 00000000..ffd78154 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/CompThemisEpoch16.java @@ -0,0 +1,45 @@ + +package org.hapiserver.source.cdaweb.adapters; + +import org.hapiserver.source.cdaweb.Adapter; +import org.hapiserver.source.cdaweb.CdawebServicesHapiRecordIterator; + +/** + * Implements by returning parameter only where the flag is (?) nonzero. + *
      + *
    • .../hapi/data?id=THG_L1_ASK@8&timerange=2025-09-08+0:00+to+12:55 + *
    + * @author jbf + */ +public class CompThemisEpoch16 extends Adapter { + + double[] base; + double[] offset; + + public CompThemisEpoch16( double[] base, double[] offset ) { + this.base= base; + this.offset= offset; + } + + @Override + public double adaptDouble(int index) { + double d= base[0]; + double d2= offset[index]; + return d+d2*1000; + } + + @Override + public String adaptString(int index) { + double d= adaptDouble(index); + //TODO: there might be a better implementation of this. + return new CdawebServicesHapiRecordIterator.IsotimeEpochAdapter( new double[] { d }, 30 ).adaptString(0); + } + + + + @Override + public String getString(int index) { + throw new UnsupportedOperationException("Not supported yet."); // Generated from nbfs://nbhost/SystemFileSystem/Templates/Classes/Code/GeneratedMethodBody + } + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/ConstantAdapter.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/ConstantAdapter.java new file mode 100644 index 00000000..18a318c8 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/ConstantAdapter.java @@ -0,0 +1,29 @@ + +package org.hapiserver.source.cdaweb.adapters; + +import org.hapiserver.source.cdaweb.Adapter; + +/** + * virtual variable component is just single record. + * THA_L2_GMOM@2¶meters=Time,tha_ptirf_sc_potQ + * @author jbf + */ +public class ConstantAdapter extends Adapter { + + private final double value; + + public ConstantAdapter( double value ) { + this.value= value; + } + + @Override + public String getString(int index) { + return String.valueOf(this.value); + } + + @Override + public double adaptDouble(int index) { + return value; + } + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/ConvertLog10.java b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/ConvertLog10.java new file mode 100644 index 00000000..0e11ddec --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/adapters/ConvertLog10.java @@ -0,0 +1,43 @@ + +package org.hapiserver.source.cdaweb.adapters; + +import org.hapiserver.source.cdaweb.Adapter; + +/** + * return log10 of another dataset. + *
      + *
    • .../hapi/data?id=IM_K0_WIC¶meters=Time,WIC_PIXELS_LOG&timerange=2005-12-17+2:32+to+23:55 + *
    + * @author jbf + */ +public class ConvertLog10 extends Adapter { + + Adapter base; + double[] stage; + + public ConvertLog10( Adapter base ) { + this.base= base; + } + @Override + public String getString(int index) { + throw new UnsupportedOperationException("Not supported yet."); // Generated from nbfs://nbhost/SystemFileSystem/Templates/Classes/Code/GeneratedMethodBody + } + + @Override + public double adaptDouble(int index) { + return Math.log10( base.adaptDouble(index) ); + } + + @Override + public double[] adaptDoubleArray(int index) { + double[] result= base.adaptDoubleArray(index); + // it seems likely that mutating the original data is a bad idea, so make a copy. + if ( stage==null ) stage= new double[result.length]; + for ( int i=0; i + *
  • .../hapi/data?id=IM_K0_WIC¶meters=Time,WIC_PIXELS_LOG&timerange=2005-12-17+2:32+to+23:55 + *
  • https://github.com/autoplot/cdfj/blob/virtual_variable_descriptions/virtual/convert_log10_flip_vert.md + * + * This is a nice example because we'll need to know the sizes to implement this. + * @author jbf + */ +public class ConvertLogFlipVert extends Adapter { + + Adapter base; + double[] stage; + + public ConvertLogFlipVert( Adapter base ) { + this.base= base; + } + @Override + public String getString(int index) { + throw new UnsupportedOperationException("Not supported yet."); // Generated from nbfs://nbhost/SystemFileSystem/Templates/Classes/Code/GeneratedMethodBody + } + + @Override + public double adaptDouble(int index) { + return Math.log10( base.adaptDouble(index) ); + } + + @Override + public double[] adaptDoubleArray(int index) { + if (true) { + throw new IllegalArgumentException("not implemented"); + } else { + double[] result= base.adaptDoubleArray(index); + // it seems likely that mutating the original data is a bad idea, so make a copy. + if ( stage==null ) stage= new double[result.length]; + for ( int i=0; i virtualFunctions; + static { + virtualFunctions= new HashSet<>(); + virtualFunctions.add("add_1800"); // verified + virtualFunctions.add("apply_esa_qflag"); // verified vap+hapi:http://localhost:8280/HapiServer/hapi?id=MMS1_FGM_BRST_L2@0¶meters=mms1_fgm_b_gse_brst_l2_clean&timerange=2025-08-16+14:19:43+to+14:22:13 + virtualFunctions.add("alternate_view"); // verified vap+hapi:http://localhost:8280/HapiServer/hapi?id=AMPTECCE_H0_MEPA@0¶meters=ION_protons_COUNTS_stack&timerange=1988-12-22+0:00+to+16:18 + virtualFunctions.add("apply_esa_qflag"); // vap+hapi:http://localhost:8280/HapiServer/hapi?id=MMS1_FGM_BRST_L2@0¶meters=mms1_fgm_b_gse_brst_l2_clean&timerange=2025-08-16+14:19:43+to+14:22:13 + virtualFunctions.add("apply_fgm_qflag"); // vap+hapi:http://localhost:8280/HapiServer/hapi?id=THA_L2_FGM@0¶meters=tha_fgs_btotalQ&timerange=2025-09-20 + virtualFunctions.add("apply_gmom_qflag"); // vap+hapi:http://localhost:8280/HapiServer/hapi?id=THA_L2_GMOM@0¶meters=tha_ptiff_densityQ&timerange=2025-09-19 + virtualFunctions.add("apply_rtn_qflag"); // vap+hapi:http://localhost:8280/HapiServer/hapi?id=MESSENGER_MAG_RTN@0¶meters=B_radial_q&timerange=2015-04-29+0:00+to+23:59 + virtualFunctions.add("comp_themis_epoch"); // Needs more study bc of NetCDF: vap+hapi:http://localhost:8280/HapiServer/hapi?id=DN_MAGN-L2-HIRES_G08¶meters=Time&timerange=2001-12-08+0:00+to+23:59 + virtualFunctions.add("comp_themis_epoch16"); // vap+hapi:http://localhost:8280/HapiServer/hapi?id=THA_L2_FGM@1¶meters=Time&timerange=2025-09-20 + virtualFunctions.add("add_1800"); // vap+hapi:http://localhost:8280/HapiServer/hapi?id=OMNI2_H0_MRG1HR¶meters=Time&timerange=2025-01-01+00:00+to+2025-06-30+23:00 + virtualFunctions.add("apply_filter_flag"); // NOT CHECKED vap+hapi:http://localhost:8280/HapiServer/hapi?id=MMS1_FPI_BRST_L2_DES-DIST¶meters=mms1_des_dist_brst1_even&timerange=2025-07-31+18:29:23+to+18:31:22 + virtualFunctions.add("convert_log10"); // vap+hapi:http://localhost:8280/HapiServer/hapi?id=IM_K0_EUV¶meters=IMAGE_LOG&timerange=2005-12-17+3:14+to+23:40 + virtualFunctions.add("clamp_to_zero"); // error!!!! vap+hapi:http://localhost:8280/HapiServer/hapi?id=RBSPA_REL04_ECT-MAGEIS-L3@0¶meters=FEDU_plasmagram&timerange=2019-10-13+0:00+to+23:59 + virtualFunctions.add("alternate_view"); + virtualFunctions.add("arr_slice"); // error!!! vap+hapi:http://localhost:8280/HapiServer/hapi?id=ERG_MEPE_L2_3DFLUX@0¶meters=FEDU_e2&timerange=2024-08-30+1:14+to+24:00 + + } + + /** + * return true if the virtual function is supported. + * @param funct + * @return true if the virtual function is supported. + */ + public static boolean virtualFunctionSupported( String funct ) { + return virtualFunctions.contains(funct); + } + +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/config.json b/CDAWebServer/src/org/hapiserver/source/cdaweb/config.json new file mode 100644 index 00000000..0d552af2 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/config.json @@ -0,0 +1,82 @@ + { + "options": { + "cdawmetaDir":"https://cottagesystems.com/~jbf/hapi/p/cdaweb/", + "cdawmetaDir_doc":"location for the server", + "cacheDir":"file:///tmp/cdaweb-hapi/cache/", + "cacheDir_doc": "read-write location where files will be downloaded.", + "catalogHome": "${cdawmetaDir}/data/hapi/catalog.json", + "catalogHome_doc": "read-only location of the catalog file", + "infoHome": "${cdawmetaDir}/data/hapi/info/", + "infoHome_doc": "read-only root folder (website or file://...) containing \"info\" directory and \"catalog.json\"", + "metaHome": "${cdawmetaDir}/data/cdfmetafile/", + "metaHome_doc": "read-only root folder (website or file://...) containing \"info\" directory with file listings." + }, + "capabilities": { + "outputFormats": [ + "csv", + "binary", + "json" + ] + }, + "groups": [ + { + "group_id": "cdaweb", + "config": { + "catalog": { + "source": "classpath", + "classpath": "CDAWebServer.jar", + "class": "org.hapiserver.source.cdaweb.CdawebInfoCatalogSource", + "method": "getCatalog", + "args": [ "${catalogHome}" ] + }, + "info": { + "source":"classpath", + "classpath": "CDAWebServer.jar", + "class":"org.hapiserver.source.cdaweb.CdawebInfoCatalogSource", + "method": "getInfo", + "args": [ "${infoHome}${id}.json", "${infoHome}${id}.json" ] + }, + "data": { + "source": "classpath", + "classpath": "CDAWebServer.jar", + "class":"org.hapiserver.source.cdaweb.CdawebHapiRecordSource", + "method": "create", + "args": [ "${metaHome}", "${id}", "${info}", "${data-config}", "${cacheDir}" ] + } + } + }, + { + "group_id": "cdaweb_availability", + "config": { + "catalog": { + "source": "classpath", + "classpath": "CDAWebServer.jar", + "class": "org.hapiserver.source.cdaweb.CdawebAvailabilityHapiRecordSource", + "method": "getAvailabilityCatalog", + "args": [ "${catalogHome}" ] + }, + "info": { + "source":"classpath", + "classpath": "CDAWebServer.jar", + "class":"org.hapiserver.source.cdaweb.CdawebAvailabilityHapiRecordSource", + "method": "getInfoAvail", + "args": [ "${metaHome}", "${id}" ] + }, + "data": { + "source": "classpath", + "classpath": "CDAWebServer.jar", + "class":"org.hapiserver.source.cdaweb.CdawebAvailabilityHapiRecordSource", + "args": [ "${metaHome}", "${id}", "${info}"] + } + } + } + ], + "about": { + "HAPI": "3.2", + "id":"cdaweb", + "title":"CDAWeb HAPI Server", + "contact":"Jeremy Faden ", + "description":"CDF data sets from CDAWeb.", + "x_server_version": "2026-01-30T06:06" + } +} diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/sampleTimes.txt b/CDAWebServer/src/org/hapiserver/source/cdaweb/sampleTimes.txt new file mode 100644 index 00000000..456321fe --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/sampleTimes.txt @@ -0,0 +1,37 @@ +# see https://github.com/autoplot/dev/blob/master/demos/2023/20230516/unplotCdawebAvail.jy +WI_SW_ION_DIST_SWE_FARADAY 1994-12-31T01:52:02.767Z 2023-05-03T00:18:40.461Z +WI_STRAHL0_SWE 1995-02-05T09:26:27.320Z 2001-07-23T21:43:03.285Z +WI_SOSP_3DP 1995-02-05T09:26:27.320Z 2023-06-08T07:53:05.014Z +WI_SOSD_3DP 1995-02-05T09:26:27.320Z 2023-06-08T07:53:05.014Z +# http://localhost:8080/HapiServer/hapi/data?id=A1_K0_MPA&time.min=2001-08-31T00:03:18Z&time.max=2001-09-01T00:03:18.000Z +# http://localhost:8080/HapiServer/hapi/data?id=availability/A1_K0_MPA&time.min=2001-08-01T00:03:18Z&time.max=2001-09-01T00:00:00.000Z +DE_UV_SAI double[][][] + +This crashes, maybe because it is integer data (DONE): +http://localhost:8080/HapiServer/hapi/data?id=WI_OR_DEF&start=1997-06-30T23:50:00.000Z&stop=1997-07-01T23:50:00.000Z¶meters=Time,CRN_EARTH + +strange bug where fill is mishandled for some data: +http://localhost:8080/HapiServer/hapi/data?id=AC_H3_MFI¶meters=&time.min=2024-06-11T00:00Z&time.max=2024-06-11T23:59:59Z + +But where type adapter is not correctly chosen: +http://localhost:8080/HapiServer/hapi/data?id=WI_H0_SWE¶meters=&time.min=2001-05-30T00:00:20Z&time.max=2001-05-30T23:59:47Z + +Uses web services: +* I8_H0_MITPLASMA0 +* http://localhost:8080/HapiServer/hapi/data?dataset=MMS4_FGM_SRVY_L2@0¶meters=Time,mms4_fgm_b_gse_srvy_l2_clean&start=2024-07-23&stop=2024-07-24 apply_esa_qflag + +Doesn't use web services: +* http://localhost:8080/HapiServer/hapi/data?dataset=A1_K0_MPA&start=2008-01-02T00:02:16Z&stop=2008-01-02T23:57:19Z + +Gets the type wrong: +* http://localhost:8080/HapiServer/hapi/data?dataset=MMS1_EDI_SRVY_L2_EFIELD&start=2023-08-21T17:33:02Z&stop=2023-08-21T22:57:02Z +* http://localhost:8080/HapiServer/hapi/data?dataset=MMS1_EDP_FAST_L2_DCE&start=2024-07-09T02:12:03Z&stop=2024-07-09T23:59:58Z +* http://localhost:8080/HapiServer/hapi/data?id=MMS1_EPD-EIS_SRVY_L2_ELECTRONENERGY¶meters=&time.min=2024-06-26T10:22:21Z&time.max=2024-06-26T13:06Z +* http://localhost:8080/HapiServer/hapi/data?id=MMS1_EPD-EIS_BRST_L2_PHXTOF¶meters=&time.min=2024-07-25T20:14:03Z&time.max=2024-07-25T20:16:22Z +* http://localhost:8080/HapiServer/hapi/data?id=MMS1_EPD-EIS_BRST_L2_EXTOF¶meters=&time.min=2024-07-25T20:14:03Z&time.max=2024-07-25T20:16:22Z + +Timetags don't match: +* http://localhost:8080/HapiServer/hapi/data?id=MMS1_EDI_SRVY_L2_AMB@0¶meters=&time.min=2015-12-26T00:00Z&time.max=2015-12-26T02:23:36Z + +Timetags repeat: +* wget -O - -o /dev/null 'http://localhost:8080/HapiServer/hapi/data?id=MMS1_EDI_SRVY_L2_AMB@0¶meters=&time.min=2015-12-26T00:00Z&time.max=2015-12-26T00:01:00Z' diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/skips.txt b/CDAWebServer/src/org/hapiserver/source/cdaweb/skips.txt new file mode 100644 index 00000000..b6b545e9 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/skips.txt @@ -0,0 +1,2 @@ +# This should include a list of patterns which when the dataset id matches, the id will be skipped. +C[1-4]_CP_STA_SM # includes rank 5 data which is not supported by CDFJ library. diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/test-locking.bash b/CDAWebServer/src/org/hapiserver/source/cdaweb/test-locking.bash new file mode 100755 index 00000000..296c3ec3 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/test-locking.bash @@ -0,0 +1,6 @@ +wget -O 1.csv -o /dev/null 'http://localhost:8280/HapiServer/hapi/data?dataset=AC_OR_SSC&start=2023-01-01T00:00Z&stop=2024-01-01T00:00Z' & +wget -O 2.csv -o /dev/null 'http://localhost:8280/HapiServer/hapi/data?dataset=AC_OR_SSC&start=2023-01-01T00:00Z&stop=2024-01-01T00:00Z' & +wget -O 3.csv -o /dev/null 'http://localhost:8280/HapiServer/hapi/data?dataset=AC_OR_SSC&start=2023-01-01T00:00Z&stop=2024-01-01T00:00Z' & +wget -O 4.csv -o /dev/null 'http://localhost:8280/HapiServer/hapi/data?dataset=AC_OR_SSC&start=2023-01-01T00:00Z&stop=2024-01-01T00:00Z' & +# wget -O 5.csv -o /dev/null 'http://localhost:8280/HapiServer/hapi/data?dataset=AC_OR_SSC&start=2023-01-01T00:00Z&stop=2024-01-01T00:00Z' & +# wget -O 6.csv -o /dev/null 'http://localhost:8280/HapiServer/hapi/data?dataset=AC_OR_SSC&start=2023-01-01T00:00Z&stop=2024-01-01T00:00Z' & diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/urls_to_verify.txt b/CDAWebServer/src/org/hapiserver/source/cdaweb/urls_to_verify.txt new file mode 100644 index 00000000..a0c7fd40 --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/urls_to_verify.txt @@ -0,0 +1,39 @@ +# Shows possible problem with assumptions about parameter numbers: +wget -O - 'http://localhost:8280/HapiServer/hapi/data?id=BAR_1D_L2_HKPG¶meters=T11_Solar4&time.min=2013-01-19T23:59:27Z&time.max=2013-01-20T23:59:18Z' + +wget -O - 'http://localhost:8280/HapiServer/hapi/data?id=AC_OR_SSC¶meters=XYZ_GSEO&start=2023-01-01T00:00Z&stop=2024-01-01T00:00Z' + +# JSON double array: +wget -O - 'http://localhost:8280/HapiServer/hapi/data?id=RBSP-A-RBSPICE_LEV-2_TOFXPHHLEHT&start=2014-10-02T17:30:00.000Z&stop=2014-10-02T17:30:10.000Z¶meters=Time,FPDU' + +# extra variable "sc_pos_syngci" +https://cottagesystems.com/~jbf/hapi/p/cdaweb/data/hapi/info/A2_K0_MPA.json + +# strange error "sc_pos_syngci" +wget -O - 'http://localhost:8280/HapiServer/hapi/data?id=A2_K0_MPA&time.min=2008-002T00:01:48Z&time.max=2008-002T23:58:16Z +Nand's server returns this, but different timetags are returned. + +# FH has many more records in +wget -O - 'http://localhost:8280/HapiServer/hapi/data?id=ALOUETTE2_AV_LIM&time.min=1967-015T12:58:30Z&time.max=1967-015T12:59:04Z + +# Bernie's services claim a file exists when server returns file not found. +wget -O - 'http://localhost:8280/HapiServer/hapi/data?id=ALOUETTE2_AV_LIM&time.min=1967-01-15T12:47:00.000Z&time.max=1967-01-15T13:05:00.000Z¶meters=Time,ampl' + + +# info is missing fill value +wget -O - 'http://localhost:8280/HapiServer/hapi/data?id=DE1_1MIN_RIMS¶meters=flags_z_hi_Oplus&time.min=1984-11-01T13:23:44Z&time.max=1984-11-25T07:57:26Z' + +# this is strange, because it is handing times from 1983, not just the one day 1991-02-17. +wget -o /dev/null -O - 'http://localhost:8280/HapiServer/hapi/data?id=DE1_62MS_MAGA-GMS/source&time.min=1991-02-17T05:30:18Z&time.max=1991-02-17T06:20:32Z' +wget -o /dev/null -O - 'http://localhost:8280/HapiServer/hapi/data?id=DE1_62MS_MAGA-GMS¶meters=alt&time.min=1991-02-17T05:30:18Z&time.max=1991-02-17T06:20:32Z' +# The gist is that there's a granule which has an unrealistic stop time. +# in https://cottagesystems.com/~jbf/hapi/p/cdaweb//data/cdfmetafile/info/DE1_62MS_MAGA-GMS.json: +# { +# "Name": "https://cdaweb.gsfc.nasa.gov/sp_phys/data/de/de1/magnetic_fields_maga/62ms_magagms_cdaweb/1983/de1_62ms_maga-gms_19831220_v01.cdf", +# "StartTime": "1983-12-20T10:10:19Z", +# "EndTime": "1991-12-20T23:35:11Z" +# }, +# See ticket https://github.com/hapi-server/server-java/issues/73 + +# Too many fields -- Autoplot can't plot this variable, so virtual variable? +wget -O - 'http://localhost:8280/HapiServer/hapi/data?dataset=IM_K0_HENA&time.min=2005-12-17T00:01:12Z&time.max=2005-12-17T23:50:55Z' | head | tail -n 1 \ No newline at end of file diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/virtualVariables.txt b/CDAWebServer/src/org/hapiserver/source/cdaweb/virtualVariables.txt similarity index 52% rename from HapiServerBase/src/org/hapiserver/source/cdaweb/virtualVariables.txt rename to CDAWebServer/src/org/hapiserver/source/cdaweb/virtualVariables.txt index 8ab7f123..ceda9e11 100644 --- a/HapiServerBase/src/org/hapiserver/source/cdaweb/virtualVariables.txt +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/virtualVariables.txt @@ -1,6 +1,7 @@ # 1 indicates the dataset id uses virtual variables # dataset name, has virt, example virt name -AC_OR_SSC 1 ALTERNATE_VIEW +# See https://github.com/autoplot/dev/blob/master/rfe/sf/672/printVirtualVariables.jy, an Autoplot script. +AC_OR_SSC 1 alternate_view AC_OR_DEF 0 AC_AT_DEF 0 AC_H2_CRIS 0 @@ -35,7 +36,7 @@ AC_H2_ULE 0 AEROCUBE-6-A_DOSIMETER_L2 0 AEROCUBE-6-B_DOSIMETER_L2 0 AIM_CIPS_SCI_3A 0 -AMPTECCE_H0_MEPA 0 +AMPTECCE_H0_MEPA 1 alternate_view APOLLO12_SWS_1HR 0 APOLLO12_SWS_28S 0 APOLLO15_SWS_1HR 0 @@ -413,6 +414,20 @@ BAR_5A_L2_RCNT 0 BAR_5A_L2_SSPC 0 BAR_5A_L2_USPC 0 BAR_5A_L2_XSPC 0 +BAR_6A_L2_EPHM 0 +BAR_6A_L2_FSPC 0 +BAR_6A_L2_HKPG 0 +BAR_6A_L2_MAGN 0 +BAR_6A_L2_MSPC 0 +BAR_6A_L2_RCNT 0 +BAR_6A_L2_SSPC 0 +BAR_7A_L2_EPHM 0 +BAR_7A_L2_FSPC 0 +BAR_7A_L2_HKPG 0 +BAR_7A_L2_MAGN 0 +BAR_7A_L2_MSPC 0 +BAR_7A_L2_RCNT 0 +BAR_7A_L2_SSPC 0 BEPICOLOMBO_HELIO1DAY_POSITION 0 CASSINI_MAG_1MIN_MAGNETIC_FIELD 0 C1_PP_ASP 0 @@ -433,10 +448,207 @@ C1_PP_EFW 0 C2_PP_EFW 0 C3_PP_EFW 0 C4_PP_EFW 0 +C1_CP_CIS-CODIF_H1_1D_PEF 0 +C3_CP_CIS-CODIF_H1_1D_PEF 0 +C4_CP_CIS-CODIF_H1_1D_PEF 0 +C1_CP_CIS-CODIF_HS_H1_PEF 0 +C3_CP_CIS-CODIF_HS_H1_PEF 0 +C4_CP_CIS-CODIF_HS_H1_PEF 0 +C1_CP_CIS-CODIF_HS_H1_PF 0 +C3_CP_CIS-CODIF_HS_H1_PF 0 +C4_CP_CIS-CODIF_HS_H1_PF 0 +C1_CP_CIS-CODIF_HS_H1_PSD 0 +C3_CP_CIS-CODIF_HS_H1_PSD 0 +C4_CP_CIS-CODIF_HS_H1_PSD 0 +C1_CP_CIS-CODIF_HS_HE1_PF 0 +C3_CP_CIS-CODIF_HS_HE1_PF 0 +C4_CP_CIS-CODIF_HS_HE1_PF 0 +C1_CP_CIS-CODIF_HS_HE1_PSD 0 +C3_CP_CIS-CODIF_HS_HE1_PSD 0 +C4_CP_CIS-CODIF_HS_HE1_PSD 0 +C1_CP_CIS-CODIF_HS_O1_PEF 0 +C3_CP_CIS-CODIF_HS_O1_PEF 0 +C4_CP_CIS-CODIF_HS_O1_PEF 0 +C1_CP_CIS-CODIF_HS_O1_PF 0 +C3_CP_CIS-CODIF_HS_O1_PF 0 +C4_CP_CIS-CODIF_HS_O1_PF 0 +C1_CP_CIS-CODIF_HS_O1_PSD 0 +C3_CP_CIS-CODIF_HS_O1_PSD 0 +C4_CP_CIS-CODIF_HS_O1_PSD 0 +C1_CP_CIS-CODIF_HE1_1D_PEF 0 +C3_CP_CIS-CODIF_HE1_1D_PEF 0 +C4_CP_CIS-CODIF_HE1_1D_PEF 0 +C4_CP_CIS-CODIF_HE1_DENSITY_CORRECTED 0 +C4_CP_CIS-CODIF_LS_H1_PEF 0 +C4_CP_CIS-CODIF_LS_H1_PF 0 +C4_CP_CIS-CODIF_LS_H1_PSD 0 +C4_CP_CIS-CODIF_LS_HE1_PEF 0 +C4_CP_CIS-CODIF_LS_HE1_PF 0 +C4_CP_CIS-CODIF_LS_O1_PEF 0 +C4_CP_CIS-CODIF_LS_O1_PF 0 +C4_CP_CIS-CODIF_LS_O1_PSD 0 +C1_CP_CIS-CODIF_O1_1D_PEF 0 +C3_CP_CIS-CODIF_O1_1D_PEF 0 +C4_CP_CIS-CODIF_O1_1D_PEF 0 +C1_CP_CIS-CODIF_PAD_HS_H1_PF 0 +C3_CP_CIS-CODIF_PAD_HS_H1_PF 0 +C4_CP_CIS-CODIF_PAD_HS_H1_PF 0 +C1_CP_CIS-CODIF_PAD_HS_HE1_PF 0 +C3_CP_CIS-CODIF_PAD_HS_HE1_PF 0 +C4_CP_CIS-CODIF_PAD_HS_HE1_PF 0 +C1_CP_CIS-CODIF_PAD_HS_O1_PF 0 +C3_CP_CIS-CODIF_PAD_HS_O1_PF 0 +C4_CP_CIS-CODIF_PAD_HS_O1_PF 0 +C4_CP_CIS-CODIF_PAD_LS_H1_PF 0 +C4_CP_CIS-CODIF_PAD_LS_HE1_PF 0 +C4_CP_CIS-CODIF_PAD_LS_O1_PF 0 +C1_CP_CIS-HIA_HS_1D_PEF 0 +C3_CP_CIS-HIA_HS_1D_PEF 0 +C1_CP_CIS-HIA_HS_MAG_IONS_PEF 0 +C3_CP_CIS-HIA_HS_MAG_IONS_PEF 0 +C1_CP_CIS-HIA_HS_MAG_IONS_PF 0 +C3_CP_CIS-HIA_HS_MAG_IONS_PF 0 +C1_CP_CIS-HIA_HS_MAG_IONS_PSD 0 +C3_CP_CIS-HIA_HS_MAG_IONS_PSD 0 +C1_CP_CIS-HIA_HS_SW_IONS_PEF 0 +C3_CP_CIS-HIA_HS_SW_IONS_PEF 0 +C1_CP_CIS-HIA_HS_SW_IONS_PF 0 +C3_CP_CIS-HIA_HS_SW_IONS_PF 0 +C1_CP_CIS-HIA_HS_SW_IONS_PSD 0 +C3_CP_CIS-HIA_HS_SW_IONS_PSD 0 +C1_CP_CIS-HIA_LS_1D_PEF 0 +C3_CP_CIS-HIA_LS_1D_PEF 0 +C1_CP_CIS-HIA_LS_SW_IONS_PEF 0 +C3_CP_CIS-HIA_LS_SW_IONS_PEF 0 +C1_CP_CIS-HIA_LS_SW_IONS_PF 0 +C3_CP_CIS-HIA_LS_SW_IONS_PF 0 +C1_CP_CIS-HIA_LS_SW_IONS_PSD 0 +C3_CP_CIS-HIA_LS_SW_IONS_PSD 0 +C1_CP_CIS-HIA_PAD_HS_MAG_IONS_PF 0 +C3_CP_CIS-HIA_PAD_HS_MAG_IONS_PF 0 +C1_CP_EDI_AEDC 0 +C2_CP_EDI_AEDC 0 +C3_CP_EDI_AEDC 0 +C1_CP_EDI_MP 0 +C2_CP_EDI_MP 0 +C3_CP_EDI_MP 0 +C1_CP_EDI_QZC 0 +C2_CP_EDI_QZC 0 +C3_CP_EDI_QZC 0 +C1_CP_EDI_SPIN 0 +C2_CP_EDI_SPIN 0 +C3_CP_EDI_SPIN 0 +C1_CP_EFW_L3_E3D_INERT 0 +C2_CP_EFW_L3_E3D_INERT 0 +C3_CP_EFW_L3_E3D_INERT 0 +C4_CP_EFW_L3_E3D_INERT 0 +C1_CP_EFW_L3_P 0 +C2_CP_EFW_L3_P 0 +C3_CP_EFW_L3_P 0 +C4_CP_EFW_L3_P 0 +C1_CP_EFW_L3_V3D_INERT 0 +C2_CP_EFW_L3_V3D_INERT 0 +C3_CP_EFW_L3_V3D_INERT 0 +C4_CP_EFW_L3_V3D_INERT 0 C1_CP_FGM_SPIN 0 C2_CP_FGM_SPIN 0 C3_CP_FGM_SPIN 0 C4_CP_FGM_SPIN 0 +C1_CP_FGM_5VPS 0 +C2_CP_FGM_5VPS 0 +C3_CP_FGM_5VPS 0 +C4_CP_FGM_5VPS 0 +C1_CP_RAP_E3DD 0 +C2_CP_RAP_E3DD 0 +C3_CP_RAP_E3DD 0 +C4_CP_RAP_E3DD 0 +C1_CP_RAP_ESPCT6 0 +C2_CP_RAP_ESPCT6 0 +C3_CP_RAP_ESPCT6 0 +C4_CP_RAP_ESPCT6 0 +C1_CP_RAP_HSPCT 0 +C2_CP_RAP_HSPCT 0 +C3_CP_RAP_HSPCT 0 +C4_CP_RAP_HSPCT 0 +C1_CP_RAP_I3DM_CNO 0 +C2_CP_RAP_I3DM_CNO 0 +C3_CP_RAP_I3DM_CNO 0 +C4_CP_RAP_I3DM_CNO 0 +C1_CP_RAP_I3DM_H 0 +C2_CP_RAP_I3DM_H 0 +C3_CP_RAP_I3DM_H 0 +C4_CP_RAP_I3DM_H 0 +C1_CP_RAP_I3DM_HE 0 +C2_CP_RAP_I3DM_HE 0 +C3_CP_RAP_I3DM_HE 0 +C4_CP_RAP_I3DM_HE 0 +C1_CP_RAP_ISPCT_CNO 0 +C2_CP_RAP_ISPCT_CNO 0 +C3_CP_RAP_ISPCT_CNO 0 +C4_CP_RAP_ISPCT_CNO 0 +C1_CP_RAP_ISPCT_HE 0 +C2_CP_RAP_ISPCT_HE 0 +C3_CP_RAP_ISPCT_HE 0 +C4_CP_RAP_ISPCT_HE 0 +C1_CP_RAP_L3DD 0 +C2_CP_RAP_L3DD 0 +C3_CP_RAP_L3DD 0 +C4_CP_RAP_L3DD 0 +C1_CP_RAP_PAD_CNO 0 +C3_CP_RAP_PAD_CNO 0 +C4_CP_RAP_PAD_CNO 0 +C1_CP_RAP_PAD_E3DD 0 +C2_CP_RAP_PAD_E3DD 0 +C3_CP_RAP_PAD_E3DD 0 +C4_CP_RAP_PAD_E3DD 0 +C1_CP_RAP_PAD_H 0 +C2_CP_RAP_PAD_H 0 +C3_CP_RAP_PAD_H 0 +C4_CP_RAP_PAD_H 0 +C1_CP_RAP_PAD_HE 0 +C2_CP_RAP_PAD_HE 0 +C3_CP_RAP_PAD_HE 0 +C4_CP_RAP_PAD_HE 0 +C1_CP_RAP_PAD_L3DD 0 +C2_CP_RAP_PAD_L3DD 0 +C3_CP_RAP_PAD_L3DD 0 +C4_CP_RAP_PAD_L3DD 0 +C1_CP_STA_CWF_GSE 0 +C2_CP_STA_CWF_GSE 0 +C3_CP_STA_CWF_GSE 0 +C4_CP_STA_CWF_GSE 0 +C1_CP_STA_PPP 0 +C2_CP_STA_PPP 0 +C3_CP_STA_PPP 0 +C4_CP_STA_PPP 0 +C1_CP_STA_PSD 0 +C2_CP_STA_PSD 0 +C3_CP_STA_PSD 0 +C4_CP_STA_PSD 0 +C1_CP_STA_SM 0 +C2_CP_STA_SM 0 +C3_CP_STA_SM 0 +C4_CP_STA_SM 0 +C1_CP_WHI_ACTIVE 0 +C2_CP_WHI_ACTIVE 0 +C3_CP_WHI_ACTIVE 0 +C4_CP_WHI_ACTIVE 0 +C1_CP_WHI_ELECTRON_DENSITY 0 +C2_CP_WHI_ELECTRON_DENSITY 0 +C3_CP_WHI_ELECTRON_DENSITY 0 +C4_CP_WHI_ELECTRON_DENSITY 0 +C1_CP_WHI_NATURAL 0 +C2_CP_WHI_NATURAL 0 +C3_CP_WHI_NATURAL 0 +C4_CP_WHI_NATURAL 0 +C1_CP_WHI_PASSIVE_ACTIVE 0 +C2_CP_WHI_PASSIVE_ACTIVE 0 +C3_CP_WHI_PASSIVE_ACTIVE 0 +C4_CP_WHI_PASSIVE_ACTIVE 0 +C1_CP_WHI_WAVE_FORM_ENERGY 0 +C2_CP_WHI_WAVE_FORM_ENERGY 0 +C3_CP_WHI_WAVE_FORM_ENERGY 0 +C4_CP_WHI_WAVE_FORM_ENERGY 0 C1_UP_FGM 0 C2_UP_FGM 0 C3_UP_FGM 0 @@ -486,10 +698,10 @@ CNOFS_VEFI_LD_500MS 0 CNOFS_VEFI_EFIELD_1SEC 0 CNOFS_VEFI_BFIELD_1SEC 0 CNOFS_PLP_PLASMA_1SEC 0 -CRRES_H0_MEA 0 +CRRES_H0_MEA 1 alternate_view CSSWE_REPTILE_6SEC-COUNTS-L1 0 CSSWE_REPTILE_6SEC-FLUX-L2 0 -DE_UV_SAI 0 +DE_UV_SAI 1 alternate_view DE_VS_EICS 0 DE1_6SEC_MAGAGMS 0 DE1_1MIN_RIMS 0 @@ -507,15 +719,16 @@ DE2_WIND2S_WATS 0 DE2_62MS_VEFIMAGB 0 DE2_AC500MS_VEFI 0 DE2_DCA500MS_VEFI 0 -DMSP-F16_SSJ_PRECIPITATING-ELECTRONS-IONS 0 -DMSP-F17_SSJ_PRECIPITATING-ELECTRONS-IONS 0 -DMSP-F18_SSJ_PRECIPITATING-ELECTRONS-IONS 0 +DMSP-F13_SSJ_PRECIPITATING-ELECTRONS-IONS 0 +DMSP-F16_SSJ_PRECIPITATING-ELECTRONS-IONS 1 alternate_view +DMSP-F17_SSJ_PRECIPITATING-ELECTRONS-IONS 1 alternate_view +DMSP-F18_SSJ_PRECIPITATING-ELECTRONS-IONS 1 alternate_view DMSP-F16_SSM_MAGNETOMETER 0 DMSP-F17_SSM_MAGNETOMETER 0 DMSP-F18_SSM_MAGNETOMETER 0 -DMSP-F16_SSIES-3_THERMAL-PLASMA 0 -DMSP-F17_SSIES-3_THERMAL-PLASMA 0 -DMSP-F18_SSIES-3_THERMAL-PLASMA 0 +DMSP-F16_SSIES-3_THERMAL-PLASMA 1 alternate_view +DMSP-F17_SSIES-3_THERMAL-PLASMA 1 alternate_view +DMSP-F18_SSIES-3_THERMAL-PLASMA 1 alternate_view DN_K0_GBAY 0 DN_K0_HANK 0 DN_K0_ICEW 0 @@ -527,15 +740,16 @@ DSCOVR_ORBIT_PRE 0 DSCOVR_AT_DEF 0 DSCOVR_AT_PRE 0 DSCOVR_H0_MAG 0 -DSCOVR_H1_FC 0 +DSCOVR_H1_FC 1 alternate_view +DYNAMO-2_DESA_NX02A-ESA-FLUX 0 ELA_L1_STATE_PRED 0 ELB_L1_STATE_PRED 0 ELA_L1_STATE_DEFN 0 ELB_L1_STATE_DEFN 0 -ELA_L1_EPDEF 0 -ELB_L1_EPDEF 0 -ELA_L1_EPDIF 0 -ELB_L1_EPDIF 0 +ELA_L1_EPDEF 1 alternate_view +ELB_L1_EPDEF 1 alternate_view +ELA_L1_EPDIF 1 alternate_view +ELB_L1_EPDIF 1 alternate_view ENDURANCE_EPHEMERIS_DEF 0 ERG_ORB_L3 0 ERG_ORB_L2 0 @@ -543,13 +757,13 @@ ERG_ORB_LPRE_L2 0 ERG_ORB_MPRE_L2 0 ERG_ORB_PRE_L2 0 ERG_ORB_SPRE_L2 0 -ERG_HEP_L2_OMNIFLUX 0 -ERG_MEPE_L2_OMNIFLUX 0 +ERG_HEP_L2_OMNIFLUX 1 alternate_view +ERG_MEPE_L2_OMNIFLUX 1 alternate_view ERG_MEPE_L2_3DFLUX 1 arr_slice -ERG_MEPI_L2_OMNIFLUX 0 +ERG_MEPI_L2_OMNIFLUX 1 alternate_view ERG_MEPI_L2_3DFLUX 1 arr_slice -ERG_LEPE_L2_OMNIFLUX 0 -ERG_LEPI_L2_OMNIFLUX 0 +ERG_LEPE_L2_OMNIFLUX 1 alternate_view +ERG_LEPI_L2_OMNIFLUX 1 alternate_view ERG_MGF_L2_8SEC 0 ERG_PWE_EFD_L2_E_SPIN 0 ERG_PWE_EFD_L2_POT 0 @@ -557,7 +771,7 @@ ERG_PWE_OFA_L2_SPEC 0 ERG_PWE_HFA_L2_SPEC_HIGH 1 arr_slice ERG_PWE_HFA_L2_SPEC_LOW 1 arr_slice ERG_PWE_HFA_L2_SPEC_MONIT 1 arr_slice -ERG_XEP_L2_OMNIFLUX 0 +ERG_XEP_L2_OMNIFLUX 1 alternate_view EQ_PP_EDI 0 EQ_PP_EPI 0 EQ_PP_ICI 0 @@ -566,11 +780,11 @@ EQ_PP_PCD 0 EQ_SP_SFD 0 EQ_PP_AUX 0 FAST_HR_DCB 0 -FAST_OR_SSC 0 +FAST_OR_SSC 1 alternate_view FAST_TEAMS_PA_L2 0 -FA_K0_ACF 0 -FA_K0_TMS 0 -FM_K0_KILP 0 +FA_K0_ACF 1 alternate_view +FA_K0_TMS 1 alternate_view +FM_K0_KILP 1 alternate_view FORMOSAT5_AIP_IDN 0 GE_HPAMOM_CPI 0 GE_EDA3SEC_MGF 0 @@ -579,15 +793,15 @@ GE_EDA12SEC_LEP 0 GE_EDB12SEC_LEP 0 GE_1MIN_MAG_PLASMA_SW_ONLY 0 GE_K0_MGF 0 -GE_SW_CPI 0 +GE_SW_CPI 1 alternate_view GE_H0_CPI 0 GE_K0_CPI 0 GE_K0_EFD 0 -GE_K0_EPI 0 +GE_K0_EPI 1 alternate_view GE_K0_LEP 0 GE_K0_PWI 0 GE_OR_DEF 0 -GE_OR_PRE 0 +GE_OR_PRE 1 alternate_view GE_AT_DEF 0 GE_AT_PRE 0 GE_K0_SPHA 0 @@ -595,12 +809,12 @@ GENESIS_3DL2_GIM 0 GOES15_EPEAD-SCIENCE-ELECTRONS-E13EW_1MIN 0 GOES14_EPEAD-SCIENCE-ELECTRONS-E13EW_1MIN 0 GOES13_EPEAD-SCIENCE-ELECTRONS-E13EW_1MIN 0 -GOES15_EPS-MAGED_1MIN 0 -GOES14_EPS-MAGED_1MIN 0 -GOES13_EPS-MAGED_1MIN 0 -GOES15_EPS-MAGED_5MIN 0 -GOES14_EPS-MAGED_5MIN 0 -GOES13_EPS-MAGED_5MIN 0 +GOES15_EPS-MAGED_1MIN 1 alternate_view +GOES14_EPS-MAGED_1MIN 1 alternate_view +GOES13_EPS-MAGED_1MIN 1 alternate_view +GOES15_EPS-MAGED_5MIN 1 alternate_view +GOES14_EPS-MAGED_5MIN 1 alternate_view +GOES13_EPS-MAGED_5MIN 1 alternate_view GOES15_EPS-PITCH-ANGLES_1MIN 0 GOES14_EPS-PITCH-ANGLES_1MIN 0 GOES13_EPS-PITCH-ANGLES_1MIN 0 @@ -632,31 +846,31 @@ G6_K0_MAG 0 GOES12_K0_MAG 0 GOES11_K0_MAG 0 GOES18_EPHEMERIS_SSC 0 -GOES17_EPHEMERIS_SSC 0 -GOES16_EPHEMERIS_SSC 0 -GOES15_EPHEMERIS_SSC 0 -GOES14_EPHEMERIS_SSC 0 -GOES13_EPHEMERIS_SSC 0 -GOES12_EPHEMERIS_SSC 0 -GOES11_EPHEMERIS_SSC 0 -GOES10_EPHEMERIS_SSC 0 -GOES9_EPHEMERIS_SSC 0 -GOES8_EPHEMERIS_SSC 0 -GOLD_L2_NMAX 0 +GOES17_EPHEMERIS_SSC 1 alternate_view +GOES16_EPHEMERIS_SSC 1 alternate_view +GOES15_EPHEMERIS_SSC 1 alternate_view +GOES14_EPHEMERIS_SSC 1 alternate_view +GOES13_EPHEMERIS_SSC 1 alternate_view +GOES12_EPHEMERIS_SSC 1 alternate_view +GOES11_EPHEMERIS_SSC 1 alternate_view +GOES10_EPHEMERIS_SSC 1 alternate_view +GOES9_EPHEMERIS_SSC 1 alternate_view +GOES8_EPHEMERIS_SSC 1 alternate_view +GOLD_L2_NMAX 1 alternate_view GOLD_L2_O2DEN 0 GOLD_L2_ON2 0 GOLD_L2_TDISK 0 -GPS_TEC2HR_IGS 0 -GPS_TEC1HR_IGS 0 -GPS_TEC15MIN_IGS 0 -GPS_ROTI15MIN_JPL 0 -GPS_RF_LANL-VTEC-1HR 0 +GPS_TEC2HR_IGS 1 alternate_view +GPS_TEC1HR_IGS 1 alternate_view +GPS_TEC15MIN_IGS 1 alternate_view +GPS_ROTI15MIN_JPL 1 alternate_view +GPS_RF_LANL-VTEC-1HR 1 alternate_view HEL1_6SEC_NESSMAG 0 HEL2_6SEC_NESSMAG 0 -HELIOS1_E6_KUNOW_1HOUR_PARTICLE-FLUX 0 -HELIOS2_E6_KUNOW_1HOUR_PARTICLE-FLUX 0 -HELIOS1_E6_1HOUR_PARTICLE_FLUX 0 -HELIOS2_E6_1HOUR_PARTICLE_FLUX 0 +HELIOS1_E6_KUNOW_1HOUR_PARTICLE-FLUX 1 alternate_view +HELIOS2_E6_KUNOW_1HOUR_PARTICLE-FLUX 1 alternate_view +HELIOS1_E6_1HOUR_PARTICLE_FLUX 1 alternate_view +HELIOS2_E6_1HOUR_PARTICLE_FLUX 1 alternate_view HELIOS1_COHO1HR_MERGED_MAG_PLASMA 0 HELIOS2_COHO1HR_MERGED_MAG_PLASMA 0 HELIOS2_40SEC_MAG-PLASMA 0 @@ -670,8 +884,8 @@ ALOUETTE2_AV_QUI 0 ALOUETTE2_AV_LIM 0 ALOUETTE2_AV_SNT 0 ALOUETTE2_AV_SOL 0 -ISS_DOSANL_TEPC 0 ISS_SP_FPMU 0 +ISS_DOSANL_TEPC 0 ISS_27DAY-AVERAGES_AMS-02 0 I1_AV2_OTT 0 I1_AV2_QUI 0 @@ -717,90 +931,180 @@ I2_AV_ULA 0 I2_AV_WNK 0 I2_NEPROF_TOPS 0 I2_NEPROF_TOPIST 0 -I8_OR_SSC 0 +I8_OR_SSC 1 alternate_view I8_320MSEC_MAG 0 -I8_15SEC_MAG 0 -I8_H0_GME 0 -I8_H0_MITPLASMA 1 region_filt +I8_15SEC_MAG 1 alternate_view +I8_H0_GME 1 alternate_view +I8_H0_MITPLASMA 1 alternate_view,region_filt IA_K0_MFI 0 IA_K0_ENF 0 IA_K0_EPI 0 IA_K0_ICD 0 IA_OR_DEF 0 -IBEX_OR_SSC 0 -IBEX_H3_ENA_LO_R17_CG_NOSP_OMNI_1YR 0 -IBEX_H3_ENA_LO_R17_CG_SP_OMNI_1YR 0 -IBEX_H3_ENA_LO_R17_CG_NOSP_ANTIRAM_1YR 0 -IBEX_H3_ENA_LO_R17_CG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_LO_R17_CG_SP_ANTIRAM_1YR 0 -IBEX_H3_ENA_LO_R17_CG_SP_RAM_1YR 0 -IBEX_H3_ENA_LO_R17_NOCG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R16_NOCG_SP_RAM_EQUA_11YR 0 -IBEX_H3_ENA_HI_R16_NOCG_SP_RAM_GALA_11YR 0 -IBEX_H3_ENA_HI_R16_CG_NOSP_OMNI_11YR 0 -IBEX_H3_ENA_HI_R16_CG_SP_OMNI_11YR 0 -IBEX_H3_ENA_HI_R16_NOCG_NOSP_OMNI_11YR 0 -IBEX_H3_ENA_HI_R16_NOCG_SP_OMNI_11YR 0 -IBEX_H3_ENA_HI_R16_CG_NOSP_RAM_11YR 0 -IBEX_H3_ENA_HI_R16_CG_SP_RAM_11YR 0 -IBEX_H3_ENA_HI_R16_NOCG_NOSP_RAM_11YR 0 -IBEX_H3_ENA_HI_R16_NOCG_SP_RAM_11YR 0 -IBEX_H3_ENA_HI_R16_CG_SP_ANTIRAM_11YR 0 -IBEX_H3_ENA_HI_R16_NOCG_NOSP_ANTIRAM_11YR 0 -IBEX_H3_ENA_HI_R16_NOCG_SP_ANTIRAM_11YR 0 -IBEX_H3_ENA_HI_R16_NOCG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R16_NOCG_SP_RAM_1YR 0 -IBEX_H3_ENA_HI_R16_CG_SP_RAM_1YR 0 -IBEX_H3_ENA_HI_R16_CG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R16_NOCG_NOSP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R16_CG_SP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R16_CG_NOSP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R16_CG_SP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R16_NOCG_NOSP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R16_NOCG_SP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R16_CG_SP_OMNI_2YR 0 -IBEX_H3_ENA_HI_R16_CG_SP_OMNI_3YR 0 -IBEX_H3_ENA_HI_R15_CG_SP_RIBCEN_1YR 0 -IBEX_H3_ENA_HI_R15_CG_SP_RIBCEN_2YR 0 +IBEX_OR_SSC 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_UPPER_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_UPPER_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_LOWER_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_MEDIAN_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_R18_STATE_VECTOR 0 +IBEX_H3_ENA_HI_R18_CG_NOSP_ANTIRAM_MONO_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_MONO_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_OMNI_MONO_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_OMNI_MONO_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_RAM_MONO_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_MONO_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_OMNI_MONO_6MO 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_OMNI_MONO_6MO 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_ANTIRAM_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_RAM_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_EQUA_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_GALA_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_OMNI_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_NOSP_OMNI_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_OMNI_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_OMNI_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_RAM_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_NOSP_RAM_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_NOSP_ANTIRAM_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_NOSP_ANTIRAM_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_ANTIRAM_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_14YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_LOWER_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_LOWER_GDF_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_MEDIAN_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_MEDIAN_GDF_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_UPPER_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_UPPER_GDF_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_LOWER_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_LOWER_GDF_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_MEDIAN_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_MEDIAN_GDF_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_UPPER_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_UPPER_GDF_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_ANTIRAM_LOWER_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_ANTIRAM_MEDIAN_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_ANTIRAM_UPPER_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_LOWER_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_MEDIAN_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_UPPER_GDF_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_LOWER_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_RAM_MEDIAN_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_LOWER_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_ANTIRAM_LOWER_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_MEDIAN_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_ANTIRAM_MEDIAN_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_UPPER_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_NOCG_SP_ANTIRAM_UPPER_RIBBON_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_LOWER_RIBBON_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_LOWER_RIBBON_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_MEDIAN_RIBBON_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_MEDIAN_RIBBON_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_ANTIRAM_UPPER_RIBBON_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R18_CG_SP_RAM_UPPER_RIBBON_MONO_1YR 1 alternate_view +IBEX_H3_ENA_LO_R17_CG_NOSP_OMNI_1YR 1 alternate_view +IBEX_H3_ENA_LO_R17_CG_SP_OMNI_1YR 1 alternate_view +IBEX_H3_ENA_LO_R17_CG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_LO_R17_CG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_LO_R17_CG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_LO_R17_CG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_LO_R17_NOCG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_ANTIRAM_MONO_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_RAM_MONO_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_ANTIRAM_MONO_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_RAM_MONO_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_ANTIRAM_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_ANTIRAM_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_RAM_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_ANTIRAM_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_OMNI_MONO_6MO 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_OMNI_MONO_6MO 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_OMNI_MONO_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_OMNI_MONO_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_SP_RAM_EQUA_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_SP_RAM_GALA_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_OMNI_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_OMNI_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_NOSP_OMNI_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_SP_OMNI_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_RAM_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_RAM_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_NOSP_RAM_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_SP_RAM_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_ANTIRAM_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_NOSP_ANTIRAM_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_SP_ANTIRAM_11YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_RAM_MONO_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R16_NOCG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_OMNI_2YR 1 alternate_view +IBEX_H3_ENA_HI_R16_CG_SP_OMNI_3YR 1 alternate_view +IBEX_H3_ENA_HI_R15_CG_SP_RIBCEN_1YR 1 alternate_view +IBEX_H3_ENA_HI_R15_CG_SP_RIBCEN_2YR 1 alternate_view IBEX_H3_ENA_HI_R15_CG_SP_RIBCEN_3YR 0 -IBEX_H3_ENA_HI_R14_PRESS-SLOPE_CG_NOSP_RAM_3YR 0 -IBEX_H3_ENA_HI_R13_CG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R13_CG_NOSP_ANTIRAM_1YR 0 +IBEX_H3_ENA_HI_R14_PRESS-SLOPE_CG_NOSP_RAM_3YR 1 alternate_view +IBEX_H3_ENA_HI_R13_CG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R13_CG_NOSP_ANTIRAM_1YR 1 alternate_view IBEX_H3_ENA_HI_R12_MAGNETOSPHERE_24MIN 0 IBEX_H3_ENA_LO_R11_SCHWADRON-INTERSTELLAR-O 0 -IBEX_H3_ENA_LO_R10_NOCG_NOSP_OMNI_6MO 0 -IBEX_H3_ENA_LO_R10_NOCG_SP_OMNI_6MO 0 -IBEX_H3_ENA_LO_R10_NOCG_NOSP_OMNI_7YR 0 -IBEX_H3_ENA_LO_R10_NOCG_SP_OMNI_7YR 0 -IBEX_H3_ENA_HI_R10_CG_SP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R10_NOCG_NOSP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R10_NOCG_SP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R10_CG_NOSP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R10_CG_NOSP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R10_CG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R10_CG_SP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R10_CG_SP_RAM_1YR 0 -IBEX_H3_ENA_HI_R10_NOCG_NOSP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R10_NOCG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R10_NOCG_SP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R10_NOCG_SP_RAM_1YR 0 -IBEX_H3_ENA_HI_R10_CG_NOSP_RAM_7YR 0 -IBEX_H3_ENA_HI_R10_CG_NOSP_ANTIRAM_7YR 0 -IBEX_H3_ENA_HI_R10_CG_NOSP_OMNI_7YR 0 -IBEX_H3_ENA_HI_R10_CG_SP_OMNI_7YR 0 -IBEX_H3_ENA_HI_R10_CG_SP_ANTIRAM_7YR 0 -IBEX_H3_ENA_HI_R10_CG_SP_RAM_7YR 0 -IBEX_H3_ENA_HI_R10_NOCG_NOSP_ANTIRAM_7YR 0 -IBEX_H3_ENA_HI_R10_NOCG_NOSP_OMNI_7YR 0 -IBEX_H3_ENA_HI_R10_NOCG_SP_ANTIRAM_7YR 0 -IBEX_H3_ENA_HI_R10_NOCG_SP_OMNI_7YR 0 +IBEX_H3_ENA_LO_R10_NOCG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_LO_R10_NOCG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_LO_R10_NOCG_NOSP_OMNI_7YR 1 alternate_view +IBEX_H3_ENA_LO_R10_NOCG_SP_OMNI_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_NOSP_RAM_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_NOSP_ANTIRAM_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_NOSP_OMNI_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_SP_OMNI_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_SP_ANTIRAM_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_CG_SP_RAM_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_NOSP_ANTIRAM_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_NOSP_OMNI_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_SP_ANTIRAM_7YR 1 alternate_view +IBEX_H3_ENA_HI_R10_NOCG_SP_OMNI_7YR 1 alternate_view IBEX_H3_ENA_LO_R09_HEIRTZLER-H_7DAY 0 IBEX_H3_ENA_LO_R09_SCHWADRON-INTERSTELLAR-HE_1YR 0 -IBEX_H3_ENA_LO_R09_PARK_OMAP_STAT_3YR 0 -IBEX_H3_ENA_LO_R09_PARK_OMAP_6MO 0 -IBEX_H3_ENA_LO_R09_PARK_OMAP_3YR 0 -IBEX_H3_ENA_LO_R09_PARK_OMAP_3YR-ODD-MAPS 0 -IBEX_H3_ENA_LO_R09_PARK_OMAP_3YR-EVEN-MAPS 0 +IBEX_H3_ENA_LO_R09_PARK_OMAP_STAT_3YR 1 alternate_view +IBEX_H3_ENA_LO_R09_PARK_OMAP_6MO 1 alternate_view +IBEX_H3_ENA_LO_R09_PARK_OMAP_3YR 1 alternate_view +IBEX_H3_ENA_LO_R09_PARK_OMAP_3YR-ODD-MAPS 1 alternate_view +IBEX_H3_ENA_LO_R09_PARK_OMAP_3YR-EVEN-MAPS 1 alternate_view IBEX_H3_ENA_LO_R08_OMNI_F2-RIBBON-MAPS-GAL_5YR 0 IBEX_H3_ENA_LO_R08_OMNI_F2-RIBBON-MAPS-J2000_5YR 0 IBEX_H3_ENA_LO_R08_OMNI_F2-RIBBON-MAPS_5YR 0 @@ -813,91 +1117,97 @@ IBEX_H3_ENA_HI_R08_OMNI_F2-RIBBON-MAPS_5YR 0 IBEX_H3_ENA_HI_R08_OMNI_F3-GDF-MAPS-GALACTIC_5YR 0 IBEX_H3_ENA_HI_R08_OMNI_F3-GDF-MAPS-J2000_5YR 0 IBEX_H3_ENA_HI_R08_OMNI_F3-GDF-MAPS_5YR 0 -IBEX_H3_ENA_LO_R07_NOCG_SP_OMNI_6MO 0 -IBEX_H3_ENA_LO_R07_NOCG_NOSP_OMNI_6MO 0 -IBEX_H3_ENA_LO_R07_NOCG_SP_OMNI_VESCF_10MP 0 -IBEX_H3_ENA_LO_R07_NOCG_NOSP_OMNI_VESCF_10MP 0 -IBEX_H3_ENA_HI_R07_NOCG_SP_RAM_VESCF_5YR 0 -IBEX_H3_ENA_HI_R07_NOCG_SP_OMNI_VESCF_10MP 0 -IBEX_H3_ENA_HI_R07_NOCG_SP_ANTIRAM_VESCF_5YR 0 -IBEX_H3_ENA_HI_R07_NOCG_NOSP_RAM_VEIF_5YR 0 -IBEX_H3_ENA_HI_R07_NOCG_NOSP_OMNI_VESCF_10MP 0 -IBEX_H3_ENA_HI_R07_NOCG_NOSP_ANTIRAM_VESCF_5YR 0 -IBEX_H3_ENA_HI_R07_CG_SP_RAM_VEIF_5YR 0 -IBEX_H3_ENA_HI_R07_CG_SP_OMNI_VEIF_10MP 0 -IBEX_H3_ENA_HI_R07_CG_SP_ANTIRAM_VEIF_5YR 0 -IBEX_H3_ENA_HI_R07_CG_NOSP_RAM_VEIF_5YR 0 -IBEX_H3_ENA_HI_R07_CG_NOSP_ANTIRAM_VEIF_5YR 0 -IBEX_H3_ENA_HI_R07_CG_NOSP_OMNI_MIF_10MP 0 -IBEX_H3_ENA_HI_R07_CG_SP_OMNI_MIF_10MP 0 -IBEX_H3_ENA_HI_R07_CG_NOSP_ANTIRAM_MIF_5YR 0 -IBEX_H3_ENA_HI_R07_CG_SP_ANTIRAM_MIF_5YR 0 -IBEX_H3_ENA_HI_R07_CG_SP_RAM_MIF_5YR 0 -IBEX_H3_ENA_HI_R07_CG_NOSP_RAM_MIF_5YR 0 -IBEX_H3_ENA_HI_R07_CG_NOSP_OMNI_VEIF_10MP 0 -IBEX_H3_ENA_HI_R07_CG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R07_NOCG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R07_CG_SP_RAM_1YR 0 -IBEX_H3_ENA_HI_R07_NOCG_SP_RAM_1YR 0 -IBEX_H3_ENA_HI_R07_NOCG_SP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R07_CG_SP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R07_NOCG_NOSP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R07_CG_NOSP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R07_CG_SP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R07_NOCG_NOSP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R07_NOCG_SP_OMNI_6MO 0 +IBEX_H3_ENA_LO_R07_NOCG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_LO_R07_NOCG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_LO_R07_NOCG_SP_OMNI_VESCF_10MP 1 alternate_view +IBEX_H3_ENA_LO_R07_NOCG_NOSP_OMNI_VESCF_10MP 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_SP_RAM_VESCF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_SP_OMNI_VESCF_10MP 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_SP_ANTIRAM_VESCF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_NOSP_RAM_VEIF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_NOSP_OMNI_VESCF_10MP 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_NOSP_ANTIRAM_VESCF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_SP_RAM_VEIF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_SP_OMNI_VEIF_10MP 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_SP_ANTIRAM_VEIF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_NOSP_RAM_VEIF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_NOSP_ANTIRAM_VEIF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_NOSP_OMNI_MIF_10MP 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_SP_OMNI_MIF_10MP 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_NOSP_ANTIRAM_MIF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_SP_ANTIRAM_MIF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_SP_RAM_MIF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_NOSP_RAM_MIF_5YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_NOSP_OMNI_VEIF_10MP 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R07_CG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R07_NOCG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_LO_R06_STATE_VECTORS 0 IBEX_H3_ENA_LO_R06_SCHWADRON-H 0 IBEX_H3_ENA_LO_R06_SCHWADRON-HE 0 IBEX_H3_ENA_R05_MARINER-H-HE 0 -IBEX_H3_ENA_LO_R04_CG_SP_RAM_3YR 0 -IBEX_H3_ENA_LO_R04_CG_SP_ANTIRAM_3YR 0 +IBEX_H3_ENA_LO_R04_CG_SP_RAM_3YR 1 alternate_view +IBEX_H3_ENA_LO_R04_CG_SP_ANTIRAM_3YR 1 alternate_view IBEX_H3_ENA_HI_R04_LOSS_SCF_7DAY 0 IBEX_H3_ENA_HI_R04_LOSS_1AU_7DAY 0 IBEX_H3_ENA_HI_R04_DEFLECTION_SCF_7DAY 0 IBEX_H3_ENA_HI_R04_DEFLECTION_1AU_7DAY 0 IBEX_H3_ENA_HI_R04_SURVPRO_SCF_7DAY 0 -IBEX_H3_ENA_HI_R04_NOCG_NOSP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R04_CG_NOSP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R04_CG_SP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R04_NOCG_SP_ANTIRAM_1YR 0 -IBEX_H3_ENA_HI_R04_CG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R04_CG_SP_RAM_1YR 0 -IBEX_H3_ENA_HI_R04_NOCG_NOSP_RAM_1YR 0 -IBEX_H3_ENA_HI_R04_NOCG_SP_RAM_1YR 0 -IBEX_H3_ENA_HI_R04_CG_SP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R04_CG_NOSP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R04_NOCG_SP_OMNI_6MO 0 -IBEX_H3_ENA_HI_R04_NOCG_NOSP_OMNI_6MO 0 IBEX_H3_ENA_HI_R04_SURVPRO_1AU_7DAY 0 +IBEX_H3_ENA_HI_R04_NOCG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R04_CG_NOSP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R04_CG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R04_NOCG_SP_ANTIRAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R04_CG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R04_CG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R04_NOCG_NOSP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R04_NOCG_SP_RAM_1YR 1 alternate_view +IBEX_H3_ENA_HI_R04_CG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R04_CG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R04_NOCG_SP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R04_NOCG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R04_NOCG_NOSP_OMNI_3YR 1 alternate_view +IBEX_H3_ENA_HI_R04_NOCG_NOSP_OMNI_6MP 1 alternate_view IBEX_H3_ENA_LO_R03_ANGULAR_ANALYSIS_7DAY 0 IBEX_H3_ENA_LO_R03_CT_RATE_ANALYSIS_7DAY 0 -ICON_L2-1_MIGHTI-A_LOS-WIND-GREEN 0 -ICON_L2-1_MIGHTI-B_LOS-WIND-GREEN 0 -ICON_L2-1_MIGHTI-A_LOS-WIND-RED 0 -ICON_L2-1_MIGHTI-B_LOS-WIND-RED 0 -ICON_L2-2_MIGHTI_VECTOR-WIND-GREEN 0 -ICON_L2-2_MIGHTI_VECTOR-WIND-RED 0 -ICON_L2-3_MIGHTI-A_TEMPERATURE 0 -ICON_L2-3_MIGHTI-B_TEMPERATURE 0 -ICON_L2-4_FUV_DAY 0 -ICON_L2-5_FUV_NIGHT 0 -ICON_L2-6_EUV 0 -ICON_L2-7_IVM-A 0 -ICON_L2-7_IVM-B 0 +IBEX_H3_ENA_HI_R02_NOCG_NOSP_OMNI_6MO 1 alternate_view +IBEX_H3_ENA_HI_R02_CG_NOSP_OMNI_6MO 1 alternate_view +ICON_L2-1_MIGHTI-A_LOS-WIND-GREEN 1 +ICON_L2-1_MIGHTI-B_LOS-WIND-GREEN 1 +ICON_L2-1_MIGHTI-A_LOS-WIND-RED 1 +ICON_L2-1_MIGHTI-B_LOS-WIND-RED 1 +ICON_L2-2_MIGHTI_VECTOR-WIND-GREEN 1 +ICON_L2-2_MIGHTI_VECTOR-WIND-RED 1 +ICON_L2-3_MIGHTI-A_TEMPERATURE 1 alternate_view +ICON_L2-3_MIGHTI-B_TEMPERATURE 1 alternate_view +ICON_L2-4_FUV_DAY 1 +ICON_L2-4_FUV_DAY-LIMB 1 +ICON_L2-5_FUV_NIGHT 1 alternate_view +ICON_L2-6_EUV 1 alternate_view +ICON_L2-7_IVM-A 1 +ICON_L2-7_IVM-B 1 IG_K0_PCI 0 IMAGE_M2_EUV 0 -IM_K0_EUV 1 convert_log10 -IM_K0_SIE 1 convert_log10 -IM_K0_SIP 1 convert_log10 -IM_K0_WIC 1 convert_log10 +IM_K0_EUV 1 alternate_view,convert_log10 +IM_K0_SIE 1 alternate_view,convert_log10 +IM_K0_SIP 1 alternate_view,convert_log10 +IM_K0_WIC 1 alternate_view,convert_log10 IM_K0_HENA 0 -IM_K0_LENA 0 -IM_K0_MENA 0 -IM_K0_RPI 0 +IM_K0_LENA 1 alternate_view +IM_K0_MENA 1 alternate_view +IM_K0_RPI 1 alternate_view IM_K1_RPI 0 IM_ELECTRON_DENSITY_RPI 0 -IM_OR_DEF 1 ALTERNATE_VIEW -IM_OR_PRE 1 ALTERNATE_VIEW +IM_OR_DEF 1 alternate_view +IM_OR_PRE 1 alternate_view IM_HK_ADS 0 IM_HK_AST 0 IM_HK_COM 0 @@ -941,6 +1251,8 @@ L7_K0_MPA 0 L7_K0_SPA 0 A1_K0_MPA 0 A2_K0_MPA 0 +MGS_MAG_LOW 0 +MGS_MAG_HIGH 0 MSL_HELIO1DAY_POSITION 0 MSL_RAD_OBS-L2 0 MSL_RAD_OBS-L1 0 @@ -948,18 +1260,18 @@ MVN_MAG_L2-SUNSTATE-1SEC 0 MVN_INSITU_KP-4SEC 0 MVN_STA_L2_D8-12R1E 0 MVN_STA_L2_D9-12R64E 0 -MVN_STA_L2_DA-1R64E 0 +MVN_STA_L2_DA-1R64E 1 alternate_view MVN_SWI_L2_ONBOARDSVYMOM 0 -MVN_SWI_L2_ONBOARDSVYSPEC 0 -MVN_SWI_L2_FINEARC3D 0 -MVN_SWI_L2_COARSEARC3D 0 -MVN_SWI_L2_COARSESVY3D 0 -MVN_SWI_L2_FINESVY3D 0 -MVN_SWE_L2_SVY3D 0 -MVN_SWE_L2_SVYSPEC 0 -MVN_SWE_L2_ARC3D 0 -MVN_SWE_L2_ARCPAD 1 arr_slice -MVN_SWE_L2_SVYPAD 1 arr_slice +MVN_SWI_L2_ONBOARDSVYSPEC 1 alternate_view +MVN_SWI_L2_FINEARC3D 1 alternate_view +MVN_SWI_L2_COARSEARC3D 1 alternate_view +MVN_SWI_L2_COARSESVY3D 1 alternate_view +MVN_SWI_L2_FINESVY3D 1 alternate_view +MVN_SWE_L2_SVY3D 1 alternate_view +MVN_SWE_L2_SVYSPEC 1 alternate_view +MVN_SWE_L2_ARC3D 1 alternate_view +MVN_SWE_L2_ARCPAD 1 alternate_view,arr_slice +MVN_SWE_L2_SVYPAD 1 alternate_view,arr_slice MVN_SEP_L2_S1-CAL-SVY-FULL 0 MVN_SEP_L2_S2-CAL-SVY-FULL 0 MESSENGER_MAG_RTN 0 @@ -983,34 +1295,34 @@ MMS1_DSP_SLOW_L2_EPSD 0 MMS2_DSP_SLOW_L2_EPSD 0 MMS3_DSP_SLOW_L2_EPSD 0 MMS4_DSP_SLOW_L2_EPSD 0 -MMS1_EDI_SRVY_L2_AMB 0 -MMS2_EDI_SRVY_L2_AMB 0 -MMS3_EDI_SRVY_L2_AMB 0 -MMS4_EDI_SRVY_L2_AMB 0 -MMS1_EDI_SRVY_L2_AMB-PM2 0 -MMS2_EDI_SRVY_L2_AMB-PM2 0 -MMS3_EDI_SRVY_L2_AMB-PM2 0 -MMS4_EDI_SRVY_L2_AMB-PM2 0 -MMS1_EDI_SRVY_L2_EFIELD 0 -MMS2_EDI_SRVY_L2_EFIELD 0 -MMS3_EDI_SRVY_L2_EFIELD 0 -MMS4_EDI_SRVY_L2_EFIELD 0 +MMS1_EDI_SRVY_L2_AMB 1 alternate_view +MMS2_EDI_SRVY_L2_AMB 1 alternate_view +MMS3_EDI_SRVY_L2_AMB 1 alternate_view +MMS4_EDI_SRVY_L2_AMB 1 alternate_view +MMS1_EDI_SRVY_L2_AMB-PM2 1 alternate_view +MMS2_EDI_SRVY_L2_AMB-PM2 1 alternate_view +MMS3_EDI_SRVY_L2_AMB-PM2 1 alternate_view +MMS4_EDI_SRVY_L2_AMB-PM2 1 alternate_view +MMS1_EDI_SRVY_L2_EFIELD 1 alternate_view +MMS2_EDI_SRVY_L2_EFIELD 1 alternate_view +MMS3_EDI_SRVY_L2_EFIELD 1 alternate_view +MMS4_EDI_SRVY_L2_EFIELD 1 alternate_view MMS1_EDI_SRVY_L2_Q0 0 MMS2_EDI_SRVY_L2_Q0 0 MMS3_EDI_SRVY_L2_Q0 0 MMS4_EDI_SRVY_L2_Q0 0 -MMS1_EDI_BRST_L2_AMB 0 -MMS2_EDI_BRST_L2_AMB 0 -MMS3_EDI_BRST_L2_AMB 0 -MMS4_EDI_BRST_L2_AMB 0 -MMS1_EDI_BRST_L2_AMB-PM2 0 -MMS2_EDI_BRST_L2_AMB-PM2 0 -MMS3_EDI_BRST_L2_AMB-PM2 0 -MMS4_EDI_BRST_L2_AMB-PM2 0 -MMS1_EDI_BRST_L2_EFIELD 0 -MMS2_EDI_BRST_L2_EFIELD 0 -MMS3_EDI_BRST_L2_EFIELD 0 -MMS4_EDI_BRST_L2_EFIELD 0 +MMS1_EDI_BRST_L2_AMB 1 alternate_view +MMS2_EDI_BRST_L2_AMB 1 alternate_view +MMS3_EDI_BRST_L2_AMB 1 alternate_view +MMS4_EDI_BRST_L2_AMB 1 alternate_view +MMS1_EDI_BRST_L2_AMB-PM2 1 alternate_view +MMS2_EDI_BRST_L2_AMB-PM2 1 alternate_view +MMS3_EDI_BRST_L2_AMB-PM2 1 alternate_view +MMS4_EDI_BRST_L2_AMB-PM2 1 alternate_view +MMS1_EDI_BRST_L2_EFIELD 1 alternate_view +MMS2_EDI_BRST_L2_EFIELD 1 alternate_view +MMS3_EDI_BRST_L2_EFIELD 1 alternate_view +MMS4_EDI_BRST_L2_EFIELD 1 alternate_view MMS1_EDI_BRST_L2_Q0 0 MMS2_EDI_BRST_L2_Q0 0 MMS3_EDI_BRST_L2_Q0 0 @@ -1083,38 +1395,38 @@ MMS1_FEEPS_BRST_L2_ION 0 MMS2_FEEPS_BRST_L2_ION 0 MMS3_FEEPS_BRST_L2_ION 0 MMS4_FEEPS_BRST_L2_ION 0 -MMS1_FPI_BRST_L2_DES-DIST 0 -MMS2_FPI_BRST_L2_DES-DIST 0 -MMS3_FPI_BRST_L2_DES-DIST 0 -MMS4_FPI_BRST_L2_DES-DIST 0 -MMS1_FPI_BRST_L2_DIS-DIST 0 -MMS2_FPI_BRST_L2_DIS-DIST 0 -MMS3_FPI_BRST_L2_DIS-DIST 0 -MMS4_FPI_BRST_L2_DIS-DIST 0 -MMS1_FPI_FAST_L2_DES-DIST 0 -MMS2_FPI_FAST_L2_DES-DIST 0 -MMS3_FPI_FAST_L2_DES-DIST 0 -MMS4_FPI_FAST_L2_DES-DIST 0 -MMS1_FPI_FAST_L2_DIS-DIST 0 -MMS2_FPI_FAST_L2_DIS-DIST 0 -MMS3_FPI_FAST_L2_DIS-DIST 0 -MMS4_FPI_FAST_L2_DIS-DIST 0 -MMS1_FPI_FAST_L2_DIS-MOMS 0 -MMS2_FPI_FAST_L2_DIS-MOMS 0 -MMS3_FPI_FAST_L2_DIS-MOMS 0 -MMS4_FPI_FAST_L2_DIS-MOMS 0 -MMS1_FPI_BRST_L2_DIS-MOMS 0 -MMS2_FPI_BRST_L2_DIS-MOMS 0 -MMS3_FPI_BRST_L2_DIS-MOMS 0 -MMS4_FPI_BRST_L2_DIS-MOMS 0 -MMS1_FPI_FAST_L2_DES-MOMS 0 -MMS2_FPI_FAST_L2_DES-MOMS 0 -MMS3_FPI_FAST_L2_DES-MOMS 0 -MMS4_FPI_FAST_L2_DES-MOMS 0 -MMS1_FPI_BRST_L2_DES-MOMS 0 -MMS2_FPI_BRST_L2_DES-MOMS 0 -MMS3_FPI_BRST_L2_DES-MOMS 0 -MMS4_FPI_BRST_L2_DES-MOMS 0 +MMS1_FPI_BRST_L2_DES-DIST 1 alternate_view +MMS2_FPI_BRST_L2_DES-DIST 1 alternate_view +MMS3_FPI_BRST_L2_DES-DIST 1 alternate_view +MMS4_FPI_BRST_L2_DES-DIST 1 alternate_view +MMS1_FPI_BRST_L2_DIS-DIST 1 alternate_view +MMS2_FPI_BRST_L2_DIS-DIST 1 alternate_view +MMS3_FPI_BRST_L2_DIS-DIST 1 alternate_view +MMS4_FPI_BRST_L2_DIS-DIST 1 alternate_view +MMS1_FPI_FAST_L2_DES-DIST 1 alternate_view +MMS2_FPI_FAST_L2_DES-DIST 1 alternate_view +MMS3_FPI_FAST_L2_DES-DIST 1 alternate_view +MMS4_FPI_FAST_L2_DES-DIST 1 alternate_view +MMS1_FPI_FAST_L2_DIS-DIST 1 alternate_view +MMS2_FPI_FAST_L2_DIS-DIST 1 alternate_view +MMS3_FPI_FAST_L2_DIS-DIST 1 alternate_view +MMS4_FPI_FAST_L2_DIS-DIST 1 alternate_view +MMS1_FPI_FAST_L2_DIS-MOMS 1 alternate_view +MMS2_FPI_FAST_L2_DIS-MOMS 1 alternate_view +MMS3_FPI_FAST_L2_DIS-MOMS 1 alternate_view +MMS4_FPI_FAST_L2_DIS-MOMS 1 alternate_view +MMS1_FPI_BRST_L2_DIS-MOMS 1 alternate_view +MMS2_FPI_BRST_L2_DIS-MOMS 1 alternate_view +MMS3_FPI_BRST_L2_DIS-MOMS 1 alternate_view +MMS4_FPI_BRST_L2_DIS-MOMS 1 alternate_view +MMS1_FPI_FAST_L2_DES-MOMS 1 alternate_view +MMS2_FPI_FAST_L2_DES-MOMS 1 alternate_view +MMS3_FPI_FAST_L2_DES-MOMS 1 alternate_view +MMS4_FPI_FAST_L2_DES-MOMS 1 alternate_view +MMS1_FPI_BRST_L2_DES-MOMS 1 alternate_view +MMS2_FPI_BRST_L2_DES-MOMS 1 alternate_view +MMS3_FPI_BRST_L2_DES-MOMS 1 alternate_view +MMS4_FPI_BRST_L2_DES-MOMS 1 alternate_view MMS1_FPI_FAST_L2_DIS-PARTMOMS 0 MMS2_FPI_FAST_L2_DIS-PARTMOMS 0 MMS3_FPI_FAST_L2_DIS-PARTMOMS 0 @@ -1143,22 +1455,22 @@ MMS1_HPCA_BRST_L2_MOMENTS 0 MMS2_HPCA_BRST_L2_MOMENTS 0 MMS3_HPCA_BRST_L2_MOMENTS 0 MMS4_HPCA_BRST_L2_MOMENTS 0 -MMS1_HPCA_BRST_L2_ION 0 -MMS2_HPCA_BRST_L2_ION 0 -MMS3_HPCA_BRST_L2_ION 0 -MMS4_HPCA_BRST_L2_ION 0 +MMS1_HPCA_BRST_L2_ION 1 alternate_view +MMS2_HPCA_BRST_L2_ION 1 alternate_view +MMS3_HPCA_BRST_L2_ION 1 alternate_view +MMS4_HPCA_BRST_L2_ION 1 alternate_view MMS1_HPCA_SRVY_L2_MOMENTS 0 MMS2_HPCA_SRVY_L2_MOMENTS 0 MMS3_HPCA_SRVY_L2_MOMENTS 0 MMS4_HPCA_SRVY_L2_MOMENTS 0 -MMS1_HPCA_SRVY_L2_ION 0 -MMS2_HPCA_SRVY_L2_ION 0 -MMS3_HPCA_SRVY_L2_ION 0 -MMS4_HPCA_SRVY_L2_ION 0 -MMS1_HPCA_SRVY_L2_TOF-COUNTS 0 -MMS2_HPCA_SRVY_L2_TOF-COUNTS 0 -MMS3_HPCA_SRVY_L2_TOF-COUNTS 0 -MMS4_HPCA_SRVY_L2_TOF-COUNTS 0 +MMS1_HPCA_SRVY_L2_ION 1 alternate_view +MMS2_HPCA_SRVY_L2_ION 1 alternate_view +MMS3_HPCA_SRVY_L2_ION 1 alternate_view +MMS4_HPCA_SRVY_L2_ION 1 alternate_view +MMS1_HPCA_SRVY_L2_TOF-COUNTS 1 alternate_view +MMS2_HPCA_SRVY_L2_TOF-COUNTS 1 alternate_view +MMS3_HPCA_SRVY_L2_TOF-COUNTS 1 alternate_view +MMS4_HPCA_SRVY_L2_TOF-COUNTS 1 alternate_view MMS1_MEC_BRST_L2_EPHT89D 0 MMS2_MEC_BRST_L2_EPHT89D 0 MMS3_MEC_BRST_L2_EPHT89D 0 @@ -1195,8 +1507,8 @@ MMS1_SCM_BRST_L2_SCHB 0 MMS2_SCM_BRST_L2_SCHB 0 MMS3_SCM_BRST_L2_SCHB 0 MMS4_SCM_BRST_L2_SCHB 0 -MUNIN_M1_MDSE 0 -MUNIN_M1_MDSI 0 +MUNIN_M1_MDSE 1 alternate_view +MUNIN_M1_MDSI 1 alternate_view MUNIN_M1_OA 0 NEW_HORIZONS_SWAP_VALIDSUM 0 NEW_HORIZONS_SWAP_PICKUP-IONS 0 @@ -1208,59 +1520,59 @@ NOAA08_MEPED1MIN_SEM 0 NOAA10_MEPED1MIN_SEM 0 NOAA12_MEPED1MIN_SEM 0 NOAA14_MEPED1MIN_SEM 0 -NOAA15_POES-SEM2_FLUXES-2SEC 0 -NOAA16_POES-SEM2_FLUXES-2SEC 0 -NOAA18_POES-SEM2_FLUXES-2SEC 0 -NOAA19_POES-SEM2_FLUXES-2SEC 0 -METOP1_POES-SEM2_FLUXES-2SEC 0 -METOP2_POES-SEM2_FLUXES-2SEC 0 -METOP3_POES-SEM2_FLUXES-2SEC 0 +NOAA15_POES-SEM2_FLUXES-2SEC 1 alternate_view +NOAA16_POES-SEM2_FLUXES-2SEC 1 alternate_view +NOAA18_POES-SEM2_FLUXES-2SEC 1 alternate_view +NOAA19_POES-SEM2_FLUXES-2SEC 1 alternate_view +METOP1_POES-SEM2_FLUXES-2SEC 1 alternate_view +METOP2_POES-SEM2_FLUXES-2SEC 1 alternate_view +METOP3_POES-SEM2_FLUXES-2SEC 1 alternate_view OMNI_HRO_1MIN 0 OMNI_HRO_5MIN 0 OMNI_HRO2_1MIN 0 OMNI_HRO2_5MIN 0 -OMNI2_H0_MRG1HR 0 +OMNI2_H0_MRG1HR 1 alternate_view OMNI_COHO1HR_MERGED_MAG_PLASMA 0 PIONEER10_COHO1HR_MERGED_MAG_PLASMA 0 PIONEER11_COHO1HR_MERGED_MAG_PLASMA 0 PIONEERVENUS_COHO1HR_MERGED_MAG_PLASMA 0 PIONEERVENUS_MERGED_SOLAR-WIND_10M 0 PMC-TURBO_L1_BOLIDE_VBC 0 -PO_K0_MFE 0 +PO_K0_MFE 1 alternate_view PO_H0_CAM 0 -PO_K0_CAM 0 -PO_K0_CEP 0 +PO_K0_CAM 1 alternate_view +PO_K0_CEP 1 alternate_view PO_K0_EFI 0 PO_10MINATT_EFI 0 POLAR_HYDRA_MOMENTS-14SEC 0 PO_HYD_ENERGY_FLUX 0 PO_H0_HYD 0 -PO_K0_HYD 0 -PO_K0_PIX 0 +PO_K0_HYD 1 alternate_view +PO_K0_PIX 1 alternate_view PO_H0_PWI 0 PO_H1_PWI 0 PO_H2_PWI 0 PO_H3_PWI 0 PO_H4_PWI 0 PO_H5_PWI 0 -PO_H7_PWI 1 fftPowerDelta512 +PO_H7_PWI 1 fftpowerdelta512 PO_H8_PWI 0 PO_H9_PWI 0 -PO_K0_PWI 0 +PO_K0_PWI 1 alternate_view PO_H0_TID 0 PO_H1_TID 0 -PO_H0_TIM 1 apply_qflag +PO_H0_TIM 1 apply_qflag,alternate_view PO_H2_TIM 0 PO_K1_TIM 0 -PO_LEVEL1_UVI 0 -PO_K0_UVI 0 -PO_VIS_EARTH-CAMERA-CALIBRATED 0 -PO_VIS_VISIBLE-IMAGER-CALIBRATED 0 -PO_EJ_VIS 0 -PO_K0_VIS 0 -PO_K1_VIS 0 +PO_LEVEL1_UVI 1 alternate_view +PO_K0_UVI 1 alternate_view +PO_VIS_EARTH-CAMERA-CALIBRATED 1 alternate_view +PO_VIS_VISIBLE-IMAGER-CALIBRATED 1 alternate_view +PO_EJ_VIS 1 alternate_view +PO_K0_VIS 1 alternate_view +PO_K1_VIS 1 alternate_view PO_OR_DEF 0 -PO_OR_PRE 0 +PO_OR_PRE 1 alternate_view PO_AT_DEF 0 PO_AT_PRE 0 PO_K0_SPHA 0 @@ -1279,13 +1591,15 @@ PSP_SWP_SPA_SF0_L2_16AX8DX32E 0 PSP_SWP_SPA_SF1_L2_32E 0 PSP_SWP_SPB_SF0_L2_16AX8DX32E 0 PSP_SWP_SPB_SF1_L2_32E 0 -PSP_SWP_SPE_SF0_L3_PAD 0 -PSP_SWP_SPA_SF0_L3_PAD 0 -PSP_SWP_SPB_SF0_L3_PAD 0 +PSP_SWP_SPE_SF0_L3_PAD 1 alternate_view +PSP_SWP_SPA_SF0_L3_PAD 1 alternate_view +PSP_SWP_SPB_SF0_L3_PAD 1 alternate_view PSP_FLD_L3_MERGED_SCAM_WF 0 PSP_FLD_L3_SQTN_RFS_V1V2 0 PSP_FLD_L3_RFS_LFR_QTN 0 PSP_FLD_L3_DUST 0 +PSP_FLD_L3_RFS_HFR 0 +PSP_FLD_L3_RFS_LFR 0 PSP_FLD_L2_MAG_RTN 0 PSP_FLD_L2_MAG_RTN_1MIN 0 PSP_FLD_L2_MAG_RTN_4_SA_PER_CYC 0 @@ -1354,96 +1668,98 @@ PSP_ISOIS-EPIHI_L2-LET2-RATES60 0 PSP_ISOIS-EPIHI_L2-LET2-RATES300 0 PSP_ISOIS-EPIHI_L2-LET2-RATES3600 0 PSP_ISOIS-EPILO_L2-IC 1 arr_slice -PSP_ISOIS-EPILO_L2-PE 0 +PSP_ISOIS-EPILO_L2-PE 1 arr_slice PSP_ISOIS_L2-EPHEM 0 RS_K0_IPEI 0 -RBSPA_REL04_ECT-HOPE-MOM-L3 0 -RBSPB_REL04_ECT-HOPE-MOM-L3 0 -RBSPA_REL04_ECT-HOPE-PA-L3 1 apply_esa_qflag -RBSPB_REL04_ECT-HOPE-PA-L3 1 apply_esa_qflag -RBSPA_REL04_ECT-MAGEIS-L3 0 -RBSPB_REL04_ECT-MAGEIS-L3 0 +RBSPA_REL04_ECT-HOPE-MOM-L3 1 alternate_view +RBSPB_REL04_ECT-HOPE-MOM-L3 1 alternate_view +RBSPA_REL04_ECT-HOPE-PA-L3 1 apply_esa_qflag,alternate_view +RBSPB_REL04_ECT-HOPE-PA-L3 1 apply_esa_qflag,alternate_view +RBSPA_REL04_ECT-MAGEIS-L3 1 alternate_view +RBSPB_REL04_ECT-MAGEIS-L3 1 alternate_view RBSP_ECT-REPT-SCI-L3-SELESNICK-MODEL 1 arr_slice -RBSPA_REL03_ECT-REPT-SCI-L3 0 -RBSPB_REL03_ECT-REPT-SCI-L3 0 -RBSPA_REL04_ECT-HOPE-SCI-L2SA 0 -RBSPB_REL04_ECT-HOPE-SCI-L2SA 0 -RBSPA_REL04_ECT-HOPE-SCI-L2 0 -RBSPB_REL04_ECT-HOPE-SCI-L2 0 -RBSPA_REL03_ECT-MAGEIS-L2 0 -RBSPB_REL03_ECT-MAGEIS-L2 0 -RBSPA_REL03_ECT-REPT-SCI-L2 0 -RBSPB_REL03_ECT-REPT-SCI-L2 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXEH 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXEH 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXEHE-0 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXEHE 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXEHE-0 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXEHE 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXEO-0 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXEO 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXEO-0 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXEO 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXEION 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXEION 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXPHHHELT 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXPHHHELT 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXPHHLEHT 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXPHHLEHT 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXPHOHELT 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXPHOHELT 0 -RBSP-A-RBSPICE_LEV-3-PAP_TOFXPHOLEHT 0 -RBSP-B-RBSPICE_LEV-3-PAP_TOFXPHOLEHT 0 -RBSP-A-RBSPICE_LEV-3-PAP_ESRLEHT 0 -RBSP-B-RBSPICE_LEV-3-PAP_ESRLEHT 0 -RBSP-A-RBSPICE_LEV-3-PAP_ESRHELT 0 -RBSP-B-RBSPICE_LEV-3-PAP_ESRHELT 0 -RBSP-A-RBSPICE_LEV-3_TOFXEH 0 -RBSP-B-RBSPICE_LEV-3_TOFXEH 0 -RBSP-A-RBSPICE_LEV-3_TOFXEION 0 -RBSP-B-RBSPICE_LEV-3_TOFXEION 0 -RBSP-A-RBSPICE_LEV-3_TOFXENONH 0 -RBSP-B-RBSPICE_LEV-3_TOFXENONH 0 -RBSP-A-RBSPICE_LEV-3_TOFXPHHHELT 0 -RBSP-B-RBSPICE_LEV-3_TOFXPHHHELT 0 -RBSP-A-RBSPICE_LEV-3_TOFXPHHLEHT 0 -RBSP-B-RBSPICE_LEV-3_TOFXPHHLEHT 0 -RBSP-A-RBSPICE_LEV-3_ESRHELT 0 -RBSP-B-RBSPICE_LEV-3_ESRHELT 0 -RBSP-A-RBSPICE_LEV-3_ESRLEHT 0 -RBSP-B-RBSPICE_LEV-3_ESRLEHT 0 -RBSP-A-RBSPICE_LEV-3_ISRHELT 0 -RBSP-B-RBSPICE_LEV-3_ISRHELT 0 -RBSP-A-RBSPICE_LEV-2_TOFXEH 0 -RBSP-B-RBSPICE_LEV-2_TOFXEH 0 -RBSP-A-RBSPICE_LEV-2_TOFXEION 0 -RBSP-B-RBSPICE_LEV-2_TOFXEION 0 -RBSP-A-RBSPICE_LEV-2_TOFXENONH 0 -RBSP-B-RBSPICE_LEV-2_TOFXENONH 0 -RBSP-A-RBSPICE_LEV-2_TOFXPHHHELT 0 -RBSP-B-RBSPICE_LEV-2_TOFXPHHHELT 0 -RBSP-A-RBSPICE_LEV-2_TOFXPHHLEHT 0 -RBSP-B-RBSPICE_LEV-2_TOFXPHHLEHT 0 -RBSP-A-RBSPICE_LEV-2_ESRHELT 0 -RBSP-B-RBSPICE_LEV-2_ESRHELT 0 -RBSP-A-RBSPICE_LEV-2_ESRLEHT 0 -RBSP-B-RBSPICE_LEV-2_ESRLEHT 0 -RBSP-A-RBSPICE_LEV-2_ISRHELT 0 -RBSP-B-RBSPICE_LEV-2_ISRHELT 0 +RBSPA_REL03_ECT-REPT-SCI-L3 1 alternate_view +RBSPB_REL03_ECT-REPT-SCI-L3 1 alternate_view +RBSPA_REL04_ECT-HOPE-SCI-L2SA 1 alternate_view +RBSPB_REL04_ECT-HOPE-SCI-L2SA 1 alternate_view +RBSPA_REL04_ECT-HOPE-SCI-L2 1 alternate_view +RBSPB_REL04_ECT-HOPE-SCI-L2 1 alternate_view +RBSPA_REL03_ECT-MAGEIS-L2 1 alternate_view +RBSPB_REL03_ECT-MAGEIS-L2 1 alternate_view +RBSPA_REL03_ECT-REPT-SCI-L2 1 alternate_view +RBSPB_REL03_ECT-REPT-SCI-L2 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXEH 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXEH 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXEHE-0 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXEHE 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXEHE-0 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXEHE 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXEO-0 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXEO 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXEO-0 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXEO 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXEION 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXEION 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXPHHHELT 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXPHHHELT 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXPHHLEHT 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXPHHLEHT 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXPHOHELT 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXPHOHELT 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_TOFXPHOLEHT 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_TOFXPHOLEHT 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_ESRLEHT 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_ESRLEHT 1 alternate_view +RBSP-A-RBSPICE_LEV-3-PAP_ESRHELT 1 alternate_view +RBSP-B-RBSPICE_LEV-3-PAP_ESRHELT 1 alternate_view +RBSP-A-RBSPICE_LEV-3_TOFXEH 1 alternate_view +RBSP-B-RBSPICE_LEV-3_TOFXEH 1 alternate_view +RBSP-A-RBSPICE_LEV-3_TOFXEION 1 alternate_view +RBSP-B-RBSPICE_LEV-3_TOFXEION 1 alternate_view +RBSP-A-RBSPICE_LEV-3_TOFXENONH 1 alternate_view +RBSP-B-RBSPICE_LEV-3_TOFXENONH 1 alternate_view +RBSP-A-RBSPICE_LEV-3_TOFXPHHHELT 1 alternate_view +RBSP-B-RBSPICE_LEV-3_TOFXPHHHELT 1 alternate_view +RBSP-A-RBSPICE_LEV-3_TOFXPHHLEHT 1 alternate_view +RBSP-B-RBSPICE_LEV-3_TOFXPHHLEHT 1 alternate_view +RBSP-A-RBSPICE_LEV-3_ESRHELT 1 alternate_view +RBSP-B-RBSPICE_LEV-3_ESRHELT 1 alternate_view +RBSP-A-RBSPICE_LEV-3_ESRLEHT 1 alternate_view +RBSP-B-RBSPICE_LEV-3_ESRLEHT 1 alternate_view +RBSP-A-RBSPICE_LEV-3_ISRHELT 1 alternate_view +RBSP-B-RBSPICE_LEV-3_ISRHELT 1 alternate_view +RBSP-A-RBSPICE_LEV-2_TOFXEH 1 alternate_view +RBSP-B-RBSPICE_LEV-2_TOFXEH 1 alternate_view +RBSP-A-RBSPICE_LEV-2_TOFXEION 1 alternate_view +RBSP-B-RBSPICE_LEV-2_TOFXEION 1 alternate_view +RBSP-A-RBSPICE_LEV-2_TOFXENONH 1 alternate_view +RBSP-B-RBSPICE_LEV-2_TOFXENONH 1 alternate_view +RBSP-A-RBSPICE_LEV-2_TOFXPHHHELT 1 alternate_view +RBSP-B-RBSPICE_LEV-2_TOFXPHHHELT 1 alternate_view +RBSP-A-RBSPICE_LEV-2_TOFXPHHLEHT 1 alternate_view +RBSP-B-RBSPICE_LEV-2_TOFXPHHLEHT 1 alternate_view +RBSP-A-RBSPICE_LEV-2_ESRHELT 1 alternate_view +RBSP-B-RBSPICE_LEV-2_ESRHELT 1 alternate_view +RBSP-A-RBSPICE_LEV-2_ESRLEHT 1 alternate_view +RBSP-B-RBSPICE_LEV-2_ESRLEHT 1 alternate_view +RBSP-A-RBSPICE_LEV-2_ISRHELT 1 alternate_view +RBSP-B-RBSPICE_LEV-2_ISRHELT 1 alternate_view RBSPA_EFW-L2_E-HIRES-UVW 0 RBSPB_EFW-L2_E-HIRES-UVW 0 -RBSPA_EFW-L2_E-SPINFIT-MGSE 0 -RBSPB_EFW-L2_E-SPINFIT-MGSE 0 +RBSPA_EFW-L2_E-SPINFIT-MGSE 1 alternate_view +RBSPB_EFW-L2_E-SPINFIT-MGSE 1 alternate_view RBSPA_EFW-L2_ESVY_DESPUN 0 RBSPB_EFW-L2_ESVY_DESPUN 0 -RBSPA_EFW-L2_FBK 0 -RBSPB_EFW-L2_FBK 0 +RBSPA_EFW-L2_FBK 1 alternate_view +RBSPB_EFW-L2_FBK 1 alternate_view RBSPA_EFW-L2_SPEC 0 RBSPB_EFW-L2_SPEC 0 RBSPA_EFW-L2_VSVY-HIRES 0 RBSPB_EFW-L2_VSVY-HIRES 0 RBSP-A_DENSITY_EMFISIS-L4 0 RBSP-B_DENSITY_EMFISIS-L4 0 +RBSP-A_WNA-SURVEY-SHEATH-CORRECTED-E_EMFISIS-L4 0 +RBSP-B_WNA-SURVEY-SHEATH-CORRECTED-E_EMFISIS-L4 0 RBSP-A_MAGNETOMETER_4SEC-GEI_EMFISIS-L3 0 RBSP-B_MAGNETOMETER_4SEC-GEI_EMFISIS-L3 0 RBSP-A_MAGNETOMETER_4SEC-GEO_EMFISIS-L3 0 @@ -1488,16 +1804,48 @@ RBSP-A_WFR-SPECTRAL-MATRIX-DIAGONAL_EMFISIS-L2 1 sum_values RBSP-B_WFR-SPECTRAL-MATRIX-DIAGONAL_EMFISIS-L2 1 sum_values RBSP-A_WFR-SPECTRAL-MATRIX-DIAGONAL-MERGED_EMFISIS-L2 1 sum_values RBSP-B_WFR-SPECTRAL-MATRIX-DIAGONAL-MERGED_EMFISIS-L2 1 sum_values -RBSP-A_HFR-WAVEFORM_EMFISIS-L2 1 fftPower -RBSP-B_HFR-WAVEFORM_EMFISIS-L2 1 fftPower -RBSP-A_WFR-WAVEFORM_EMFISIS-L2 1 fftPower -RBSP-B_WFR-WAVEFORM_EMFISIS-L2 1 fftPower -RBSP-A_WFR-WAVEFORM-CONTINUOUS-BURST_EMFISIS-L2 1 fftPower -RBSP-B_WFR-WAVEFORM-CONTINUOUS-BURST_EMFISIS-L2 1 fftPower +RBSP-A_HFR-WAVEFORM_EMFISIS-L2 1 alternate_view,fftpower +RBSP-B_HFR-WAVEFORM_EMFISIS-L2 1 alternate_view,fftpower +RBSP-A_WFR-WAVEFORM_EMFISIS-L2 1 fftpower +RBSP-B_WFR-WAVEFORM_EMFISIS-L2 1 fftpower +RBSP-A_WFR-WAVEFORM-CONTINUOUS-BURST_EMFISIS-L2 1 fftpower +RBSP-B_WFR-WAVEFORM-CONTINUOUS-BURST_EMFISIS-L2 1 fftpower RBSPA_L2_PSBR-RPS 0 RBSPB_L2_PSBR-RPS 0 RBSPA_L2-1MIN_PSBR-RPS 0 RBSPB_L2-1MIN_PSBR-RPS 0 +REACH-VID-101_DOSIMETER-L1C 0 +REACH-VID-102_DOSIMETER-L1C 0 +REACH-VID-105_DOSIMETER-L1C 0 +REACH-VID-108_DOSIMETER-L1C 0 +REACH-VID-113_DOSIMETER-L1C 0 +REACH-VID-114_DOSIMETER-L1C 0 +REACH-VID-115_DOSIMETER-L1C 0 +REACH-VID-116_DOSIMETER-L1C 0 +REACH-VID-133_DOSIMETER-L1C 0 +REACH-VID-134_DOSIMETER-L1C 0 +REACH-VID-135_DOSIMETER-L1C 0 +REACH-VID-136_DOSIMETER-L1C 0 +REACH-VID-137_DOSIMETER-L1C 0 +REACH-VID-138_DOSIMETER-L1C 0 +REACH-VID-139_DOSIMETER-L1C 0 +REACH-VID-140_DOSIMETER-L1C 0 +REACH-VID-148_DOSIMETER-L1C 0 +REACH-VID-149_DOSIMETER-L1C 0 +REACH-VID-162_DOSIMETER-L1C 0 +REACH-VID-163_DOSIMETER-L1C 0 +REACH-VID-164_DOSIMETER-L1C 0 +REACH-VID-165_DOSIMETER-L1C 0 +REACH-VID-166_DOSIMETER-L1C 0 +REACH-VID-169_DOSIMETER-L1C 0 +REACH-VID-170_DOSIMETER-L1C 0 +REACH-VID-171_DOSIMETER-L1C 0 +REACH-VID-172_DOSIMETER-L1C 0 +REACH-VID-173_DOSIMETER-L1C 0 +REACH-VID-175_DOSIMETER-L1C 0 +REACH-VID-176_DOSIMETER-L1C 0 +REACH-VID-180_DOSIMETER-L1C 0 +REACH-VID-181_DOSIMETER-L1C 0 RENU2_H0_EFIELD 0 RENU2_H0_EPLAS 0 RENU2_H0_ERPAMAIN 0 @@ -1521,7 +1869,7 @@ SO_OR_DEF 0 SO_OR_PRE 0 SO_AT_DEF 0 SOHO_CELIAS-SEM_15S 0 -SOHO_CELIAS-SEM_1DAY 0 +SOHO_CELIAS-SEM_1DAY 1 alternate_view SOHO_CELIAS-PM_30S 0 SOHO_CELIAS-PM_5MIN 0 SOHO_COSTEP-EPHIN_L3I-1MIN 0 @@ -1532,18 +1880,14 @@ SOHO_COSTEP-EPHIN_L3I-1HR 0 SOHO_COSTEP-EPHIN_L3I-1DAY 0 SOHO_ERNE-LED_L2-1MIN 0 SOHO_ERNE-HED_L2-1MIN 0 -SOLO_L3_SWA-HIS-COMP-10MIN 0 -SOLO_L3_RPW-BIA-DENSITY-10-SECONDS 0 -SOLO_L3_RPW-BIA-DENSITY 0 -SOLO_L3_RPW-BIA-EFIELD-10-SECONDS 0 -SOLO_L3_RPW-BIA-EFIELD 0 -SOLO_L3_RPW-BIA-SCPOT-10-SECONDS 0 -SOLO_L3_RPW-BIA-SCPOT 0 +SOHO_ERNE_HEAVY-ION-1HR 0 +SOHO_ERNE_HEAVY-ION-5MIN 0 SOLO_COHO1HR_MERGED_MAG_PLASMA 0 SOLO_HELIO1DAY_POSITION 0 +SOLO_L3_SWA-HIS-COMP-10MIN 0 SOLO_L2_SWA-PAS-GRND-MOM 0 SOLO_L2_SWA-PAS-EFLUX 0 -SOLO_L2_SWA-PAS-VDF 0 +SOLO_L2_SWA-PAS-VDF 1 alternate_view SOLO_L2_SWA-EAS-PAD-DNF 0 SOLO_L2_SWA-EAS-PAD-DEF 0 SOLO_L2_SWA-EAS1-SS-PSD 0 @@ -1557,7 +1901,7 @@ SOLO_L2_SWA-EAS2-SS-DNF 0 SOLO_L2_SWA-EAS2-SS-DEF 0 SOLO_L2_SWA-EAS2-NM3D-PSD 0 SOLO_L2_SWA-EAS2-NM3D-DNF 0 -SOLO_L2_SWA-EAS2-NM3D-DEF 0 +SOLO_L2_SWA-EAS2-NM3D-DEF 1 alternate_view SOLO_L2_MAG-RTN-BURST 0 SOLO_L2_MAG-RTN-NORMAL 0 SOLO_L2_MAG-RTN-NORMAL-1-MINUTE 0 @@ -1569,6 +1913,12 @@ SOLO_L2_MAG-SRF-LL 0 SOLO_L2_MAG-VSO-BURST 0 SOLO_L2_MAG-VSO-NORMAL 0 SOLO_L2_MAG-VSO-NORMAL-1-MINUTE 0 +SOLO_L3_RPW-BIA-DENSITY-10-SECONDS 0 +SOLO_L3_RPW-BIA-DENSITY 0 +SOLO_L3_RPW-BIA-EFIELD-10-SECONDS 0 +SOLO_L3_RPW-BIA-EFIELD 0 +SOLO_L3_RPW-BIA-SCPOT-10-SECONDS 0 +SOLO_L3_RPW-BIA-SCPOT 0 SOLO_L2_RPW-LFR-SURV-BP1 0 SOLO_L2_RPW-LFR-SURV-BP2 0 SOLO_L2_RPW-LFR-SURV-CWF-B 0 @@ -1576,7 +1926,8 @@ SOLO_L2_RPW-LFR-SURV-CWF-E 0 SOLO_L2_RPW-TDS-SURV-HIST1D 0 SOLO_L2_RPW-TDS-SURV-HIST2D 0 SOLO_L2_RPW-TDS-SURV-STAT 0 -SOLO_L2_RPW-LFR-SURV-ASM 0 +SOLO_L2_RPW-HFR-SURV 0 +SOLO_L2_RPW-LFR-SURV-ASM 1 alternate_view SOLO_L2_EPD-STEP-MAIN 0 SOLO_L2_EPD-STEP-BURST 0 SOLO_L2_EPD-STEP-HCAD 0 @@ -1631,6 +1982,7 @@ SOLO_LL02_EPD-HET-ASUN-RATES 0 SOLO_LL02_EPD-HET-SUN-RATES 0 SOLO_LL02_EPD-HET-NORTH-RATES 0 SOLO_LL02_EPD-HET-SOUTH-RATES 0 +STPSAT-6_FALCON_SEED-L1 1 alternate_view ST5-155_1SEC_MAG 0 ST5-224_1SEC_MAG 0 ST5-094_1SEC_MAG 0 @@ -1641,18 +1993,18 @@ STA_L1_IMPACT_BURST 0 STB_L1_IMPACT_BURST 0 STA_L1_IMPACT_HKP 0 STB_L1_IMPACT_HKP 0 -STA_L1_SWEA_DIST 0 -STB_L1_SWEA_DIST 0 -STA_L1_SWEA_DISB 0 -STB_L1_SWEA_DISB 0 -STA_L1_SWEA_SPEC 0 -STB_L1_SWEA_SPEC 0 +STA_L1_SWEA_DIST 1 alternate_view +STB_L1_SWEA_DIST 1 alternate_view +STA_L1_SWEA_DISB 1 alternate_view +STB_L1_SWEA_DISB 1 alternate_view +STA_L1_SWEA_SPEC 1 alternate_view +STB_L1_SWEA_SPEC 1 alternate_view STA_L2_SWEA_PAD 0 STB_L2_SWEA_PAD 0 -STA_L1_LET 0 -STB_L1_LET 0 -STA_L1_HET 0 -STB_L1_HET 0 +STA_L1_LET 1 alternate_view +STB_L1_LET 1 alternate_view +STA_L1_HET 1 alternate_view +STB_L1_HET 1 alternate_view STA_L1_SEPT 0 STB_L1_SEPT 0 STA_L1_SIT 0 @@ -1682,8 +2034,8 @@ STB_L2_MAGPLASMA_1M 0 STA_L3_PLA_HEPLUS_24HR 0 STA_L3_PLA_HEPLUS_F_VSW_01HR 0 STA_L3_PLA_HEPLUS_F_VSW_24HR 0 -STA_L3_PLA_HE2PL_F_VSW_01HR 0 -STA_L3_PLA_HEPLUS_F_VSW_10MIN 0 +STA_L3_PLA_HE2PL_F_VSW_01HR 1 alternate_view +STA_L3_PLA_HEPLUS_F_VSW_10MIN 1 alternate_view STA_L3_PLA_HEPLUS_SW_VELCTDIST_5MIN 0 STA_L3_WAV_HFR 0 STB_L3_WAV_HFR 0 @@ -1693,13 +2045,13 @@ SX_K0_30F 0 SX_K0_POF 0 STA_COHO1HR_MERGED_MAG_PLASMA 0 STB_COHO1HR_MERGED_MAG_PLASMA 0 -STEREO_LEVEL2_SWAVES 0 +STEREO_LEVEL2_SWAVES 1 alternate_view TOOWINDY_E_NE 0 -THA_OR_SSC 0 -THB_OR_SSC 0 -THC_OR_SSC 0 -THD_OR_SSC 0 -THE_OR_SSC 0 +THA_OR_SSC 1 alternate_view +THB_OR_SSC 1 alternate_view +THC_OR_SSC 1 alternate_view +THD_OR_SSC 1 alternate_view +THE_OR_SSC 1 alternate_view THA_L2_EFI 0 THB_L2_EFI 0 THC_L2_EFI 0 @@ -1752,21 +2104,21 @@ THB_L2_MOM 1 apply_esa_qflag THC_L2_MOM 1 apply_esa_qflag THD_L2_MOM 1 apply_esa_qflag THE_L2_MOM 1 apply_esa_qflag -THA_L1_STATE 0 -THB_L1_STATE 0 -THC_L1_STATE 0 -THD_L1_STATE 0 -THE_L1_STATE 0 +THA_L1_STATE 1 alternate_view +THB_L1_STATE 1 alternate_view +THC_L1_STATE 1 alternate_view +THD_L1_STATE 1 alternate_view +THE_L1_STATE 1 alternate_view THG_L1_ASK 0 -THG_L2_MAG_HOV 0 -THG_L2_MAG_JAN 0 +THG_L2_MAG_HOV 1 alternate_view +THG_L2_MAG_JAN 1 alternate_view THG_L2_MAG_NRD 0 THG_L2_MAG_DMH 0 THG_L2_MAG_DNB 0 -THG_L2_MAG_LRV 0 +THG_L2_MAG_LRV 1 alternate_view THG_L2_MAG_SCO 0 THG_L2_MAG_AMK 0 -THG_L2_MAG_SUM 0 +THG_L2_MAG_SUM 1 alternate_view THG_L2_MAG_NAQ 0 THG_L2_MAG_NRSQ 0 THG_L2_MAG_FHB 0 @@ -1788,7 +2140,7 @@ THG_L2_MAG_SCHF 0 THG_L2_MAG_KUUJ 0 THG_L2_MAG_IQA 0 THG_L2_MAG_CRVR 0 -THG_L2_MAG_TAB 0 +THG_L2_MAG_TAB 1 alternate_view THG_L2_MAG_THL 0 THG_L2_MAG_STFL 0 THG_L2_MAG_CHBG 0 @@ -1822,47 +2174,47 @@ THG_L2_MAG_PKS 0 THG_L2_MAG_EKAT 0 THG_L2_MAG_SNAP 0 THG_L2_MAG_FSMI 0 -THG_L2_MAG_LETH 0 -THG_L2_MAG_ATHA 0 -THG_L2_MAG_MEA 0 -THG_L2_MAG_REDR 0 -THG_L2_MAG_ROTH 0 -THG_L2_MAG_YKNF 0 -THG_L2_MAG_YKC 0 +THG_L2_MAG_LETH 1 alternate_view +THG_L2_MAG_ATHA 1 alternate_view +THG_L2_MAG_MEA 1 alternate_view +THG_L2_MAG_REDR 1 alternate_view +THG_L2_MAG_ROTH 1 alternate_view +THG_L2_MAG_YKNF 1 alternate_view +THG_L2_MAG_YKC 1 alternate_view THG_L2_MAG_FSJ 0 -THG_L2_MAG_FSIM 0 +THG_L2_MAG_FSIM 1 alternate_view THG_L2_MAG_FTN 0 -THG_L2_MAG_PGEO 0 -THG_L2_MAG_VIC 0 -THG_L2_MAG_PTRS 0 -THG_L2_MAG_INUV 0 +THG_L2_MAG_PGEO 1 alternate_view +THG_L2_MAG_VIC 1 alternate_view +THG_L2_MAG_PTRS 1 alternate_view +THG_L2_MAG_INUV 1 alternate_view THG_L2_MAG_WHS 0 -THG_L2_MAG_WHIT 0 -THG_L2_MAG_SIT 0 -THG_L2_MAG_EAGL 0 -THG_L2_MAG_KAKO 0 -THG_L2_MAG_GAKO 0 -THG_L2_MAG_FYKN 0 -THG_L2_MAG_ARCT 0 -THG_L2_MAG_POKR 0 -THG_L2_MAG_CMO 0 -THG_L2_MAG_CIGO 0 -THG_L2_MAG_DED 0 -THG_L2_MAG_TOOL 0 -THG_L2_MAG_HLMS 0 -THG_L2_MAG_TRAP 0 -THG_L2_MAG_HOMR 0 -THG_L2_MAG_KENA 0 -THG_L2_MAG_BETT 0 -THG_L2_MAG_MCGR 0 -THG_L2_MAG_BRW 0 -THG_L2_MAG_KIAN 0 -THG_L2_MAG_SHU 0 -THG_L2_MAG_PBK 0 -THG_L2_MAG_TIK 0 -THG_L2_MAG_DIK 0 +THG_L2_MAG_WHIT 1 alternate_view +THG_L2_MAG_SIT 1 alternate_view +THG_L2_MAG_EAGL 1 alternate_view +THG_L2_MAG_KAKO 1 alternate_view +THG_L2_MAG_GAKO 1 alternate_view +THG_L2_MAG_FYKN 1 alternate_view +THG_L2_MAG_ARCT 1 alternate_view +THG_L2_MAG_POKR 1 alternate_view +THG_L2_MAG_CMO 1 alternate_view +THG_L2_MAG_CIGO 1 alternate_view +THG_L2_MAG_DED 1 alternate_view +THG_L2_MAG_TOOL 1 alternate_view +THG_L2_MAG_HLMS 1 alternate_view +THG_L2_MAG_TRAP 1 alternate_view +THG_L2_MAG_HOMR 1 alternate_view +THG_L2_MAG_KENA 1 alternate_view +THG_L2_MAG_BETT 1 alternate_view +THG_L2_MAG_MCGR 1 alternate_view +THG_L2_MAG_BRW 1 alternate_view +THG_L2_MAG_KIAN 1 alternate_view +THG_L2_MAG_SHU 1 alternate_view +THG_L2_MAG_PBK 1 alternate_view +THG_L2_MAG_TIK 1 alternate_view +THG_L2_MAG_DIK 1 alternate_view THG_L2_MAG_VIZ 0 -THG_L2_MAG_AMD 0 +THG_L2_MAG_AMD 1 alternate_view THG_L2_MAG_LOZ 0 THG_L2_MAG_MEK 0 THG_L2_MAG_IVA 0 @@ -1871,71 +2223,71 @@ THG_L2_MAG_KEV 0 THG_L2_MAG_HAN 0 THG_L2_MAG_TAR 0 THG_L2_MAG_RAN 0 -THG_L2_MAG_NOR 0 -THG_L2_MAG_HOP 0 +THG_L2_MAG_NOR 1 alternate_view +THG_L2_MAG_HOP 1 alternate_view THG_L2_MAG_NUR 0 THG_L2_MAG_PEL 0 THG_L2_MAG_MAS 0 THG_L2_MAG_MUO 0 -THG_L2_MAG_SOR 0 +THG_L2_MAG_SOR 1 alternate_view THG_L2_MAG_KIL 0 -THG_L2_MAG_BJN 0 -THG_L2_MAG_TRO 0 -THG_L2_MAG_JCK 0 -THG_L2_MAG_AND 0 -THG_L2_MAG_LYR 0 +THG_L2_MAG_BJN 1 alternate_view +THG_L2_MAG_TRO 1 alternate_view +THG_L2_MAG_JCK 1 alternate_view +THG_L2_MAG_AND 1 alternate_view +THG_L2_MAG_LYR 1 alternate_view THG_L2_MAG_BBG 0 -THG_L2_MAG_DON 0 -THG_L2_MAG_NAL 0 -THG_L2_MAG_BFE 0 -THG_L2_MAG_RVK 0 -THG_L2_MAG_DOB 0 -THG_L2_MAG_ROE 0 -THG_L2_MAG_KAR 0 -THG_L2_MAG_SOL 0 +THG_L2_MAG_DON 1 alternate_view +THG_L2_MAG_NAL 1 alternate_view +THG_L2_MAG_BFE 1 alternate_view +THG_L2_MAG_RVK 1 alternate_view +THG_L2_MAG_DOB 1 alternate_view +THG_L2_MAG_ROE 1 alternate_view +THG_L2_MAG_KAR 1 alternate_view +THG_L2_MAG_SOL 1 alternate_view THG_L2_MAG_PG5 0 THG_L2_MAG_PG4 0 THG_L2_MAG_PG3 0 THG_L2_MAG_PG2 0 THG_L2_MAG_PG1 0 THG_L2_MAG_PG0 0 -THG_L2_MAG_SJG 0 -THG_L2_MAG_DRBY 0 -THG_L2_MAG_WLPS 0 -THG_L2_MAG_LREL 0 -THG_L2_MAG_FRD 0 -THG_L2_MAG_LOYS 0 -THG_L2_MAG_BMLS 0 -THG_L2_MAG_RMUS 0 -THG_L2_MAG_SWNO 0 -THG_L2_MAG_BSL 0 -THG_L2_MAG_HRIS 0 -THG_L2_MAG_WRTH 0 -THG_L2_MAG_BENN 0 -THG_L2_MAG_AMER 0 +THG_L2_MAG_SJG 1 alternate_view +THG_L2_MAG_DRBY 1 alternate_view +THG_L2_MAG_WLPS 1 alternate_view +THG_L2_MAG_LREL 1 alternate_view +THG_L2_MAG_FRD 1 alternate_view +THG_L2_MAG_LOYS 1 alternate_view +THG_L2_MAG_BMLS 1 alternate_view +THG_L2_MAG_RMUS 1 alternate_view +THG_L2_MAG_SWNO 1 alternate_view +THG_L2_MAG_BSL 1 alternate_view +THG_L2_MAG_HRIS 1 alternate_view +THG_L2_MAG_WRTH 1 alternate_view +THG_L2_MAG_BENN 1 alternate_view +THG_L2_MAG_AMER 1 alternate_view THG_L2_MAG_GLYN 0 THG_L2_MAG_RICH 0 -THG_L2_MAG_PCEL 0 -THG_L2_MAG_LYFD 0 -THG_L2_MAG_SATX 0 +THG_L2_MAG_PCEL 1 alternate_view +THG_L2_MAG_LYFD 1 alternate_view +THG_L2_MAG_SATX 1 alternate_view THG_L2_MAG_LRES 0 -THG_L2_MAG_FYTS 0 -THG_L2_MAG_PINE 0 -THG_L2_MAG_PBLO 0 -THG_L2_MAG_BOU 0 -THG_L2_MAG_TUC 0 -THG_L2_MAG_HOTS 0 -THG_L2_MAG_NEW 0 -THG_L2_MAG_UKIA 0 -THG_L2_MAG_FRN 0 -THG_L2_MAG_CCNV 0 -THG_L2_MAG_STFD 0 -THG_L2_MAG_KODK 0 -THG_L2_MAG_HON 0 -THG_L2_MAG_GUA 0 +THG_L2_MAG_FYTS 1 alternate_view +THG_L2_MAG_PINE 1 alternate_view +THG_L2_MAG_PBLO 1 alternate_view +THG_L2_MAG_BOU 1 alternate_view +THG_L2_MAG_TUC 1 alternate_view +THG_L2_MAG_HOTS 1 alternate_view +THG_L2_MAG_NEW 1 alternate_view +THG_L2_MAG_UKIA 1 alternate_view +THG_L2_MAG_FRN 1 alternate_view +THG_L2_MAG_CCNV 1 alternate_view +THG_L2_MAG_STFD 1 alternate_view +THG_L2_MAG_KODK 1 alternate_view +THG_L2_MAG_HON 1 alternate_view +THG_L2_MAG_GUA 1 alternate_view THG_L2_MAG_BRN 0 -THG_L2_MAG_TDC 0 -TIMED_L1BV20_SABER 0 +THG_L2_MAG_TDC 1 alternate_view +TIMED_L1BV20_SABER 1 alternate_view TIMED_L2A_SABER 0 TIMED_L3A_SEE 0 TIMED_WINDVECTORSNCAR_TIDI 0 @@ -2135,15 +2487,15 @@ TSS-1R_M1_SPSA 0 TSS-1R_M1_SPSB 0 TSS-1R_M1_SPSC 0 TSS-1R_M1_SPSD 0 -TWINS_M2_ENA 0 -TWINS1_TISTORM_IMAGER 0 -TWINS2_TISTORM_IMAGER 0 -TWINS1_L1_IMAGER 0 -TWINS2_L1_IMAGER 0 +TWINS_M2_ENA 1 alternate_view +TWINS1_TISTORM_IMAGER 1 alternate_view +TWINS2_TISTORM_IMAGER 1 alternate_view +TWINS1_L1_IMAGER 1 alternate_view +TWINS2_L1_IMAGER 1 alternate_view TWINS1_L1_LAD 0 TWINS2_L1_LAD 0 -TWINS1_OR_DEF 0 -TWINS2_OR_DEF 0 +TWINS1_OR_DEF 1 alternate_view +TWINS2_OR_DEF 1 alternate_view TWINS1_AT_DEF 0 TWINS2_AT_DEF 0 TWINS1_HK_1SEC 0 @@ -2188,22 +2540,23 @@ UY_ALPHA-MOMENTS_SWOOPS 0 UY_PROTON-DISTRIBUTIONS_SWOOPS 0 UY_PROTON-FIT-PARAMETERS_SWOOPS 0 UY_PROTON-MOMENTS_SWOOPS 0 -VG1_PWS_LR 0 -VG2_PWS_LR 0 +VG1_PWS_LR 1 alternate_view +VG2_PWS_LR 1 alternate_view VG1_PWS_WF 0 VG2_PWS_WF 0 VOYAGER1_48S_MAG-VIM 0 VOYAGER2_48S_MAG-VIM 0 -VOYAGER1_2S_MAG 0 -VOYAGER1_10S_MAG 0 -VOYAGER1_48S_MAG 0 -VOYAGER2_2S_MAG 0 -VOYAGER2_10S_MAG 0 -VOYAGER2_48S_MAG 0 +VOYAGER1_2S_MAG 1 alternate_view +VOYAGER1_10S_MAG 1 alternate_view +VOYAGER1_48S_MAG 1 alternate_view +VOYAGER2_2S_MAG 1 alternate_view +VOYAGER2_10S_MAG 1 alternate_view +VOYAGER2_48S_MAG 1 alternate_view VOYAGER1_COHO1HR_MERGED_MAG_PLASMA 0 VOYAGER2_COHO1HR_MERGED_MAG_PLASMA 0 VOYAGER1_PLS_HIRES_PLASMA_DATA 0 VOYAGER2_PLS_HIRES_PLASMA_DATA 0 +VOYAGER2_PLS_HIRES_PLASMA_DATA_HSH 0 VOYAGER1_PLS_COMPOSITION 0 VOYAGER2_PLS_COMPOSITION 0 VOYAGER1_PLS_ELECTRONS_E1 0 @@ -2216,15 +2569,14 @@ VOYAGER2_PLS_IONS_L 0 VOYAGER2_PLS_IONS_M 0 VOYAGER-1_LECP_ELEC-BGND-COR-1D 0 VOYAGER-2_LECP_ELEC-BGND-COR-1D 0 -WI_L3-DUSTIMPACT_WAVES 0 WI_L2-1HOUR-SEP_EPACT-APE_B 0 -WI_L2-1HOUR-SEP_EPACT-LEMT 0 -WI_L2-5MIN-SEP_EPACT-LEMT 0 +WI_L2-1HOUR-SEP_EPACT-LEMT 1 alternate_view +WI_L2-5MIN-SEP_EPACT-LEMT 1 alternate_view WI_EPACT_STEP-DIFFERENTIAL-ION-FLUX-1HR 1 arr_slice -WI_EPACT_STEP-DIRECTIONAL-DIFF-CNO-FLUX-10MIN 1 arr_slice -WI_EPACT_STEP-DIRECTIONAL-DIFF-FE-FLUX-10MIN 1 arr_slice -WI_EPACT_STEP-DIRECTIONAL-DIFF-HE-FLUX-10MIN 1 arr_slice -WI_EPACT_STEP-DIRECTIONAL-DIFF-H-FLUX-10MIN 1 arr_slice +WI_EPACT_STEP-DIRECTIONAL-DIFF-CNO-FLUX-10MIN 1 arr_slice,alternate_view +WI_EPACT_STEP-DIRECTIONAL-DIFF-FE-FLUX-10MIN 1 arr_slice,alternate_view +WI_EPACT_STEP-DIRECTIONAL-DIFF-HE-FLUX-10MIN 1 arr_slice,alternate_view +WI_EPACT_STEP-DIRECTIONAL-DIFF-H-FLUX-10MIN 1 alternate_view,arr_slice WI_L2-30MIN_SMS-STICS-AFM-MAGNETOSPHERE 0 WI_L2-30MIN_SMS-STICS-AFM-SOLARWIND 0 WI_L2-30MIN_SMS-STICS-ERPA-MAGNETOSPHERE 0 @@ -2233,43 +2585,45 @@ WI_L2_3MIN_SMS-STICS-NVT-MAGNETOSPHERE 0 WI_L2_3MIN_SMS-STICS-NVT-SOLARWIND 0 WI_L2-3MIN_SMS-STICS-VDF-MAGNETOSPHERE 1 arr_slice WI_L2-3MIN_SMS-STICS-VDF-SOLARWIND 1 arr_slice -WI_H0_MFI 0 +WI_H0_MFI 1 alternate_view WI_H2_MFI 0 WI_H3-RTN_MFI 0 WI_H4-RTN_MFI 0 -WI_K0_MFI 0 -WI_K0_SWE 1 calc_p +WI_K0_MFI 1 alternate_view +WI_K0_SWE 1 alternate_view,calc_p WI_H0_SWE 0 -WI_H1_SWE 0 -WI_SW-ION-DIST_SWE-FARADAY 0 -WI_STRAHL0_SWE 0 -WI_H3_SWE 0 -WI_H4_SWE 0 +WI_H1_SWE 1 alternate_view +WI_SW-ION-DIST_SWE-FARADAY 1 alternate_view +WI_STRAHL0_SWE 1 alternate_view +WI_H3_SWE 1 alternate_view +WI_H4_SWE 1 alternate_view WI_H5_SWE 0 -WI_M0_SWE 0 -WI_M2_SWE 0 +WI_M0_SWE 1 alternate_view +WI_M2_SWE 1 alternate_view WI_PM_3DP 0 -WI_PLSP_3DP 0 -WI_SOSP_3DP 0 -WI_SOPD_3DP 0 +WI_PLSP_3DP 1 alternate_view +WI_SOSP_3DP 1 alternate_view +WI_SOPD_3DP 1 alternate_view WI_EM_3DP 0 WI_EMFITS_E0_3DP 0 WI_ELM2_3DP 0 -WI_ELSP_3DP 0 -WI_ELPD_3DP 0 -WI_EHSP_3DP 0 -WI_EHPD_3DP 0 -WI_SFSP_3DP 0 -WI_SFPD_3DP 0 +WI_ELSP_3DP 1 alternate_view +WI_ELPD_3DP 1 alternate_view +WI_EHSP_3DP 1 alternate_view +WI_EHPD_3DP 1 alternate_view +WI_SFSP_3DP 1 alternate_view +WI_SFPD_3DP 1 alternate_view WIND_3DP_ECHSFITS_E0-YR 0 WI_K0_3DP 0 WI_K0_EPA 0 -WI_K0_SMS 0 +WI_K0_SMS 1 alternate_view WI_H0_WAV 0 WI_H1_WAV 0 -WI_K0_WAV 0 +WI_K0_WAV 1 alternate_view +WI_WA_RAD1_L3_DF 0 +WI_L3-DUSTIMPACT_WAVES 0 WI_OR_DEF 0 -WI_OR_PRE 0 +WI_OR_PRE 1 alternate_view WI_AT_DEF 0 WI_AT_PRE 0 WI_K0_SPHA 0 diff --git a/CDAWebServer/src/org/hapiserver/source/cdaweb/x_landing.json b/CDAWebServer/src/org/hapiserver/source/cdaweb/x_landing.json new file mode 100644 index 00000000..a9e55f1b --- /dev/null +++ b/CDAWebServer/src/org/hapiserver/source/cdaweb/x_landing.json @@ -0,0 +1,4 @@ +{ + "$schema": "https://raw.githubusercontent.com/hapi-server/server-java/main/HapiServer/src/java/templates/x-landing-schema.json" + +} diff --git a/HapiServer/lib/UriTemplatesJava.jar b/HapiServer/lib/UriTemplatesJava.jar index dc6d655b..f5439f99 100644 Binary files a/HapiServer/lib/UriTemplatesJava.jar and b/HapiServer/lib/UriTemplatesJava.jar differ diff --git a/HapiServer/nbproject/ant-deploy.xml b/HapiServer/nbproject/ant-deploy.xml index 55566c8e..785fa0b8 100644 --- a/HapiServer/nbproject/ant-deploy.xml +++ b/HapiServer/nbproject/ant-deploy.xml @@ -1,42 +1,23 @@ @@ -59,18 +40,30 @@ Contributor(s): - - + + + + + + + + - - + + + + + + + + diff --git a/HapiServer/nbproject/build-impl.xml b/HapiServer/nbproject/build-impl.xml index bc0b30cd..8c3c84d3 100644 --- a/HapiServer/nbproject/build-impl.xml +++ b/HapiServer/nbproject/build-impl.xml @@ -74,6 +74,36 @@ + + + + + + + + + + + + + + + + + + + Must set platform.home + Must set platform.bootcp + Must set platform.java + Must set platform.javac + + The J2SE Platform is not correctly set up. + Your active platform is: ${platform.active}, but the corresponding property "platforms.${platform.active}.home" is not found in the project's properties files. + Either open the project in the IDE and setup the Platform with the same name or add it manually. + For example like this: + ant -Duser.properties.file=<path_to_property_file> jar (where you put the property "platforms.${platform.active}.home" in a .properties file) + or ant -Dplatforms.${platform.active}.home=<path_to_JDK_home> jar (where no properties file is used) + @@ -215,15 +245,6 @@ - - - - - - - - - @@ -318,7 +339,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -355,7 +376,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -432,7 +453,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -455,7 +476,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -495,7 +516,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -573,7 +594,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -582,8 +603,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - - + @@ -598,7 +618,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -615,8 +635,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - - + @@ -722,7 +741,7 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + @@ -754,6 +773,9 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f + + + @@ -774,18 +796,6 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - - - - - - - - - - - - @@ -800,10 +810,9 @@ or ant -Dj2ee.platform.classpath=<server_classpath> (where no properties f - + - - + @@ -1031,7 +1040,6 @@ exists or setup the property manually. For example like this: - @@ -1039,7 +1047,6 @@ exists or setup the property manually. For example like this: - @@ -1207,6 +1214,9 @@ exists or setup the property manually. For example like this: + + + @@ -1329,7 +1339,7 @@ exists or setup the property manually. For example like this: --> - + @@ -1446,6 +1456,7 @@ exists or setup the property manually. For example like this: + diff --git a/HapiServer/nbproject/genfiles.properties b/HapiServer/nbproject/genfiles.properties index d0df6ce7..8d3975c8 100644 --- a/HapiServer/nbproject/genfiles.properties +++ b/HapiServer/nbproject/genfiles.properties @@ -1,8 +1,8 @@ -build.xml.data.CRC32=34fb3e36 +build.xml.data.CRC32=da256322 build.xml.script.CRC32=ee888dac -build.xml.stylesheet.CRC32=651128d4@1.77.1.1 +build.xml.stylesheet.CRC32=1707db4f@1.94.0.1 # This file is used by a NetBeans-based IDE to track changes in generated files such as build-impl.xml. # Do not edit this file. You may delete it but then the IDE will never regenerate such files for you. -nbproject/build-impl.xml.data.CRC32=34fb3e36 -nbproject/build-impl.xml.script.CRC32=080d917e -nbproject/build-impl.xml.stylesheet.CRC32=99ea4b56@1.77.1.1 +nbproject/build-impl.xml.data.CRC32=da256322 +nbproject/build-impl.xml.script.CRC32=a628628a +nbproject/build-impl.xml.stylesheet.CRC32=334708a0@1.94.0.1 diff --git a/HapiServer/nbproject/project.properties b/HapiServer/nbproject/project.properties index f91fd70c..dfab7221 100644 --- a/HapiServer/nbproject/project.properties +++ b/HapiServer/nbproject/project.properties @@ -30,18 +30,16 @@ endorsed.classpath=\ excludes= file.reference.cdfj.jar=lib/cdfj.jar file.reference.jettison-1.4.1.jar=lib/jettison-1.4.1.jar -file.reference.UriTemplatesJava.jar=lib/UriTemplatesJava.jar includes=** j2ee.compile.on.save=true j2ee.copy.static.files.on.save=true j2ee.deploy.on.save=true j2ee.platform=1.7-web -j2ee.platform.classpath=${j2ee.server.home}/lib/annotations-api.jar:${j2ee.server.home}/lib/catalina-ant.jar:${j2ee.server.home}/lib/catalina-ha.jar:${j2ee.server.home}/lib/catalina-ssi.jar:${j2ee.server.home}/lib/catalina-storeconfig.jar:${j2ee.server.home}/lib/catalina-tribes.jar:${j2ee.server.home}/lib/catalina.jar:${j2ee.server.home}/lib/ecj-4.20.jar:${j2ee.server.home}/lib/el-api.jar:${j2ee.server.home}/lib/jasper-el.jar:${j2ee.server.home}/lib/jasper.jar:${j2ee.server.home}/lib/jaspic-api.jar:${j2ee.server.home}/lib/jsp-api.jar:${j2ee.server.home}/lib/servlet-api.jar:${j2ee.server.home}/lib/tomcat-api.jar:${j2ee.server.home}/lib/tomcat-coyote.jar:${j2ee.server.home}/lib/tomcat-dbcp.jar:${j2ee.server.home}/lib/tomcat-i18n-cs.jar:${j2ee.server.home}/lib/tomcat-i18n-de.jar:${j2ee.server.home}/lib/tomcat-i18n-es.jar:${j2ee.server.home}/lib/tomcat-i18n-fr.jar:${j2ee.server.home}/lib/tomcat-i18n-ja.jar:${j2ee.server.home}/lib/tomcat-i18n-ko.jar:${j2ee.server.home}/lib/tomcat-i18n-pt-BR.jar:${j2ee.server.home}/lib/tomcat-i18n-ru.jar:${j2ee.server.home}/lib/tomcat-i18n-zh-CN.jar:${j2ee.server.home}/lib/tomcat-jdbc.jar:${j2ee.server.home}/lib/tomcat-jni.jar:${j2ee.server.home}/lib/tomcat-util-scan.jar:${j2ee.server.home}/lib/tomcat-util.jar:${j2ee.server.home}/lib/tomcat-websocket.jar:${j2ee.server.home}/lib/websocket-api.jar +j2ee.platform.classpath=${j2ee.server.home}/bin/tomcat-juli.jar:${j2ee.server.home}/lib/annotations-api.jar:${j2ee.server.home}/lib/catalina-ant.jar:${j2ee.server.home}/lib/catalina-ha.jar:${j2ee.server.home}/lib/catalina-ssi.jar:${j2ee.server.home}/lib/catalina-storeconfig.jar:${j2ee.server.home}/lib/catalina-tribes.jar:${j2ee.server.home}/lib/catalina.jar:${j2ee.server.home}/lib/ecj-4.20.jar:${j2ee.server.home}/lib/el-api.jar:${j2ee.server.home}/lib/jasper-el.jar:${j2ee.server.home}/lib/jasper.jar:${j2ee.server.home}/lib/jaspic-api.jar:${j2ee.server.home}/lib/jsp-api.jar:${j2ee.server.home}/lib/servlet-api.jar:${j2ee.server.home}/lib/tomcat-api.jar:${j2ee.server.home}/lib/tomcat-coyote.jar:${j2ee.server.home}/lib/tomcat-dbcp.jar:${j2ee.server.home}/lib/tomcat-i18n-cs.jar:${j2ee.server.home}/lib/tomcat-i18n-de.jar:${j2ee.server.home}/lib/tomcat-i18n-es.jar:${j2ee.server.home}/lib/tomcat-i18n-fr.jar:${j2ee.server.home}/lib/tomcat-i18n-ja.jar:${j2ee.server.home}/lib/tomcat-i18n-ko.jar:${j2ee.server.home}/lib/tomcat-i18n-pt-BR.jar:${j2ee.server.home}/lib/tomcat-i18n-ru.jar:${j2ee.server.home}/lib/tomcat-i18n-zh-CN.jar:${j2ee.server.home}/lib/tomcat-jdbc.jar:${j2ee.server.home}/lib/tomcat-jni.jar:${j2ee.server.home}/lib/tomcat-util-scan.jar:${j2ee.server.home}/lib/tomcat-util.jar:${j2ee.server.home}/lib/tomcat-websocket.jar:${j2ee.server.home}/lib/websocket-api.jar j2ee.server.type=Tomcat jar.compress=false javac.classpath=\ ${file.reference.jettison-1.4.1.jar}:\ - ${file.reference.UriTemplatesJava.jar}:\ ${reference.HapiServerBase.jar}:\ ${file.reference.cdfj.jar} # Space-separated list of extra javac options @@ -73,7 +71,7 @@ javadoc.version=false javadoc.windowtitle= lib.dir=${web.docbase.dir}/WEB-INF/lib persistence.xml.dir=${conf.dir} -platform.active=default_platform +platform.active=JDK_8__System_ project.HapiServerBase=../HapiServerBase reference.HapiServerBase.jar=${project.HapiServerBase}/dist/HapiServerBase.jar resource.dir=setup diff --git a/HapiServer/nbproject/project.xml b/HapiServer/nbproject/project.xml index 1a9d51a6..6441ba9a 100644 --- a/HapiServer/nbproject/project.xml +++ b/HapiServer/nbproject/project.xml @@ -5,15 +5,12 @@ HapiServer 1.6.5 + ${file.reference.jettison-1.4.1.jar} WEB-INF/lib - - ${file.reference.UriTemplatesJava.jar} - WEB-INF/lib - ${reference.HapiServerBase.jar} WEB-INF/lib diff --git a/HapiServer/src/java/org/hapiserver/AboutServlet.java b/HapiServer/src/java/org/hapiserver/AboutServlet.java index a5955290..19cf0c5d 100644 --- a/HapiServer/src/java/org/hapiserver/AboutServlet.java +++ b/HapiServer/src/java/org/hapiserver/AboutServlet.java @@ -3,6 +3,9 @@ import java.io.IOException; import java.io.PrintWriter; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.ServletException; @@ -77,9 +80,16 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re response.setHeader("Access-Control-Allow-Headers","Content-Type" ); try { - JSONObject about= HapiServerSupport.getAbout(HAPI_HOME); + JSONObject content= HapiServerSupport.getAbout(HAPI_HOME); + String modificationDate= content.optString("x_modificationDate",""); + if ( modificationDate.length()>0 ) { + SimpleDateFormat sdf = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z"); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + String rfc2616= sdf.format( new Date( TimeUtil.toMillisecondsSince1970(modificationDate) ) ); + response.setHeader("Last-Modified", rfc2616 ); + } try (PrintWriter out = response.getWriter()) { - String s= about.toString(4); + String s= content.toString(4); out.write(s); } catch ( JSONException ex ) { throw new ServletException(ex); diff --git a/HapiServer/src/java/org/hapiserver/CapabilitiesServlet.java b/HapiServer/src/java/org/hapiserver/CapabilitiesServlet.java index 58d83554..e5bb2d1a 100644 --- a/HapiServer/src/java/org/hapiserver/CapabilitiesServlet.java +++ b/HapiServer/src/java/org/hapiserver/CapabilitiesServlet.java @@ -6,6 +6,9 @@ import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.ServletException; @@ -80,9 +83,16 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re response.setHeader("Access-Control-Allow-Headers","Content-Type" ); try { - JSONObject about= HapiServerSupport.getCapabilities(HAPI_HOME); + JSONObject content= HapiServerSupport.getCapabilities(HAPI_HOME); + String modificationDate= content.optString("x_modificationDate",""); + if ( modificationDate.length()>0 ) { + SimpleDateFormat sdf = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z"); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + String rfc2616= sdf.format( new Date( TimeUtil.toMillisecondsSince1970(modificationDate) ) ); + response.setHeader("Last-Modified", rfc2616 ); + } try (PrintWriter out = response.getWriter()) { - String s= about.toString(4); + String s= content.toString(4); out.write(s); } catch ( JSONException ex ) { throw new ServletException(ex); diff --git a/HapiServer/src/java/org/hapiserver/CatalogServlet.java b/HapiServer/src/java/org/hapiserver/CatalogServlet.java index 3fdeb019..c704a1d6 100644 --- a/HapiServer/src/java/org/hapiserver/CatalogServlet.java +++ b/HapiServer/src/java/org/hapiserver/CatalogServlet.java @@ -3,6 +3,9 @@ import java.io.IOException; import java.io.PrintWriter; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.ServletException; @@ -80,9 +83,22 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re response.setHeader("Access-Control-Allow-Headers","Content-Type" ); JSONObject catalog= HapiServerSupport.getCatalog(HAPI_HOME); + String modificationDate= catalog.optString("x_modificationDate",""); + if ( modificationDate.length()>0 ) { + SimpleDateFormat sdf = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z"); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + String rfc2616= sdf.format( new Date( TimeUtil.toMillisecondsSince1970(modificationDate) ) ); + response.setHeader("Last-Modified", rfc2616 ); + } + catalog = new JSONObject(catalog.toMap()); // force a shallow copy + + //TODO: we need to remove x_groups and x_dataset_to_group without modifying the database. + catalog.remove("x_groups"); + catalog.remove("x_dataset_to_group"); try (PrintWriter out = response.getWriter()) { String s= catalog.toString(4); + s= s.replace("\\/", "/"); // a small performance hit. We can worry about this later... out.write(s); } catch ( JSONException ex ) { throw new ServletException(ex); diff --git a/HapiServer/src/java/org/hapiserver/CsvHapiRecordConverter.java b/HapiServer/src/java/org/hapiserver/CsvHapiRecordConverter.java index 5128c166..c66dd1d0 100644 --- a/HapiServer/src/java/org/hapiserver/CsvHapiRecordConverter.java +++ b/HapiServer/src/java/org/hapiserver/CsvHapiRecordConverter.java @@ -45,7 +45,7 @@ public HapiRecord convert( String record ) { String[] ff= new String[params.length()]; int i=0; if ( params.length()>fields.length ) { - throw new IndexOutOfBoundsException( "not enough fields found in record, or too many records in info."); + throw new IndexOutOfBoundsException( String.format( "too few fields (%d) found in record, or too many parameters (%d) in info.", fields.length, params.length() ) ); } for ( int j=0; j request, String name, String deft, } private static final Pattern PATTERN_TRUE_FALSE = Pattern.compile("(|true|false)"); - private static final Pattern PATTERN_FORMAT = Pattern.compile("(|csv|binary)"); + private static final Pattern PATTERN_FORMAT = Pattern.compile("(|csv|binary|json)"); private static final Pattern PATTERN_INCLUDE = Pattern.compile("(|header)"); /** @@ -83,7 +84,7 @@ private boolean check1405TimeRange( JSONObject info, String start, String stop ) if ( startTime.length()==0 ) throw new IllegalArgumentException("info must contain startDate"); if ( stopTime.length()==0 ) throw new IllegalArgumentException("info must contain stopDate"); if ( stop.compareTo(start)<=0 ) { - throw new HapiException( 1404, "start time equal to or after stop time" ); + throw new HapiException( 1404, "Bad request - start equal to or after stop" ); } try { start= TimeUtil.reformatIsoTime( startTime, start ); @@ -126,6 +127,31 @@ private boolean check1405TimeRange( JSONObject info, String start, String stop ) return true; } + /** + * throw exception when parameter is not found + * @param info info for the parameter, which contains parameter list + * @param parameters "" or the parameters + * @return true if everything is okay, throw HapiException otherwise + */ + private boolean check1407Parameters( JSONObject info, String sparameters ) throws HapiException, JSONException { + JSONArray parameters= info.getJSONArray("parameters"); + if ( sparameters.trim().length()>0 ) { + String[] pps= sparameters.split(","); + for ( String p : pps ) { + boolean found=false; + for ( int i=0; found==false && iGET and POST methods. * @@ -138,48 +164,93 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re throws ServletException, IOException { Map params= new HashMap<>( request.getParameterMap() ); - String id= getParam( params,"id",null,"The identifier for the resource.", null ); - String timeMin= getParam( params, "time.min", "", "The earliest value of time to include in the response.", null ); - String timeMax= - getParam( params, "time.max", "", "Include values of time up to but not including this time in the response.", null ); - if ( timeMin.length()==0 ) { // support 3.0 - timeMin= getParam( params, "start", null, "The earliest value of time to include in the response.", null ); - timeMax= - getParam( params, "stop", null, "Include values of time up to but not including this time in the response.", null ); + + String dataset; + String start; + String stop; + + // HAPI 3.0 servers must accept both old and new parameters. + dataset= getParam( params,"id","","The identifier for the resource.", null ); + if ( dataset.equals("") ) { + dataset= getParam( params,"dataset",null,"The identifier for the resource.", null ); // allowed in 3.0 } + start= getParam( params, "time.min", "", "The earliest value of time to include in the response.", null ); + stop= getParam( params, "time.max", "", "Include values of time up to but not including this time in the response.", null ); + if ( start.length()==0 ) { + start= getParam( params, "start", "", "The earliest value of time to include in the response.", null ); + stop= getParam( params, "stop", "", "Include values of time up to but not including this time in the response.", null ); + } + + if ( start.equals("") || stop.equals("") ) { + Util.raiseError( 1400, "Bad request - user input error (start or stop is missing)", + response, null ); + return; + } + String parameters= getParam( params, "parameters", "", "The comma separated list of parameters to include in the response ", null ); - if ( parameters!=null ) { - parameters= parameters.replaceAll(" ","+"); - } String include= getParam(params, "include", "", "include header at the top", PATTERN_INCLUDE); - String format= getParam(params, "format", "", "The desired format for the data stream.", PATTERN_FORMAT); + String sresolveReferences= getParam( params, "resolve_references", "true", "resolve references in included header", null ); + + String format; + try { + format = getParam(params, "format", "csv", "The desired format for the data stream.", PATTERN_FORMAT); + } catch ( IllegalArgumentException ex ) { + Util.raiseError( 1409, "Bad request - unsupported output format", response, null ); + return; + } if ( !params.isEmpty() ) { - Util.raiseError( 1401, "Bad request - unknown API parameter name " + params.entrySet().iterator().next().getKey(), - response, response.getOutputStream() ); + Util.raiseError( 1401, "Bad request - unknown API parameter name", + response, null ); + return; + } + + try { + TimeUtil.parseISO8601Time(start); + } catch ( IllegalArgumentException | ParseException ex ) { + Util.raiseError( 1402, "Bad request - syntax error in start time", response, null ); + return; + } + + try { + TimeUtil.parseISO8601Time(stop); + } catch ( IllegalArgumentException | ParseException ex ) { + Util.raiseError( 1403, "Bad request - syntax error in stop time", response, null ); return; } - if ( timeMax.length()>timeMin.length() ) { - timeMin= TimeUtil.reformatIsoTime( timeMax, timeMin ); - } else if ( timeMin.length()>timeMax.length() ) { - timeMax= TimeUtil.reformatIsoTime( timeMin, timeMax ); + if ( stop.length()>start.length() ) { + start= TimeUtil.reformatIsoTime( stop, start ); + } else if ( start.length()>stop.length() ) { + stop= TimeUtil.reformatIsoTime( start, stop ); } - logger.log(Level.FINE, "data request for {0} {1}/{2}", new Object[]{id, timeMin, timeMax}); + logger.log(Level.FINE, "data request for {0} {1}/{2}", new Object[]{dataset, start, stop}); DataFormatter dataFormatter; - if ( format.equals("binary") ) { - response.setContentType("application/binary"); - dataFormatter= new BinaryDataFormatter(); - response.setHeader("Content-disposition", "attachment; filename=" - + Util.fileSystemSafeName(id).replaceAll("\\/", "_" ) + "_"+timeMin+ "_"+timeMax + ".bin" ); - } else { - response.setContentType("text/csv;charset=UTF-8"); - dataFormatter= new CsvDataFormatter(); - response.setHeader("Content-disposition", "attachment; filename=" - + Util.fileSystemSafeName(id).replaceAll("\\/", "_" ) + "_"+timeMin+ "_"+timeMax + ".csv" ); + switch (format) { + case "binary": + response.setContentType("application/binary"); + dataFormatter= new BinaryDataFormatter(); + response.setHeader("Content-disposition", "attachment; filename=" + + Util.fileSystemSafeName(dataset).replaceAll("\\/", "_" ) + "_"+start+ "_"+stop + ".bin" ); + break; + case "json": + response.setContentType("application/json"); + dataFormatter= new JsonDataFormatter(); + response.setHeader("Content-disposition", "attachment; filename=" + + Util.fileSystemSafeName(dataset).replaceAll("\\/", "_" ) + "_"+start+ "_"+stop + ".bin" ); + break; + case "csv": + case "": + response.setContentType("text/csv;charset=UTF-8"); + dataFormatter= new CsvDataFormatter(); + response.setHeader("Content-disposition", "attachment; filename=" + + Util.fileSystemSafeName(dataset).replaceAll("\\/", "_" ) + "_"+start+ "_"+stop + ".csv" ); + break; + default: + throw new IllegalArgumentException("bad format"); // shouldn't get here } @@ -191,28 +262,42 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re JSONObject jo; try { - jo= HapiServerSupport.getInfo( HAPI_HOME, id ); + jo= HapiServerSupport.getInfo( HAPI_HOME, dataset ); } catch ( BadRequestIdException ex ) { - Util.raiseError( ex, response, response.getOutputStream() ); + Util.raiseError( ex, response, null ); return; } catch (JSONException | HapiException ex) { throw new RuntimeException(ex); } try { - check1405TimeRange( jo, timeMin, timeMax ); + check1405TimeRange( jo, start, stop ); + } catch ( HapiException ex ) { + try (ServletOutputStream out = response.getOutputStream()) { + Util.raiseError( ex.getCode(), ex.getMessage(), response, null ); + return; + } + } + + try { + check1407Parameters( jo, parameters ); } catch ( HapiException ex ) { try (ServletOutputStream out = response.getOutputStream()) { - Util.raiseError( ex.getCode(), ex.getMessage(), response, out ); + Util.raiseError( ex.getCode(), ex.getMessage(), response, null ); return; } + } catch ( JSONException ex ) { + throw new ServletException(ex); } OutputStream out = response.getOutputStream(); - + + // we're going to do some special (kludgy) things to make this format. + boolean jsonFormat= dataFormatter instanceof JsonDataFormatter; + int[] dr; try { - dr = TimeUtil.createTimeRange( TimeUtil.parseISO8601Time(timeMin), TimeUtil.parseISO8601Time(timeMax) ); + dr = TimeUtil.createTimeRange( TimeUtil.parseISO8601Time(start), TimeUtil.parseISO8601Time(stop) ); } catch ( ParseException ex ) { throw new RuntimeException(ex); //TODO: HAPI Exceptions } @@ -228,21 +313,21 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re HapiRecordSource source; try { - source= SourceRegistry.getInstance().getSource(HAPI_HOME, id, jo); + source= SourceRegistry.getInstance().getSource(HAPI_HOME, dataset, jo); } catch ( BadRequestIdException ex ) { - Util.raiseError( 1406, "HAPI error 1406: unknown dataset id", response, response.getOutputStream() ); + Util.raiseError( 1406, "HAPI error 1406: unknown dataset id", response, null); return; } catch ( HapiException ex ) { throw new RuntimeException(ex); } - + String ifModifiedSince= request.getHeader("If-Modified-Since"); if ( ifModifiedSince!=null ) { - String ts= source.getTimeStamp( dr, TimeUtil.getStopTime(dr) ); + TimeString ts= source.getTimeStamp( new TimeString(dr), new TimeString(dr) ); if ( ts!=null ) { // this will often be null. try { String clientModifiedTime= parseTime(ifModifiedSince); - if ( clientModifiedTime.compareTo(ts)>=0 ) { + if ( clientModifiedTime.compareTo(ts.toIsoTime())>=0 ) { response.setStatus( HttpServletResponse.SC_NOT_MODIFIED ); //304 out.close(); return; @@ -258,16 +343,24 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re String[] parametersArray= HapiServerSupport.getAllParameters( jo ); dataNeedsParameterSubsetting= false; if ( source.hasGranuleIterator() ) { - dsiter= new AggregatingIterator( source, dr, TimeUtil.getStopTime(dr), parametersArray ); + dsiter= new AggregatingIterator( source, + TimeString.getStartTime(dr), + TimeString.getStopTime(dr), + parametersArray ); } else { - dsiter= source.getIterator( TimeUtil.getStartTime(dr), TimeUtil.getStopTime(dr), parametersArray ); + dsiter= source.getIterator( new TimeString(TimeUtil.getStartTime(dr)), + TimeString.getStopTime(dr), + parametersArray ); } } else { dataNeedsParameterSubsetting= false; if ( source.hasGranuleIterator() ) { - dsiter= new AggregatingIterator( source, TimeUtil.getStartTime(dr), TimeUtil.getStopTime(dr) ); + dsiter= new AggregatingIterator( source, + TimeString.getStartTime(dr), + TimeString.getStopTime(dr) ); } else { - dsiter= source.getIterator( TimeUtil.getStartTime(dr), TimeUtil.getStopTime(dr) ); + dsiter= source.getIterator( + TimeString.getStartTime(dr), TimeString.getStopTime(dr) ); } } } else { @@ -275,24 +368,24 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re dataNeedsParameterSubsetting= false; String[] parametersSplit= HapiServerSupport.splitParams( jo, parameters ); if ( source.hasGranuleIterator() ) { - dsiter= new AggregatingIterator( source, TimeUtil.getStartTime(dr), TimeUtil.getStopTime(dr), parametersSplit ); + dsiter= new AggregatingIterator( source, TimeString.getStartTime(dr), TimeString.getStopTime(dr), parametersSplit ); } else { - dsiter= source.getIterator( TimeUtil.getStartTime(dr), TimeUtil.getStopTime(dr), parametersSplit ); + dsiter= source.getIterator( TimeString.getStartTime(dr), TimeString.getStopTime(dr), parametersSplit ); } } else { dataNeedsParameterSubsetting= true; if ( source.hasGranuleIterator() ) { - dsiter= new AggregatingIterator( source, TimeUtil.getStartTime(dr), TimeUtil.getStopTime(dr) ); + dsiter= new AggregatingIterator( source, TimeString.getStartTime(dr), TimeString.getStopTime(dr) ); } else { - dsiter= source.getIterator( TimeUtil.getStartTime(dr), TimeUtil.getStopTime(dr) ); + dsiter= source.getIterator( TimeString.getStartTime(dr), TimeString.getStopTime(dr) ); } } } if ( dsiter==null ) { - Util.raiseError( 1500, "HAPI error 1500: internal server error, id has no reader " + id, - response, response.getOutputStream() ); + Util.raiseError( 1500, "HAPI error 1500: internal server error, id has no reader " + dataset, + response, null ); source.doFinalize(); return; } @@ -311,12 +404,12 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re try { - jo0= HapiServerSupport.getInfo( HAPI_HOME, id ); + jo0= HapiServerSupport.getInfo( HAPI_HOME, dataset ); int[] indexMap; if ( !parameters.equals("") ) { jo= Util.subsetParams( jo0, parameters ); - indexMap= (int[])jo.get("x_indexmap"); + indexMap= (int[])jo.remove("x_indexmap"); if ( dataNeedsParameterSubsetting ) { dsiter= new SubsetFieldsDataSetIterator( dsiter, indexMap ); } @@ -332,6 +425,9 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re boolean sendHeader= include.equals("header"); + // The following line can be used to eavesdrop on the output stream. + //out= new DebugOutputStream(out); + try { assert dsiter!=null; if ( dsiter.hasNext() ) { @@ -341,18 +437,30 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re logger.log(Level.FINER, "first record read from source: {0}", first.getIsoTime(0)); + try { + JSONArray pp= jo.getJSONArray("parameters"); + if ( pp.length()!=first.length() ) { + throw new IllegalStateException("Info parameters.length() does not equal the number of parameters in the HAPIRecord"); + } + } catch (JSONException ex) { + Logger.getLogger(DataServlet.class.getName()).log(Level.SEVERE, null, ex); + } + dataFormatter.initialize( jo, out, first ); doVerify(dataFormatter, first, jo); // format time boundaries so they are in the same format as the data, and simple string comparisons can be made. - String startTime= TimeUtil.reformatIsoTime( first.getIsoTime(0), timeMin ); - String stopTime= TimeUtil.reformatIsoTime( first.getIsoTime(0), timeMax ); + String startTime= TimeUtil.reformatIsoTime( first.getIsoTime(0), start ); + String stopTime= TimeUtil.reformatIsoTime( first.getIsoTime(0), stop ); if ( first.getIsoTime(0).compareTo( startTime )>=0 && first.getIsoTime(0).compareTo( stopTime )<0 ) { if ( sentSomething==false ) { - if ( sendHeader ) { + if ( sendHeader && !jsonFormat ) { try { + if ( sresolveReferences.equals("true") ) { + jo= HapiServerSupport.resolveReferences(jo); + } sendHeader( jo, format, out); } catch (JSONException | UnsupportedEncodingException ex) { logger.log(Level.SEVERE, null, ex); @@ -367,14 +475,18 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re String isoTime= record.getIsoTime(0); if ( isoTime.compareTo( startTime )>=0 && isoTime.compareTo( stopTime )<0 ) { //TODO: repeat code, consider do..while if ( sentSomething==false ) { - if ( sendHeader ) { + if ( sendHeader && !jsonFormat ) { try { - sendHeader( jo, format, out); + sendHeader( jo, format, out ); } catch (JSONException | UnsupportedEncodingException ex) { logger.log(Level.SEVERE, null, ex); } } sentSomething= true; + } else { + if ( jsonFormat ) { + out.write(",\n".getBytes(CHARSET)); + } } dataFormatter.sendRecord( out,record ); } @@ -382,9 +494,12 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re } if ( !sentSomething ) { - Util.raiseError( 1201, "HAPI error 1201: no data found " + id, response, out ); + Util.raiseError( 1201, "HAPI error 1201: no data found " + dataset, response, out ); } + if ( jsonFormat ) { + out.write("\n".getBytes(CHARSET)); + } dataFormatter.finalize(out); } catch ( RuntimeException ex ) { @@ -411,11 +526,14 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re */ private void doVerify(DataFormatter dataFormatter, HapiRecord first, JSONObject jo) throws IOException { ByteArrayOutputStream testOut= new ByteArrayOutputStream(1024); - dataFormatter.sendRecord( testOut, first); + dataFormatter.sendRecord(testOut, first); byte[] bb= testOut.toByteArray(); try { int len= jo.getJSONArray("parameters").getJSONObject(0).getInt("length"); - if ( bb[len-1]!='Z' ) { + if ( dataFormatter instanceof JsonDataFormatter ) { + len= len+2; // because it has [" + } + if ( bb[len-1]!='Z' ) { logger.log(Level.WARNING, "time is not the correct length or Z is missing, expected Z at byte offset {0}", len); } @@ -428,6 +546,9 @@ private void sendHeader(JSONObject jo, String format, OutputStream out) throws J ByteArrayOutputStream boas= new ByteArrayOutputStream(10000); PrintWriter pw= new PrintWriter(boas); jo.put( "format", format ); // Thanks Bob's verifier for catching this. + JSONObject okayStatus= new JSONObject("{ \"code\": 1200, \"message\": \"OK\" }"); + jo.put( "status", okayStatus ); + jo.put( "HAPI", Util.hapiVersion() ); pw.write( jo.toString(4) ); pw.close(); boas.close(); diff --git a/HapiServer/src/java/org/hapiserver/EmptyServlet.java b/HapiServer/src/java/org/hapiserver/EmptyServlet.java index 06fc7d73..b20e08c9 100644 --- a/HapiServer/src/java/org/hapiserver/EmptyServlet.java +++ b/HapiServer/src/java/org/hapiserver/EmptyServlet.java @@ -9,7 +9,12 @@ import javax.servlet.http.HttpServletResponse; /** - * redirect the browser to the hapi landing page. + * redirect the browser to the page without the slash. So,
      + *
    • .../hapi/info/?dataset=AC_H0_SWE/availability → .../hapi/info?dataset=AC_H0_SWE/availability + *
    • .../hapi/ → .../hapi + *
    • .../ → .../hapi + *
    • .../happy → .../hapi + *
    * @author jbf */ @WebServlet(name = "EmptyServlet", urlPatterns = {"/"}) @@ -25,9 +30,24 @@ public class EmptyServlet extends HttpServlet { */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - - response.sendRedirect( "hapi"); - + String url= request.getRequestURI(); + if ( request.getQueryString()!=null ) { + response.sendRedirect( url.substring(0,url.length()-1) + "?" + request.getQueryString() ); + } else { + if ( url.endsWith("/hapi/") ) { + response.sendRedirect( url.substring(0,url.length()-1) ); + } else if ( url.endsWith("/hapi/hapi") ) { + response.sendRedirect( url.substring(0,url.length()-5) ); + } else if ( !url.endsWith("/hapi") ) { + int lastSlash= url.lastIndexOf("/"); + if ( lastSlash>-1 ) { + url= url.substring(0,lastSlash+1); + } + response.sendRedirect( url + "hapi" ); + } else { + response.sendRedirect( url.substring(0,url.length()-1) ); + } + } } // diff --git a/HapiServer/src/java/org/hapiserver/HapiServerSupport.java b/HapiServer/src/java/org/hapiserver/HapiServerSupport.java index a74e27c7..b37bb982 100644 --- a/HapiServer/src/java/org/hapiserver/HapiServerSupport.java +++ b/HapiServer/src/java/org/hapiserver/HapiServerSupport.java @@ -4,25 +4,33 @@ import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.Collections; +import java.util.Date; import java.util.HashMap; +import java.util.LinkedHashSet; import java.util.Map; +import java.util.Set; +import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.regex.Pattern; import java.util.stream.Collectors; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; @@ -130,72 +138,180 @@ public static String[] getAllParameters(JSONObject jo) { } } - private static Map catalogCache= new HashMap<>(); + private static final Map catalogCache= new HashMap<>(); - private static JSONObject resolveCatalog(JSONObject jo) throws JSONException { - JSONArray catalog= jo.getJSONArray("catalog"); + /** + * New 2025: the config.json file can contain node "options" which are a + * set of definitions for the node. Note options' namespace is the same + * as the servers, so "${id}", "${info}", and "${data-config}" are all + * reserved words. This will only look at the "args" of the "config" node. + * @param options + * @param item item with either catalog, info, or data node. + * @return + */ + private static JSONObject resolveOptions( Map options, JSONObject item ) { + JSONObject itemConfig; + JSONObject config= item.optJSONObject("config"); + if ( config==null ) return item; + String[] confs= new String[] { "catalog", "info", "data" }; + for (String s: confs ) { + itemConfig= config.optJSONObject(s); + if ( itemConfig==null ) continue; + if ( "classpath".equals(itemConfig.opt("source")) ) { + JSONArray args= itemConfig.optJSONArray("args"); + if (args!=null ) { + for ( int i=0; i optionsMap; + if ( options==null ) { + optionsMap= Collections.emptyMap(); + } else { + // go through and resolve any internal references within the options + optionsMap = new HashMap( options.toMap() ); + Set kk= optionsMap.keySet(); + for ( String k1 : kk ) { + Object o= optionsMap.get(k1); + if ( o instanceof String ) { + String v1= (String)o; + if ( v1.contains("${") ) { + String[] ss= v1.split("\\$\\{"); + for ( int j=1; j datasetIds= new LinkedHashSet<>(); Map infoCache= new HashMap<>(); Map configCache = new HashMap<>(); Map dataConfigCache = new HashMap<>(); @@ -257,7 +399,7 @@ public ConfigData( JSONObject config, long infoTimeStamp ) { * @throws java.io.IOException */ public static JSONObject getCatalogFromSpawnCommand( String command ) throws IOException { - logger.log(Level.INFO, "spawn command {0}", command); + logger.log(Level.FINE, "spawn command {0}", command); String[] ss= command.split("\\s+"); ProcessBuilder pb= new ProcessBuilder( ss ); @@ -281,7 +423,7 @@ public static JSONObject getCatalogFromSpawnCommand( String command ) throws IOE * @throws IOException */ public static JSONObject getInfoFromSpawnCommand( JSONObject jo, String HAPI_HOME, String id ) throws IOException { - logger.log(Level.INFO, "getInfoFromSpawnCommand {0}", id); + logger.log(Level.FINE, "getInfoFromSpawnCommand {0}", id); try { String command = SpawnRecordSource.doMacros( HAPI_HOME, id, jo.getString("command") ); @@ -311,7 +453,7 @@ public static JSONObject getInfoFromSpawnCommand( JSONObject jo, String HAPI_HOM * @throws java.io.IOException */ public static JSONObject getCatalogFromClasspath( JSONObject jo, String HAPI_HOME ) throws IOException { - logger.log(Level.INFO, "getCatalogFromClasspath" ); + logger.log(Level.FINE, "getCatalogFromClasspath" ); ClassLoader cl=null; String s= jo.optString( "classpath", jo.optString("x_classpath","") ); @@ -324,12 +466,7 @@ public static JSONObject getCatalogFromClasspath( JSONObject jo, String HAPI_HOM if ( s.length()>0 ) { try { s= SpawnRecordSource.doMacros( HAPI_HOME, "", s ); - URL url; - if ( s.startsWith("http://") || s.startsWith("https://") || s.startsWith("file:") ) { - url= new URL( s ); - } else { - url= new File(s).toURI().toURL(); - } + URL url= getClasspath(HAPI_HOME, s); cl= new URLClassLoader( new URL[] { url }, SourceRegistry.class.getClassLoader()); cl.getParent(); } catch (MalformedURLException ex) { @@ -348,8 +485,19 @@ public static JSONObject getCatalogFromClasspath( JSONObject jo, String HAPI_HOM } else { c= Class.forName(clas); } - Object o; JSONArray args= jo.optJSONArray("args"); + if ( args==null ) { + if ( jo.has("x_args") ) { + try { + if ( !( jo.get("x_args") instanceof JSONArray ) ) { + throw new IllegalArgumentException("x_args should be an array"); + } + } catch (JSONException ex) { + Logger.getLogger(HapiServerSupport.class.getName()).log(Level.SEVERE, null, ex); + } + } + args= jo.optJSONArray("x_args"); + } if ( args==null ) { Method method= c.getMethod( methodString ); if ( method.getReturnType()!=String.class ) { @@ -379,7 +527,7 @@ public static JSONObject getCatalogFromClasspath( JSONObject jo, String HAPI_HOM } } Method method= c.getMethod( methodString, cc ); - String infoString= (String)method.invoke( null ); + String infoString= (String)method.invoke( null, oo ); // method is static try { return new JSONObject(infoString); } catch ( JSONException ex ) { @@ -393,6 +541,27 @@ public static JSONObject getCatalogFromClasspath( JSONObject jo, String HAPI_HOM } + /** + * return the URL identified by s, which can be relative to the HAPI_HOME/config directory. + * @param HAPI_HOME home of config and resolved responses, often /tmp/hapi-server/. + * @param s the path for the jar file. + * @return the resolved URL. + * @throws MalformedURLException + */ + protected static URL getClasspath( String HAPI_HOME, String s ) throws MalformedURLException { + URL url; + if ( s.startsWith("http://") || s.startsWith("https://") || s.startsWith("file:") ) { + url= new URL( s ); + } else { + if ( s.contains("/") ) { + url= new File(s).toURI().toURL(); + } else { + url= new File(HAPI_HOME + "/config/",s).toURI().toURL(); + } + } + return url; + } + /** * Allow a java call to produce the info for a dataset id. The JSONObject should * have the tags "class" and "method" which identify a static method which takes the @@ -415,12 +584,7 @@ public static JSONObject getInfoFromClasspath( JSONObject jo, String HAPI_HOME, try { String s= classpath; s= SpawnRecordSource.doMacros( HAPI_HOME, id, s ); - URL url; - if ( s.startsWith("http://") || s.startsWith("https://") || s.startsWith("file:") ) { - url= new URL( s ); - } else { - url= new File(s).toURI().toURL(); - } + URL url= getClasspath(HAPI_HOME, s); cl= new URLClassLoader( new URL[] { url }, SourceRegistry.class.getClassLoader()); cl.getParent(); } catch (MalformedURLException ex) { @@ -435,7 +599,6 @@ public static JSONObject getInfoFromClasspath( JSONObject jo, String HAPI_HOME, } else { c= Class.forName(clas); } - Object o; JSONArray args= jo.optJSONArray("args"); if ( args==null ) args= jo.optJSONArray("x_args"); if ( args==null ) { @@ -443,7 +606,12 @@ public static JSONObject getInfoFromClasspath( JSONObject jo, String HAPI_HOME, if ( method.getReturnType()!=String.class ) { throw new IllegalArgumentException("method should return String: " + clas + "."+ methodString ); } - String infoString= (String)method.invoke( id ); + String infoString; + if ( Modifier.isStatic(method.getModifiers()) ) { + infoString= (String)method.invoke( null, id ); + } else { + infoString= (String)method.invoke( id ); + } try { return new JSONObject(infoString); } catch ( JSONException ex ) { @@ -467,6 +635,7 @@ public static JSONObject getInfoFromClasspath( JSONObject jo, String HAPI_HOME, } } Method method= c.getMethod( methodString, cc ); + logger.log(Level.FINER, "about to call into method to get info: {0}", method); String infoString= (String)method.invoke( id, oo ); try { return new JSONObject(infoString); @@ -489,33 +658,47 @@ public static JSONObject getInfoFromClasspath( JSONObject jo, String HAPI_HOME, * @throws JSONException */ public static JSONObject getLandingConfig( String HAPI_HOME ) throws IOException, JSONException { - logger.info("getLandingConfig"); + logger.fine("getLandingConfig"); - JSONObject result= loadAndCheckConfig( HAPI_HOME, "x-landing.json", null ); + JSONObject result= loadAndCheckConfig( HAPI_HOME, "x-landing.json", new JSONObject() ); return result; } + private static JSONObject DEFAULT_ABOUT; + + static { + String ss= Util.getTemplateAsString("about.json"); + try { + DEFAULT_ABOUT= new JSONObject(ss); + } catch (JSONException ex) { + throw new RuntimeException(ex); + } + } + /** - * read the about file from the config directory if it has been modified. + * read the about file from the config directory if it has been modified. The + * x_buildTime is set using Util.buildTime() and HAPI version is set with Util.hapiVersion() * @param HAPI_HOME - * @return JSON for the about file. + * @return JSON for the about response. * @throws IOException * @throws JSONException */ public static JSONObject getAbout( String HAPI_HOME ) throws IOException, JSONException { - logger.info("getAbout"); + logger.fine("getAbout"); - JSONObject result= loadAndCheckConfig( HAPI_HOME, "about.json" ); + JSONObject result= loadAndCheckConfig( HAPI_HOME, "about.json", DEFAULT_ABOUT ); result.put( "x_buildTime", Util.buildTime() ); + result.put( "HAPI", Util.hapiVersion() ); return result; } /** - * read the relations file from the config directory if it has been modified. + * read the semantics file from the config directory if it has been modified. + * Note this was an experimental feature which was never accepted. * @param HAPI_HOME * @return JSON for the about file. * @throws IOException @@ -523,11 +706,11 @@ public static JSONObject getAbout( String HAPI_HOME ) throws IOException, JSONEx */ public static JSONObject getSemantics( String HAPI_HOME ) throws IOException, JSONException { - logger.info("getSemantics"); + logger.fine("getSemantics"); String ff= "semantics.json"; - JSONObject jo= loadAndCheckConfig(HAPI_HOME, ff); + JSONObject jo= loadAndCheckConfig(HAPI_HOME, ff, new JSONObject() ); return jo; } @@ -535,6 +718,8 @@ public static JSONObject getSemantics( String HAPI_HOME ) throws IOException, JS /** * read the relations file from the config directory if it has been modified. + * Note this was an experimental feature which was never accepted. + * * @param HAPI_HOME * @return JSON for the about file. * @throws IOException @@ -542,15 +727,26 @@ public static JSONObject getSemantics( String HAPI_HOME ) throws IOException, JS */ public static JSONObject getRelations( String HAPI_HOME ) throws IOException, JSONException { - logger.info("getRelations"); + logger.fine("getRelations"); String ff= "relations.json"; - JSONObject jo= loadAndCheckConfig(HAPI_HOME, ff); + JSONObject jo= loadAndCheckConfig(HAPI_HOME, ff, new JSONObject() ); return jo; } + private static JSONObject DEFAULT_CAPABILITIES; + + static { + String ss= Util.getTemplateAsString("capabilities.json"); + try { + DEFAULT_CAPABILITIES= new JSONObject(ss); + } catch (JSONException ex) { + throw new RuntimeException(ex); + } + } + /** * read the capabilities file from the config directory if it has been modified. * @param HAPI_HOME @@ -560,11 +756,11 @@ public static JSONObject getRelations( String HAPI_HOME ) throws IOException, JS */ public static JSONObject getCapabilities( String HAPI_HOME ) throws IOException, JSONException { - logger.info("getCapabilities"); + logger.fine("getCapabilities"); String ff= "capabilities.json"; - JSONObject jo= loadAndCheckConfig(HAPI_HOME, ff); + JSONObject jo= loadAndCheckConfig(HAPI_HOME, ff, DEFAULT_CAPABILITIES ); return jo; } @@ -577,72 +773,96 @@ private static JSONObject loadAndCheckConfig(String HAPI_HOME, String ff) throws * load the file, checking to see if there's a newer version in the config area, and loading the * initial version from the templates area or deft object. * @param HAPI_HOME - * @param ff the name of the file, one of "about.json", "x-landing.json", "semantics.json", or "relations.json" + * @param typeFileName the name of the file, one of "about.json", "x-landing.json", "semantics.json", or "relations.json" * @param deft a deft value to use, if null then load from templates area. - * @return the JSON object for the file. + * @return the JSON object for the file, or deft if the configuration is not found. * @throws IOException * @throws JSONException */ - private static JSONObject loadAndCheckConfig(String HAPI_HOME, String ff, JSONObject deft ) throws IOException, JSONException { + private static JSONObject loadAndCheckConfig(String HAPI_HOME, String typeFileName, JSONObject deft ) throws IOException, JSONException { Initialize.maybeInitialize( HAPI_HOME ); - if ( ff.contains("..") ) { + if ( typeFileName.contains("..") ) { throw new IllegalArgumentException("ff cannot contain .."); } - File releaseFile= new File( HAPI_HOME, ff ); + if ( !Pattern.matches("[a-z\\-]+.json", typeFileName) ) { + throw new IllegalArgumentException("ff must match [a-z]+"); + } + + File releaseFile= new File( HAPI_HOME, typeFileName ); long releaseFileTimeStamp= releaseFile.exists() ? releaseFile.lastModified() : 0; File configDir= new File( HAPI_HOME, "config" ); - File configFile= new File( configDir, ff ); - if ( !configFile.exists() ) { - if ( deft==null ) { + File typeFile= new File( configDir, typeFileName ); // typeFile is the file in the config directory, when config.json is not used. + File catalogConfigFile= getConfigFile(HAPI_HOME); + + if ( !typeFile.exists() ) { + + if ( !catalogConfigFile.exists() ) { + throw new IOException("config directory should contain config.json or "+ catalogConfigFile); + } + + if ( catalogConfigFile.lastModified() > releaseFileTimeStamp ) { + + byte[] bb= Files.readAllBytes( Paths.get( catalogConfigFile.toURI() ) ); + String s= new String( bb, Charset.forName("UTF-8") ); try { - InputStream ins= Util.getTemplateAsStream(ff); - File tmpFile = new File( configDir, "_"+ff ); - Util.transfer( ins, new FileOutputStream(tmpFile), true ); - if ( !tmpFile.renameTo(configFile) ) { - logger.log(Level.SEVERE, "Unable to write to {0}", configFile); - throw new IllegalArgumentException("unable to write file"); - } else { - logger.log(Level.FINE, "wrote config file {0}", configFile); - } - } catch ( NullPointerException ex ) { - throw new IOException("templates directory should contain "+ff); - } - } else { - if ( configFile.getParentFile().canWrite() ) { - Files.write( configFile.toPath(), deft.toString(4).getBytes(CHARSET) ); - } else { - logger.log(Level.WARNING, "writing to server read area: {0}", releaseFile); - if ( releaseFile.getParentFile().canWrite() ) { - Files.write( releaseFile.toPath(), deft.toString(4).getBytes(CHARSET) ); - releaseFileTimeStamp= releaseFile.exists() ? releaseFile.lastModified() : 0; + JSONObject jo= Util.newJSONObject(s); + + int i= typeFileName.indexOf("."); + String item= typeFileName.substring(0,i); + if ( jo.has(item) ) { + deft= jo.getJSONObject(item); // the item is defined in config.json } + typeFile= catalogConfigFile; + } catch ( JSONException ex ) { + warnWebMaster(ex); } } } - logger.log(Level.INFO, " configFile.lastModified(): {0}", configFile.lastModified()); - logger.log(Level.INFO, " latestTimeStamp: {0}", releaseFileTimeStamp); - if ( configFile.lastModified() > releaseFileTimeStamp ) { // verify that it can be parsed and then copy it. //TODO: synchronized - byte[] bb= Files.readAllBytes( Paths.get( configFile.toURI() ) ); - String s= new String( bb, CHARSET ); - try { - logger.log(Level.INFO, "read {0} from config", ff); - JSONObject jo= Util.newJSONObject(s); - jo.put("x_hapi_home",HAPI_HOME); - - try ( InputStream ins= new ByteArrayInputStream(jo.toString(4).getBytes(CHARSET) ) ) { - logger.log(Level.INFO, "write resolved config to {0}", releaseFile.getPath()); - Files.copy( ins, - releaseFile.toPath(), StandardCopyOption.REPLACE_EXISTING ); + + logger.log(Level.FINE, " configFile.lastModified(): {0}", typeFile.lastModified()); + logger.log(Level.FINE, " latestTimeStamp: {0}", releaseFileTimeStamp); + if ( typeFile.lastModified() > releaseFileTimeStamp ) { // verify that it can be parsed and then copy it. + synchronized (HapiServerSupport.class) { + if ( typeFile.lastModified() > releaseFileTimeStamp ) { + byte[] bb= Files.readAllBytes( Paths.get( typeFile.toURI() ) ); + String s= new String( bb, CHARSET ); + try { + logger.log(Level.INFO, "read {0} from config", typeFile); + JSONObject jo= Util.newJSONObject(s); + jo.put("x_hapi_home",HAPI_HOME); + + if ( typeFile.getName().endsWith("config.json") ) { + int i= typeFileName.indexOf("."); + String item= typeFileName.substring(0,i); + if ( jo.has(item) ) { + deft= jo.getJSONObject(item); + } + } else { + deft= jo; + } + + if ( deft==null ) { + return deft; + } + + try ( InputStream ins= new ByteArrayInputStream(deft.toString().getBytes(CHARSET) ) ) { + logger.log(Level.INFO, "write resolved config to {0}", releaseFile.getPath()); + File tmpFile= getTmpFile(HAPI_HOME,typeFileName); + Files.copy( ins, + tmpFile.toPath(), StandardCopyOption.REPLACE_EXISTING ); + Files.move( tmpFile.toPath(), releaseFile.toPath(), StandardCopyOption.ATOMIC_MOVE ); + } + } catch ( JSONException ex ) { + warnWebMaster(ex); + throw ex; + } } - releaseFileTimeStamp= releaseFile.lastModified(); - } catch ( JSONException ex ) { - warnWebMaster(ex); - throw ex; } } - logger.log(Level.INFO, "reading config json from {0}", releaseFile ); + + logger.log(Level.FINE, "reading config json from {0}", releaseFile ); byte[] bb= Files.readAllBytes( Paths.get( releaseFile.toURI() ) ); String s= new String( bb, Charset.forName("UTF-8") ); JSONObject jo= Util.newJSONObject(s); @@ -652,14 +872,36 @@ private static JSONObject loadAndCheckConfig(String HAPI_HOME, String ff, JSONOb status.put( "message", "OK request successful"); jo.put( "status", status ); - jo.put( "HAPI", "3.1" ); + jo.put("HAPI", HAPI_VERSION); + jo.put("x_modificationDate", TimeUtil.fromMillisecondsSince1970(releaseFileTimeStamp) ); + return jo; } + /** + * return the file which was once catalog.json and is now config.json. This + * file contains either the literal catalog response, or a configuration + * which will generate the catalog response. If HAPI_HOME/config/config.json + * does not exist, then HAPI_HOME/config/catalog.json is returned. + * @param HAPI_HOME + * @return + */ + private static File getConfigFile( String HAPI_HOME ) { + File configDirectory= new File( HAPI_HOME, "config" ); + + File configFile= new File( configDirectory, "config.json" ); + if ( configFile.exists() ) { + return configFile; + } else { + File catalogFile= new File( configDirectory, "catalog.json" ); + return catalogFile; + } + } /** * keep and monitor a cached version of the catalog in memory. + * TODO: remove x_groups, which shows some server internals. This should be done in the servlet code, before it leaves the server. * @param HAPI_HOME the location of the server definition * @return the JSONObject for the catalog. * @throws java.io.IOException @@ -667,30 +909,33 @@ private static JSONObject loadAndCheckConfig(String HAPI_HOME, String ff, JSONOb */ public static JSONObject getCatalog( String HAPI_HOME ) throws IOException, JSONException { - logger.info("getCatalog"); + logger.fine("getCatalog"); Initialize.maybeInitialize( HAPI_HOME ); + boolean caching= true; + File catalogFile= new File( HAPI_HOME, "catalog.json" ); CatalogData cc= catalogCache.get( HAPI_HOME ); long latestTimeStamp= catalogFile.lastModified(); - File catalogConfigFile= new File( new File( HAPI_HOME, "config" ), "catalog.json" ); + File catalogConfigFile= getConfigFile(HAPI_HOME); if ( !catalogConfigFile.exists() ) { - throw new IOException("config directory should contain catalog.json"); + throw new IOException("config directory should contain config.json or catalog.json"); } - if ( catalogConfigFile.lastModified() > latestTimeStamp ) { // verify that it can be parsed and then copy it. //TODO: synchronized + if ( !caching || ( catalogConfigFile.lastModified() > latestTimeStamp ) ) { // verify that it can be parsed and then copy it. //TODO: synchronized byte[] bb= Files.readAllBytes( Paths.get( catalogConfigFile.toURI() ) ); String s= new String( bb, Charset.forName("UTF-8") ); try { JSONObject jo= Util.newJSONObject(s); logger.info("resolveCatalog"); - jo= resolveCatalog( jo ); - + jo= resolveCatalog( HAPI_HOME, jo ); + jo.put("x_modificationDate", TimeUtil.fromMillisecondsSince1970(catalogConfigFile.lastModified()) ); + try ( InputStream ins= new ByteArrayInputStream(jo.toString(4).getBytes(CHARSET) ) ) { logger.log(Level.INFO, "write resolved catalog to {0}", catalogFile.getPath()); Files.copy( ins, @@ -719,13 +964,28 @@ public static JSONObject getCatalog( String HAPI_HOME ) throws IOException, JSON jo.put( "status", status ); - jo.put( "HAPI", "3.1" ); + jo.put("HAPI", HAPI_VERSION); cc= new CatalogData(jo,latestTimeStamp); + + JSONArray ids= jo.getJSONArray("catalog"); + for ( int i=0; i 0 ) { // verify that it can be parsed and then copy it. + if ( !caching || ( configTimeStamp - latestTimeStamp > 0 ) ) { // verify that it can be parsed and then copy it. + cachedInfoJsonObject= null; + JSONObject jo; if ( config==null ) { byte[] bb= Files.readAllBytes( Paths.get( infoConfigFile.toURI() ) ); @@ -1060,66 +1426,81 @@ public static JSONObject getInfo( String HAPI_HOME, String id ) throws IOExcepti if ( t instanceof JSONObject ) { jo= (JSONObject)t; } else { - throw new IllegalArgumentException("info node is not JSONObject it is "+t.getClass()); + if ( t==null ) { + throw new IllegalArgumentException("info node is not found."); + } else { + throw new IllegalArgumentException("info node is not JSONObject, it is "+t.getClass()); + } } String source= jo.optString("source",jo.optString("x_source","") ); if ( source.length()>0 ) { - if ( source.equals("spawn") ) { - jo= getInfoFromSpawnCommand( jo, HAPI_HOME, id ); - try ( InputStream ins= new ByteArrayInputStream(jo.toString(4).getBytes(CHARSET) ) ) { - File parentFile= infoFile.getParentFile(); - if ( !parentFile.exists() ) { - if ( !parentFile.mkdirs() ) { - throw new IllegalArgumentException("unable to make directory for info"); + switch (source) { + case "spawn": + jo= getInfoFromSpawnCommand( jo, HAPI_HOME, id ); + caching= jo.optBoolean("x_info_caching",true); + if ( caching ) { + try ( InputStream ins= new ByteArrayInputStream(jo.toString(4).getBytes(CHARSET) ) ) { + copyStreamSafely( ins, infoFile ); + } catch ( Exception ex ) { + throw ex; } } - Files.copy( ins, - infoFile.toPath(), StandardCopyOption.REPLACE_EXISTING ); - } - } else if ( source.equals("classpath") ) { - jo= getInfoFromClasspath( jo, HAPI_HOME, id ); - try ( InputStream ins= new ByteArrayInputStream(jo.toString(4).getBytes(CHARSET) ) ) { - File parentFile= infoFile.getParentFile(); - if ( !parentFile.exists() ) { - if ( !parentFile.mkdirs() ) { - throw new IllegalArgumentException("unable to make directory for info"); + break; + case "classpath": + jo= getInfoFromClasspath( jo, HAPI_HOME, id ); + caching= jo.optBoolean("x_info_caching",true); + if ( caching ) { + try ( InputStream ins= new ByteArrayInputStream(jo.toString(4).getBytes(CHARSET) ) ) { + copyStreamSafely( ins, infoFile ); + } catch ( Exception ex ) { + throw ex; } } - Files.copy( ins, - infoFile.toPath(), StandardCopyOption.REPLACE_EXISTING ); - } - } else { - warnWebMaster(new RuntimeException("catalog source can only be spawn or classpath") ); + break; + default: + warnWebMaster(new RuntimeException("catalog source can only be spawn or classpath") ); + break; } } else { validInfoObject(jo); - String infoString= jo.toString(4); - if ( !infoFile.getParentFile().exists() ) { - if ( !infoFile.getParentFile().mkdirs() ) { - throw new RuntimeException("unable to create folder for dataset id: " + id ); + caching= jo.optBoolean("x_info_caching",true); + if ( caching ) { + try ( InputStream ins= new ByteArrayInputStream(jo.toString(4).getBytes(CHARSET) ) ) { + copyStreamSafely( ins, infoFile ); } } - Files.copy( new ByteArrayInputStream( infoString.getBytes(CHARSET) ), infoFile.toPath(), StandardCopyOption.REPLACE_EXISTING ); } - latestTimeStamp= infoFile.lastModified(); + if ( caching ) { + latestTimeStamp= infoFile.lastModified(); + cachedInfoJsonObject= jo; + } else { + latestTimeStamp= new Date().getTime(); + cachedInfoJsonObject= jo; + } } catch ( JSONException | IllegalArgumentException ex ) { warnWebMaster(ex); } } - if ( cc!=null ) { - InfoData infoData= cc.infoCache.get( safeId ); - if ( infoData!=null ) { - if ( infoData.infoTimeStamp==latestTimeStamp ) { - JSONObject jo= infoData.info; - jo= resolveTimes(jo); - return jo; - } + InfoData infoData= cc.infoCache.get( safeId ); + if ( infoData!=null ) { + if ( infoData.infoTimeStamp==latestTimeStamp ) { + JSONObject jo= infoData.info; + jo= resolveTimes(jo); + return jo; } } - byte[] bb= Files.readAllBytes( Paths.get( infoFile.toURI() ) ); - String s= new String( bb, Charset.forName("UTF-8") ); - JSONObject jo= Util.newJSONObject(s); + + JSONObject jo; + if ( cachedInfoJsonObject!=null ) { + jo= cachedInfoJsonObject; + } else { + if ( caching==false ) throw new IllegalArgumentException("caching is disabled yet we have no object loaded, this is a server implementation error"); + byte[] bb= Files.readAllBytes( Paths.get( infoFile.toURI() ) ); + String s= new String( bb, Charset.forName("UTF-8") ); + jo= Util.newJSONObject(s); + } + if ( jo.has("modificationDate") ) { String modificationDate= jo.getString("modificationDate"); if ( modificationDate.length()==0 ) { @@ -1134,6 +1515,9 @@ public static JSONObject getInfo( String HAPI_HOME, String id ) throws IOExcepti } } } + + JSONObject jo0= jo; + jo= resolveTimes(jo); JSONObject status= Util.newJSONObject(); @@ -1142,7 +1526,7 @@ public static JSONObject getInfo( String HAPI_HOME, String id ) throws IOExcepti jo.put( "status", status ); - jo.put( "HAPI", "3.1" ); + jo.put("HAPI", HAPI_VERSION); cc= catalogCache.get( HAPI_HOME ); if ( cc==null ) { @@ -1154,7 +1538,7 @@ public static JSONObject getInfo( String HAPI_HOME, String id ) throws IOExcepti throw new IllegalArgumentException("This should not happen"); } long expiresTimeStamp= System.currentTimeMillis()+CONFIG_CACHE_FILE_MAX_LIFE_MILLIS; - InfoData infoData= new InfoData(jo,latestTimeStamp,expiresTimeStamp); + infoData= new InfoData(jo0,latestTimeStamp,expiresTimeStamp); cc.infoCache.put( safeId, infoData ); } return jo; @@ -1232,7 +1616,7 @@ public static String joinParams( JSONObject info, String[] params ) { * @param ex */ private static void warnWebMaster(Exception ex) { - logger.info("warnWebMaster"); + logger.info("warnWebMaster see catalina.out or server log files."); ex.printStackTrace(); } diff --git a/HapiServer/src/java/org/hapiserver/InfoServlet.java b/HapiServer/src/java/org/hapiserver/InfoServlet.java index d48ae2bf..2b2e45f0 100644 --- a/HapiServer/src/java/org/hapiserver/InfoServlet.java +++ b/HapiServer/src/java/org/hapiserver/InfoServlet.java @@ -3,6 +3,11 @@ import java.io.IOException; import java.io.OutputStream; +import java.io.StringWriter; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.ServletException; @@ -12,6 +17,8 @@ import javax.servlet.http.HttpServletResponse; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; +import org.codehaus.jettison.json.JSONWriter; +import org.codehaus.jettison.mapped.Configuration; import org.hapiserver.exceptions.BadRequestIdException; import org.hapiserver.exceptions.BadRequestParameterException; import org.hapiserver.exceptions.HapiException; @@ -46,11 +53,23 @@ public void init() throws ServletException { protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - String id= request.getParameter("id"); + String dataset; - logger.log(Level.FINE, "info request for {0}", id); + // HAPI 3.x servers must accept both old and new parameters. + dataset= request.getParameter("dataset"); + if ( dataset==null ) { + dataset= request.getParameter("id"); + } + + String sresolveReferences= request.getParameter("resolve_references"); + boolean resolveReferences= sresolveReferences==null || sresolveReferences.equals("true"); + + logger.log(Level.FINE, "info request for {0}", dataset); - if ( id==null ) throw new ServletException("required parameter 'id' is missing from request"); + if ( dataset==null ) { + Util.raiseError( 1400, "HAPI error 1400: Bad request - user input error", response, null ); + return; + } response.setContentType("application/json;charset=UTF-8"); @@ -61,23 +80,29 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re JSONObject jo; try { - jo = HapiServerSupport.getInfo( HAPI_HOME, id ); + jo = HapiServerSupport.getInfo( HAPI_HOME, dataset ); + if ( resolveReferences ) { + jo= HapiServerSupport.resolveReferences(jo); + } - } catch ( BadRequestIdException ex ) { - Util.raiseError( 1406, "HAPI error 1406: unknown dataset id", response, response.getOutputStream() ); + if ( jo.has( "modificationDate" ) ) { + SimpleDateFormat sdf = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z"); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + String rfc2616= sdf.format( new Date( TimeUtil.toMillisecondsSince1970(jo.getString( "modificationDate" )) ) ); + response.setHeader("Last-Modified", rfc2616 ); + } + + } catch ( BadRequestIdException | java.nio.file.NoSuchFileException ex ) { + Util.raiseError( 1406, "HAPI error 1406: unknown dataset id", response, null ); return; - } catch ( HapiException | JSONException ex ) { + }catch ( HapiException | JSONException ex ) { throw new RuntimeException(ex); - } catch (java.nio.file.NoSuchFileException ex ) { - // don't show server-side information. - Util.raiseError( 1406, "HAPI error 1406: unknown dataset id", response, response.getOutputStream() ); - return; } + // don't show server-side information. try ( OutputStream out = response.getOutputStream() ) { String parameters= request.getParameter("parameters"); if ( parameters!=null) { - parameters= parameters.replaceAll(" ","+"); try { jo= Util.subsetParams(jo,parameters); } catch ( BadRequestParameterException ex2 ) { @@ -85,7 +110,16 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re return; } } + jo.remove("x_indexmap"); + jo.put("HAPI",Util.hapiVersion()); + + JSONObject status= new JSONObject("{ \"code\":1200, \"message\":\"OK\" }"); + jo.put( "status", status ); + //jo.setEscapeForwardSlashAlways(false); This seems to have no effect String s= jo.toString(4); + + s= s.replace("\\/", "/"); // a small performance hit. We can worry about this later... + out.write(s.getBytes( HapiServerSupport.CHARSET )); } catch ( JSONException ex ) { diff --git a/HapiServer/src/java/org/hapiserver/Initialize.java b/HapiServer/src/java/org/hapiserver/Initialize.java index 7ab3e567..9b6903a6 100644 --- a/HapiServer/src/java/org/hapiserver/Initialize.java +++ b/HapiServer/src/java/org/hapiserver/Initialize.java @@ -90,66 +90,66 @@ public static synchronized void initialize( File hapiHome ) { InputStream in; File tmpFile; - // copy about.json to config - File aboutFile= new File( configDir, "about.json" ); - logger.log(Level.INFO, "copy about.json from internal templates to {0}", aboutFile); - in = Util.getTemplateAsStream("about.json"); - tmpFile = new File( configDir, "_about.json" ); - Util.transfer( in, new FileOutputStream(tmpFile), true ); - if ( !tmpFile.renameTo(aboutFile) ) { - logger.log(Level.SEVERE, "Unable to write to {0}", aboutFile); - throw new IllegalArgumentException("unable to write about file"); - } else { - logger.log(Level.FINE, "wrote config about file {0}", aboutFile); - } - - // copy capabilities.json to config - File capabilitiesFile= new File( configDir, "capabilities.json" ); - logger.log(Level.INFO, "copy capabilities.json from internal templates to {0}", capabilitiesFile); - in= Util.getTemplateAsStream("capabilities.json"); - tmpFile= new File( configDir, "_capabilities.json" ); - Util.transfer( in, new FileOutputStream(tmpFile), true ); - if ( !tmpFile.renameTo(capabilitiesFile) ) { - logger.log(Level.SEVERE, "Unable to write to {0}", capabilitiesFile); - throw new IllegalArgumentException("unable to write capabilities file"); - } else { - logger.log(Level.FINE, "wrote config capabilities file {0}", capabilitiesFile); + // copy catalog.json to config. Note either config.json or catalog.json can be used. + File configFile= new File( configDir, "config.json" ); + logger.log(Level.INFO, "copy catalog.json from internal templates to {0}", configFile); + + try { + in= Util.getTemplateAsStream("config.json"); + } catch ( NullPointerException ex ) { + logger.log(Level.INFO, "config.json not found, using catalog.json."); + in= null; } - - // copy catalog.json to config - File catalogFile= new File( configDir, "catalog.json" ); - logger.log(Level.INFO, "copy catalog.json from internal templates to {0}", catalogFile); - in= Util.getTemplateAsStream("catalog.json"); - tmpFile= new File( configDir, "_catalog.json" ); - Util.transfer( in, new FileOutputStream(tmpFile), true ); - if ( !tmpFile.renameTo(catalogFile) ) { - logger.log(Level.SEVERE, "Unable to write to {0}", catalogFile); - throw new IllegalArgumentException("unable to write catalog file"); + if ( in!=null ) { + tmpFile= new File( configDir, "_config.json" ); + Util.transfer( in, new FileOutputStream(tmpFile), true ); + if ( !tmpFile.renameTo(configFile) ) { + logger.log(Level.SEVERE, "Unable to write to {0}", configFile); + throw new IllegalArgumentException("unable to write catalog file"); + } else { + logger.log(Level.FINE, "wrote config file {0}", configFile); + } } else { - logger.log(Level.FINE, "wrote config catalog file {0}", catalogFile); + // copy config.json to config. Note either config.json or catalog.json can be used. + File catalogFile= new File( configDir, "catalog.json" ); + logger.log(Level.INFO, "copy catalog.json from internal templates to {0}", catalogFile); + + in= Util.getTemplateAsStream("catalog.json"); + tmpFile= new File( configDir, "_catalog.json" ); + Util.transfer( in, new FileOutputStream(tmpFile), true ); + if ( !tmpFile.renameTo(catalogFile) ) { + logger.log(Level.SEVERE, "Unable to write to {0}", catalogFile); + throw new IllegalArgumentException("unable to write catalog file"); + } else { + logger.log(Level.FINE, "wrote config catalog file {0}", catalogFile); + } + configFile= catalogFile; } // load each of the template's info files. try { - byte[] bb= Files.readAllBytes( Paths.get( catalogFile.toURI() ) ); + byte[] bb= Files.readAllBytes( Paths.get( configFile.toURI() ) ); JSONObject jo= new JSONObject( new String(bb,"UTF-8") ); - JSONObject jo2= HapiServerSupport.getCatalog(hapiHome.toString()); - JSONArray cat= jo.getJSONArray("catalog"); + JSONArray cat= jo.optJSONArray("groups"); + if ( cat.length()==0 ) cat=jo.optJSONArray("catalog"); + for ( int i=0; i()); + } + + /** + * Recursively expand refs in a node. + */ + private static Object expandNode(Object node, JSONObject root, Set refStack) throws JSONException, IllegalArgumentException { + if (node instanceof JSONObject) { + JSONObject obj = (JSONObject) node; + + // If this object is exactly {"$ref": "..."} or at least contains $ref, + // treat it as a ref object. For schema-processing purposes, this is often + // what people want. Adjust if you want stricter behavior. + if (obj.has("$ref")) { + String ref = obj.getString("$ref"); + + if (refStack.contains(ref)) { + throw new IllegalStateException("Circular $ref detected: " + refStack + " -> " + ref); + } + + refStack.add(ref); + Object target = resolveRef(root, ref); + + // Deep copy before expanding so the resolved object can be reused safely. + Object replacement = deepCopy(target); + Object expanded = expandNode(replacement, root, refStack); + + refStack.remove(ref); + return expanded; + } + + JSONObject out = new JSONObject(); + Iterator keys = obj.keys(); + while (keys.hasNext()) { + String key = (String) keys.next(); + Object value = obj.get(key); + out.put(key, expandNode(value, root, refStack)); + } + return out; + + } else if (node instanceof JSONArray) { + JSONArray arr = (JSONArray) node; + JSONArray out = new JSONArray(); + for (int i = 0; i < arr.length(); i++) { + out.put(expandNode(arr.get(i), root, refStack)); + } + return out; + + } else { + // primitives: String, Number, Boolean, JSONObject.NULL + return node; + } + } + + /** + * Resolve a local JSON Pointer ref like "#/definitions/Foo". + * @param root + * @param ref + * @return + * @throws org.codehaus.jettison.json.JSONException + */ + public static Object resolveRef(JSONObject root, String ref) throws IllegalArgumentException, JSONException { + if (!ref.startsWith("#")) { + throw new IllegalArgumentException("Only local refs are supported: " + ref); + } + + if (ref.equals("#")) { + return root; + } + + String pointer = ref.substring(1); // remove leading '#' + + if (!pointer.startsWith("/")) { + throw new IllegalArgumentException("Invalid JSON Pointer fragment: " + ref); + } + + Object current = root; + String[] parts = pointer.substring(1).split("/"); + + for (String rawPart : parts) { + String part = unescapeJsonPointer(rawPart); + + if (current instanceof JSONObject) { + current = ((JSONObject) current).get(part); + } else if (current instanceof JSONArray) { + int idx = Integer.parseInt(part); + current = ((JSONArray) current).get(idx); + } else { + throw new IllegalArgumentException( + "Cannot descend through non-container while resolving ref " + ref + ": " + current + ); + } + } + + return current; + } + + /** + * Deep copy a JSON value. + */ + private static Object deepCopy(Object value) throws IllegalArgumentException, JSONException { + if (value instanceof JSONObject) { + JSONObject src = (JSONObject) value; + JSONObject dst = new JSONObject(); + Iterator keys = src.keys(); + while (keys.hasNext()) { + String key = (String) keys.next(); + dst.put(key, deepCopy(src.get(key))); + } + return dst; + + } else if (value instanceof JSONArray) { + JSONArray src = (JSONArray) value; + JSONArray dst = new JSONArray(); + for (int i = 0; i < src.length(); i++) { + dst.put(deepCopy(src.get(i))); + } + return dst; + + } else { + // primitives and JSONObject.NULL + return value; + } + } + + /** + * JSON Pointer unescaping: + * ~1 => / + * ~0 => ~ + */ + private static String unescapeJsonPointer(String s) { + return s.replace("~1", "/").replace("~0", "~"); + } +} \ No newline at end of file diff --git a/HapiServer/src/java/org/hapiserver/SourceRegistry.java b/HapiServer/src/java/org/hapiserver/SourceRegistry.java index f7985b20..b967e76a 100644 --- a/HapiServer/src/java/org/hapiserver/SourceRegistry.java +++ b/HapiServer/src/java/org/hapiserver/SourceRegistry.java @@ -5,11 +5,13 @@ import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.stream.Stream; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; @@ -27,6 +29,8 @@ */ public class SourceRegistry { + private static final Logger logger= Logger.getLogger("hapi"); + private static SourceRegistry instance= new SourceRegistry(); public static SourceRegistry getInstance() { @@ -48,38 +52,33 @@ public static SourceRegistry getInstance() { * @throws org.hapiserver.exceptions.HapiException */ public HapiRecordSource getSource( String hapiHome, String id, JSONObject info ) throws HapiException { - JSONObject data; + JSONObject dataConfig; try { - data= HapiServerSupport.getDataConfig( hapiHome, id ); + dataConfig= HapiServerSupport.getDataConfig( hapiHome, id ); } catch ( IOException | JSONException ex ) { throw new RuntimeException(ex); } - String source= data.optString( "source", data.optString("x_source") ); + String source= dataConfig.optString( "source", dataConfig.optString("x_source") ); switch (source) { case "aggregation": - return new AggregationRecordSource( hapiHome, id, info, data ); + return new AggregationRecordSource( hapiHome, id, info, dataConfig ); case "spawn": - return new SpawnRecordSource( hapiHome, id, info, data ); + return new SpawnRecordSource( hapiHome, id, info, dataConfig ); case "hapiserver": - return new HapiWrapperRecordSource( id, info, data ); + return new HapiWrapperRecordSource( id, info, dataConfig ); case "classpath": - String clas= data.optString("class",data.optString("x_class")); + String clas= dataConfig.optString("class",dataConfig.optString("x_class")); if ( clas.endsWith(".java") ) { throw new IllegalArgumentException("class should not end in .java"); } ClassLoader cl=null; - if ( data.has("classpath") || data.has("x_classpath") ) { + if ( dataConfig.has("classpath") || dataConfig.has("x_classpath") ) { try { - String s= data.optString("classpath",data.optString("x_classpath")); + String s= dataConfig.optString("classpath",dataConfig.optString("x_classpath")); s= SpawnRecordSource.doMacros( hapiHome, id, s ); - URL url; - if ( s.startsWith("http://") || s.startsWith("https://") || s.startsWith("file:") ) { - url= new URL( s ); - } else { - url= new File(s).toURI().toURL(); - } + URL url= HapiServerSupport.getClasspath(hapiHome,s); cl= new URLClassLoader( new URL[] { url }, SourceRegistry.class.getClassLoader()); cl.getParent(); } catch (MalformedURLException ex) { @@ -98,13 +97,23 @@ public HapiRecordSource getSource( String hapiHome, String id, JSONObject info ) c= Class.forName(clas); } Object o; - JSONArray args= data.optJSONArray("args"); + JSONArray args= dataConfig.optJSONArray("args"); if ( args==null ) { - args= data.optJSONArray("x_args"); + args= dataConfig.optJSONArray("x_args"); } + String method= dataConfig.optString("method",dataConfig.optString("x_method","") ); + if ( args==null ) { // must have constructor that takes hapiHome, id, info, and data. - Constructor constructor= c.getConstructor( String.class, String.class, JSONObject.class, JSONObject.class ); - o= constructor.newInstance( hapiHome, id, info, data ); + try { + Constructor constructor= c.getConstructor( String.class, String.class, JSONObject.class, JSONObject.class ); + o= constructor.newInstance( hapiHome, id, info, dataConfig ); + } catch ( NoSuchMethodException ex ) { + logger.fine("Constructor not found. Found constructors: "); + for ( Constructor constructor : c.getConstructors() ) { + logger.log(Level.FINE, " {0}", constructor.toGenericString()); + } + throw ex; + } } else { Class[] cc= new Class[args.length()]; Object[] oo= new Object[args.length()]; @@ -113,15 +122,49 @@ public HapiRecordSource getSource( String hapiHome, String id, JSONObject info ) oo[i]= args.get(i); cc[i]= oo[i].getClass(); if ( cc[i]==String.class ) { // check for macros - String s= SpawnRecordSource.doMacros( hapiHome, id, (String)oo[i] ); - oo[i]= s; + if ( oo[i].equals("${info}") ) { + oo[i]= info; + cc[i]= JSONObject.class; + } else if ( oo[i].equals("${data-config}") ) { + oo[i]= dataConfig; + cc[i]= JSONObject.class; + } else if ( oo[i].equals("${data}") ) { + oo[i]= dataConfig; + cc[i]= JSONObject.class; + } else { + String s= SpawnRecordSource.doMacros( hapiHome, id, (String)oo[i] ); + oo[i]= s; + } } } catch (JSONException ex) { Logger.getLogger(SourceRegistry.class.getName()).log(Level.SEVERE, null, ex); } } - Constructor constructor= c.getConstructor( cc ); - o= constructor.newInstance( oo ); + if ( method.length()>0 ) { // call static method + Method m; + try { + m= c.getMethod( method, cc ); + } catch ( NoSuchMethodException ex ) { + String[] arglist= new String[cc.length]; + for ( int i=0; i getGranuleIterator(int[] start, int[] stop) { + public Iterator getGranuleIterator(TimeString start, TimeString stop) { throw new UnsupportedOperationException("not used"); } @@ -28,15 +29,15 @@ public boolean hasParamSubsetIterator() { } @Override - public abstract Iterator getIterator(int[] start, int[] stop, String[] params); + public abstract Iterator getIterator(TimeString start, TimeString stop, String[] params); @Override - public Iterator getIterator(int[] start, int[] stop) { + public Iterator getIterator(TimeString start, TimeString stop) { throw new UnsupportedOperationException("not used"); } @Override - public String getTimeStamp(int[] start, int[] stop) { + public TimeString getTimeStamp(TimeString start, TimeString stop) { return null; } diff --git a/HapiServer/src/java/org/hapiserver/source/AggregationRecordSource.java b/HapiServer/src/java/org/hapiserver/source/AggregationRecordSource.java index e0838172..aea2fb32 100644 --- a/HapiServer/src/java/org/hapiserver/source/AggregationRecordSource.java +++ b/HapiServer/src/java/org/hapiserver/source/AggregationRecordSource.java @@ -10,7 +10,7 @@ import org.codehaus.jettison.json.JSONObject; import org.hapiserver.CsvHapiRecordConverter; import org.hapiserver.HapiRecord; -import org.hapiserver.TimeUtil; +import org.hapiserver.TimeString; import org.hapiserver.URITemplate; /** @@ -49,7 +49,7 @@ public boolean hasGranuleIterator() { } @Override - public Iterator getGranuleIterator(int[] start, int[] stop) { + public Iterator getGranuleIterator(TimeString start, TimeString stop) { return new AggregationGranuleIterator( fileFormat, start, stop ); } @@ -59,14 +59,14 @@ public boolean hasParamSubsetIterator() { } @Override - public Iterator getIterator(int[] start, int[] stop, String[] params) { + public Iterator getIterator(TimeString start, TimeString stop, String[] params) { throw new IllegalArgumentException("not supported"); } @Override - public Iterator getIterator(int[] start, int[] stop) { + public Iterator getIterator(TimeString start, TimeString stop) { - String file= uriTemplate.format( TimeUtil.formatIso8601Time(start), TimeUtil.formatIso8601Time(stop) ); + String file= uriTemplate.format( start.toString(), stop.toString() ); try { Iterator iter; diff --git a/HapiServer/src/java/org/hapiserver/source/HapiWrapperIterator.java b/HapiServer/src/java/org/hapiserver/source/HapiWrapperIterator.java index 7b48cb9b..fae1ea4c 100644 --- a/HapiServer/src/java/org/hapiserver/source/HapiWrapperIterator.java +++ b/HapiServer/src/java/org/hapiserver/source/HapiWrapperIterator.java @@ -16,7 +16,7 @@ import org.hapiserver.CsvHapiRecordConverter; import org.hapiserver.HapiRecord; import org.hapiserver.HapiServerSupport; -import org.hapiserver.TimeUtil; +import org.hapiserver.TimeString; import org.hapiserver.Util; import org.hapiserver.exceptions.BadRequestParameterException; @@ -30,21 +30,15 @@ public class HapiWrapperIterator implements Iterator { boolean initialized= false; - String server; - String id; JSONObject info; URL request; InputStream in; BufferedReader reader; String nextRecord; - String[] params; CsvHapiRecordConverter converter; - public HapiWrapperIterator( String server, String id, JSONObject info, String[] params, int[] start, int[] stop) { - this.server= server; - this.id= id; + public HapiWrapperIterator( String server, String id, JSONObject info, String[] params, TimeString start, TimeString stop) { this.info= info; - this.params= params; String surl; try { JSONArray parameters= info.getJSONArray("parameters"); @@ -57,11 +51,11 @@ public HapiWrapperIterator( String server, String id, JSONObject info, String[] if ( params==null ) { surl= String.format( "%s/data?id=%s&time.min=%s&time.max=%s", - server, id, TimeUtil.formatIso8601Time(start), TimeUtil.formatIso8601Time(stop) ); + server, id, start.toString(), stop.toString() ); } else { String sparams= HapiServerSupport.joinParams(info,params); surl= String.format( "%s/data?id=%s&time.min=%s&time.max=%s¶meters=%s", - server, id, TimeUtil.formatIso8601Time(start), TimeUtil.formatIso8601Time(stop), sparams ); + server, id, start.toString(), stop.toString(), sparams ); logger.log(Level.INFO, "upstream url: {0}", surl); } try { @@ -71,7 +65,7 @@ public HapiWrapperIterator( String server, String id, JSONObject info, String[] } } - public HapiWrapperIterator( String server, String id, JSONObject info, int[] start, int[] stop) { + public HapiWrapperIterator( String server, String id, JSONObject info, TimeString start, TimeString stop) { this( server, id, info, null, start, stop ); } diff --git a/HapiServer/src/java/org/hapiserver/source/HapiWrapperRecordSource.java b/HapiServer/src/java/org/hapiserver/source/HapiWrapperRecordSource.java index 3921dd04..c66183d4 100644 --- a/HapiServer/src/java/org/hapiserver/source/HapiWrapperRecordSource.java +++ b/HapiServer/src/java/org/hapiserver/source/HapiWrapperRecordSource.java @@ -4,6 +4,7 @@ import java.util.Iterator; import org.codehaus.jettison.json.JSONObject; import org.hapiserver.HapiRecord; +import org.hapiserver.TimeString; /** * RecordSource which simply wraps another HAPI server. Its configuration @@ -43,13 +44,13 @@ public boolean hasParamSubsetIterator() { } @Override - public Iterator getIterator(int[] start, int[] stop, String[] params) { + public Iterator getIterator(TimeString start, TimeString stop, String[] params) { hapiWrapperIterator = new HapiWrapperIterator( hapiServer, id, info, params, start, stop ); return hapiWrapperIterator; } @Override - public Iterator getIterator(int[] start, int[] stop) { + public Iterator getIterator(TimeString start, TimeString stop) { hapiWrapperIterator = new HapiWrapperIterator( hapiServer, id, info, start, stop ); return hapiWrapperIterator; } diff --git a/HapiServer/src/java/org/hapiserver/source/SpawnRecordSource.java b/HapiServer/src/java/org/hapiserver/source/SpawnRecordSource.java index 48cb64f0..50639c33 100644 --- a/HapiServer/src/java/org/hapiserver/source/SpawnRecordSource.java +++ b/HapiServer/src/java/org/hapiserver/source/SpawnRecordSource.java @@ -15,6 +15,7 @@ import org.hapiserver.HapiRecord; import org.hapiserver.HapiRecordSource; import org.hapiserver.HapiServerSupport; +import org.hapiserver.TimeString; import org.hapiserver.TimeUtil; import org.hapiserver.URITemplate; import org.hapiserver.Util; @@ -30,9 +31,10 @@ * "granuleSize": "P1D" * } * - * Here command is the command which is run on the command line, with start, stop, format, parameters, and HAPI_HOME macros. - * And the control timeFormat is used to format the start and stop times. "stepSize" will cause the calls to be broken up - * into separate calls for each step. + * Here command is the command which is run on the command line, with start, stop, + * timeFormat, parameters, and HAPI_HOME macros. + * Where the control timeFormat is used to format the start and stop times. granuleSize + * will cause the calls to be broken up into separate calls for each step. * @author jbf */ public class SpawnRecordSource implements HapiRecordSource { @@ -69,7 +71,6 @@ public SpawnRecordSource( String hapiHome, String id, JSONObject info, JSONObjec String tf= dataConfig.optString("timeFormat",""); if ( tf.length()>0 ) { this.timeFormat= tf; - this.uriTemplate= new URITemplate(this.timeFormat); } else { this.timeFormat= null; } @@ -78,10 +79,20 @@ public SpawnRecordSource( String hapiHome, String id, JSONObject info, JSONObjec if ( granuleSize.length()>0 ) { try { this.granuleSize= TimeUtil.parseISO8601Duration( granuleSize ); + if ( this.timeFormat==null ) { + if ( granuleSize.equals("P1Y") ) { + this.timeFormat="$Y"; + } else { + throw new IllegalArgumentException("timeFormat must be specified when granuleSize is not P1Y"); + } + } } catch (ParseException ex) { throw new RuntimeException(ex); } } + if ( this.timeFormat!=null ) { + this.uriTemplate= new URITemplate(this.timeFormat); + } } @Override @@ -90,12 +101,13 @@ public boolean hasGranuleIterator() { } @Override - public Iterator getGranuleIterator(int[] start, int[] stop) { + public Iterator getGranuleIterator(TimeString start, TimeString stopts) { + int[] stop= stopts.toComponents(); try { - String time0= this.uriTemplate.format( TimeUtil.formatIso8601Time(start), TimeUtil.formatIso8601Time(start) ); + String time0= this.uriTemplate.format( start.toString(), start.toString() ); int[] time= this.uriTemplate.parse( time0 ); - return new Iterator() { + return new Iterator() { int[] timeIt= time; @Override @@ -104,10 +116,12 @@ public boolean hasNext() { } @Override - public int[] next() { + public TimeString[] next() { int[] result= timeIt; timeIt = TimeUtil.nextRange( timeIt ); - return result; + TimeString starts= new TimeString( TimeUtil.getStartTime(result) ); + TimeString stops= new TimeString( TimeUtil.getStopTime(result) ); + return new TimeString[] { starts, stops }; } }; @@ -122,7 +136,7 @@ public boolean hasParamSubsetIterator() { } @Override - public Iterator getIterator(int[] start, int[] stop, String[] params) { + public Iterator getIterator(TimeString start, TimeString stop, String[] params) { try { JSONObject infoSubset= Util.subsetParams( info, HapiServerSupport.joinParams( info, params ) ); iter= new SpawnRecordSourceIterator( hapiHome, id, infoSubset, command, start, stop, params ); @@ -133,18 +147,21 @@ public Iterator getIterator(int[] start, int[] stop, String[] params } @Override - public Iterator getIterator(int[] start, int[] stop) { + public Iterator getIterator(TimeString start, TimeString stop) { iter = new SpawnRecordSourceIterator( hapiHome, id, info, command, start, stop, null ); return iter; } @Override - public String getTimeStamp(int[] start, int[] stop) { + public TimeString getTimeStamp(TimeString start, TimeString stop) { return null; } /** - * add code for implementing hapiHome and id macros. + * add code for implementing hapiHome and id macros. The following macros are supported:
      + *
    • ${id} -- the data id + *
    • ${HAPI_HOME} -- the server home + *
    * @param hapiHome * @param id * @param command @@ -187,7 +204,8 @@ private class SpawnRecordSourceIterator implements Iterator { * @param stop the seven-component stop time * @param params null or the parameters which should be sent */ - public SpawnRecordSourceIterator( String hapiHome, String id, JSONObject info, String command, int[] start, int[] stop, String[] params ) { + public SpawnRecordSourceIterator( + String hapiHome, String id, JSONObject info, String command, TimeString start, TimeString stop, String[] params ) { try { String[] ss= command.split("\\$\\{"); for ( int i=0; i5 && s1.charAt(5)=='}' ) { if ( uriTemplate!=null ) { - String s= TimeUtil.formatIso8601Time( start ); + String s= start.toIsoTime(); ss[i] = uriTemplate.format( s, s ) + ss[i].substring(6) ; } else { - ss[i]= TimeUtil.formatIso8601Time( start ) + ss[i].substring(6); + ss[i]= start.toIsoTime() + ss[i].substring(6); } } else { if ( s1.substring(5).startsWith(";format=" ) ) { @@ -210,10 +228,10 @@ public SpawnRecordSourceIterator( String hapiHome, String id, JSONObject info, S } else if ( s1.startsWith("stop") ) { if ( s1.length()>4 && s1.charAt(4)=='}' ) { if ( uriTemplate!=null ) { - String s= TimeUtil.formatIso8601Time( stop ); + String s= stop.toIsoTime(); ss[i] = uriTemplate.format( s, s ) + ss[i].substring(5) ; } else { - ss[i]= TimeUtil.formatIso8601Time( stop ) + ss[i].substring(5); + ss[i]= stop.toIsoTime() + ss[i].substring(5); } } else { throw new IllegalArgumentException("not supported: "+command); diff --git a/HapiServer/src/java/templates/about.json b/HapiServer/src/java/templates/about.json index ac9a3ff4..1a6e6cd5 100644 --- a/HapiServer/src/java/templates/about.json +++ b/HapiServer/src/java/templates/about.json @@ -1,6 +1,5 @@ { - "$schema": "https://raw.githubusercontent.com/hapi-server/data-specification-schema/jon-jeremy-mess-3.0/3.1/about.json", - "HAPI": "3.1", + "HAPI": "3.3", "id":"Java-Demo-Server", "title":"Demo Server", "contact":"Jeremy Faden ", diff --git a/HapiServer/src/java/templates/capabilities.json b/HapiServer/src/java/templates/capabilities.json index a6849f61..234dc304 100644 --- a/HapiServer/src/java/templates/capabilities.json +++ b/HapiServer/src/java/templates/capabilities.json @@ -1,6 +1,5 @@ { - "$schema": "https://raw.githubusercontent.com/hapi-server/data-specification-schema/jon-jeremy-mess-3.0/3.1/capabilities.json", - "HAPI": "3.1", + "HAPI": "3.3", "outputFormats": [ "csv", "binary" diff --git a/HapiServer/src/java/templates/catalog-esac.json b/HapiServer/src/java/templates/catalog-esac.json new file mode 100644 index 00000000..3a2ecbd4 --- /dev/null +++ b/HapiServer/src/java/templates/catalog-esac.json @@ -0,0 +1,43 @@ + { + "HAPI": "3.3", + "catalog": [ + { + "x_group_id": "csa", + "x_source": "classpath", + "x_class": "org.hapiserver.source.tap.CsaInfoCatalogSource", + "x_method": "getCatalog", + "x_config": { + "info": { + "x_source":"classpath", + "x_class":"org.hapiserver.source.tap.CsaInfoCatalogSource", + "x_method": "getInfo", + "x_args": [ "${id}" ] + }, + "data": { + "source": "classpath", + "class":"org.hapiserver.source.tap.TAPDataSource", + "args":["https://csa.esac.esa.int/csa-sl-tap/","${id}","${info}"] + } + } + }, + { + "x_group_id": "csa-availability", + "x_source": "classpath", + "x_class": "org.hapiserver.source.tap.TAPAvailabilityDataSource", + "x_method": "getCatalog", + "x_config": { + "info": { + "x_source":"classpath", + "x_class":"org.hapiserver.source.tap.TAPAvailabilityDataSource", + "x_method": "getInfo", + "x_args": [ "${id}" ] + }, + "data": { + "source": "classpath", + "class":"org.hapiserver.source.tap.TAPAvailabilityDataSource", + "args":["https://csa.esac.esa.int/csa-sl-tap/","${id}","${info}"] + } + } + } + ] + } \ No newline at end of file diff --git a/HapiServer/src/java/templates/catalog.json b/HapiServer/src/java/templates/catalog.json deleted file mode 100644 index 14b53341..00000000 --- a/HapiServer/src/java/templates/catalog.json +++ /dev/null @@ -1,29 +0,0 @@ - { - "$schema": "https://raw.githubusercontent.com/hapi-server/data-specification-schema/jon-jeremy-mess-3.0/3.1/catalog.json", - "HAPI": "3.0", - "catalog": [ - { - "x_group_id": "csa", - "x_source": "classpath", - "x_class": "org.hapiserver.source.tap.CsaInfoCatalogSource", - "x_method": "getCatalog", - "x_config": { - "info": { - "x_source":"classpath", - "x_class":"org.hapiserver.source.tap.CsaInfoCatalogSource", - "x_method": "getInfo", - "x_args": [ "${id}" ] - }, - "data": { - "source": "classpath", - "class":"org.hapiserver.source.tap.TAPDataSource", - "args":["https://csa.esac.esa.int/csa-sl-tap/","${id}"] - } - } - } - ], - "status": { - "code": 1200, - "message": "OK request successful" - } - } \ No newline at end of file diff --git a/HapiServer/src/java/templates/config.json b/HapiServer/src/java/templates/config.json new file mode 100644 index 00000000..b81743db --- /dev/null +++ b/HapiServer/src/java/templates/config.json @@ -0,0 +1,79 @@ + { + "options": { + "cdawmetaDir":"https://cottagesystems.com/~jbf/hapi/p/cdaweb/", + "cdawmetaDir_doc":"location for the server", + "cacheDir":"file:///tmp/cdaweb-hapi/cache/", + "cacheDir_doc": "read-write location where files will be downloaded.", + "catalogHome": "${cdawmetaDir}/data/hapi/catalog.json", + "catalogHome_doc": "read-only location of the catalog file", + "infoHome": "${cdawmetaDir}/data/hapi/info/", + "infoHome_doc": "read-only root folder (website or file://...) containing \"info\" directory and \"catalog.json\"", + "metaHome": "${cdawmetaDir}/data/cdfmetafile/", + "metaHome_doc": "read-only root folder (website or file://...) containing \"info\" directory with file listings." + }, + "groups": [ + { + "group_id": "cdaweb", + "config": { + "catalog": { + "source": "classpath", + "classpath": "CDAWebServer.jar", + "class": "org.hapiserver.source.cdaweb.CdawebInfoCatalogSource", + "method": "getCatalog", + "args": [ "${catalogHome}" ] + }, + "info": { + "source":"classpath", + "classpath": "CDAWebServer.jar", + "class":"org.hapiserver.source.cdaweb.CdawebInfoCatalogSource", + "method": "getInfo", + "args": [ "${infoHome}${id}.json", "${infoHome}${id}.json" ] + }, + "data": { + "source": "classpath", + "classpath": "CDAWebServer.jar", + "class":"org.hapiserver.source.cdaweb.CdawebHapiRecordSource", + "method": "create", + "args": [ "${metaHome}", "${id}", "${info}", "${data-config}", "${cacheDir}" ] + } + } + }, + { + "group_id": "cdaweb_availability", + "config": { + "catalog": { + "source": "classpath", + "classpath": "CDAWebServer.jar", + "class": "org.hapiserver.source.cdaweb.CdawebAvailabilityHapiRecordSource", + "method": "getAvailabilityCatalog", + "args": [ "${catalogHome}" ] + }, + "info": { + "source":"classpath", + "classpath": "CDAWebServer.jar", + "class":"org.hapiserver.source.cdaweb.CdawebAvailabilityHapiRecordSource", + "method": "getInfoAvail", + "args": [ "${metaHome}", "${id}" ] + }, + "data": { + "source": "classpath", + "classpath": "CDAWebServer.jar", + "class":"org.hapiserver.source.cdaweb.CdawebAvailabilityHapiRecordSource", + "args": [ "${metaHome}", "${id}", "${info}"] + } + } + } + ], + "capabilities": { + "outputFormats": [ "csv", "binary" ] + }, + "about": { + "_": "This is not used, see ticket https://github.com/hapi-server/server-java/issues/64", + "HAPI": "3.3", + "id":"cdaweb", + "title":"CDAWeb HAPI Server", + "contact":"Jeremy Faden ", + "description":"CDF data sets from CDAWeb.", + "x_server_version": "2025-10-31T11:03" + } +} diff --git a/HapiServer/src/java/templates/pool_temperature.json b/HapiServer/src/java/templates/pool_temperature.json index e03a4103..38ee567c 100644 --- a/HapiServer/src/java/templates/pool_temperature.json +++ b/HapiServer/src/java/templates/pool_temperature.json @@ -1,6 +1,6 @@ { "info":{ - "HAPI": "3.0", + "HAPI": "3.3", "x_createdAt": "2017-02-21T17:27Z", "modificationDate": "lastyear", "parameters": [ diff --git a/HapiServer/src/java/templates/x-landing-schema.json b/HapiServer/src/java/templates/x-landing-schema.json index d754fb66..52c3dae7 100644 --- a/HapiServer/src/java/templates/x-landing-schema.json +++ b/HapiServer/src/java/templates/x-landing-schema.json @@ -21,6 +21,10 @@ "type": "string" }, "uniqueItems": true + }, + "style" : { + "description" : "Link to style sheet for the landing page.", + "type" : "string" } } } diff --git a/HapiServer/src/java/templates/relations.json b/HapiServer/src/java/templates/x-relations.json similarity index 100% rename from HapiServer/src/java/templates/relations.json rename to HapiServer/src/java/templates/x-relations.json diff --git a/HapiServer/web/WEB-INF/lib/UriTemplatesJava.jar b/HapiServer/web/WEB-INF/lib/UriTemplatesJava.jar deleted file mode 100644 index 93bb9af4..00000000 Binary files a/HapiServer/web/WEB-INF/lib/UriTemplatesJava.jar and /dev/null differ diff --git a/HapiServer/web/WEB-INF/lib/guava-30.1-jre.jar b/HapiServer/web/WEB-INF/lib/guava-30.1-jre.jar deleted file mode 100644 index 4244e8a2..00000000 Binary files a/HapiServer/web/WEB-INF/lib/guava-30.1-jre.jar and /dev/null differ diff --git a/HapiServer/web/WEB-INF/lib/jettison-1.4.1.jar b/HapiServer/web/WEB-INF/lib/jettison-1.4.1.jar deleted file mode 100644 index e6e593b0..00000000 Binary files a/HapiServer/web/WEB-INF/lib/jettison-1.4.1.jar and /dev/null differ diff --git a/HapiServer/web/WEB-INF/web.xml b/HapiServer/web/WEB-INF/web.xml index 5ca15528..e0f02617 100644 --- a/HapiServer/web/WEB-INF/web.xml +++ b/HapiServer/web/WEB-INF/web.xml @@ -31,7 +31,7 @@ DataServlet org.hapiserver.DataServlet - + Redirect to landing servlet EmptyServlet @@ -70,22 +70,49 @@ experimental and proposed for 3.2 DataServlet /hapi/data - + LandingServlet /hapi - + + + LandingServlet + /hapi/ + EmptyServlet - / + /hapi/info/ + + EmptyServlet + /hapi/data/ + + + EmptyServlet + / + + + DirectoryListing + org.apache.catalina.servlets.DefaultServlet + + listings + true + + 1 + + + + DirectoryListing + /d1/ + + 30 diff --git a/HapiServer/web/hapi/index.jsp b/HapiServer/web/hapi/index.jsp index f71121fd..2c67bb34 100644 --- a/HapiServer/web/hapi/index.jsp +++ b/HapiServer/web/hapi/index.jsp @@ -4,6 +4,11 @@ Author : jbf --%> +<%@page import="org.hapiserver.SourceRegistry"%> +<%@page import="java.net.URLClassLoader"%> +<%@page import="java.net.URL"%> +<%@page import="org.hapiserver.source.SpawnRecordSource"%> +<%@page import="java.lang.reflect.Method"%> <%@page import="java.util.List"%> <%@page import="java.util.ArrayList"%> <%@page import="java.util.ArrayList"%> @@ -23,11 +28,30 @@ <%@page import="java.io.File"%> <%@page contentType="text/html" pageEncoding="UTF-8"%> + + <% + String HAPI_HOME= Initialize.getHapiHome(getServletContext()); + + String uri = request.getRequestURI(); + + boolean withinHapi= uri.endsWith("/hapi/"); // if true, then references must be relative + String h="hapi/"; + if ( withinHapi ) h=""; + + JSONObject landingConfig= HapiServerSupport.getLandingConfig(HAPI_HOME); + JSONObject about= HapiServerSupport.getAbout(HAPI_HOME); + %> - HAPI Server + <%= about.optString("title","Basic HAPI Server") %> + <% + if ( landingConfig.has("style") ) { + String styleUrl= landingConfig.getString("style"); + out.println(""); + } + %> @@ -35,41 +59,27 @@ final int MAX_PARAMETERS=10; final int MAX_DATASETS=10; - String HAPI_HOME= Initialize.getHapiHome(getServletContext()); - Initialize.maybeInitialize( HAPI_HOME ); - - JSONObject about= HapiServerSupport.getAbout(HAPI_HOME); - - JSONObject landingConfig= HapiServerSupport.getLandingConfig(HAPI_HOME); Logger logger= Util.getLogger(); %> -

    <%= about.optString("title","Basic HAPI Server") %>

    More information about this type of server is found at GitHub. - This implementation of the HAPI server uses plug-in readers to load data. Discussion and more about this - server can be found here. +

    <%= about.optString("title","Basic HAPI Server") %>

    + <% + String defaultDescription="More information about this type of server is found at " + + "GitHub." + + " This implementation of the HAPI server uses plug-in readers to load data. Discussion and more about this " + + " server can be found here."; + %> + <%= about.optString("description",defaultDescription) %>

    Some example requests:

    - About More about this server, like contact info.
    - Capabilities Capabilities of the server.
    - Catalog Show the catalog of available data sets.
    - <% - boolean hasSemantics= false; - try { - JSONObject json= HapiServerSupport.getSemantics(HAPI_HOME); - hasSemantics= true; - } catch ( IOException ex ) { - } - if ( hasSemantics ) { - %> - Semantics Show declared relationships of data sets.
    - <% - } - %> + About More about this server, like contact info.
    + Capabilities Capabilities of the server.
    + Catalog Show the catalog of available data sets.

    @@ -209,10 +219,10 @@ TimeUtil.formatIso8601TimeBrief( TimeUtil.getStopTime(exampleRange) ) ); out.println( String.format( "

    %s

    ", title ) ); if ( exampleRange!=null ) { - out.println( String.format("[Info] [Data]", + out.println( String.format("[Info] [Data]", id, id, exampleTimeRange ) ); } else { - out.println( String.format("[Info] [Data]", + out.println( String.format("[Info] [Data]", id, id ) ); } @@ -233,7 +243,7 @@ if ( j>0 ) out.print(" "); try { String pname= parameters.getJSONObject(j).getString("name"); - out.print( String.format( "%s", id, pname, exampleTimeRange, labels[j] ) ); + out.print( String.format( "%s", id, pname, exampleTimeRange, labels[j] ) ); if ( j>0 && sparklines ) { //sparklines // vap +hapi :https ://jfaden.net /HapiServerDemo /hapi ?id=?parameters=Temperature //?url=vap%2Bhapi%3Ahttps%3A%2F%2Fjfaden.net%2FHapiServerDemo%2Fhapi%3Fid%3DpoolTemperature%26timerange%3D2020-08-06&format=image%2Fpng&width=70&height=20&column=0%2C100%25&row=0%2C100%25&timeRange=2003-mar&renderType=&color=%23000000&symbolSize=&fillColor=%23aaaaff&foregroundColor=%23000000&backgroundColor=none @@ -267,9 +277,9 @@ } } catch ( Exception ex ) { out.println( String.format( "

    %s

    ", title ) ); - out.println( "

    Unable to load info for dataset: "+id+", log files should notify the server host.

    " ) ; + out.println( "

    Unable to load info for dataset: "+id+", log files should notify the server host.

    " ) ; Util.logError(ex); - //out.println( "ex: " + ex ); //TODO: security!!! + //out.println( "ex: " ;+ ex ); //TODO: security!!! } } if ( numDataSets

    build id: "+Util.buildTime()+""); + JSONObject footer= (JSONObject)landingConfig.opt("x_footer"); + if ( footer!=null ) { + String s= footer.optString( "classpath", footer.optString("x_classpath","") ); + String clas= footer.getString("x_class"); + String method= footer.getString("x_method"); + if ( clas!=null && method!=null ) { + s= SpawnRecordSource.doMacros( HAPI_HOME, "", s ); + ClassLoader cl= new URLClassLoader( new URL[] { new URL( s ) }, SourceRegistry.class.getClassLoader() ); + cl.getParent(); + Class c= Class.forName(clas,true,cl); + Method m = c.getMethod( method ); + String sfooter= (String)m.invoke(null); + out.println(""+sfooter+""); + } + } + %> diff --git a/HapiServerBase/lib/CopyLibs/org-netbeans-modules-java-j2seproject-copylibstask.jar b/HapiServerBase/lib/CopyLibs/org-netbeans-modules-java-j2seproject-copylibstask.jar index 1258902c..91e787a6 100644 Binary files a/HapiServerBase/lib/CopyLibs/org-netbeans-modules-java-j2seproject-copylibstask.jar and b/HapiServerBase/lib/CopyLibs/org-netbeans-modules-java-j2seproject-copylibstask.jar differ diff --git a/HapiServerBase/lib/UriTemplatesJava.jar b/HapiServerBase/lib/UriTemplatesJava.jar index b5946bea..f5439f99 100644 Binary files a/HapiServerBase/lib/UriTemplatesJava.jar and b/HapiServerBase/lib/UriTemplatesJava.jar differ diff --git a/HapiServerBase/lib/nblibraries.properties b/HapiServerBase/lib/nblibraries.properties index 6d0afb59..b5bd56c4 100644 --- a/HapiServerBase/lib/nblibraries.properties +++ b/HapiServerBase/lib/nblibraries.properties @@ -1,4 +1,4 @@ libs.CopyLibs.classpath=\ ${base}/CopyLibs/org-netbeans-modules-java-j2seproject-copylibstask.jar libs.CopyLibs.displayName=CopyLibs Task -libs.CopyLibs.prop-version=2.0 +libs.CopyLibs.prop-version=3.0 diff --git a/HapiServerBase/nbproject/build-impl.xml b/HapiServerBase/nbproject/build-impl.xml index 0e38fe23..fb185ffa 100644 --- a/HapiServerBase/nbproject/build-impl.xml +++ b/HapiServerBase/nbproject/build-impl.xml @@ -19,7 +19,7 @@ is divided into following sections: - cleanup --> - + @@ -71,15 +71,116 @@ is divided into following sections: - - + +
    - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must set platform.home + Must set platform.bootcp + Must set platform.java + Must set platform.javac + + The J2SE Platform is not correctly set up. + Your active platform is: ${platform.active}, but the corresponding property "platforms.${platform.active}.home" is not found in the project's properties files. + Either open the project in the IDE and setup the Platform with the same name or add it manually. + For example like this: + ant -Duser.properties.file=<path_to_property_file> jar (where you put the property "platforms.${platform.active}.home" in a .properties file) + or ant -Dplatforms.${platform.active}.home=<path_to_JDK_home> jar (where no properties file is used) + @@ -101,7 +202,9 @@ is divided into following sections: - + + + @@ -112,15 +215,6 @@ is divided into following sections: - - - - - - - - - @@ -147,12 +241,12 @@ is divided into following sections: - + - + @@ -209,20 +303,6 @@ is divided into following sections: - - - - - - - - - - - - - - @@ -255,8 +335,8 @@ is divided into following sections: - Must set src.dir - Must set test.src.dir + Must set src.java.dir + Must set test.java.dir Must set build.dir Must set dist.dir Must set build.classes.dir @@ -275,11 +355,80 @@ is divided into following sections:
    - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + @@ -292,7 +441,7 @@ is divided into following sections: - + @@ -316,11 +465,13 @@ is divided into following sections: - + - + + + @@ -332,7 +483,7 @@ is divided into following sections: - + @@ -349,9 +500,9 @@ is divided into following sections: - + - + @@ -404,59 +555,99 @@ is divided into following sections: - - + + - - - + - - + + + + + + + + - + + - - + + - - - + - - - - - - - - - + + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -470,17 +661,21 @@ is divided into following sections: - + - + + + + + @@ -535,10 +730,6 @@ is divided into following sections: - - - - @@ -546,74 +737,20 @@ is divided into following sections: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - - + + + + + + @@ -632,14 +769,14 @@ is divided into following sections: - + - + @@ -666,14 +803,10 @@ is divided into following sections: - - - - - + - + @@ -753,13 +886,20 @@ is divided into following sections: + + + + + + + @@ -775,18 +915,6 @@ is divided into following sections: - - - - - - - - - - - - @@ -796,21 +924,79 @@ is divided into following sections: + + + + + + + + + + + + + + + + + + + + + - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -820,14 +1006,16 @@ is divided into following sections: - + + + - + @@ -845,6 +1033,7 @@ is divided into following sections: + @@ -934,7 +1123,9 @@ is divided into following sections: - + + + @@ -949,12 +1140,12 @@ is divided into following sections: - + - + @@ -975,7 +1166,7 @@ is divided into following sections: Must select some files in the IDE or set javac.includes - + @@ -995,6 +1186,25 @@ is divided into following sections: + + + + + + + + + + + + + + + + + + + @@ -1021,21 +1231,61 @@ is divided into following sections: - + + + + + + + + + + + + + + + To run this application from the command line without Ant, try: - java -jar "${dist.jar.resolved}" + ${platform.java} -jar "${dist.jar.resolved}" - + + + + + + + + + - + + + + + + + + + + + + + + + + + + + @@ -1054,8 +1304,73 @@ is divided into following sections: - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + - - + + + + + + - + @@ -1293,12 +1666,16 @@ is divided into following sections: - + Must select some files in the IDE or set javac.includes - + + + + + - + @@ -1314,7 +1691,7 @@ is divided into following sections: - + @@ -1326,14 +1703,14 @@ is divided into following sections: - + Must select some files in the IDE or set test.includes Some tests failed; see details above. - + Must select some files in the IDE or set test.class Must select some method in the IDE or set test.method @@ -1342,7 +1719,7 @@ is divided into following sections: Some tests failed; see details above. - + $(subsec;places=4) + * $(enum,values=01,02,03,id=foo) --> $(enum;values=01,02,03;id=foo) + * $a --> $a + * (subsec,places=4) --> (subsec;places=4) + * @param qualifiers + * @return + */ + public static String makeQualifiersCanonical( String qualifiers ) { + boolean noDelimiters= true; + for ( int i=0; noDelimiters && iistart; i-- ) { + result[i]= qualifiers.charAt(i); + char ch= qualifiers.charAt(i); + if ( ch=='=' ) expectSemi=true; + else if ( ch==',' && expectSemi ) { + result[i]= ';' ; + } else if ( ch==';' ) { + expectSemi= false; + } + } + String rr= new String(result); + if ( !result.equals(qualifiers) ) { + logger.log(Level.FINE, "qualifiers are made canonical: {0}->{1}", new Object[]{qualifiers, rr}); + } + return rr; + } + + /** + * create the array if it hasn't been created already. + * @param digits + * @return + */ + private static int[] maybeInitialize( int[] digits ) { + if ( digits==null ) { + return new int[TimeUtil.TIME_DIGITS]; + } else { + return digits; + } + } + + /** + * return the digit used to store the number associated with + * the code. For example, Y is the year, so it is stored in the 0th + * position, H is hour and is stored in the 3rd position. + * @param code one of YmjdHMS. + * @return the digit 0-6, or -1 for none. + */ + private static int digitForCode( char code ) { + switch (code) { + case 'Y': + return 0; + case 'm': + return 1; + case 'j': + return 2; + case 'd': + return 2; + case 'H': + return 3; + case 'M': + return 4; + case 'S': + return 5; + default: + return -1; + } + } + + /** + * use own floorDiv since JavaScript doesn't have floorDiv function. + * Note that in Python, floorDiv is just "//". Java also truncates when doing + * integer division. + * +---------------+--------+ + * | expression | result | + * +---------------+--------+ + * | floorDiv(0,7) | 0 | + * | floorDiv(1,7) | 0 | + * | floorDiv(7,7) | 1 | + * | floorDiv(-1,7)| -1 | + * | floorDiv(-7,7)| -1 | + * | floorDiv(-8,7)| -2 | + * +---------------+--------+ + * + * @param ndays + * @param timeWidth + * @return the integer number of widths which result in a number below ndays. + */ + protected static int floorDiv( int ndays, int timeWidth ) { + int ncycles; + if ( ndays<0 ) { + ncycles= ( ndays + 1 ) / timeWidth - 1; + } else { + ncycles= ndays / timeWidth; + } + return ncycles; + } + + /** + * set the explicit width + * @param spec specification like "4" or "4H" for four hours. + */ + private void handleWidth( String fc, String spec ) { + int span; + int n= spec.length()-1; + if ( Character.isDigit( spec.charAt(n) ) ) { + span= Integer.parseInt(spec); + int digit= digitForCode(fc.charAt(0)); + this.timeWidth[digit]= span; + } else { + span= Integer.parseInt( spec.substring(0,n) ); + int digit= digitForCode(spec.charAt(n)); + this.timeWidth[digit]= span; + } + timeWidthIsExplicit= true; + } + + /** + * create a new URITemplate for parsing and formatting. + * @param formatString URI template spec as in /tmp/data.$Y$m$d.txt + */ + public URITemplate( String formatString ) { + + this.fieldHandlers= new HashMap<>(); + + this.fieldHandlers.put("subsec",new SubsecFieldHandler()); + this.fieldHandlers.put("hrinterval",new HrintervalFieldHandler()); + this.fieldHandlers.put("periodic",new PeriodicFieldHandler()); + this.fieldHandlers.put("enum",new EnumFieldHandler()); + this.fieldHandlers.put("x",new IgnoreFieldHandler()); + this.fieldHandlers.put("v",new VersionFieldHandler()); + + logger.log(Level.FINE, "new TimeParser({0},...)", formatString); + + int[] startTime = new int[NUM_TIME_DIGITS]; + startTime[0]= MIN_VALID_YEAR; + startTime[1]= 1; + startTime[2]= 1; + + stopTimeDigit = AFTERSTOP_INIT; + + int[] stopTime = new int[NUM_TIME_DIGITS]; + stopTime[0]= MAX_VALID_YEAR; + stopTime[1]= 1; + stopTime[2]= 1; + + //result.fieldHandlers = fieldHandlers; + + this.fieldHandlersById= new HashMap(); + + formatString= makeCanonical(formatString); + this.spec= formatString; + //this.formatString = formatString; + + String[] ss = formatString.split("\\$"); + fc = new String[ss.length]; + qualifiers= new String[ss.length]; + + String[] delim = new String[ss.length + 1]; + + ndigits = ss.length; + + StringBuilder regex1 = new StringBuilder(100); + regex1.append(ss[0].replaceAll("\\+","\\\\+"));//TODO: I thought we did this already. + + lengths = new int[ndigits]; + for (int i = 0; i < lengths.length; i++) lengths[i] = -1; // -1 indicates not known, but we'll figure out as many as we can. + + startShift= null; + stopShift= null; + + this.qualifiersMaps= new HashMap[ndigits]; + + this.phasestart= null; + + delim[0] = ss[0]; + for (int i = 1; i < ndigits; i++) { + int pp = 0; + String ssi= ss[i]; + while ( ssi.length()>pp && ( Character.isDigit(ssi.charAt(pp)) || ssi.charAt(pp) == '-') ) { + pp+=1; + } + if (pp > 0) { // Note length ($5Y) is not supported in https://github.com/hapi-server/uri-templates/wiki/Specification, but is in this library. + lengths[i] = Integer.parseInt(ssi.substring(0, pp)); + } else { + lengths[i] = 0; // determine later by field type + } + + ssi= makeQualifiersCanonical(ssi); + + logger.log( Level.FINE, "ssi={0}", ss[i] ); + if ( ssi.charAt(pp)!='(' ) { + fc[i] = ssi.substring(pp, pp + 1); + delim[i] = ssi.substring(pp + 1); + } else if ( ssi.charAt(pp) == '(') { + int endIndex = ssi.indexOf(')', pp); + if ( endIndex==-1 ) { + throw new IllegalArgumentException("opening paren but no closing paren in \"" + ssi+ "\""); + } + int semi= ssi.indexOf(";", pp ); + if ( semi != -1) { + fc[i] = ssi.substring(pp + 1, semi ); + qualifiers[i]= ssi.substring( semi+1,endIndex ); + } else { + fc[i] = ssi.substring(pp + 1, endIndex); + } + delim[i] = ssi.substring(endIndex + 1); + } + } + + handlers = new int[ndigits]; + offsets = new int[ndigits]; + + int pos = 0; + offsets[0] = pos; + + lsd = -1; + int lsdMult= 1; +//TODO: We want to add $Y_1XX/$j/WAV_$Y$jT$(H,span=5)$M$S_REC_V01.PKT + context= new int[NUM_TIME_DIGITS]; + System.arraycopy( startTime, 0, context, 0, NUM_TIME_DIGITS ); + externalContext= NUM_TIME_DIGITS; // this will lower and will typically be 0. + + timeWidth = new int[NUM_TIME_DIGITS]; + + boolean haveHour= false; + + for (int i = 1; i < ndigits; i++) { + if (pos != -1) { + pos += delim[i - 1].length(); + } + int handler = 9999; + + for (int j = 0; j < valid_formatCodes.length; j++) { + if (valid_formatCodes[j].equals(fc[i])) { + handler = j; + break; + } + } + + if ( fc[i].equals("H") ) { + haveHour= true; + } else if ( fc[i].equals("p") ) { + if ( !haveHour ) { + throw new IllegalArgumentException("$H must preceed $p"); + } + } + + if (handler == 9999) { + if ( !fieldHandlers.containsKey(fc[i]) ) { + throw new IllegalArgumentException("bad format code: \"" + fc[i] + "\" in \""+ formatString + "\""); + } else { + handler = 100; + handlers[i] = 100; + offsets[i] = pos; + if (lengths[i] < 1 || pos == -1) { // 0->indetermined as well, allows user to force indeterminate + pos = -1; + lengths[i] = -1; + } else { + pos += lengths[i]; + } + FieldHandler fh= fieldHandlers.get(fc[i]); + String args= qualifiers[i]; + Map argv= new HashMap(); + if ( args!=null ) { + String[] ss2= args.split(";",-2); + for (String ss21 : ss2) { + int i3 = ss21.indexOf("="); + if (i3==-1) { + argv.put(ss21.trim(), ""); + } else { + argv.put(ss21.substring(0, i3).trim(), ss21.substring(i3+1).trim()); + } + } + } + String errm= fh.configure(argv); + if ( errm!=null ) { + throw new IllegalArgumentException(errm); + } + + String id= getArg( argv, "id", null ); + if ( id!=null ) { + fieldHandlersById.put( id,fh ); + } + + } + } else { + handlers[i] = handler; + if (lengths[i] == 0) { + lengths[i] = formatCode_lengths[handler]; + } + offsets[i] = pos; + } + + int span=1; + int div=1; + + if ( qualifiers[i]!=null ) { + String[] ss2= qualifiers[i].split(";"); + qualifiersMaps[i]= new HashMap<>(); + for ( String ss21 : ss2 ) { //TODO: handle end before shift. + boolean okay=false; + String qual = ss21.trim(); + if ( qual.equals("startTimeOnly") ) { + startTimeOnly= fc[i].charAt(0); + okay= true; + } + int idx= qual.indexOf("="); + if ( !okay && idx>-1 ) { + String name= qual.substring(0,idx).trim(); + String val= qual.substring(idx+1).trim(); + qualifiersMaps[i].put(name, val); + //FieldHandler fh= (FieldHandler) fieldHandlers.get(name); + //fh.parse( val, context, timeWidth ); + switch (name) { + case "Y": + context[YEAR]= Integer.parseInt(val); + externalContext= Math.min( externalContext, 0 ); + break; + case "m": + context[MONTH]= Integer.parseInt(val); + externalContext= Math.min( externalContext, 1 ); + break; + case "d": + context[DAY]= Integer.parseInt(val); + externalContext= Math.min( externalContext, 2 ); + break; + case "j": + context[MONTH]= 1; + context[DAY]= Integer.parseInt(val); + externalContext= Math.min( externalContext, 1 ); + break; + case "H": + context[HOUR]= Integer.parseInt(val); + externalContext= Math.min( externalContext, 3 ); + break; + case "M": + context[MINUTE]= Integer.parseInt(val); + externalContext= Math.min( externalContext, 4 ); + break; + case "S": + context[SECOND]= Integer.parseInt(val); + externalContext= Math.min( externalContext, 5 ); + break; + case "cadence": + span= Integer.parseInt(val); + handleWidth(fc[i],val); + timeWidthIsExplicit= true; + break; + case "span": + span= Integer.parseInt(val); // not part of uri_templates + handleWidth(fc[i],val); + timeWidthIsExplicit= true; + break; + case "delta": + span= Integer.parseInt(val); // see http://tsds.org/uri_templates + handleWidth(fc[i],val); + timeWidthIsExplicit= true; + break; + case "resolution": + span= Integer.parseInt(val); + handleWidth(fc[i],val); + timeWidthIsExplicit= true; + break; + case "period": + if ( val.startsWith("P") ) { + try { + int[] r= TimeUtil.parseISO8601Duration(val); + for ( int j=0; j0 ) { + lsd= j; + lsdMult= r[j]; + logger.log(Level.FINER, "lsd is now {0}, width={1}", new Object[]{lsd, lsdMult}); + break; + } + } + } catch (ParseException ex) { + logger.log(Level.SEVERE, null, ex); + } + } else { + char code= val.charAt(val.length()-1); + switch (code) { + case 'Y': + lsd=0; + break; + case 'm': + lsd=1; + break; + case 'd': + lsd=2; + break; + case 'j': + lsd=2; + break; + case 'H': + lsd=3; + break; + case 'M': + lsd=4; + break; + case 'S': + lsd=5; + break; + case 'N': + lsd=6; + break; + default: + break; + } + lsdMult= Integer.parseInt(val.substring(0,val.length()-1) ); + logger.log(Level.FINER, "lsd is now {0}, width={1}", new Object[]{lsd, lsdMult}); + } break; + case "id": + break; //TODO: orbit plug in handler... + case "places": + break; //TODO: this all needs to be redone... + case "phasestart": + try { + phasestart= TimeUtil.isoTimeToArray(val); + } catch (IllegalArgumentException ex) { + logger.log(Level.SEVERE, null, ex); + } + break; + case "start": + if ( handler==1 ) { + twoDigitYearStart= Integer.parseInt(val); + } + break; + case "shift": + //TODO: handle end before shift. + if ( val.length()==0 ) throw new IllegalArgumentException("shift is empty"); + char possibleUnit= val.charAt(val.length()-1); + int digit; + if ( Character.isAlphabetic(possibleUnit) ) { + digit= digitForCode(possibleUnit); + val= val.substring(0,val.length()-1); + } else { + digit= digitForCode(fc[i].charAt(0)); + } + if ( i lsd && lsdMult==1 ) { // omni2_h0_mrg1hr_$Y$(m,span=6)$d_v01.cdf. Essentially we ignore the $d. + lsd = precision[handler]; + lsdMult= Math.max( span,div ); + logger.log(Level.FINER, "lsd is now {0}, width={1}", new Object[]{lsd, lsdMult}); + } + } + + String dots = "........."; + if (lengths[i] == -1) { + regex1.append("(.*)"); + } else { + regex1.append("(").append(dots.substring(0, lengths[i])).append(")"); + } + regex1.append(delim[i].replaceAll("\\+","\\\\+")); + + } + + switch (lsd) { // see https://sourceforge.net/p/autoplot/bugs/1506/ + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + if ( !timeWidthIsExplicit ) { + timeWidth[lsd] = lsdMult; + } + break; + case -1: + timeWidth[0]= 8000; + break; + case 100: /* do nothing */ break; //TODO: handler needs to report it's lsd, if it affects. + } + + if ( logger.isLoggable(Level.FINE) ) { + StringBuilder canonical= new StringBuilder( delim[0] ); + for (int i = 1; i < ndigits; i++) { + canonical.append("$"); + if ( qualifiers[i]==null ) { + canonical.append(fc[i]); + } else { + canonical.append("(").append(fc[i]).append(";").append(qualifiers[i]).append(")"); + } + canonical.append(delim[i]); + } + logger.log( Level.FINE, "Canonical: {0}", canonical.toString()); + } + + // if the stop time is not in the spec, then both start and stop are shifted. + if ( this.stopTimeDigit==AFTERSTOP_INIT ) { + if ( this.startShift!=null ) { + this.stopShift= this.startShift; + } + } + + this.delims = delim; + this.regex = regex1.toString(); + + } + + /** + * return the timeString, parsed into start time and stop time. + * The result is a 14-element array, with the first 7 the start time + * and the last 7 the stop time. + * @param timeString the template string to be parsed. + * @return 14 element array [ Y, m, d, H, M, S, nano, Y, m, d, H, M, S, nano ] + * @throws ParseException when a number is expected, or patterned not matched. + * @see #parse(java.lang.String, java.util.Map) + */ + public int[] parse( String timeString ) throws ParseException { + return parse( timeString, new HashMap<>() ); + } + + /** + * return the timeString, parsed into start time and stop time. + * The result is a 14-element array, with the first 7 the start time + * and the last 7 the stop time. The output will be decomposed into + * year, month, and day even if year, day-of-year are in the time string. + * @param timeString string in the format described by the template. + * @param extra extension results, like $(x,name=sc) appear here. + * @return 14 element array [ Y, m, d, H, M, S, nano, Y, m, d, H, M, S, nano ] + * @throws ParseException when a number is expected, or patterned not matched. + * @see TimeUtil#dayOfYear(int, int, int) if day-of-year is needed. + * @see #parse(java.lang.String) which can be used when extra arguments are not needed. + */ + public int[] parse( String timeString, Map extra ) throws ParseException { + logger.log(Level.FINER, "parse {0}", timeString); + + int offs = 0; + int length = 0; + + int[] time; + + int[] startTime, stopTime; + + startTime= new int[NUM_TIME_DIGITS]; + stopTime= new int[NUM_TIME_DIGITS]; + + time= startTime; + + System.arraycopy( context, 0, time, 0, NUM_TIME_DIGITS ); + + int lastOffset=0; + int lastLength=0; + + for (int idigit = 1; idigit < ndigits; idigit++) { + + if ( idigit==stopTimeDigit ) { + logger.finer("switching to parsing end time"); + System.arraycopy( time, 0, stopTime, 0, NUM_TIME_DIGITS ); + time= stopTime; + } + + if (offsets[idigit] != -1) { // note offsets[0] is always known + offs = offsets[idigit]; + } else { + offs += length + this.delims[idigit - 1].length(); + } + if (lengths[idigit] != -1) { + length = lengths[idigit]; + } else { + if (this.delims[idigit].equals("")) { + if (idigit == ndigits - 1) { + length = timeString.length() - offs; + } else { + throw new IllegalArgumentException("No delimiter specified after unknown length field, \"" + formatName[handlers[idigit]] + "\", field number=" + (1 + idigit) + ""); + } + } else { + while ( offs=timeString.length() ) { + throw new ParseException( "expected delimiter \"" + this.delims[idigit] + "\" but reached end of string", offs); + } + int i = timeString.indexOf(this.delims[idigit], offs); + if (i == -1) { + throw new ParseException("expected delimiter \"" + this.delims[idigit] + "\"", offs); + } + length = i - offs; + if ( length<0 ) { + throw new IllegalArgumentException("bad state, length should never be less than zero."); + } + } + } + + String foundDelim= timeString.substring(lastOffset+lastLength,offs); + if ( !foundDelim.equals(delims[idigit-1]) ) { + throw new ParseException("Expected \""+delims[idigit-1]+"\" before $" +fc[idigit]+", got: "+foundDelim,lastOffset); + } + + lastOffset= offs; + lastLength= length; + + if ( timeString.length() qual= this.qualifiersMaps[idigit]; + if (handlers[idigit] < 10) { + int digit; + digit= Integer.parseInt(field); + if ( qual!=null ) { + String s= getArg( qual, "div", null ); + if ( s!=null ) { + int div= (int)Float.parseFloat(s); // TODO: we really have to parse this each time? + digit= digit*div; + } + } + switch (handlers[idigit]) { + case 0: + time[YEAR] = digit; + break; + case 1: + int mod= twoDigitYearStart % 100; + int cent= twoDigitYearStart / 100; + if ( digit>=mod ) { + time[YEAR] = cent * 100 + digit; + } else { + time[YEAR] = (cent+1) * 100 + digit; + } + break; + case 2: + time[MONTH] = 1; + time[DAY] = digit; + break; + case 3: + time[MONTH] = digit; + break; + case 4: + time[DAY] = digit; + break; + case 5: + time[HOUR] = digit; + break; + case 6: + time[MINUTE] = digit; + break; + case 7: + time[SECOND] = digit; + break; + case 8: + time[NANOSECOND] = digit; + break; + default: + throw new IllegalArgumentException("handlers[idigit] was not expected value (which shouldn't happen)"); + } + } else if (handlers[idigit] == 100) { + FieldHandler handler = (FieldHandler) fieldHandlers.get(fc[idigit]); + handler.parse(timeString.substring(offs, offs + length), time, timeWidth, extra ); + + } else if (handlers[idigit] == 10) { // AM/PM -- code assumes hour has been read already + char ch = timeString.charAt(offs); + if (ch == 'P' || ch == 'p') { + if ( time[HOUR]==12 ) { + // do nothing + } else { + time[HOUR] += 12; + } + } else if (ch == 'A' || ch == 'a') { + if ( time[HOUR]==12 ) { + time[HOUR] -= 12; + } else { + // do nothing + } + } + } else if (handlers[idigit] == 11) { // TimeZone is not supported, see code elsewhere. + int offset; + offset= Integer.parseInt(timeString.substring(offs, offs + length)); + time[HOUR] -= offset / 100; // careful! + + time[MINUTE] -= offset % 100; + } else if (handlers[idigit] == 12) { // $(ignore) + if ( length>=0 ) { + extra.put( "ignore", timeString.substring(offs, offs + length) ); + } + } else if (handlers[idigit] == 13) { // month name + time[MONTH] = TimeUtil.monthNumber(timeString.substring(offs, offs + length)); + + } else if (handlers[idigit] == 14) { // "X" + if ( length>=0 ) { + extra.put( "X", timeString.substring(offs, offs + length) ); + } + } else if (handlers[idigit] == 15) { // "x" + String name; + if ( qual!=null ) { + name= getArg( qual, "name", "x" ); + } else { + name= "x"; + } + if ( length>=0 ) { + extra.put( name, timeString.substring(offs, offs + length) ); + } + } + } catch ( NumberFormatException ex ) { + throw new ParseException( String.format( "fail to parse digit number %d: %s", idigit, field ), offs ); + } + + } + + String foundDelim= timeString.substring(lastOffset+lastLength); + if ( !foundDelim.equals(delims[ndigits-1]) ) { + throw new ParseException("Expected \""+delims[ndigits-1]+"\" after $" +fc[ndigits-1]+", got: "+foundDelim,lastOffset+lastLength); + } + + if ( this.phasestart!=null ) { + if ( timeWidth==null ) { + logger.warning("phasestart cannot be used for month or year resolution"); + } else { + if ( timeWidth[1]>0 ) { + startTime[1]= ( ( startTime[1] - this.phasestart[1] ) / timeWidth[1] ) * timeWidth[1] + this.phasestart[1]; + } else if ( timeWidth[0]>0 ) { + startTime[0]= ( ( startTime[0] - this.phasestart[0] ) / timeWidth[0] ) * timeWidth[0] + this.phasestart[0]; + } else if ( timeWidth[2]>1 ) { + int phaseStartJulian= TimeUtil.julianDay( phasestart[0], phasestart[1], phasestart[2] ); + int ndays= TimeUtil.julianDay( startTime[0], startTime[1], startTime[2] ) - phaseStartJulian; + int ncycles= floorDiv( ndays, timeWidth[2] ); + startTime= TimeUtil.fromJulianDay( phaseStartJulian + ncycles * timeWidth[2] ); + } else { + logger.log(Level.WARNING, + "phasestart can only be used when step size is integer number of days greater than 1: {0}", + TimeUtil.formatIso8601Duration(timeWidth)); + } + stopTime= TimeUtil.add( startTime, this.timeWidth ); + } + } else { + if ( stopTimeDigit==AFTERSTOP_INIT ) { + if ( disallowCarryForStopTime ) { + stopTime= TimeUtil.add( startTime, this.timeWidth ); + if ( this.timeWidth[0]==0 && this.timeWidth[1]==0 && this.timeWidth[2]>1 ) { + stopTime[1]= 1; + stopTime[2]= 1; + } + } else { + stopTime= TimeUtil.add( startTime, this.timeWidth ); + } + } + } + + int [] result= new int[NUM_TIME_DIGITS*2]; + + boolean noShift; + noShift = this.startShift==null; + if ( noShift ) { + System.arraycopy(startTime, 0, result, 0, NUM_TIME_DIGITS); + TimeUtil.normalizeTime(result); + } else { + for ( int i=0; i extra ) throws ParseException { + URITemplate ut= new URITemplate(template); + ArrayList result= new ArrayList<>(); + String s1; + String sptr= TimeUtil.isoTimeFromArray( TimeUtil.isoTimeToArray(startTimeStr) ); + int[] stopDigits= TimeUtil.isoTimeToArray(stopTimeStr); + String stop= TimeUtil.isoTimeFromArray( stopDigits ); + if ( sptr.compareTo(stop)>0 ) { + throw new IllegalArgumentException("start time must be before or equal to stop time."); + } + int i=0; + int externalContext= ut.getExternalContext(); + if ( externalContext>0 ) { + int[] context= new int[TimeUtil.TIME_DIGITS]; + System.arraycopy(stopDigits, 0, context, 0, externalContext); + ut.setContext(context); + } + + boolean firstLoop= true; + while ( sptr.compareTo(stop)<0 ) { + String sptr0= sptr; + s1= ut.format( sptr, sptr, extra ); + int [] tta= ut.parse( s1, new HashMap<>() ); + if ( firstLoop ) { + sptr= TimeUtil.isoTimeFromArray( TimeUtil.getStartTime(tta) ); + s1= ut.format( sptr, sptr, extra ); + firstLoop= false; + } + //test for special case where start and stop are in the template, so there is no looping. + if ( Arrays.equals( TimeUtil.getStartTime(tta), TimeUtil.getStopTime(tta) ) ) { + result.add( ut.format( startTimeStr, stopTimeStr ) ); + break; + } else { + result.add( s1 ); + } + sptr= TimeUtil.isoTimeFromArray( TimeUtil.getStopTime(tta) ); + if ( sptr0.equals(sptr) ) { + throw new IllegalArgumentException("template fails to advance"); + } + i=i+1; + } + return result.toArray( new String[result.size()] ); + } + + /** + * return a the formatted name, using the spec and the given time range. + * @param startTimeStr iso8601 formatted time. + * @param stopTimeStr iso8601 formatted time. + * @return formatted time, often a resolvable URI. + */ + public String format( String startTimeStr, String stopTimeStr ) { + return format( startTimeStr, stopTimeStr, new HashMap<>() ); + } + + /** + * return a the formatted name, using the spec and the given time range. + * @param startTimeStr iso8601 formatted time. + * @param stopTimeStr iso8601 formatted time. + * @param extra extra parameters + * @return formatted time, often a resolvable URI. + */ + public String format( String startTimeStr, String stopTimeStr, + Map extra ) { + + int[] startTime= TimeUtil.isoTimeToArray( startTimeStr ); + int[] stopTime; + if ( timeWidthIsExplicit ) { + stopTime = TimeUtil.add( startTime, timeWidth ); + } else { + stopTime = TimeUtil.isoTimeToArray( stopTimeStr ); + } + return formatStartStopRange( startTime, stopTime, extra ); + } + + /** + * return the formatted name, using the spec and the given time range. + * @param timeRange fourteen-component time range + * @return formatted time, often a resolvable URI. + */ + public String formatTimeRange( int[] timeRange ) { + return formatTimeRange( timeRange, Collections.emptyMap() ); + } + + /** + * return the formatted name, using the spec and the given time range. + * @param timeRange fourteen-component time range + * @param extra extra parameters + * @return formatted time, often a resolvable URI. + */ + public String formatTimeRange( int[] timeRange, Map extra ) { + int[] start= TimeUtil.getStartTime(timeRange); + int[] stop= TimeUtil.getStopTime(timeRange); + return formatStartStopRange( start, stop, extra ); + } + + /** + * return the formatted name, using the spec and the given time range. + * @param startTime seven-component start time + * @param stopTime seven-component stop time + * @return formatted time, often a resolvable URI. + */ + public String formatStartStopRange( int[] startTime, int[] stopTime ) { + return formatStartStopRange( startTime, stopTime, Collections.emptyMap() ); + } + + /** + * return the formatted name, using the spec and the given time range. + * @param startTime seven-component start time + * @param stopTime seven-component stop time + * @param extra extra parameters + * @return formatted time, often a resolvable URI. + */ + public String formatStartStopRange( int[] startTime, int[] stopTime, Map extra ) { + + int[] timeWidthl; + if ( timeWidthIsExplicit ) { + timeWidthl= timeWidth; + } else { + timeWidthl = TimeUtil.subtract( stopTime, startTime ); + } + + if ( startShift!=null ) { + startTime= TimeUtil.subtract( startTime, startShift ); + } + if ( stopShift!=null ) { + stopTime= TimeUtil.subtract( stopTime, stopShift ); + } + + if ( timeWidthIsExplicit ) { + if ( this.phasestart!=null && timeWidth[2]>0 ) { + int phaseStartJulian= TimeUtil.julianDay( phasestart[0], phasestart[1], phasestart[2] ); + int ndays= TimeUtil.julianDay( startTime[0], startTime[1], startTime[2] ) - phaseStartJulian; + int ncycles= floorDiv( ndays, timeWidth[2] ); + int[] tnew= TimeUtil.fromJulianDay(phaseStartJulian+ncycles*timeWidth[2]); + startTime[0]= tnew[0]; + startTime[1]= tnew[1]; + startTime[2]= tnew[2]; + stopTime = TimeUtil.add( startTime, timeWidth ); + } + } + + int[] timel= startTime; + + StringBuilder result = new StringBuilder(100); + + int offs = 0; + int length; + + String[] nf = new String[5]; + nf[1] = "%1d"; + nf[2] = "%02d"; + nf[3] = "%03d"; + nf[4] = "%04d"; + + for (int idigit = 1; idigit < ndigits; idigit++) { + if ( idigit==stopTimeDigit ) { + timel= stopTime; + } + + result.insert(offs, this.delims[idigit - 1]); + if (offsets[idigit] != -1) { // note offsets[0] is always known + + offs = offsets[idigit]; + } else { + offs += this.delims[idigit - 1].length(); + } + if (lengths[idigit] != -1) { + length = lengths[idigit]; + } else { + length = -9999; // the field handler will tell us. + + } + if (handlers[idigit] < 10) { + Map qualm= qualifiersMaps[idigit]; + int digit; + int delta=1; + if ( qualm!=null ) { + String ddelta= getArg( qualm, "delta", null ); + if ( ddelta!=null ) { + delta= Integer.parseInt(ddelta); + } else { + ddelta= getArg( qualm, "span", null ); + if ( ddelta!=null ) { + delta= Integer.parseInt(ddelta); + } + } + } + switch (handlers[idigit]) { + case 0: + digit = timel[0]; + break; + case 1: + if ( timel[0]<2000 ) { + digit = timel[0] - 1900; + } else { + digit = timel[0] - 2000; + } + break; + case 2: + digit = TimeUtil.dayOfYear( timel[0], timel[1], timel[2] ); + break; + case 3: + digit = timel[1]; + break; + case 4: + digit = timel[2]; + break; + case 5: + digit = timel[3]; + break; + case 6: + digit = timel[4]; + break; + case 7: + digit = timel[5]; + break; + case 8: + digit = timel[6]; + break; + case 9: + digit = timel[6]/1000; //TODO verify + break; + default: + throw new RuntimeException("shouldn't get here"); + } + if ( delta>1 ) { + int h= handlers[idigit]; + switch (h) { + case 2: + case 3: + // $j, $m all start with 1. + digit= ( ( ( digit-1) / delta ) * delta ) + 1; + break; + case 4: + if ( phasestart!=null ) { + int phaseStartJulian= TimeUtil.julianDay( phasestart[0], phasestart[1], phasestart[2] ); + int ndays= TimeUtil.julianDay( timel[0], timel[1], timel[2] ) - phaseStartJulian; + int ncycles= floorDiv( ndays, timeWidth[2] ); + + int[] tnew= TimeUtil.fromJulianDay(phaseStartJulian+ncycles*delta); + timel[0]= tnew[0]; + timel[1]= tnew[1]; + timel[2]= tnew[2]; + + } else { + throw new IllegalArgumentException("phasestart not set for delta days"); + } + break; + default: + digit= ( digit / delta ) * delta; + break; + } + } + if ( length<0 ) { + String ss= String.valueOf(digit); + result.insert(offs, ss); + offs+= ss.length(); + } else { + if ( this.qualifiersMaps[idigit]!=null ) { + // TODO: suboptimal + String div= getArg( this.qualifiersMaps[idigit], "div", null ); + if ( div!=null ) { + digit= digit / (int)Float.parseFloat(div); // use parseFloat to support 1E6 + } + String pad= getArg( this.qualifiersMaps[idigit], "pad", null ); + if ( pad==null || pad.equals("zero") ) { + result.insert(offs, String.format(nf[length],digit) ); + offs+= length; + } else { + if ( digit<10 ) { + switch (pad) { + case "space": + result.insert( offs, " " ); + result.insert(offs, String.valueOf(digit) ); + offs+= 2; + break; + case "underscore": + result.insert( offs, "_" ); + result.insert(offs, String.valueOf(digit) ); + offs+= 2; + break; + // do nothing. + case "none": + result.insert(offs, String.valueOf(digit) ); + offs+= 1; + break; + default: + result.insert(offs, String.format( nf[length], digit) ); + offs+= length; + break; + } + + } else { + result.insert(offs, String.format( nf[length], digit) ); + offs+= length; + } + } + } else { + result.insert(offs, String.format( nf[length], digit) ); + offs += length; + } + } + + } else if (handlers[idigit] == 13) { // month names + String cas= getArg( this.qualifiersMaps[idigit], "case", null ); + String fmt= getArg( this.qualifiersMaps[idigit], "fmt", null ); + String ins; + + if ( "full".equals(fmt) ) { + ins= TimeUtil.monthNameFull(timel[1]); + } else { + ins= TimeUtil.monthNameAbbrev(timel[1]); + } + + if ( cas==null || cas.equals("lc") ) { + ins= ins.toLowerCase(); + } else if ( cas.equals("cap") ) { + // nothing more + } else if ( cas.equals("uc") ) { + ins= ins.toUpperCase(); + } + + result.insert(offs, ins); + offs += ins.length(); + + } else if (handlers[idigit] == 12 || handlers[idigit]==14 ) { // ignore + throw new RuntimeException("cannot format spec containing ignore"); + + } else if (handlers[idigit] == 100) { + if ( fc[idigit].equals("v") ) { // kludge for version. TODO: This can probably use the code below now. + String ins= getArg( extra, "v", "00" ); + if ( length>-1 ) { + if ( length>20 ) throw new IllegalArgumentException("version lengths>20 not supported"); + ins= "00000000000000000000".substring(0,length); + } + result.insert( offs, ins ); + offs+= ins.length(); + } else { + FieldHandler fh1= fieldHandlers.get(fc[idigit]); + int[] timeEnd = stopTime; + String ins= fh1.format( timel, TimeUtil.subtract(timeEnd, timel), length, extra ); + int[] startTimeTest= new int[NUM_TIME_DIGITS]; + System.arraycopy(timel, 0, startTimeTest, 0, NUM_TIME_DIGITS); + int[] timeWidthTest= new int[NUM_TIME_DIGITS]; + System.arraycopy(timeWidthl, 0, timeWidthTest, 0, NUM_TIME_DIGITS); + + try { + fh1.parse( ins, startTimeTest, timeWidthTest, extra ); + System.arraycopy(startTimeTest, 0, timel, 0, NUM_TIME_DIGITS); + System.arraycopy(timeWidthTest, 0, timeWidthl, 0, NUM_TIME_DIGITS); + System.arraycopy(TimeUtil.add( timel, timeWidthl ), 0, stopTime, 0, NUM_TIME_DIGITS); + + } catch (ParseException ex) { + logger.log(Level.SEVERE, null, ex); + } + if ( length>-1 && ins.length()!=length ) { + String p= getArg( this.qualifiersMaps[idigit], "pad", null ); + if ( p==null ) { + throw new IllegalArgumentException("length of fh is incorrect, should be "+length+", got \""+ins+"\", and pad is not defined."); + } + if ( length] --template= [--name=]"); + System.err.println("java -jar UriTemplatesJava.jar --formatRange --range=1999-01-01/1999-01-03 --template='http://example.com/data_$(d;pad=none).dat'"); + System.err.println("java -jar UriTemplatesJava.jar --parse --template='data_$(d;pad=none;Y=1999; m=5).dat' --name=data_1.dat"); + System.err.println(" --formatRange time ranges will be formatted into names"); + System.err.println(" --parse names will be parsed into time ranges"); + System.err.println(" --range is an iso8601 range, or - for ranges from stdin"); + System.err.println(" --name is has been formatted by the template, or - for names from stdin"); + } + + /** + * Usage: java -jar dist/UriTemplatesJava.jar --formatRange --range='1999-01-01/1999-01-03' --template='http://example.com/data_$(d;pad=none).dat' + * @param args the command line arguments. + */ + public static void main( String[] args ) { + if ( args.length==0 || args[1].equals("--help") ) { + printUsage(); + System.exit(-1); + } + Map argsm= new HashMap<>(); + for (String a : args) { + String[] aa= a.split("=",2); + if ( aa.length==1 ) { + argsm.put( aa[0], "" ); + } else { + argsm.put( aa[0], aa[1] ); + } + } + if ( argsm.containsKey("--formatRange") ) { + argsm.remove("--formatRange"); + String template= argsm.remove("--template"); + if ( template==null ) { + printUsage(); + System.err.println("need --template parameter"); + System.exit(-2); + } + String timeRange= argsm.remove("--range"); + if ( timeRange==null ) { + printUsage(); + System.err.println("need --range parameter"); + System.exit(-3); + } + if ( timeRange.equals("-") ) { + String tr1=null; + try ( BufferedReader r= new BufferedReader( new InputStreamReader(System.in) ) ) { + tr1= r.readLine(); + while ( tr1!=null ) { + int[] itimeRange; + itimeRange= TimeUtil.parseISO8601TimeRange(tr1); + String[] result= URITemplate.formatRange( template, + TimeUtil.isoTimeFromArray( TimeUtil.getStartTime( itimeRange ) ), + TimeUtil.isoTimeFromArray( TimeUtil.getStopTime( itimeRange ) ) ); + for ( String s: result ) { + System.out.println(s); + } + tr1= r.readLine(); + } + } catch (ParseException ex) { + printUsage(); + System.err.println("range is misformatted: "+tr1); + System.exit(-3); + } catch ( IOException ex ) { + System.err.println("IOException"); + System.exit(-4); + } + + } else { + int[] itimeRange; + try { + itimeRange= TimeUtil.parseISO8601TimeRange(timeRange); + String[] result= URITemplate.formatRange( template, + TimeUtil.isoTimeFromArray( TimeUtil.getStartTime( itimeRange ) ), + TimeUtil.isoTimeFromArray( TimeUtil.getStopTime( itimeRange ) ) ); + for ( String s: result ) { + System.out.println(s); + } + } catch (ParseException ex) { + printUsage(); + System.err.println("range is misformatted"); + System.exit(-3); + } + } + + } else if ( argsm.containsKey("--parse" ) ) { + argsm.remove("--parse"); + String template= argsm.remove("--template"); + if ( template==null ) { + printUsage(); + System.err.println("need --template parameter"); + System.exit(-2); + } + String name= argsm.remove("--name"); + if ( name==null ) { + printUsage(); + System.err.println("need --name parameter"); + System.exit(-3); + } + if ( name.equals("-") ) { + String filen1=null; + try ( BufferedReader r= new BufferedReader( new InputStreamReader(System.in) ) ) { + filen1= r.readLine(); + while ( filen1!=null ) { + URITemplate ut= new URITemplate(template); + int[] itimeRange= ut.parse( filen1, argsm ); + System.out.print( TimeUtil.isoTimeFromArray( TimeUtil.getStartTime( itimeRange ) ) ); + System.out.print( "/" ); + System.out.println( TimeUtil.isoTimeFromArray( TimeUtil.getStopTime( itimeRange ) ) ); + filen1= r.readLine(); + } + + } catch ( IOException ex ) { + + } catch ( ParseException ex ) { + printUsage(); + System.err.println("parseException from "+filen1); + System.exit(-3); + } + + } else { + try { + URITemplate ut= new URITemplate(template); + int[] itimeRange= ut.parse( name, argsm ); + System.out.print( TimeUtil.isoTimeFromArray( TimeUtil.getStartTime( itimeRange ) ) ); + System.out.print( "/" ); + System.out.println( TimeUtil.isoTimeFromArray( TimeUtil.getStopTime( itimeRange ) ) ); + } catch ( ParseException ex ) { + printUsage(); + System.err.println("parseException from ?"); + System.exit(-3); + } + } + + } + } +} diff --git a/HapiServerBase/src/org/hapiserver/exceptions/InconsistentDataException.java b/HapiServerBase/src/main/java/org/hapiserver/exceptions/InconsistentDataException.java similarity index 100% rename from HapiServerBase/src/org/hapiserver/exceptions/InconsistentDataException.java rename to HapiServerBase/src/main/java/org/hapiserver/exceptions/InconsistentDataException.java diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/AggregatingIterator.java b/HapiServerBase/src/main/java/org/hapiserver/source/AggregatingIterator.java new file mode 100644 index 00000000..3fd16a67 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/AggregatingIterator.java @@ -0,0 +1,129 @@ + +package org.hapiserver.source; + +import java.util.Iterator; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.hapiserver.HapiRecord; +import org.hapiserver.HapiRecordSource; +import org.hapiserver.TimeString; +import org.hapiserver.TimeUtil; + +/** + * Often we have granules of data which when "aggregated" together form the + * entire data set. For example, data might be stored in daily files, and to + * implement the HAPI server we must read each one. This class creates a + * HapiRecord iterator for any time range by combining each of the granules, + * so the reader can be simple. + * @author jbf + */ +public class AggregatingIterator implements Iterator { + + private static final Logger logger= Logger.getLogger("hapi.agg"); + + TimeString[] granule; + Iterator granuleIterator; + Iterator hapiRecordIterator; + String[] parameters; + HapiRecordSource source; + TimeString stop; + + /** + * construct an iterator which will use the source create and go through a set of iterators, one for each granule. + * @param source the source of data + * @param start the start time + * @param stop the stop time + */ + public AggregatingIterator( HapiRecordSource source, TimeString start, TimeString stop ) { + this( source, start, stop, null ); + } + + /** + * construct an iterator which will use the source create and go through a set of iterators, one for each granule. + * @param source the source of data + * @param start the start time + * @param stop the stop time + * @param parameters null or the parameters to subset. + */ + public AggregatingIterator( HapiRecordSource source, TimeString start, TimeString stop, String[] parameters ) { + this.source= source; + //this.granuleIterator= source.getGranuleIterator(start, stop); + //System.err.println("=====" + parameters[0] ); + //while ( this.granuleIterator.hasNext() ) { + // System.err.println( TimeUtil.formatIso8601TimeRange( this.granuleIterator.next() ) ); + //} + //System.err.println("====="); + this.granuleIterator= source.getGranuleIterator(start, stop); + this.stop= stop; + this.parameters= parameters; + if ( granuleIterator.hasNext() ) { + this.granule= granuleIterator.next(); + int count=0; + while ( start.gt(this.granule[1]) && granuleIterator.hasNext() ) { + logger.log(Level.FINER, "skipping {0}", this.granule); + count++; + this.granule= granuleIterator.next(); + } + logger.log(Level.FINE, "skipped over {0} granules", count); + if ( this.granule[0].gt( stop ) ) { + logger.log(Level.FINER, "finished {0}", this.granule); + this.granule= null; + } else { + if ( this.granule.length!=2 ) { + throw new IllegalArgumentException("implementation error, granule iterator did not return 14 time range digits"); + } + if ( this.parameters==null ) { + this.hapiRecordIterator= source.getIterator( granule[0], granule[1] ); + } else { + // Here we make the iterator for the next granule. + this.hapiRecordIterator= source.getIterator( granule[0], granule[1], this.parameters ); + } + findNextRecord(); + } + } else { + this.granule= null; + } + } + + private void findNextRecord() { + while ( !this.hapiRecordIterator.hasNext() ) { + if ( !granuleIterator.hasNext() ) { + granule=null; // we're done + break; + } else { + granule= granuleIterator.next(); + if ( granule[0].gt(this.stop) ) { + granule= null; + break; + } + } + //TimeUtil.isValidTimeRange(granule); + try { + if ( this.parameters==null ) { + hapiRecordIterator= source.getIterator( granule[0], granule[1] ); + } else { + hapiRecordIterator= source.getIterator( granule[0], granule[1], parameters ); + } + } catch ( RuntimeException ex ) { + logger.log(Level.WARNING, "RuntimeException in AggregatingIterator.findNextRecord: {0}", ex.getMessage()); + this.granule= null; + break; + } + } + } + + @Override + public boolean hasNext() { + return granule!=null && hapiRecordIterator.hasNext(); + } + + @Override + public HapiRecord next() { + HapiRecord next= hapiRecordIterator.next(); + if ( !hapiRecordIterator.hasNext() ) { + findNextRecord(); + } + return next; + } + +} diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/AggregationGranuleIterator.java b/HapiServerBase/src/main/java/org/hapiserver/source/AggregationGranuleIterator.java new file mode 100644 index 00000000..d8cbff60 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/AggregationGranuleIterator.java @@ -0,0 +1,51 @@ + +package org.hapiserver.source; + +import java.text.ParseException; +import java.util.Arrays; +import java.util.Iterator; +import org.hapiserver.TimeString; +import org.hapiserver.URITemplate; + +/** + * + * @author jbf + */ +public class AggregationGranuleIterator implements Iterator { + + String[] result; + int next=0; + URITemplate uriTemplate; + + public AggregationGranuleIterator( String fileFormat, TimeString start, TimeString stop ) { + this.uriTemplate= new URITemplate(fileFormat); + + try { + result= URITemplate.formatRange( fileFormat, start.toString(), stop.toString() ); + } catch (ParseException ex) { + throw new RuntimeException(ex); + } + + + } + @Override + public boolean hasNext() { + return result.length>next; + } + + @Override + public TimeString[] next() { + try { + int i= next; + next++; + int[] rr= uriTemplate.parse(result[i]); + TimeString start= new TimeString( Arrays.copyOfRange( rr, 0, 7 ) ); + TimeString stop= new TimeString( Arrays.copyOfRange( rr, 7, 14 ) ); + return new TimeString[] { start, stop }; + } catch ( ParseException ex ) { + throw new RuntimeException(ex); + } + + } + +} diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/BuildCacheInputStreamProvider.java b/HapiServerBase/src/main/java/org/hapiserver/source/BuildCacheInputStreamProvider.java new file mode 100644 index 00000000..06303d87 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/BuildCacheInputStreamProvider.java @@ -0,0 +1,68 @@ +package org.hapiserver.source; + +import hapi.cache.InputStreamProvider; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +/** + * InputStreamProvider which builds a cache file as it reads the data + * @author jbf + */ +public class BuildCacheInputStreamProvider implements InputStreamProvider { + + InputStream ins; + OutputStream out; + File cacheFile; + File tmpCacheFile; + + public BuildCacheInputStreamProvider(InputStreamProvider insProvider, File cacheFile) throws FileNotFoundException, IOException { + this.cacheFile = cacheFile; + this.tmpCacheFile = new File(cacheFile.getAbsolutePath() + "." + Thread.currentThread().getName()); + this.out = new FileOutputStream(tmpCacheFile); + this.ins = insProvider.openInputStream(); + } + + @Override + public InputStream openInputStream() { + return new TeeInputStream(); + } + + private class TeeInputStream extends InputStream { + + @Override + public int read() throws IOException { + int i = ins.read(); + out.write(i); + return i; + } + + @Override + public int read(byte[] b) throws IOException { + int bytesRead = ins.read(b); + if (bytesRead > 0) { + out.write(b, 0, bytesRead); + } + return bytesRead; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + int bytesRead = ins.read(b, off, len); + if (bytesRead > 0) { + out.write(b, off, bytesRead); + } + return bytesRead; + } + + @Override + public void close() throws IOException { + ins.close(); + out.close(); + tmpCacheFile.renameTo(cacheFile); + } + } +} diff --git a/HapiServerBase/src/org/hapiserver/source/SourceUtil.java b/HapiServerBase/src/main/java/org/hapiserver/source/SourceUtil.java similarity index 63% rename from HapiServerBase/src/org/hapiserver/source/SourceUtil.java rename to HapiServerBase/src/main/java/org/hapiserver/source/SourceUtil.java index 35d6ac91..ea43fc55 100644 --- a/HapiServerBase/src/org/hapiserver/source/SourceUtil.java +++ b/HapiServerBase/src/main/java/org/hapiserver/source/SourceUtil.java @@ -1,8 +1,10 @@ package org.hapiserver.source; +import hapi.cache.InputStreamProvider; import java.io.BufferedReader; import java.io.File; +import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; @@ -10,6 +12,7 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; +import java.io.StringReader; import java.net.URISyntaxException; import java.net.URL; import java.nio.charset.Charset; @@ -17,10 +20,14 @@ import java.nio.file.Paths; import java.util.Arrays; import java.util.Iterator; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.ReentrantLock; +import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; +import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.hapiserver.HapiRecord; @@ -38,6 +45,9 @@ public class SourceUtil { private static final Logger logger= Logger.getLogger("hapi"); private static boolean lineStartsWithTimeTag( String line ) { + if ( line.startsWith("\"") ) { + line= line.substring(1); + } if ( line.length()<2 ) { return false; } else if ( line.charAt(0)=='1' ) { @@ -124,6 +134,9 @@ public String next() { try { String t= line; line= reader.readLine(); + while ( line!=null && line.length()==0 ) { + line= reader.readLine(); + } return t; } catch (IOException ex) { try { @@ -137,7 +150,8 @@ public String next() { } /** - * return an iterator for each line of the ASCII data file. + * return an iterator for each line of the ASCII data file, only returning the records which + * start with timetags or quoted timetags. * @param f a file containing timetags. * @return an iterator for the lines. * @throws FileNotFoundException @@ -148,7 +162,8 @@ public static Iterator getFileLines( File f ) throws FileNotFoundExcepti } /** - * return an iterator for each line of the ASCII data URL. + * return an iterator for each line of the ASCII data URL, only returning the records which + * start with timetags or quoted timetags. * @param url a URL pointing to an ASCII file containing timetags. * @return an iterator for the lines. * @throws FileNotFoundException @@ -292,8 +307,8 @@ public static String[] stringSplit( String s ) { /** * read the XML document from a remote site. - * @param url - * @return + * @param url the XML document + * @return the XML document * @throws org.xml.sax.SAXException * @throws java.io.IOException * @throws javax.xml.parsers.ParserConfigurationException @@ -308,6 +323,115 @@ public static Document readDocument( URL url ) throws SAXException, IOException } } + private static final File CACHE_HOME; + static { + String s= System.getProperty("HAPI_SERVER_CACHE_HOME","/tmp/HapiServerCache/"); + CACHE_HOME= new File( s ); + } + + /** + * return an input stream for the resource, possibly using a cached copy which is no older than ageSeconds. + * Note this should only be used for testing, and not in production use. + * @param url the URL + * @param ageSeconds the maximum allowed age in seconds, if 0 then disable caching. + * @return an InputStream + * @throws java.io.IOException + */ + public static InputStream getInputStream( URL url, int ageSeconds ) throws IOException { + InputStream result; + boolean allowCaching= ageSeconds>0; + if ( allowCaching ) { + String hash = Integer.toHexString(url.hashCode()); + File cacheFile= new File( CACHE_HOME, url.getHost() + "/" + hash ); + if ( !cacheFile.getParentFile().exists() ) { + if ( !cacheFile.getParentFile().mkdirs() ) { + logger.log(Level.FINE, "unable to mkdirs for {0}", cacheFile); + } + } + if ( cacheFile.exists() ) { + long timeTag= cacheFile.lastModified(); + double cacheAgeSeconds= ( System.currentTimeMillis() - timeTag ) / 1000.; + if ( cacheAgeSeconds < ( ageSeconds * 1000 ) ) { + return new FileInputStream(cacheFile); + } + } + InputStreamProvider urlInputStreamProvider= () -> url.openStream(); + InputStreamProvider bcisp= new BuildCacheInputStreamProvider( urlInputStreamProvider, cacheFile ); + + result= bcisp.openInputStream(); + + } else { + result= url.openStream(); + } + return result; + } + + /** + * read the XML document from a remote site, allowing cached response to be used. The cache + * of files is kept in /tmp/HapiServerCache/. + * @param url the XML document + * @param ageSeconds the age of the document allowed, since the last read. + * @return the XML document + * @throws org.xml.sax.SAXException + * @throws java.io.IOException + * @throws javax.xml.parsers.ParserConfigurationException + */ + public static Document readDocument( URL url, int ageSeconds ) throws SAXException, IOException, ParserConfigurationException { + + try ( InputStream is= getInputStream( url, ageSeconds ) ) { + DocumentBuilder builder; + builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); + InputSource source = new InputSource(new InputStreamReader(is)); + Document document = builder.parse(source); + return document; + } + } + + /** + * read the XML document from a String. + * @param src + * @return the XML document + * @throws SAXException + * @throws IOException + * @throws ParserConfigurationException + */ + public static Document readDocument( String src ) throws SAXException, IOException, ParserConfigurationException { + StringReader reader= new StringReader(src); + DocumentBuilder builder; + builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); + InputSource source = new InputSource( reader ); + Document document = builder.parse(source); + return document; + } + + /** + * retrieve the parameter from the info. + * @param info info JSON. + * @param name name of the parameter + * @return parameter JSONObject. + * @throws IllegalArgumentException when the name is not found or JSON object does not follow schema. + */ + public static JSONObject getParam( JSONObject info, String name ) { + try { + JSONArray array= info.getJSONArray("parameters"); + for ( int i=0; i lockMap = new ConcurrentHashMap<>(); + + /** + * Download the file, but if another thread is loading the same URL to the same + * File, then wait for it to complete. + * @param url the URL to load + * @param file the file accepting the result. + * @param tmpFile the file accepting the data as the file is downloaded. + * @return the downloaded file. + */ + public static File downloadFileLocking( URL url, File file, String tmpFile ) throws IOException { + String surl= url.toString(); + String key = surl + "::" + file.toString(); + ReentrantLock lock = lockMap.computeIfAbsent(key, k -> new ReentrantLock()); + + lock.lock(); + try { + + if (file.exists()) { + logger.log(Level.WARNING, "File exists! Someone else must have loaded it! {0}", new Object[]{file}); + return file; + } + + logger.log(Level.FINE, "Downloading {0} to {1}", new Object[]{surl, file}); + try (InputStream in = url.openStream(); + FileOutputStream out = new FileOutputStream(tmpFile)) { + byte[] buffer = new byte[4096]; + int bytesRead; + while ((bytesRead = in.read(buffer)) != -1) { + out.write(buffer, 0, bytesRead); + } + logger.log(Level.FINE, "Download complete: {0}", file); + } + new File(tmpFile).renameTo( file ); + + return file; + } finally { + lock.unlock(); + // Optional: remove lock if no other threads are using it + lockMap.remove(key, lock); + } + } + /** * download the resource to the given file * @param url the URL to load @@ -328,14 +495,16 @@ public static JSONObject readJSONObject( URL url ) throws IOException { */ public static File downloadFile( URL url, File file ) throws IOException { // Get the URL of the file to download. - + // Open a connection to the URL. try ( InputStream inputStream = url.openStream(); OutputStream outputStream = new FileOutputStream(file) ) { // Copy the contents of the input stream to the output stream. byte[] buffer = new byte[10240]; int bytesRead; + long totalBytesRead=0; while ((bytesRead = inputStream.read(buffer)) != -1) { outputStream.write(buffer, 0, bytesRead); + totalBytesRead+=bytesRead; } } return file; diff --git a/HapiServerBase/src/org/hapiserver/source/WindSwe2mDataSource.java b/HapiServerBase/src/main/java/org/hapiserver/source/WindSwe2mDataSource.java similarity index 76% rename from HapiServerBase/src/org/hapiserver/source/WindSwe2mDataSource.java rename to HapiServerBase/src/main/java/org/hapiserver/source/WindSwe2mDataSource.java index 53e335e2..6f805114 100644 --- a/HapiServerBase/src/org/hapiserver/source/WindSwe2mDataSource.java +++ b/HapiServerBase/src/main/java/org/hapiserver/source/WindSwe2mDataSource.java @@ -7,6 +7,7 @@ import java.util.Iterator; import org.hapiserver.HapiRecord; import org.hapiserver.HapiRecordSource; +import org.hapiserver.TimeString; /** * Example of class which loads data @@ -32,9 +33,11 @@ public boolean hasGranuleIterator() { } @Override - public Iterator getGranuleIterator(int[] start, int[] stop) { + public Iterator getGranuleIterator( TimeString startts, TimeString stopts) { int stopYear; int stopMonth; + int[] stop= stopts.toComponents(); + int[] start= startts.toComponents(); if ( stop[1]==1 && stop[2]==1 && stop[3]==1 && stop[4]==0 && stop[5]==0 && stop[6]==0 ) { stopYear= stop[0]; stopMonth= stop[1]; @@ -49,7 +52,7 @@ public Iterator getGranuleIterator(int[] start, int[] stop) { int fstopMonth= stopMonth; int fstopYear= stopYear; - return new Iterator() { + return new Iterator() { int currentYear= start[0]; int currentMonth= start[1]; @@ -59,7 +62,7 @@ public boolean hasNext() { } @Override - public int[] next() { + public TimeString[] next() { int m= currentMonth; int y= currentYear; currentMonth++; @@ -67,7 +70,7 @@ public int[] next() { currentMonth-= 12; currentYear++; } - return new int[] { y, m, 1, 0, 0, 0, 0, currentYear, currentMonth, 1, 0, 0, 0, 0}; + return new TimeString[] { new TimeString( y, m, 1 ), new TimeString( currentYear, currentMonth, 1 ) }; } }; } @@ -78,18 +81,18 @@ public boolean hasParamSubsetIterator() { } @Override - public Iterator getIterator(int[] start, int[] stop, String[] params) { + public Iterator getIterator(TimeString start, TimeString stop, String[] params) { throw new IllegalArgumentException("not used"); } @Override - public Iterator getIterator(int[] start, int[] stop) { + public Iterator getIterator(TimeString start, TimeString stop) { return new WindSwe2mIterator( this.dataHome, start, stop ); } @Override - public String getTimeStamp(int[] start, int[] stop) { - throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. + public TimeString getTimeStamp(TimeString start, TimeString stop) { + return null; } @Override diff --git a/HapiServerBase/src/org/hapiserver/source/WindSwe2mIterator.java b/HapiServerBase/src/main/java/org/hapiserver/source/WindSwe2mIterator.java similarity index 96% rename from HapiServerBase/src/org/hapiserver/source/WindSwe2mIterator.java rename to HapiServerBase/src/main/java/org/hapiserver/source/WindSwe2mIterator.java index c999eb5b..33d9d4fe 100644 --- a/HapiServerBase/src/org/hapiserver/source/WindSwe2mIterator.java +++ b/HapiServerBase/src/main/java/org/hapiserver/source/WindSwe2mIterator.java @@ -10,6 +10,7 @@ import java.util.logging.Level; import java.util.logging.Logger; import org.hapiserver.HapiRecord; +import org.hapiserver.TimeString; /** * Bootstrap method for getting server going, used to test its functionality in initial development. It still serves @@ -44,10 +45,10 @@ public class WindSwe2mIterator implements Iterator { * @param startTime * @param stopTime */ - public WindSwe2mIterator( String dataHome, int[] startTime, int[] stopTime ) { + public WindSwe2mIterator( String dataHome, TimeString startTime, TimeString stopTime ) { - currentYear= startTime[0]; - currentMonth= startTime[1]; + currentYear= startTime.getYear(); + currentMonth= startTime.getMonth(); try { currentUrl= new URL( String.format( dataHome+"/wind_swe_2m_sw%4d%02d.asc", diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2ServerCatalogSource.java b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2ServerCatalogSource.java new file mode 100644 index 00000000..32b37462 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2ServerCatalogSource.java @@ -0,0 +1,113 @@ +package org.hapiserver.source.das2server; + +import java.io.IOException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.regex.Pattern; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.source.SourceUtil; + +/** + * return a catalog + * @author jbf + */ +public class Das2ServerCatalogSource { + + + private static JSONObject makeOne( String id, String description ) throws JSONException { + JSONObject jo= new JSONObject(); + jo.setEscapeForwardSlashAlways(false); + jo.put("id",id); + if ( description.length()>0 ) { + jo.put("description", description ); + } + return jo; + } + /** + * config is a file located relative to this source. + * @param config the configuration json file + * @return the catalog response + * @throws java.io.IOException + * @throws org.codehaus.jettison.json.JSONException + */ + public static String getCatalog( String config ) throws IOException, JSONException { + URL url= Das2ServerCatalogSource.class.getResource(config); + String configJson= SourceUtil.getAllFileLines(url); + JSONObject jo = new JSONObject(configJson); + String das2server= jo.getString("server"); + JSONArray includeRegexArray= jo.getJSONArray("include"); // default is to include everything + JSONArray excludeRegexArray= jo.optJSONArray("exclude"); // default is to exclude nothing + + Pattern[] includePatternArray; + if ( includeRegexArray==null ) { + includePatternArray= new Pattern[0]; + } else { + includePatternArray= new Pattern[includeRegexArray.length()]; + } + for ( int i=0; i lines= SourceUtil.getFileLines( new URL(das2server+"?server=list") ); + ArrayList dsids= new ArrayList<>(); + while ( lines.hasNext() ) { + String line= lines.next(); + String id,description; + int i= line.indexOf("|"); + if ( i>-1 ) { + id = line.substring(0,i); + description= line.substring(i+1); + } else { + id= line.trim(); + description= ""; + } + if ( id.endsWith("/") ) { + continue; + } + + boolean include= false; + if ( includePatternArray.length==0 ) { + include= true; + } else { + for ( Pattern p: includePatternArray ) { + if ( p.matcher(id).matches() ) include=true; + } + } + if ( include ) { + if ( excludePatternArray.length>0 ) { + for ( Pattern p: excludePatternArray ) { + if ( p.matcher(id).matches() ) include=false; + } + } + } + if ( include ) { + dsids.add(makeOne(id,description)); + } + } + JSONObject catalog= new JSONObject(); + catalog.put("catalog", dsids); + + return catalog.toString(); + } + + public static void main( String[] args ) throws IOException, JSONException { + //args= new String[] { "AC_AT_DEF" }; + args= new String[0]; + + if ( args.length==0 ) { + System.out.println( Das2ServerCatalogSource.getCatalog("jupiter-d2s.json") ); + } + } +} diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2ServerDataSource.java b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2ServerDataSource.java new file mode 100644 index 00000000..2c0e7898 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2ServerDataSource.java @@ -0,0 +1,102 @@ +package org.hapiserver.source.das2server; + +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Iterator; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.xpath.XPathExpressionException; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.AbstractHapiRecordSource; +import org.hapiserver.HapiRecord; +import org.hapiserver.HapiRecordSource; +import org.hapiserver.TimeString; +import org.hapiserver.TimeUtil; +import org.hapiserver.source.SourceUtil; +import org.xml.sax.SAXException; + +/** + * Read data from the Das2Server to create a HapiRecordSource. This source + * only produces complete records (ParamSubset=False). + * + * For reference, here is a Das2Server URL: + * https://jupiter.physics.uiowa.edu/das/server?server=dataset&start_time=2021-04-15T00%3A00%3A00.000Z&end_time=2021-04-17T00%3A00%3A00.000Z&dataset=Juno%2FEphemeris%2FJovicentric + * @author jbf + */ +public class Das2ServerDataSource extends AbstractHapiRecordSource { + + private static final Logger logger = Logger.getLogger("hapi.das2server"); + + private String id; + private String das2server; + + JSONObject info; + JSONObject data; + String root; + + private Das2ServerDataSource(String das2server, String id, JSONObject info) { + logger.entering("CdawebServicesHapiRecordSource", "constructor"); + this.das2server = das2server; + this.id = id; + this.info = info; + logger.exiting("CdawebServicesHapiRecordSource", "constructor"); + } + + /** + * the server will call this method to get the record source. + * + * @param config + * @param id + * @param info + * @return + * @throws java.io.IOException + * @throws org.codehaus.jettison.json.JSONException + */ + public static HapiRecordSource getRecordSource(String config, String id, JSONObject info) throws IOException, JSONException { + URL url = Das2ServerCatalogSource.class.getResource(config); + String configJson = SourceUtil.getAllFileLines(url); + JSONObject jo = new JSONObject(configJson); + String das2server = jo.getString("server"); + return new Das2ServerDataSource(das2server, id, info); + } + + @Override + public boolean hasGranuleIterator() { + return false; + } + + @Override + public boolean hasParamSubsetIterator() { + return false; + } + + @Override + public Iterator getIterator( TimeString start, TimeString stop) { + String sstart= TimeUtil.formatIso8601TimeBrief(start.toComponents()); + String sstop= TimeUtil.formatIso8601TimeBrief(stop.toComponents()); + StringBuilder url= new StringBuilder(this.das2server); + url.append("?server=dataset&dataset=").append(id).append("&start_time=").append(sstart).append("&end_time=").append(sstop); + int interval= info.optInt("x_interval",0); + if ( interval>0 ) { + url.append("&interval=").append(interval); + } + url.append("&ascii=true"); + InputStream ins; + try { + URL das2StreamUrl= new URL( url.toString() ); + ins= das2StreamUrl.openStream(); + return new Das2StreamParser(ins).getHapiRecordIterator(); + } catch ( MalformedURLException ex ) { + throw new RuntimeException(ex); + } catch (IOException | SAXException | ParserConfigurationException | XPathExpressionException ex) { + throw new RuntimeException(ex); + } + + + } + +} diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2ServerInfoSource.java b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2ServerInfoSource.java new file mode 100644 index 00000000..56e35211 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2ServerInfoSource.java @@ -0,0 +1,114 @@ + +package org.hapiserver.source.das2server; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.source.SourceUtil; +import org.w3c.dom.Document; +import org.xml.sax.SAXException; + +/** + * https://jupiter.physics.uiowa.edu/das/server?server=dataset&dataset=Juno/JED/ElectronSpectra&start_time=2013-10-09&end_time=2013-10-10 + * @author jbf + */ +public class Das2ServerInfoSource { + + public static int STANDARD_STEP_SIZE_SECONDS=30; + + private static String[] parseTimeRange( String timerange ) { + int i= timerange.indexOf("|"); + if ( i>-1 ) { + timerange= timerange.substring(0,i); + } + String[] ss= timerange.split("to"); + return new String[] { ss[0].trim(), ss[1].trim() }; + } + + /** + * this is going to make a request to the Das2Server, and will take the Das2Stream response and convert it to + * a HAPI header. + * + * @param config + * @param id the dataset id + * @return + * @throws IOException + * @throws JSONException + * @throws SAXException + * @throws ParserConfigurationException + * @throws XPathExpressionException + */ + public static String getInfo( String config, String id ) throws IOException, JSONException, SAXException, ParserConfigurationException, XPathExpressionException { + URL url= Das2ServerCatalogSource.class.getResource(config); + String configJson= SourceUtil.getAllFileLines(url); + JSONObject jo = new JSONObject(configJson); + String das2server= jo.getString("server"); + String dsdf= SourceUtil.getAllFileLines( new URL(das2server+"?server=dsdf&dataset="+id ) ); + String xml= dsdf.substring(10); + Document doc= SourceUtil.readDocument(xml); + XPath xpath= XPathFactory.newInstance().newXPath(); + String testRange = xpath.compile("//stream/properties/@testRange").evaluate(doc); + String testInterval = xpath.compile("//stream/properties/@testInterval").evaluate(doc); + if ( testRange.length()==0 ) { + testRange= xpath.compile("//stream/properties/@exampleRange").evaluate(doc); + testInterval= xpath.compile("//stream/properties/@exampleInterval").evaluate(doc); + } + if ( testRange.length()==0 ) { + testRange= xpath.compile("//stream/properties/@exampleRange_00").evaluate(doc); + testInterval= xpath.compile("//stream/properties/@exampleInterval_00").evaluate(doc); + } + StringBuilder dataRequest= new StringBuilder(das2server).append("?server=dataset&dataset=").append(id); + if ( testRange.length()>0 ) { + String[] isoTimes= parseTimeRange(testRange); + dataRequest.append("&").append("start_time=").append(isoTimes[0]).append("&end_time=").append(isoTimes[1]); + if ( testInterval.length()>0 ) { + dataRequest.append("&").append("interval=").append(STANDARD_STEP_SIZE_SECONDS); + } + } else { + throw new IllegalArgumentException("unable to identify time range to download example."); + } + dataRequest.append("&ascii=true"); // TODO: support for native types + JSONObject result= parseDas2StreamForInfo( new URL(dataRequest.toString()) ); + if ( testInterval.length()>0 ) { + dataRequest.append("&interval=").append(testInterval); + result.put("x_interval",STANDARD_STEP_SIZE_SECONDS); + } + String[] validRange = parseTimeRange( xpath.compile("//stream/properties/@validRange").evaluate(doc) ); + result.put("startDate",validRange[0]); + result.put("stopDate", validRange[1]); + + String[] exampleRange = parseTimeRange( xpath.compile("//stream/properties/@exampleRange_00").evaluate(doc) ); + result.put("sampleStartDate",exampleRange[0]); + result.put("sampleStopDate", exampleRange[1]); + + return result.toString(4); + + } + + /** + * can we make a light-weight das2stream parser which gets the description from the header. For the Das2Servers, the contents + * of the stream is not known until a request is made. + * @param das2StreamUrl + * @return + */ + public static JSONObject parseDas2StreamForInfo( URL das2StreamUrl ) throws IOException, SAXException, ParserConfigurationException, XPathExpressionException { + InputStream ins= das2StreamUrl.openStream(); + JSONObject result= new Das2StreamParser(ins).getInfo(); + return result; + } + + public static void main( String[] args ) throws Exception { + //args= new String[] { "AC_AT_DEF" }; + args= new String[0]; + + if ( args.length==0 ) { + System.out.println( Das2ServerInfoSource.getInfo("jupiter-d2s.json","Juno/Ephemeris/Jovicentric") ); + } + } +} diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2StreamParser.java b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2StreamParser.java new file mode 100644 index 00000000..dc5e0e44 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/Das2StreamParser.java @@ -0,0 +1,444 @@ + +package org.hapiserver.source.das2server; + +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.charset.Charset; +import java.util.Iterator; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.HapiRecord; +import org.hapiserver.TimeUtil; +import org.hapiserver.source.SourceUtil; +import org.w3c.dom.Document; +import org.w3c.dom.NamedNodeMap; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; + +/** + * Simple Das2Stream parser. Note this can only parse Das2Streams with one kind of packet, which is + * also the only sort of stream which can be converted to a HAPI stream. + * @author jbf + */ +public class Das2StreamParser implements Iterator { + + private final ReadableByteChannel channel; + + private Document packetDescriptor; + + private JSONObject infoResponse; + + // The number identifying the packet + private int packetId; + + // The number of bytes in each record + private int recordLengthBytes; + + // the bytes within the hapi record + byte[] recordBytes; + int[] offs; + int[] lens; + String[] d2stypes; + + private static final String UNIT_US2000 = "us2000"; + private static final String UNIT_T1970 = "t1970"; + private static final String UNIT_MS1970 = "ms1970"; + + String units; + + private HapiRecord nextRecord; + + + public Das2StreamParser( InputStream ins ) throws IOException, SAXException, ParserConfigurationException, XPathExpressionException { + this.channel= Channels.newChannel(ins); + byte[] b4bb= new byte[4]; + ByteBuffer b4= ByteBuffer.wrap(b4bb); + this.channel.read(b4); + if ( b4.get(0)!='[' || b4.get(3)!=']' ) { + throw new IllegalArgumentException("It was not a stream, expected [xx]"); + } + byte[] b6bb= new byte[6]; + ByteBuffer b6= ByteBuffer.wrap(b6bb); + this.channel.read(b6); + int len= Integer.parseInt( new String(b6bb) ); + ByteBuffer bn= ByteBuffer.allocateDirect(len); + this.channel.read(bn); + + b4= ByteBuffer.wrap(b4bb); + this.channel.read(b4); + if ( b4.get(0)!='[' || b4.get(3)!=']' ) { + throw new IllegalArgumentException("It was not a stream, expected [xx]"); + } + String spacketId = new String(b4bb,1,2); + packetId= Integer.parseInt( spacketId ); + + b6bb= new byte[6]; + b6= ByteBuffer.wrap(b6bb); + this.channel.read(b6); + len= Integer.parseInt( new String(b6bb) ); + + byte[] bnbb= new byte[len]; + bn= ByteBuffer.wrap(bnbb); + this.channel.read(bn); + + String xmlString= new String(bnbb,Charset.forName("UTF-8")); + packetDescriptor= SourceUtil.readDocument(xmlString); + + XPath xpath= XPathFactory.newInstance().newXPath(); + String u= (String) xpath.evaluate( "//packet/x/units", packetDescriptor, XPathConstants.STRING ); + units= UNIT_US2000; + + recordLengthBytes= getRecordLengthBytes(packetDescriptor); + + // read the first record + nextRecord= setUpNextRecord(); + + } + + private HapiRecord setUpNextRecord() throws IOException { + byte[] b4bb= new byte[4]; + ByteBuffer b4= ByteBuffer.wrap(b4bb); + int bytesRead= this.channel.read(b4); + if ( bytesRead==-1 ) { + return null; + } + while ( b4.remaining()>0 ) this.channel.read(b4); + if ( b4.get(0)!=':' || b4.get(3)!=':' ) { + return null; + } + + int recordLength= recordLengthBytes; + recordBytes= new byte[recordLength]; + + ByteBuffer bn= ByteBuffer.wrap(recordBytes); + while ( bn.remaining()>0 ) this.channel.read(bn); + + return parseNextRecord(recordBytes); + + } + /** + * convert the Das2Stream data type to HAPI type + * @param das2streamType + * @return + */ + private String convertType( String das2streamType ) { + if ( das2streamType.startsWith("time") ) { + return "isotime"; + } else if ( das2streamType.startsWith("ascii") ) { + return "double"; + } else if ( das2streamType.startsWith("little_endian_real4") ) { + return "double"; + } else if ( das2streamType.startsWith("little_endian_real8") ) { + return "double"; + } else { + throw new IllegalArgumentException("unsupported type: "+das2streamType); + } + } + + private int getBytesFor( String das2streamType ) { + if ( das2streamType.startsWith("time") ) { + return Integer.parseInt(das2streamType.substring(4)); + } else if ( das2streamType.startsWith("ascii") ) { + return Integer.parseInt(das2streamType.substring(5)); + } else if ( das2streamType.startsWith("little_endian_real4") ) { + return 4; + } else if ( das2streamType.startsWith("little_endian_real8") ) { + return 8; + } else { + throw new IllegalArgumentException("unsupported type: "+das2streamType); + } + } + + private int getRecordLengthBytes( Document packetDescriptor ) throws XPathExpressionException { + XPath xpath= XPathFactory.newInstance().newXPath(); + NodeList nl= (NodeList) xpath.evaluate( "//packet/*", packetDescriptor, XPathConstants.NODESET ); + int recordLengthBytes= 0; + + offs= new int[nl.getLength()]; + lens= new int[nl.getLength()]; + d2stypes= new String[nl.getLength()]; + + for ( int i=0; i getHapiRecordIterator() { + return this; + } + + @Override + public boolean hasNext() { + return nextRecord!=null; + } + + @Override + public HapiRecord next() { + HapiRecord r= nextRecord; + + try { + nextRecord = setUpNextRecord(); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + + return r; + + } + + public static void main( String[] args ) throws Exception { + InputStream ins; + String surl; + args= new String[] {"test2"}; + if ( args[0].equals("test1") ) { + surl= "https://jupiter.physics.uiowa.edu/das/server" + + "?server=dataset" + + "&dataset=Juno/Ephemeris/EuropaCoRotational" + + "&start_time=2021-04-15T00:00Z" + + "&end_time=2021-04-17T00:00Z" + + "&interval=600"; + ins= new URL(surl).openStream(); + } else if ( args[0].equals("test2") ) { + // wget -O - 'https://planet.physics.uiowa.edu/das/das2Server?server=dataset&start_time=2000-01-01T00:00:00.000Z&end_time=2000-01-02T00:00:00.000Z&resolution=50.58548009367681&dataset=Voyager/1/PWS/SpecAnalyzer-4s-Efield&ascii=true' + https://planet.physics.uiowa.edu/das/das2Server?server=dataset&start_time=2000-01-01T00%3A00%3A00.000Z&end_time=2000-01-02T00%3A00%3A00.000Z&resolution=50.58548009367681&dataset=Voyager%2F1%2FPWS%2FSpecAnalyzer-4s-Efield + surl= "https://planet.physics.uiowa.edu/das/das2Server" + + "?server=dataset" + + "&dataset=Voyager/1/PWS/SpecAnalyzer-4s-Efield" + + "&start_time=2000-01-01T00:00Z" + + "&end_time=2000-01-02T00:00Z" + + "&ascii=true"; + ins= new URL(surl).openStream(); + } else { + throw new IllegalArgumentException("bad arg1"); + } + Das2StreamParser p= new Das2StreamParser(ins); + System.out.println( p.getInfo().toString(4) ); + Iterator reciter= p.getHapiRecordIterator(); + while ( reciter.hasNext() ) { + HapiRecord rec=reciter.next(); + System.out.println(rec.getIsoTime(0)); + } + } +} diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/das2server/catalog.json b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/catalog.json new file mode 100644 index 00000000..de3a34ae --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/catalog.json @@ -0,0 +1,26 @@ +{ + "HAPI": "3.0", + "catalog": [ + { + "x_group_id": "das2server", + "x_source": "classpath", + "x_class": "org.hapiserver.source.das2server.Das2ServerCatalogSource", + "x_method": "getCatalog", + "x_args": [ "planet-d2s.json" ], + "x_config": { + "info": { + "x_source": "classpath", + "x_class": "org.hapiserver.source.das2server.Das2ServerInfoSource", + "x_method": "getInfo", + "x_args": ["planet-d2s.json","${id}"] + }, + "data": { + "x_source": "classpath", + "x_class": "org.hapiserver.source.das2server.Das2ServerDataSource", + "x_method": "getRecordSource", + "x_args": ["planet-d2s.json","${id}","${info}"] + } + } + } + ] +} \ No newline at end of file diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/das2server/dsdfResponse.xml b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/dsdfResponse.xml new file mode 100644 index 00000000..760d763d --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/dsdfResponse.xml @@ -0,0 +1,24 @@ + + + \ No newline at end of file diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/das2server/jupiter-d2s.json b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/jupiter-d2s.json new file mode 100644 index 00000000..69de8840 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/jupiter-d2s.json @@ -0,0 +1,4 @@ +{ + "server":"https://jupiter.physics.uiowa.edu/das/server", + "include":[ "Juno/Ephemeris/.*", "Juno/JED/ElectronSpectra" ] +} diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/das2server/planet-d2s.json b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/planet-d2s.json new file mode 100644 index 00000000..22d1da97 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/das2server/planet-d2s.json @@ -0,0 +1,4 @@ +{ + "server":"https://planet.physics.uiowa.edu/das/das2Server", + "include":[ "Voyager/1/PWS/SpecAnalyzer-4s-Efield", "Voyager/2/PWS/SpecAnalyzer-4s-Efield" ] +} diff --git a/HapiServerBase/src/org/hapiserver/source/package.html b/HapiServerBase/src/main/java/org/hapiserver/source/package.html similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/package.html rename to HapiServerBase/src/main/java/org/hapiserver/source/package.html diff --git a/HapiServerBase/src/org/hapiserver/source/tap/CefFileIterator.java b/HapiServerBase/src/main/java/org/hapiserver/source/tap/CefFileIterator.java similarity index 89% rename from HapiServerBase/src/org/hapiserver/source/tap/CefFileIterator.java rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/CefFileIterator.java index 3e53dfbe..ea86974f 100644 --- a/HapiServerBase/src/org/hapiserver/source/tap/CefFileIterator.java +++ b/HapiServerBase/src/main/java/org/hapiserver/source/tap/CefFileIterator.java @@ -22,13 +22,15 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.zip.GZIPOutputStream; +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; import org.hapiserver.AbstractHapiRecord; import org.hapiserver.HapiRecord; import org.hapiserver.source.SourceUtil; /** - * make CEF reader which provides records as the CEF is read in. + * make CEF reader which provides HapiRecords as the CEF is read in. * * @author jbf */ @@ -38,13 +40,20 @@ public Cef getCef() { return cef; } - public CefFileIterator(ReadableByteChannel lun) throws IOException { + private JSONObject info; + + public CefFileIterator(ReadableByteChannel lun,JSONObject info) throws IOException { + if ( info==null ) { + throw new NullPointerException("info is null"); + } + for (int i = 0; i < doParse.length; i++) { doParse[i] = true; } this.lun = lun; + this.info= info; CefReaderHeader readerh = new CefReaderHeader(); cef = readerh.read(lun); @@ -65,6 +74,25 @@ public CefFileIterator(ReadableByteChannel lun) throws IOException { private ReadableByteChannel lun; + /** + * tri-state is null if not yet known, then either True or False. + */ + Boolean isIsoRange= null; + + /** + * Check to see if record contains just "END_DATA". + * C3_CQ_EDI_ANOMALY_AE has " END_DATA" + * @param record + * @return true of the record contains just "END_DATA" + */ + private boolean checkEndData(ByteBuffer record) { + byte[] bb= record.duplicate().array(); + String buff= new String(bb,record.position(),record.limit()); + + buff= buff.trim(); + return buff.startsWith("END_DATA"); + } + protected static class GlobalStruct { //String name; @@ -498,7 +526,9 @@ private HapiRecord parseRecord(ByteBuffer record) { ParamStruct p = cef.parameters.get(key); int index= p.cefFieldPos[0]; if (index==-1 ) { // Non-record-varying - columnIndices.add(Collections.singletonList(-i)); + if ( ((String[])p.entries.get("DATA")).length==1 ) { + columnIndices.add(Collections.singletonList(-i)); + } String[] nonRecordVaryingValues= (String[])p.entries.get("DATA"); vfields.put( -i, nonRecordVaryingValues ); } else { @@ -513,102 +543,33 @@ private HapiRecord parseRecord(ByteBuffer record) { componentIndices.add(index+iComponent); ffields.add(f1s[if1s++]); } + columnIndices.add(componentIndices); + + if ( i==0 ) { + try { + if ( isIsoRange==null ) { + if ( info.getJSONArray("parameters").getJSONObject(0).has("x_type") ) { + isIsoRange= "ISO_TIME_RANGE".equals( info.getJSONArray("parameters").getJSONObject(0).getString("x_type") ); + } else { + isIsoRange= false; + } + } + if ( isIsoRange ) { + columnIndices.add(componentIndices); // use the same twice + } + } catch (JSONException ex) { + Logger.getLogger(CefFileIterator.class.getName()).log(Level.SEVERE, null, ex); + } + } } } fields= ffields.toArray( new String[ffields.size()] ); - + // columnIndices maps from parameter number to CEF file column(s). If it is // -1, it means the data is constant, and idx will refer to the column in the CEF file. - return new AbstractHapiRecord() { - @Override - public int length() { - return columnIndices.size(); - } - - @Override - public String getIsoTime(int i) { - int idx= columnIndices.get(i).get(0); - String field = fields[idx].trim(); - if (field.length() > 45) { //TODO: kludge for time ranges. See https://github.com/hapi-server/server-java/issues/22 - int is1 = field.indexOf("/"); - if (is1 > 0) { - field = field.substring(0, 24); - if ( !field.endsWith("Z") ) field=field+"Z"; - } - } - return field; - } - - @Override - public String getString(int i) { - return getAsString(i); - } - - @Override - public int getInteger(int i) { - return Integer.parseInt(getAsString(i)); - } - - @Override - public int[] getIntegerArray(int i) { - String[] stringArray = getStringArray(i); - int[] intArray = new int[stringArray.length]; - for (int iField = 0; iField < stringArray.length; iField++) { - intArray[iField] = Integer.parseInt(stringArray[iField]); - } - return intArray; - } - - @Override - public double[] getDoubleArray(int i) { - String[] stringArray = getStringArray(i); - double[] doubleArray = new double[stringArray.length]; - for (int iField = 0; iField < stringArray.length; iField++) { - doubleArray[iField] = Double.parseDouble(stringArray[iField]); - } - return doubleArray; - - } - - @Override - public String[] getStringArray(int i) { - List indices = columnIndices.get(i); - int firstIndex= indices.get(0); - if ( firstIndex<0 ) { - return vfields.get(firstIndex); - } else { - String[] vector = new String[indices.size()]; - int lastIndex= firstIndex+indices.size(); - for (int iField = firstIndex; iField < lastIndex; iField++) { - vector[iField-firstIndex] = fields[iField].trim(); - } - return vector; - } - } - - @Override - public double getDouble(int i) { - return Double.parseDouble(getAsString(i)); - } - - @Override - public String getAsString(int i) { - int idx= columnIndices.get(i).get(0); - if ( idx<0 ) { - return vfields.get(idx)[0]; - } else { - return fields[idx].trim(); - } - } - - @Override - public String toString() { - return getAsString(0) + " " + length() + " fields"; - } - - }; + return new CefHapiRecord(columnIndices, ffields, vfields, fields); } private void removeComments(ByteBuffer work_buffer, int work_size) { @@ -670,6 +631,11 @@ private int findDelimeterPosition(ByteBuffer work_buffer) { private HapiRecord nextRecord; + private static String peekWorkBuffer( ByteBuffer work_buffer ) { + byte[] bb= work_buffer.duplicate().array(); + return new String(bb); + } + private void readNextRecord() throws IOException { eor = cef.eor; @@ -680,8 +646,8 @@ private void readNextRecord() throws IOException { this.lun.close(); } else { - - // removeComments(work_buffer, work_buffer.limit() ); + removeComments(work_buffer, work_buffer.limit() ); + //peekWorkBuffer(work_buffer); int last_eor = getLastEor(work_buffer); //*** look for delimeters, EOR, comments, EOL etc int loop = 0; while (last_eor < 0) { // reload the work_buffer @@ -710,8 +676,11 @@ private void readNextRecord() throws IOException { work_buffer.flip(); } - //removeComments(work_buffer, work_buffer.limit() ); + removeComments(work_buffer, work_buffer.limit() ); + //peekWorkBuffer(work_buffer); + last_eor = getLastEor(work_buffer); + if (last_eor < 0) { //No full record available, so prepare the work buffer to read more work_buffer.position(work_buffer.limit()); @@ -724,6 +693,12 @@ private void readNextRecord() throws IOException { int stringLength = delimeterPos; ByteBuffer record = work_buffer.slice(); record.limit(stringLength); + //peekWorkBuffer(record); + if ( checkEndData(work_buffer) ) { + eof= true; + nextRecord= null; + return; + } nextRecord = parseRecord(record); logger.log(Level.FINER, "Read: {0}", nextRecord); if ( nextRecord.getIsoTime(0).compareTo("2100")>0 ) { // If it is appearently fill @@ -772,7 +747,7 @@ public static void main(String[] args) throws MalformedURLException, IOException String endDate; String urlString; - if ( false ) { + if ( true ) { dataSet= "C1_CP_FGM_20030303"; startDate = "2003-03-03T00:00:00Z"; endDate = "2003-03-03T02:00:00Z"; @@ -781,11 +756,16 @@ public static void main(String[] args) throws MalformedURLException, IOException startDate, endDate); } - if ( true ) { + if ( false ) { dataSet="CL_SP_WHI"; urlString = "https://csa.esac.esa.int/csa-sl-tap/data?RETRIEVAL_TYPE=product&RETRIEVAL_ACCESS=streamed&DATASET_ID=CL_SP_WHI&START_DATE=2012-12-25T00:00:00Z&END_DATE=2012-12-26T00:00:00Z"; } + if ( false ) { + dataSet="C3_CQ_EDI_ANOMALY_AE"; + urlString= "https://csa.esac.esa.int/csa-sl-tap/data?RETRIEVAL_TYPE=product&RETRIEVAL_ACCESS=streamed&DATASET_ID=C3_CQ_EDI_ANOMALY_AE&START_DATE=2020-12-31T00:00:00Z&END_DATE=2021-01-01T00:00:00Z"; + } + URL uu = new URL(urlString); logger.log(Level.FINE, "Opening connection to: {0}", uu); @@ -794,7 +774,7 @@ public static void main(String[] args) throws MalformedURLException, IOException ReadableByteChannel lun = Channels.newChannel(in); // String dataSet = "C1_CP_FGM_test"; - String outfileString = "/home/jbf/tmp/20230314/" + dataSet + "csv.gz"; + String outfileString = "/home/jbf/tmp/20230314/" + dataSet + ".csv.gz"; FileOutputStream fos = new FileOutputStream(outfileString); GZIPOutputStream gzos = new GZIPOutputStream(fos); @@ -803,7 +783,7 @@ public static void main(String[] args) throws MalformedURLException, IOException System.err.println("begin reading " + uu); long t0 = System.currentTimeMillis(); - CefFileIterator iter = new CefFileIterator(lun); + CefFileIterator iter = new CefFileIterator(lun,null); Cef cefSample = iter.getCef(); List headerList = new ArrayList<>(); headerList.add("Epoch"); diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/tap/CefHapiRecord.java b/HapiServerBase/src/main/java/org/hapiserver/source/tap/CefHapiRecord.java new file mode 100644 index 00000000..78469c79 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/tap/CefHapiRecord.java @@ -0,0 +1,129 @@ +package org.hapiserver.source.tap; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.hapiserver.AbstractHapiRecord; + +/** + * + * @author jbf + */ +public class CefHapiRecord extends AbstractHapiRecord { + + List> columnIndices = new ArrayList<>(); + List ffields = new ArrayList<>(); + Map vfields = new HashMap<>(); + String[] fields; + + public CefHapiRecord(List> columnIndices, List ffields, Map vfields, String[] fields) { + this.columnIndices = columnIndices; + this.ffields = ffields; + this.vfields = vfields; + this.fields = fields; + } + + @Override + public int length() { + return columnIndices.size(); + } + + @Override + public String getIsoTime(int i) { + int idx = columnIndices.get(i).get(0); + String field = fields[idx].trim(); + if (field.length() > 45) { //TODO: kludge for time ranges. See https://github.com/hapi-server/server-java/issues/22 + int is1 = field.indexOf("/"); + if (is1 > 0) { + switch (i) { + case 0: + field = field.substring(0, is1); + if (!field.endsWith("Z")) { + field = field + "Z"; + } break; + case 1: + field = field.substring(is1 + 1); + if (!field.endsWith("Z")) { + field = field + "Z"; + } break; + default: + throw new IllegalArgumentException("time ranges only supported for first two fields"); + } + } + } + return field; + } + + @Override + public String getString(int i) { + return getAsString(i); + } + + @Override + public int getInteger(int i) { + return Integer.parseInt(getAsString(i)); + } + + @Override + public int[] getIntegerArray(int i) { + String[] stringArray = getStringArray(i); + int[] intArray = new int[stringArray.length]; + for (int iField = 0; iField < stringArray.length; iField++) { + intArray[iField] = Integer.parseInt(stringArray[iField]); + } + return intArray; + } + + @Override + public double[] getDoubleArray(int i) { + String[] stringArray = getStringArray(i); + double[] doubleArray = new double[stringArray.length]; + for (int iField = 0; iField < stringArray.length; iField++) { + doubleArray[iField] = Double.parseDouble(stringArray[iField]); + } + return doubleArray; + + } + + @Override + public String[] getStringArray(int i) { + List indices = columnIndices.get(i); + int firstIndex = indices.get(0); + if (firstIndex < 0) { + return vfields.get(firstIndex); + } else { + String[] vector = new String[indices.size()]; + int lastIndex = firstIndex + indices.size(); + for (int iField = firstIndex; iField < lastIndex; iField++) { + vector[iField - firstIndex] = fields[iField].trim(); + } + return vector; + } + } + + @Override + public double getDouble(int i) { + return Double.parseDouble(getAsString(i)); + } + + @Override + public String getAsString(int i) { + int idx = columnIndices.get(i).get(0); + if (idx < 0) { + return vfields.get(idx)[0]; + } else { + String s = fields[idx].trim(); + if (s.startsWith("\"") && s.endsWith("\"")) { + s = s.substring(1, s.length() - 1); + } + return s; + } + } + + @Override + public String toString() { + return getAsString(0) + " " + length() + " fields"; + } + +} diff --git a/HapiServerBase/src/org/hapiserver/source/tap/CsaCatalogExclude.txt b/HapiServerBase/src/main/java/org/hapiserver/source/tap/CsaCatalogExclude.txt similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/tap/CsaCatalogExclude.txt rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/CsaCatalogExclude.txt diff --git a/HapiServerBase/src/org/hapiserver/source/tap/CsaInfoCatalogSource.java b/HapiServerBase/src/main/java/org/hapiserver/source/tap/CsaInfoCatalogSource.java similarity index 91% rename from HapiServerBase/src/org/hapiserver/source/tap/CsaInfoCatalogSource.java rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/CsaInfoCatalogSource.java index c4755bec..5f564894 100644 --- a/HapiServerBase/src/org/hapiserver/source/tap/CsaInfoCatalogSource.java +++ b/HapiServerBase/src/main/java/org/hapiserver/source/tap/CsaInfoCatalogSource.java @@ -158,6 +158,10 @@ public static int[] roundOut( String timeRange, int digit ) throws ParseExceptio t1[i]=0; t2[i]=0; } + if ( !isRoundUp ) { + boolean same= TimeUtil.eq(t1, t2); + if ( same ) isRoundUp=true; + } if ( isRoundUp ) t2[digit]++; return TimeUtil.createTimeRange( t1, t2 ); } @@ -226,6 +230,10 @@ public static String getInfo(String id) throws IOException { String[] constantData= new String[nl.getLength()]; JSONObject definitions = new JSONObject(); + + // this contains only the definitions used in the info, we discard unused definitions. + JSONObject definitionsOut= new JSONObject(); + boolean hasDefinitions= false; for (int i = 0; i < nl.getLength(); i++) { // scan through looking for non-time-varying data Node p = nl.item(i); @@ -239,7 +247,7 @@ public static String getInfo(String id) throws IOException { String sdata= c.getTextContent(); constantData[i]= sdata; String[] ss= constantData[i].trim().split("\\s+"); - if ( ss.length>1 ) { + if ( ss.length>1 ) { // ytags vs constant like time_width. JSONObject data= new JSONObject(); data.put( "name", name ); JSONArray ja= new JSONArray(); @@ -280,6 +288,7 @@ public static String getInfo(String id) throws IOException { if ( name==null ) throw new IllegalArgumentException("unnamed parameter"); JSONObject parameter = new JSONObject(); + JSONObject secondParameter= null; for (int j = 0; j < n.getLength(); j++) { Node c = n.item(j); // parameter String nodeName= c.getNodeName(); @@ -294,6 +303,10 @@ public static String getInfo(String id) throws IOException { parameter.put("type", "isotime"); parameter.put("x_type", "ISO_TIME_RANGE"); parameter.put("units", "UTC" ); + secondParameter= new JSONObject(); + secondParameter.put("type","isotime"); + secondParameter.put("x_type", "ISO_TIME_RANGE"); + secondParameter.put("units", "UTC" ); break; case "FLOAT": case "DOUBLE": @@ -325,11 +338,21 @@ public static String getInfo(String id) throws IOException { String nodeName= c.getNodeName(); String nodeValue= c.getTextContent(); switch (c.getNodeName()) { + case "DATA": + String sdata= c.getTextContent(); + String[] ss= sdata.trim().split("\\s+"); + if ( ss.length==1 ) { // ytags vs constant like time_width. + parameter.put("x_constant", ss[0] ); + } + break; case "PARAMETER_ID": if ( popLabel ) { nodeValue= nodeValue.substring(0,nodeValue.length()-2-id.length()); } parameter.put("name", nodeValue); + if ( secondParameter!=null ) { + secondParameter.put("name",nodeValue+"_stop"); + } break; case "UNITS": if ( isTime ) { @@ -353,7 +376,8 @@ public static String getInfo(String id) throws IOException { String type = parameter.optString("type", ""); if ( isTime || type.equals("string")) { if (parameter.optString("x_type", "").equals("ISO_TIME_RANGE")) { - parameter.put("length", 25); + parameter.put("length",25); + if ( secondParameter!=null ) secondParameter.put("length",25); } else { parameter.put("length", Integer.parseInt(nodeValue)); } @@ -374,6 +398,7 @@ public static String getInfo(String id) throws IOException { case "FILLVAL": if ( isTime ) { parameter.put("fill", JSONObject.NULL ); + if ( secondParameter!=null ) secondParameter.put("fill", JSONObject.NULL ); } else { parameter.put("fill", nodeValue); } @@ -396,9 +421,10 @@ public static String getInfo(String id) throws IOException { if ( depends.size()==sizes.size() ) { JSONObject bin= new JSONObject(); bin.setEscapeForwardSlashAlways(false); - if ( definitions.has( depends.get(ia) ) ) { + if ( definitions.has( depends.get(ia) ) && !definitions.has(name) ) { //TODO: I can't figure out why it always escapes the backslashes here bin.put( "$ref", "#/definitions/"+ depends.get(ia) ); + definitionsOut.put( depends.get(ia), definitions.get(depends.get(ia)) ); } else { bin.put( "name", depends.get(ia)+"__ref" ); bin.put( "centers", depends.get(ia) ); @@ -422,12 +448,15 @@ public static String getInfo(String id) throws IOException { } else { parameters.put(parameters.length(), parameter); + if (secondParameter!=null) { + parameters.put(parameters.length(),secondParameter); + } } } - if ( hasDefinitions ) { - jo.put( "definitions", definitions ); + if ( definitionsOut.length()>0 ) { + jo.put( "definitions", definitionsOut ); } jo.put("parameters", parameters); @@ -451,7 +480,17 @@ public static String getInfo(String id) throws IOException { String sampleStartDate= ss[0]; String sampleStopDate= ss[1]; sampleStartDate= TimeUtil.reformatIsoTime( startDate, sampleStartDate ); - sampleStopDate= TimeUtil.reformatIsoTime( stopDate, sampleStopDate ); + sampleStopDate= TimeUtil.reformatIsoTime( startDate, sampleStopDate ); + if ( sampleStopDate.equals(sampleStartDate) ) { + try { + int[] hour= new int[7]; + hour[TimeUtil.COMPONENT_HOUR]=1; + sampleStartDate= TimeUtil.formatIso8601Time( TimeUtil.subtract(TimeUtil.parseISO8601Time(sampleStopDate),hour) ); + sampleStartDate= TimeUtil.reformatIsoTime( startDate, sampleStartDate); + } catch ( ParseException ex ) { + throw new RuntimeException(ex); + } + } if ( sampleStopDate.compareTo(startDate)<0 ) sampleStartDate= startDate; if ( sampleStopDate.compareTo(stopDate)>0 ) sampleStopDate= stopDate; try { // make sure sample times are no greater than one or two days. diff --git a/HapiServerBase/src/org/hapiserver/source/tap/TapAvailabilitySource.java b/HapiServerBase/src/main/java/org/hapiserver/source/tap/TAPAvailabilityDataSource.java similarity index 87% rename from HapiServerBase/src/org/hapiserver/source/tap/TapAvailabilitySource.java rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/TAPAvailabilityDataSource.java index 27bbd93b..f44a9341 100644 --- a/HapiServerBase/src/org/hapiserver/source/tap/TapAvailabilitySource.java +++ b/HapiServerBase/src/main/java/org/hapiserver/source/tap/TAPAvailabilityDataSource.java @@ -16,6 +16,7 @@ import org.hapiserver.AbstractHapiRecordSource; import org.hapiserver.CsvDataFormatter; import org.hapiserver.HapiRecord; +import org.hapiserver.TimeString; import org.hapiserver.TimeUtil; import org.hapiserver.source.AggregationGranuleIterator; import org.hapiserver.source.SourceUtil; @@ -24,7 +25,7 @@ * Source for availability * @author jbf */ -public class TapAvailabilitySource extends AbstractHapiRecordSource { +public class TAPAvailabilityDataSource extends AbstractHapiRecordSource { private static final Logger logger = Logger.getLogger("hapi.cef"); @@ -124,10 +125,18 @@ public static String getInfo( String idavail ) { } String id; + String tapServerURL; - public TapAvailabilitySource( String idavail ) { + /** + * creates a TapAvailabilitySource. Note info is ignored but may be used in the future. + * @param tapServerURL + * @param idavail + * @param info + */ + public TAPAvailabilityDataSource( String tapServerURL, String idavail, JSONObject info ) { int i= idavail.indexOf("/"); id= idavail.substring(0,i); + this.tapServerURL= tapServerURL; } @Override @@ -141,16 +150,16 @@ public boolean hasParamSubsetIterator() { } @Override - public Iterator getGranuleIterator(int[] start, int[] stop) { + public Iterator getGranuleIterator(TimeString start, TimeString stop) { return new AggregationGranuleIterator( "$Y-$m", start, stop ); } @Override - public Iterator getIterator(int[] start, int[] stop) { - String templ= "https://csa.esac.esa.int/csa-sl-tap/tap/sync?REQUEST=doQuery&LANG=ADQL&FORMAT=CSV&QUERY=SELECT+start_time,end_time,num_instances+FROM+csa.v_dataset_inventory+WHERE+dataset_id='%s'+AND+start_time>='%s'+AND+start_time<'%s'+AND+num_instances>0+ORDER+BY+start_time"; - - String startStr= TimeUtil.formatIso8601TimeBrief(start); - String stopStr= TimeUtil.formatIso8601TimeBrief(stop); + public Iterator getIterator(TimeString start, TimeString stop) { + String templ= tapServerURL + "tap/sync?REQUEST=doQuery&LANG=ADQL&FORMAT=CSV&QUERY=SELECT+start_time,end_time,num_instances+FROM+csa.v_dataset_inventory+WHERE+dataset_id='%s'+AND+start_time>='%s'+AND+start_time<'%s'+AND+num_instances>0+ORDER+BY+start_time"; + + String startStr= start.toString(); + String stopStr= stop.toString(); String url= String.format( templ, id, startStr, stopStr ); logger.log(Level.INFO, "readData URL: {0}", url); @@ -211,7 +220,7 @@ public int getInteger(int i) { * @return * @throws IOException */ - public static Iterator getData( String idavail, String start, String stop ) throws IOException { + public static Iterator getDataForTesting( String idavail, String start, String stop ) throws IOException { String templ= "https://csa.esac.esa.int/csa-sl-tap/tap/sync?REQUEST=doQuery&LANG=ADQL&FORMAT=CSV&QUERY=SELECT+start_time,end_time,num_instances+FROM+csa.v_dataset_inventory+WHERE+dataset_id='%s'+AND+start_time>='%s'+AND+start_time<'%s'+AND+num_instances>0+ORDER+BY+start_time"; @@ -246,7 +255,7 @@ public static void main( String[] args ) throws IOException { System.out.println( getInfo(args[0]) ); break; case 3: - Iterator iter = TapAvailabilitySource.getData( args[0], args[1], args[2] ); + Iterator iter = TAPAvailabilityDataSource.getDataForTesting( args[0], args[1], args[2] ); if ( iter.hasNext() ) { CsvDataFormatter format= new CsvDataFormatter(); try { diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/tap/TAPDataSource.java b/HapiServerBase/src/main/java/org/hapiserver/source/tap/TAPDataSource.java new file mode 100644 index 00000000..6d4c1a38 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/tap/TAPDataSource.java @@ -0,0 +1,107 @@ +package org.hapiserver.source.tap; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.util.Iterator; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.AbstractHapiRecordSource; +import org.hapiserver.HapiRecord; +import org.hapiserver.TimeString; +import org.hapiserver.TimeUtil; +import org.hapiserver.source.SourceUtil; + +/** + * provide the data stream from the ESAC TAP server + * @author jbf + */ +public class TAPDataSource extends AbstractHapiRecordSource { + + private static final Logger logger = Logger.getLogger("hapi.cef"); + + private final String tapServerURL; + private final String id; + private final JSONObject info; + + private InputStream in=null; + + public TAPDataSource(String tapServerURL, String id, JSONObject info) { + if ( info==null ) { + throw new NullPointerException("info is null, check configuration to make sure info is passed in."); + } + this.tapServerURL = tapServerURL; + this.id = id; + this.info= info; + } + + public TAPDataSource(String tapServerURL, String id) { + this(tapServerURL,id,null); + } + + @Override + public boolean hasGranuleIterator() { + return false; + } + + @Override + public boolean hasParamSubsetIterator() { + return false; + } + + @Override + public Iterator getIterator(TimeString start, TimeString stop) { + String startTimeString; + String stopTimeString; + int minimumDurationNs=200000000; + int[] duration = TimeUtil.subtract( stop.toComponents(), start.toComponents() ); + if ( duration[0]==0 && duration[1]==0 && duration[2]==0 + && duration[3]==0 && duration[4]==0 && duration[5]==0 + && duration[6] getIterator(int[] start, int[] stop) { - String startTimeString; - String stopTimeString; - int minimumDurationNs=200000000; - int[] duration = TimeUtil.subtract(stop, start); - if ( duration[0]==0 && duration[1]==0 && duration[2]==0 - && duration[3]==0 && duration[4]==0 && duration[5]==0 - && duration[6] iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); - while ( iter.hasNext() ) { - HapiRecord r= iter.next(); - System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); - } - } - - public static void mainCase2( String[] args ) { - String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; - String id= "D1_CG_STA-DWP_COMBI_PNG"; - int[] start= new int[] { 2012, 12, 25, 0, 0, 0, 0 }; - int[] stop= new int[] { 2012, 12, 26, 0, 0, 0, 0 }; - Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); - while ( iter.hasNext() ) { - HapiRecord r= iter.next(); - System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); - } - } - - public static void mainCase3( String[] args ) throws ParseException { - String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; - String id="CM_CG_WBD_OVERVIEW_500_19_PNG"; - String tr= "2023-01-18T17:00/18:00"; - int[] timerange = TimeUtil.parseISO8601TimeRange(tr); - int[] start= Arrays.copyOfRange( timerange, 0, 7 ); - int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); - Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); - while ( iter.hasNext() ) { - HapiRecord r= iter.next(); - System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); - } - } - - public static void mainCase4( String[] args ) throws ParseException { - String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; - String id="C4_CP_CIS-CODIF_HS_O1_PEF"; - String tr= "2021-12-01T00:00/00:02"; - int[] timerange = TimeUtil.parseISO8601TimeRange(tr); - int[] start= Arrays.copyOfRange( timerange, 0, 7 ); - int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); - Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); - while ( iter.hasNext() ) { - HapiRecord r= iter.next(); - System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); - } - } - - public static void mainCase5( String[] args ) throws ParseException { - String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; - String id="C1_CP_PEA_3DRH_PSD"; - String tr= "2019-08-01T00:00/0:10"; - int[] timerange = TimeUtil.parseISO8601TimeRange(tr); - int[] start= Arrays.copyOfRange( timerange, 0, 7 ); - int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); - Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); - while ( iter.hasNext() ) { - HapiRecord r= iter.next(); - System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); - } - } - - /** - * This returns 769 fields while the info thinks it should be 897 (128 more). - * @see https://github.com/hapi-server/server-java/issues/21 - * @param args - * @throws ParseException - */ - public static void mainCase6( String[] args ) throws ParseException { - String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; - String id="C4_CP_STA_CS_NBR"; - String tr= "2022-07-31T11:00Z/2022-08-01T00:00Z"; - int[] timerange = TimeUtil.parseISO8601TimeRange(tr); - int[] start= Arrays.copyOfRange( timerange, 0, 7 ); - int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); - Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); - while ( iter.hasNext() ) { - HapiRecord r= iter.next(); - System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); - } - } - -/** - * This returns 769 fields while the info thinks it should be 897 (128 more). - * @see https://github.com/hapi-server/server-java/issues/21 - * @param args - * @throws ParseException - */ - public static void mainCase7( String[] args ) throws ParseException { - String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; - String id="C1_PP_EDI"; - String tr= "2018-10-24T18:59:56Z/2018-10-25T00:00:04Z"; - int[] timerange = TimeUtil.parseISO8601TimeRange(tr); - int[] start= Arrays.copyOfRange( timerange, 0, 7 ); - int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); - Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); - while ( iter.hasNext() ) { - HapiRecord r= iter.next(); - System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); - } - } - -/** - * This returns 769 fields while the info thinks it should be 897 (128 more). - * @see https://github.com/hapi-server/server-java/issues/21 - * @param args - * @throws ParseException - */ - public static void mainCase8( String[] args ) throws ParseException { - String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; - String id="C1_PP_WHI"; - String tr= "2012-12-15T20:00Z/2012-12-15T20:07Z"; - int[] timerange = TimeUtil.parseISO8601TimeRange(tr); - int[] start= Arrays.copyOfRange( timerange, 0, 7 ); - int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); - Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); - while ( iter.hasNext() ) { - HapiRecord r= iter.next(); - System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); - } - } - - public static void main(String[] args ) throws Exception { - //mainCase1(args); - //mainCase2(args); - //mainCase3(args); - //mainCase4(args); - //mainCase5(args); - //mainCase6(args); - //mainCase7(args); - mainCase8(args); - } -} +/* + * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license + * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template + */ +package org.hapiserver.source.tap; + +import java.io.IOException; +import java.net.URL; +import java.text.ParseException; +import java.util.Arrays; +import java.util.Iterator; +import java.util.logging.Level; +import org.hapiserver.HapiRecord; +import org.hapiserver.TimeString; +import org.hapiserver.TimeUtil; +import org.hapiserver.source.SourceUtil; + +/** + * + * @author jbf + */ +public class Tests { + + public static void mainCase1( String[] args ) { + String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; + String id= "CL_SP_WHI"; + TimeString start= new TimeString( new int[] { 2012, 12, 25, 0, 0, 0, 0 } ); + TimeString stop= new TimeString( new int[] { 2012, 12, 26, 0, 0, 0, 0 } ); + Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); + while ( iter.hasNext() ) { + HapiRecord r= iter.next(); + System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); + } + } + + public static void mainCase2( String[] args ) { + String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; + String id= "D1_CG_STA-DWP_COMBI_PNG"; + TimeString start= new TimeString( new int[] { 2012, 12, 25, 0, 0, 0, 0 } ); + TimeString stop= new TimeString( new int[] { 2012, 12, 26, 0, 0, 0, 0 } ); + Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); + while ( iter.hasNext() ) { + HapiRecord r= iter.next(); + System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); + } + } + + public static void mainCase3( String[] args ) throws ParseException { + String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; + String id="CM_CG_WBD_OVERVIEW_500_19_PNG"; + String tr= "2023-01-18T17:00/18:00"; + int[] timerange = TimeUtil.parseISO8601TimeRange(tr); + TimeString start= new TimeString( Arrays.copyOfRange( timerange, 0, 7 ) ); + TimeString stop= new TimeString( Arrays.copyOfRange( timerange, 7, 14 ) ); + Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); + while ( iter.hasNext() ) { + HapiRecord r= iter.next(); + System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); + } + } + + public static void mainCase4( String[] args ) throws ParseException { + String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; + String id="C4_CP_CIS-CODIF_HS_O1_PEF"; + String tr= "2021-12-01T00:00/00:02"; + int[] timerange = TimeUtil.parseISO8601TimeRange(tr); + TimeString start= new TimeString( Arrays.copyOfRange( timerange, 0, 7 ) ); + TimeString stop= new TimeString( Arrays.copyOfRange( timerange, 7, 14 ) ); + Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(start, stop); + while ( iter.hasNext() ) { + HapiRecord r= iter.next(); + System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); + } + } + + public static void mainCase5( String[] args ) throws ParseException { + String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; + String id="C1_CP_PEA_3DRH_PSD"; + String tr= "2019-08-01T00:00/0:10"; + int[] timerange = TimeUtil.parseISO8601TimeRange(tr); + int[] start= Arrays.copyOfRange( timerange, 0, 7 ); + int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); + Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(new TimeString( start), new TimeString( stop)); + while ( iter.hasNext() ) { + HapiRecord r= iter.next(); + System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); + } + } + + /** + * This returns 769 fields while the info thinks it should be 897 (128 more). + * @see https://github.com/hapi-server/server-java/issues/21 + * @param args + * @throws ParseException + */ + public static void mainCase6( String[] args ) throws ParseException { + String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; + String id="C4_CP_STA_CS_NBR"; + String tr= "2022-07-31T11:00Z/2022-08-01T00:00Z"; + int[] timerange = TimeUtil.parseISO8601TimeRange(tr); + int[] start= Arrays.copyOfRange( timerange, 0, 7 ); + int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); + Iterator iter= new TAPDataSource(tapServerURL, id).getIterator( new TimeString(start), new TimeString( stop )); + while ( iter.hasNext() ) { + HapiRecord r= iter.next(); + System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); + } + } + +/** + * This returns 769 fields while the info thinks it should be 897 (128 more). + * @see https://github.com/hapi-server/server-java/issues/21 + * @param args + * @throws ParseException + */ + public static void mainCase7( String[] args ) throws ParseException { + String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; + String id="C1_PP_EDI"; + String tr= "2018-10-24T18:59:56Z/2018-10-25T00:00:04Z"; + int[] timerange = TimeUtil.parseISO8601TimeRange(tr); + int[] start= Arrays.copyOfRange( timerange, 0, 7 ); + int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); + Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(new TimeString( start), new TimeString( stop)); + while ( iter.hasNext() ) { + HapiRecord r= iter.next(); + System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); + } + } + +/** + * This returns 769 fields while the info thinks it should be 897 (128 more). + * @see https://github.com/hapi-server/server-java/issues/21 + * @param args + * @throws ParseException + */ + public static void mainCase8( String[] args ) throws ParseException { + String tapServerURL="https://csa.esac.esa.int/csa-sl-tap/"; + String id="C1_PP_WHI"; + String tr= "2012-12-15T20:00Z/2012-12-15T20:07Z"; + int[] timerange = TimeUtil.parseISO8601TimeRange(tr); + int[] start= Arrays.copyOfRange( timerange, 0, 7 ); + int[] stop= Arrays.copyOfRange( timerange, 7, 14 ); + Iterator iter= new TAPDataSource(tapServerURL, id).getIterator(new TimeString( start), new TimeString( stop)); + while ( iter.hasNext() ) { + HapiRecord r= iter.next(); + System.err.println( String.format( "%s %d fields", r.getIsoTime(0), r.length() ) ); + } + } + + public static void main(String[] args ) throws Exception { + //mainCase1(args); + //mainCase2(args); + //mainCase3(args); + //mainCase4(args); + //mainCase5(args); + //mainCase6(args); + //mainCase7(args); + mainCase8(args); + } +} diff --git a/HapiServerBase/src/org/hapiserver/source/tap/URIsToTest.txt b/HapiServerBase/src/main/java/org/hapiserver/source/tap/URIsToTest.txt similarity index 90% rename from HapiServerBase/src/org/hapiserver/source/tap/URIsToTest.txt rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/URIsToTest.txt index 6be0039e..e0c5841c 100644 --- a/HapiServerBase/src/org/hapiserver/source/tap/URIsToTest.txt +++ b/HapiServerBase/src/main/java/org/hapiserver/source/tap/URIsToTest.txt @@ -74,4 +74,10 @@ https://cottagesystems.com/server/debug/hapi/data?id=C4_CT_WHI_NATURAL_EVENT&par # Timeout. https://cottagesystems.com/server/debug/hapi/data?id=C3_CP_WHI_NATURAL¶meters=time_tags&time.min=2022-12-31T18:00Z&time.max=2023-01-01T00:00Z -https://cottagesystems.com/server/debug/hapi/data?id=C3_CP_WHI_WAVE_FORM_ENERGY&time.min=2022-12-31T18:00Z&time.max=2023-01-01T00:00Z&include=header \ No newline at end of file +https://cottagesystems.com/server/debug/hapi/data?id=C3_CP_WHI_WAVE_FORM_ENERGY&time.min=2022-12-31T18:00Z&time.max=2023-01-01T00:00Z&include=header + +# comment characters were not handled for some reason. Testing with C3_CQ_EDI_ANOMALY_AE and C1_CP_FGM_SPIN shows the code works. +https://csa.esac.esa.int/csa-sl-tap/data?RETRIEVAL_TYPE=product&RETRIEVAL_ACCESS=streamed&DATASET_ID=C3_CQ_EDI_ANOMALY_AE&START_DATE=2020-12-31T00:00:00Z&END_DATE=2021-01-01T00:00:00Z + +# the stream is terminated with "END_DATA" and no one was checking for this. +vap+hapi:http://localhost:8080/HapiServer/hapi?id=C3_CQ_EDI_ANOMALY_AE&timerange=2020 \ No newline at end of file diff --git a/HapiServerBase/src/main/java/org/hapiserver/source/tap/config.json b/HapiServerBase/src/main/java/org/hapiserver/source/tap/config.json new file mode 100644 index 00000000..5a6a0645 --- /dev/null +++ b/HapiServerBase/src/main/java/org/hapiserver/source/tap/config.json @@ -0,0 +1,51 @@ + { + "HAPI": "3.3", + "about": { + "id": "CSA", + "title": "Cluster Science Archive", + "contact": "https://www.cosmos.esa.int/web/csa/cluster-helpdesk", + "description": "This HAPI server is based on responses provided by the TAP server at https://csa.esac.esa.int/csa-sl-tap/", + "x_hapi_home": "/tmp/hapi-server/" + }, + "catalog": [ + { + "x_group_id": "csa", + "x_source": "classpath", + "x_class": "org.hapiserver.source.tap.CsaInfoCatalogSource", + "x_method": "getCatalog", + "x_config": { + "info": { + "x_source":"classpath", + "x_class":"org.hapiserver.source.tap.CsaInfoCatalogSource", + "x_method": "getInfo", + "x_args": [ "${id}" ] + }, + "data": { + "source": "classpath", + "class":"org.hapiserver.source.tap.TAPDataSource", + "args":["https://csa.esac.esa.int/csa-sl-tap/","${id}","${info}"] + } + } + }, + { + "x_group_id": "csa-availability", + "x_source": "classpath", + "x_class": "org.hapiserver.source.tap.TAPAvailabilityDataSource", + "x_method": "getCatalog", + "x_config": { + "info": { + "x_source":"classpath", + "x_class":"org.hapiserver.source.tap.TAPAvailabilityDataSource", + "x_method": "getInfo", + "x_args": [ "${id}" ] + }, + "data": { + "source": "classpath", + "class":"org.hapiserver.source.tap.TAPAvailabilityDataSource", + "args":["https://csa.esac.esa.int/csa-sl-tap/","${id}","${info}"] + } + } + } + ], + "x_version":"2026-03-28T07:54:00" + } \ No newline at end of file diff --git a/HapiServerBase/src/org/hapiserver/source/tap/esac.avail.C1.txt b/HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.C1.txt similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/tap/esac.avail.C1.txt rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.C1.txt diff --git a/HapiServerBase/src/org/hapiserver/source/tap/esac.avail.C2.txt b/HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.C2.txt similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/tap/esac.avail.C2.txt rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.C2.txt diff --git a/HapiServerBase/src/org/hapiserver/source/tap/esac.avail.C3.txt b/HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.C3.txt similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/tap/esac.avail.C3.txt rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.C3.txt diff --git a/HapiServerBase/src/org/hapiserver/source/tap/esac.avail.C4.txt b/HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.C4.txt similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/tap/esac.avail.C4.txt rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.C4.txt diff --git a/HapiServerBase/src/org/hapiserver/source/tap/esac.avail.D1.txt b/HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.D1.txt similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/tap/esac.avail.D1.txt rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.D1.txt diff --git a/HapiServerBase/src/org/hapiserver/source/tap/esac.avail.D2.txt b/HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.D2.txt similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/tap/esac.avail.D2.txt rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/esac.avail.D2.txt diff --git a/HapiServerBase/src/org/hapiserver/source/tap/findSampleTimes.jy b/HapiServerBase/src/main/java/org/hapiserver/source/tap/findSampleTimes.jy similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/tap/findSampleTimes.jy rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/findSampleTimes.jy diff --git a/HapiServerBase/src/org/hapiserver/source/tap/sampleTimes.txt b/HapiServerBase/src/main/java/org/hapiserver/source/tap/sampleTimes.txt similarity index 100% rename from HapiServerBase/src/org/hapiserver/source/tap/sampleTimes.txt rename to HapiServerBase/src/main/java/org/hapiserver/source/tap/sampleTimes.txt diff --git a/HapiServerBase/src/org/hapiserver/source/AggregatingIterator.java b/HapiServerBase/src/org/hapiserver/source/AggregatingIterator.java deleted file mode 100644 index 546bd225..00000000 --- a/HapiServerBase/src/org/hapiserver/source/AggregatingIterator.java +++ /dev/null @@ -1,96 +0,0 @@ - -package org.hapiserver.source; - -import java.util.Iterator; -import org.hapiserver.HapiRecord; -import org.hapiserver.HapiRecordSource; -import org.hapiserver.TimeUtil; - -/** - * Often we have granules of data which when "aggregated" together form the - * entire data set. For example, data might be stored in daily files, and to - * implement the HAPI server we must read each one. This class creates a - * HapiRecord iterator for any time range by combining each of the granules, - * so the reader can be simple. - * @author jbf - */ -public class AggregatingIterator implements Iterator { - - int[] granule; - Iterator granuleIterator; - Iterator hapiRecordIterator; - String[] parameters; - HapiRecordSource source; - - /** - * construct an iterator which will use the source create and go through a set of iterators, one for each granule. - * @param source the source of data - * @param start the start time - * @param stop the stop time - */ - public AggregatingIterator( HapiRecordSource source, int[] start, int[] stop ) { - this( source, start, stop, null ); - } - - /** - * construct an iterator which will use the source create and go through a set of iterators, one for each granule. - * @param source the source of data - * @param start the start time - * @param stop the stop time - * @param parameters null or the parameters to subset. - */ - public AggregatingIterator( HapiRecordSource source, int[] start, int[] stop, String[] parameters ) { - this.source= source; - this.granuleIterator= source.getGranuleIterator(start, stop); - this.parameters= parameters; - if ( granuleIterator.hasNext() ) { - this.granule= granuleIterator.next(); - if ( this.granule.length!=TimeUtil.TIME_RANGE_DIGITS ) { - throw new IllegalArgumentException("implementation error, granule iterator did not return 14 time range digits"); - } - if ( this.parameters==null ) { - this.hapiRecordIterator= source.getIterator( granule, TimeUtil.getStopTime(granule) ); - } else { - this.hapiRecordIterator= source.getIterator(granule, TimeUtil.getStopTime(granule), this.parameters ); - } - findNextRecord(); - } else { - this.granule= null; - } - } - - private void findNextRecord() { - while ( !this.hapiRecordIterator.hasNext() ) { - if ( !granuleIterator.hasNext() ) { - granule=null; // we're done - break; - } else { - granule= granuleIterator.next(); - if ( granule.length!=14 ) { - throw new IllegalArgumentException("granule length should be 14"); - } - } - TimeUtil.isValidTimeRange(granule); - if ( this.parameters==null ) { - hapiRecordIterator= source.getIterator( granule, TimeUtil.getStopTime(granule) ); - } else { - hapiRecordIterator= source.getIterator( granule, TimeUtil.getStopTime(granule), parameters ); - } - } - } - - @Override - public boolean hasNext() { - return granule!=null && hapiRecordIterator.hasNext(); - } - - @Override - public HapiRecord next() { - HapiRecord next= hapiRecordIterator.next(); - if ( !hapiRecordIterator.hasNext() ) { - findNextRecord(); - } - return next; - } - -} diff --git a/HapiServerBase/src/org/hapiserver/source/AggregationGranuleIterator.java b/HapiServerBase/src/org/hapiserver/source/AggregationGranuleIterator.java deleted file mode 100644 index 1e473f1a..00000000 --- a/HapiServerBase/src/org/hapiserver/source/AggregationGranuleIterator.java +++ /dev/null @@ -1,48 +0,0 @@ - -package org.hapiserver.source; - -import java.text.ParseException; -import java.util.Iterator; -import org.hapiserver.TimeUtil; -import org.hapiserver.URITemplate; - -/** - * - * @author jbf - */ -public class AggregationGranuleIterator implements Iterator { - - String[] result; - int next=0; - URITemplate uriTemplate; - - public AggregationGranuleIterator( String fileFormat, int[] start, int[] stop ) { - this.uriTemplate= new URITemplate(fileFormat); - - try { - result= URITemplate.formatRange( fileFormat, - TimeUtil.isoTimeFromArray( start ), - TimeUtil.isoTimeFromArray( stop ) ); - } catch (ParseException ex) { - throw new RuntimeException(ex); - } - - - } - @Override - public boolean hasNext() { - return result.length>next; - } - - @Override - public int[] next() { - try { - int i= next; - next++; - return uriTemplate.parse(result[i]); - } catch (ParseException ex) { - throw new RuntimeException(ex); - } - } - -} diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/CdawebAvailabilitySource.java b/HapiServerBase/src/org/hapiserver/source/cdaweb/CdawebAvailabilitySource.java deleted file mode 100644 index 375d9b23..00000000 --- a/HapiServerBase/src/org/hapiserver/source/cdaweb/CdawebAvailabilitySource.java +++ /dev/null @@ -1,398 +0,0 @@ - -package org.hapiserver.source.cdaweb; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; -import java.text.ParseException; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.logging.Level; -import java.util.logging.Logger; -import javax.xml.parsers.ParserConfigurationException; -import javax.xml.xpath.XPath; -import javax.xml.xpath.XPathConstants; -import javax.xml.xpath.XPathExpressionException; -import javax.xml.xpath.XPathFactory; -import org.codehaus.jettison.json.JSONArray; -import org.codehaus.jettison.json.JSONException; -import org.codehaus.jettison.json.JSONObject; -import org.hapiserver.AbstractHapiRecord; -import org.hapiserver.AbstractHapiRecordSource; -import org.hapiserver.CsvDataFormatter; -import org.hapiserver.HapiRecord; -import org.hapiserver.TimeUtil; -import org.hapiserver.source.AggregationGranuleIterator; -import org.hapiserver.source.SourceUtil; -import org.w3c.dom.Document; -import org.w3c.dom.NodeList; -import org.xml.sax.SAXException; - -/** - * return availability, showing when file granules are found. - * @author jbf - */ -public class CdawebAvailabilitySource extends AbstractHapiRecordSource { - - private static final Logger logger= Logger.getLogger("hapi.cdaweb"); - - /** - * the field containing the partial filename. - */ - public static int FIELD_FILENAME= 2; - - String spid; - int rootlen; - String root; - - public CdawebAvailabilitySource( String hapiHome, String idavail, JSONObject info, JSONObject data ) { - int i= idavail.indexOf("/"); - spid= idavail.substring(i+1); - try { - JSONArray array= info.getJSONArray("parameters"); - JSONObject p= array.getJSONObject(2); // the filename parameter - JSONObject stringType= p.getJSONObject("x_stringType"); - JSONObject urin= stringType.getJSONObject("uri"); - rootlen= urin.getString("base").length(); - if ( !urin.getString("base").contains("sp_phys/") ) { - rootlen= rootlen + 4; //TODO: Bernie's server says "sp_phys" while all.xml says "pub". - } - root= urin.getString("base"); - } catch (JSONException ex) { - throw new RuntimeException(ex); - } - - } - - /** - * return the root for references in availability response - * @return - */ - public String getRoot() { - return this.root; - } - - /** - * get the catalog - * @return - * @throws IOException - */ - public static String getCatalog() throws IOException { - try { - String catalogString= CdawebInfoCatalogSource.getCatalog(); - JSONObject catalogContainer= new JSONObject(catalogString); - JSONArray catalog= catalogContainer.getJSONArray("catalog"); - int n= catalog.length(); - for ( int i=0; i getGranuleIterator(int[] start, int[] stop) { - return new AggregationGranuleIterator( "$Y-$m", start, stop ); - } - - @Override - public boolean hasParamSubsetIterator() { - return false; - } - - @Override - public Iterator getIterator(int[] start, int[] stop) { - - try { - - String sstart= String.format( "%04d%02d%02dT%02d%02d%02dZ", start[0], start[1], start[2], start[3], start[4], start[5] ); - String sstop= String.format( "%04d%02d%02dT%02d%02d%02dZ", stop[0], stop[1], stop[2], stop[3], stop[4], stop[5] ); - - URL url = new URL(String.format( CdawebInfoCatalogSource.CDAWeb + "WS/cdasr/1/dataviews/sp_phys/datasets/%s/orig_data/%s,%s", spid, sstart, sstop) ); - - logger.log(Level.INFO, "readData URL: {0}", url); - - System.out.println("url: "+url ); - - try { - Document doc= SourceUtil.readDocument( url ); - XPathFactory factory = XPathFactory.newInstance(); - XPath xpath = (XPath) factory.newXPath(); - NodeList starts = (NodeList) xpath.evaluate( "//DataResult/FileDescription/StartTime", doc, XPathConstants.NODESET ); - NodeList stops = (NodeList) xpath.evaluate( "//DataResult/FileDescription/EndTime", doc, XPathConstants.NODESET ); - NodeList files = (NodeList) xpath.evaluate( "//DataResult/FileDescription/Name", doc, XPathConstants.NODESET ); - //NodeList lengths = (NodeList) xpath.evaluate( "//DataResult/FileDescription/Length", doc, XPathConstants.NODESET ); - return fromNodes( starts, stops, rootlen, files ); - } catch (IOException | SAXException | ParserConfigurationException | XPathExpressionException ex) { - throw new RuntimeException(ex); - } - } catch (MalformedURLException ex) { - throw new RuntimeException(ex); - } - } - - - private static Iterator fromNodes( final NodeList starts, final NodeList stops, int rootlen, final NodeList files ) { - final int len= starts.getLength(); - - return new Iterator() { - int i=0; - - @Override - public boolean hasNext() { - return i iter = - new CdawebAvailabilitySource("",args[0],info,null).getIterator( - TimeUtil.parseISO8601Time(args[1]), - TimeUtil.parseISO8601Time(args[2]) ); - if ( iter.hasNext() ) { - CsvDataFormatter format= new CsvDataFormatter(); - try { - format.initialize( new JSONObject( getInfo(args[0]) ),System.out,iter.next() ); - } catch (JSONException ex) { - throw new RuntimeException(ex); - } - do { - HapiRecord r= iter.next(); - format.sendRecord( System.out, r ); - } while ( iter.hasNext() ); - } - break; - default: - printHelp(); - } - - } - -} diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/CdawebInfoCatalogSource.java b/HapiServerBase/src/org/hapiserver/source/cdaweb/CdawebInfoCatalogSource.java deleted file mode 100644 index 7a6ab799..00000000 --- a/HapiServerBase/src/org/hapiserver/source/cdaweb/CdawebInfoCatalogSource.java +++ /dev/null @@ -1,239 +0,0 @@ - -package org.hapiserver.source.cdaweb; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.net.MalformedURLException; -import java.net.URL; -import java.text.ParseException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.logging.Level; -import java.util.logging.Logger; -import java.util.regex.Pattern; -import javax.xml.parsers.ParserConfigurationException; -import javax.xml.xpath.XPath; -import javax.xml.xpath.XPathConstants; -import javax.xml.xpath.XPathExpressionException; -import javax.xml.xpath.XPathFactory; -import org.codehaus.jettison.json.JSONArray; -import org.codehaus.jettison.json.JSONException; -import org.codehaus.jettison.json.JSONObject; -import org.hapiserver.TimeUtil; -import org.hapiserver.source.SourceUtil; -import org.w3c.dom.Document; -import org.w3c.dom.NamedNodeMap; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; -import org.xml.sax.SAXException; - -/** - * Returns catalog response based on all.xml, and info responses from - * either Bob's process, Nand's existing server, or a future implementation (and this - * documentation needs to be updated). - * @author jbf - * @see https://cdaweb.gsfc.nasa.gov/pub/catalogs/all.xml - */ -public class CdawebInfoCatalogSource { - - private static final Logger logger= Logger.getLogger("hapi.cdaweb"); - - public static final String CDAWeb = "https://cdaweb.gsfc.nasa.gov/"; - - protected static Map coverage= new HashMap<>(); - protected static Map filenaming= new HashMap<>(); - - private static String getURL( String id, Node dataset ) { - NodeList kids= dataset.getChildNodes(); - String lookfor= "ftp://cdaweb.gsfc.nasa.gov/pub/istp/"; - String lookfor2= "ftp://cdaweb.gsfc.nasa.gov/pub/cdaweb_data"; - for ( int j=0; j skips; - private static HashSet skipsPatterns; - - private static void readSkips() throws IOException { - logger.info("reading skips"); - skips= new HashSet<>(); - skipsPatterns= new HashSet<>(); - URL skipsFile= CdawebInfoCatalogSource.class.getResource("skips.txt"); - try (BufferedReader r = new BufferedReader(new InputStreamReader( skipsFile.openStream() ))) { - String s = r.readLine(); - while ( s!=null ) { - String[] ss= s.split(",",-2); - if ( ss.length==2 ) { - if ( ss[0].contains(".") ) { - skipsPatterns.add( Pattern.compile(ss[0]) ); - } else { - skips.add(ss[0].trim()); - } - } - s = r.readLine(); - } - } - } - - /** - * return the catalog response by parsing all.xml. - * @return - * @throws IOException - */ - public static String getCatalog() throws IOException { - readSkips(); - try { - URL url= new URL("https://cdaweb.gsfc.nasa.gov/pub/catalogs/all.xml"); - Document doc= SourceUtil.readDocument(url); - XPathFactory factory = XPathFactory.newInstance(); - XPath xpath = (XPath) factory.newXPath(); - NodeList nodes = (NodeList) xpath.evaluate( "//sites/datasite/dataset", doc, XPathConstants.NODESET ); - - int ic= 0; - JSONArray catalog= new JSONArray(); - for ( int i=0; i1 && Character.isDigit(st.charAt(0)) - && en.length()>1 && Character.isDigit(en.charAt(0)) - && nssdc_ID.contains("None") ) { - String name= attrs.getNamedItem("serviceprovider_ID").getTextContent(); - - if ( skips.contains(name) ) { - logger.log(Level.FINE, "skipping {0}", name); - continue; - } - boolean doSkip= false; - for ( Pattern p: skipsPatterns ) { - if ( p.matcher(name).matches() ) { - doSkip= true; - logger.log(Level.FINE, "skipping {0} because of match", name); - } - } - if ( doSkip ) continue; - - String sourceurl= getURL(name,node); - if ( sourceurl!=null && - ( sourceurl.startsWith( CDAWeb ) || - sourceurl.startsWith("ftp://cdaweb.gsfc.nasa.gov" ) ) && !sourceurl.startsWith("/tower3/private" ) ) { - JSONObject jo= new JSONObject(); - jo.put( "id", name ); - try { - st = TimeUtil.formatIso8601TimeBrief( TimeUtil.parseISO8601Time(st) ); - en = TimeUtil.formatIso8601TimeBrief( TimeUtil.parseISO8601Time(en) ); - String range= st+"/"+en; - jo.put( "x_range", range ); - CdawebInfoCatalogSource.coverage.put( name, range ); - } catch (ParseException ex) { - logger.log(Level.SEVERE, null, ex); - } - - catalog.put( ic++, jo ); - } - } - - } - - JSONObject result= new JSONObject(); - result.put( "catalog", catalog ); - return result.toString(4); - } catch (MalformedURLException | SAXException | ParserConfigurationException | XPathExpressionException | JSONException ex) { - throw new RuntimeException(ex); - } - - } - - /** - * return the info response generated by combining several sources. These info - * responses are stored (presently) at http://mag.gmu.edu/git-data/cdaweb-hapi-metadata/hapi/bw/CDAWeb/info/. - * @param id the dataset id. - * @param srcid nl (to call the old server) or bw (to use Bob's info calculations) or "" to use new code. - * @return the info response. - * @throws MalformedURLException - * @throws IOException - */ - public static String getInfo( String id, String srcid ) throws MalformedURLException, IOException { - int i= id.indexOf('_'); - String g; - if ( i>-1 ) { - g= id.substring(0,i); - } else { - throw new IllegalArgumentException("bad id: "+id); - } - if ( srcid.equals("bw") ) { - URL url = new URL( "http://mag.gmu.edu/git-data/cdaweb-hapi-metadata/hapi/bw/CDAWeb/info/"+id+".json" ); - String src= SourceUtil.getAllFileLines( url ); - try { - JSONObject jo= new JSONObject(src); - jo.put("x_info_author", "bw"); - return jo.toString(4); - } catch ( JSONException ex ) { - throw new IllegalArgumentException("bad thing that will never happen"); - } - } else if ( srcid.equals("nl") ) { - try { - URL url = new URL( "https://cdaweb.gsfc.nasa.gov/hapi/info?id="+id ); - String src= SourceUtil.getAllFileLines( url ); - JSONObject jo= new JSONObject(src); - jo.put("x_info_author", "nl"); - return jo.toString(4); - } catch (JSONException ex) { - throw new IllegalArgumentException("bad thing that will never happen"); - } - } else { - throw new IllegalArgumentException("info method not supported"); - } - - } - - public static void main( String[] args ) throws IOException { - //args= new String[] { "AC_AT_DEF" }; - args= new String[0]; - - if ( args.length==0 ) { - System.out.println( CdawebInfoCatalogSource.getCatalog() ); - } else if ( args.length==1 ) { - System.out.println( CdawebInfoCatalogSource.getInfo( args[0], "bw" ) ); - } - } - -} diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/CdawebServicesHapiRecordIterator.java b/HapiServerBase/src/org/hapiserver/source/cdaweb/CdawebServicesHapiRecordIterator.java deleted file mode 100644 index fc8a1cfd..00000000 --- a/HapiServerBase/src/org/hapiserver/source/cdaweb/CdawebServicesHapiRecordIterator.java +++ /dev/null @@ -1,960 +0,0 @@ - -package org.hapiserver.source.cdaweb; - -import gov.nasa.gsfc.spdf.cdfj.CDFException; -import gov.nasa.gsfc.spdf.cdfj.CDFReader; -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.io.InputStreamReader; -import java.lang.management.ManagementFactory; -import java.lang.reflect.Array; -import java.net.MalformedURLException; -import java.net.URL; -import java.text.MessageFormat; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Iterator; -import java.util.logging.ConsoleHandler; -import java.util.logging.Formatter; -import java.util.logging.Level; -import java.util.logging.LogRecord; -import java.util.logging.Logger; -import javax.xml.parsers.ParserConfigurationException; -import javax.xml.xpath.XPath; -import javax.xml.xpath.XPathConstants; -import javax.xml.xpath.XPathExpressionException; -import javax.xml.xpath.XPathFactory; -import org.codehaus.jettison.json.JSONArray; -import org.codehaus.jettison.json.JSONException; -import org.codehaus.jettison.json.JSONObject; -import org.hapiserver.HapiRecord; -import org.hapiserver.TimeUtil; -import org.hapiserver.source.SourceUtil; -import org.w3c.dom.Document; -import org.xml.sax.SAXException; - -/** - * This uses CDAWeb Web Services described at https://cdaweb.gsfc.nasa.gov/WebServices/REST/. - * - * @author jbf - */ -public class CdawebServicesHapiRecordIterator implements Iterator { - - private static final Logger logger= Logger.getLogger("hapi.cdaweb"); - - static class TimerFormatter extends Formatter { - long t0= System.currentTimeMillis(); - String resetMessage= "ENTRY"; - - @Override - public String format(LogRecord record) { - if ( record.getMessage().equals(resetMessage) ) { - t0= record.getMillis(); - } - String message= MessageFormat.format( record.getMessage(), record.getParameters() ); - if ( message.equals("ENTRY") || message.equals("RETURN")) { - message= message + " " + record.getSourceClassName() + " " + record.getSourceMethodName(); - } - return String.format( "%06d: %s\n", record.getMillis()-t0, message ); - } - - } - static { - logger.setLevel(Level.FINER); - ConsoleHandler h= new ConsoleHandler(); - h.setFormatter( new TimerFormatter() ); - h.setLevel(Level.ALL); - logger.addHandler(h); - } - - HapiRecord nextRecord; - Adapter[] adapters; - - int index; - int nindex; - - /** - * one of these methods will be implemented by the adapter. - */ - private static abstract class Adapter { - public String adaptString( int index ) { - return null; - } - public double adaptDouble( int index ) { - return Double.NEGATIVE_INFINITY; - } - public int adaptInteger( int index ) { - return Integer.MIN_VALUE; - } - public double[] adaptDoubleArray( int index ) { - return null; - } - public int[] adaptIntegerArray( int index ) { - return null; - } - public String[] adaptStringArray( int index ) { - return null; - } - } - - private static class IsotimeEpochAdapter extends Adapter { - int julianDay; - long cdfTT2000= Long.MAX_VALUE; - - /** - * the time in milliseconds since year 1 for cdfEpoch. - */ - double baseTime; - - /** - * 1000000 for epoch, which is a milliseconds offset. - */ - double baseUnitsFactor; - String baseYYYYmmddTHH; - - double[] array; - - String format= ":%02d:%02d.%09d"; - int formatFactor= 1; // number by which to round - - private IsotimeEpochAdapter( double[] array, int length ) { - this.array= array; - double d= array[0]; - double us2000= ( d - 6.3113904E13 ) * 1000; // ms -> microseconds - double day2000= Math.floor( us2000 / 86400000000. ); // days since 2000-01-01. - double usDay= us2000 - day2000 * 86400000000.; // microseconds within this day. - double ms1970= day2000 * 86400000. + 946684800000.; - String baseDay= TimeUtil.fromMillisecondsSince1970((long)ms1970); - baseYYYYmmddTHH= baseDay.substring(0,10)+"T00"; - baseTime= (long)(d-usDay/1000); - switch ( length ) { // YYYY4hh7mm0HH3MM6SS9NNNNNNNNNZ - case 24: - format=":%02d:%02d.%03dZ"; - formatFactor= 1000000; - break; - case 27: - format=":%02d:%02d.%06dZ"; - formatFactor= 1000000; - break; - case 30: - format=":%02d:%02d.%09dZ"; - break; - default: - throw new IllegalArgumentException("not supported"); - } - } - - private String formatTime( double t ) { - double offset= t-baseTime; // milliseconds - while ( offset>=3600000. ) { - double hours= offset / 3600000.; - baseTime = baseTime + hours * 3600000.; - int hour= Integer.parseInt(baseYYYYmmddTHH.substring(11,13)); - baseYYYYmmddTHH= baseYYYYmmddTHH.substring(0,11)+String.format("%02d",(int)(hour+hours)); - baseYYYYmmddTHH= TimeUtil.normalizeTimeString(baseYYYYmmddTHH).substring(0,13); - offset= t-baseTime; - } - int nanos= (int)( (offset*1000000) % 1000000000. ); - offset= (int)( offset / 1000 ); // now it's in seconds. Note offset must be positive for this to work. - int seconds= (int)(offset % 60); - int minutes= (int)(offset / 60); // now it's in minutes - return baseYYYYmmddTHH + String.format( format, minutes, seconds, nanos/formatFactor ); - } - - @Override - public String adaptString( int index) { - return formatTime( array[index] ); - } - - } - - private static class DoubleDoubleAdapter extends Adapter { - double[] array; - - private DoubleDoubleAdapter( double[] array ) { - this.array= array; - } - - @Override - public double adaptDouble(int index) { - if ( index>=this.array.length ) { - throw new ArrayIndexOutOfBoundsException("can't find the double at position "+index); - } - return this.array[index]; - } - } - - private static class DoubleArrayDoubleAdapter extends Adapter { - double[][] array; - int n; // there's a weird bit of code where the Java library is giving me double arrays containing ints. - - private DoubleArrayDoubleAdapter( double[][] array ) { - this.array= array; - if ( array.length>0 ) { - this.n= array[0].length; - } - } - - @Override - public double[] adaptDoubleArray(int index) { - return this.array[index]; - } - - @Override - public int[] adaptIntegerArray(int index) { - int[] adapt= new int[n]; - double[] rec= this.array[index]; - for ( int i=0; i=3600000000000L ) { - double hours= offset / 3600000000000L; - baseTime = baseTime + hours * 3600000000000L; - int hour= Integer.parseInt(baseYYYYmmddTHH.substring(11,13)); - baseYYYYmmddTHH= baseYYYYmmddTHH.substring(0,11)+String.format("%02d",(int)(hour+hours)); - baseYYYYmmddTHH= TimeUtil.normalizeTimeString(baseYYYYmmddTHH).substring(0,13); - offset= (long)((t-baseTime)); - } - int nanos= (int)( (offset) % 1000000000. ); - offset= offset / 1000000000; // now it's in seconds - int seconds= (int)(offset % 60); - int minutes= (int)(offset / 60); // now it's in minutes - return baseYYYYmmddTHH + String.format( ":%02d:%02d.%09dZ", minutes, seconds, nanos ); - } - - @Override - public String adaptString(int index) { - return formatTime( array[index] ); - } - } - - /** - * Returns the name of the integer data type, for example, 8 is type - * 8-byte integer (a.k.a. Java long), and 33 is CDF_TT2000. - * @param type the code for data type - * @return a name identifying the type. - * @see https://spdf.gsfc.nasa.gov/pub/software/cdf/doc/cdf380/cdf38ifd.pdf page 33. - */ - public static String nameForType(int type) { - switch (type) { - case 1: - return "CDF_INT1"; - case 41: - return "CDF_BYTE"; // 1-byte signed integer - case 2: - return "CDF_INT2"; - case 4: - return "CDF_INT4"; - case 8: - return "CDF_INT8"; - case 11: - return "CDF_UINT1"; - case 12: - return "CDF_UINT2"; - case 14: - return "CDF_UINT4"; - case 21: - return "CDF_REAL4"; - case 44: - return "CDF_FLOAT"; - case 22: - return "CDF_REAL8"; - case 45: - return "CDF_DOUBLE"; - case 31: - return "CDF_EPOCH"; - case 32: - return "CDF_EPOCH16"; // make of two CDF_REAL8, - case 33: - return "CDF_TT2000"; - case 51: - return "CDF_CHAR"; - case 52: - return "CDF_UCHAR"; - default: - return "???"; - } - } - - /** - * List of datasets which are known to be readable from the files, containing no virtual variables. Eventually - * there will be metadata in the infos which contains this information. - */ - private static final HashSet readDirect= new HashSet(); - static { - //readDirect.add("RBSP-A_DENSITY_EMFISIS-L4"); - //readDirect.add("RBSP-B_DENSITY_EMFISIS-L4"); - //readDirect.add("RBSP-A_MAGNETOMETER_4SEC-GEI_EMFISIS-L3"); - //readDirect.add("RBSP-B_MAGNETOMETER_4SEC-GEI_EMFISIS-L3"); - //readDirect.add("RBSPA_REL04_ECT-HOPE-PA-L3"); - //readDirect.add("RBSPB_REL04_ECT-HOPE-PA-L3"); - URL virt= CdawebServicesHapiRecordIterator.class.getResource("virtualVariables.txt"); - try ( BufferedReader reader = new BufferedReader( new InputStreamReader( virt.openStream() ) ) ) { - String line= reader.readLine(); - while ( line!=null ) { - if ( line.length()>0 && line.charAt(0)=='#' ) { - // skip comment line - } else { - String[] ss= line.split("\t"); - if ( ss[1].equals("0") ) { - readDirect.add(ss[0]); - } - } - line= reader.readLine(); - } - } catch ( IOException ex ) { - logger.log( Level.WARNING, ex.getMessage(), ex ); - } - logger.log(Level.INFO, "readDirect has {0} entries", readDirect.size()); - } - - /** - * return true if the data contain virtual variables which must be calculated by CDAWeb web services. This is - * slower than reading the files directly. Some virtual variables may be implemented within this server in the future. - * @param id the id, for example RBSP-A_DENSITY_EMFISIS-L4 - * @return true if web services must be used. - */ - private boolean mustUseWebServices( String id ) { - return !readDirect.contains(id); - } - - /** - * return either the URL of the CDF generated by the web services, or the URL of the CDF file in the https area. Maby - * CDFs contain virtual variables which are only computed within the IDL web services. When a file does not contain virtual - * variables (or in the future the virtual variable is trivial to compute), then a reference to the direct file is returned. - * @param id the dataset id, such as AC_OR_SSC or RBSP-A_DENSITY_EMFISIS-L4 - * @param info the info object - * @param start the seven-component start time - * @param stop the seven-component stop time - * @param params the list of parameters to read - * @param file null or the file which contains the data - * @return the URL of the file containing the data. - */ - private URL getCdfDownloadURL( String id, JSONObject info, int[] start, int[] stop, String[] params, String file ) throws MalformedURLException { - logger.entering("CdawebServicesHapiRecordIterator", "getCdfDownloadURL"); - String sstart= String.format( "%04d%02d%02dT%02d%02d%02dZ", start[0], start[1], start[2], start[3], start[4], start[5] ); - String sstop= String.format( "%04d%02d%02dT%02d%02d%02dZ", stop[0], stop[1], stop[2], stop[3], stop[4], stop[5] ); - - int iat= id.indexOf("@"); // multiple timetags cdf files will have @\d for each set of timetags. - if ( iat>0 ) { - id= id.substring(0,iat); - } - - if ( file==null || mustUseWebServices(id) ) { - - String ss; - if ( params.length==1 ) { - try { - // special case where we have to request some DATA variable, cannot just request time. - JSONArray parameters = info.getJSONArray("parameters"); - String dependent= parameters.getJSONObject(parameters.length()-1).getString("name"); - ss= dependent; - } catch (JSONException ex) { - throw new RuntimeException(ex); - } - } else { - ss= String.join(",", Arrays.copyOfRange( params, 1, params.length ) ); // CDAWeb WS will send time. - } - if ( params.length>2 || ( params.length==2 && !params[0].equals("Time") ) ) { - ss= "ALL-VARIABLES"; - } - - String surl= - String.format( "https://cdaweb.gsfc.nasa.gov/WS/cdasr/1/dataviews/sp_phys/datasets/%s/data/%s,%s/%s?format=cdf", - id, sstart, sstop, ss ); - - logger.log(Level.FINER, "request {0}", surl); - - try { - Document doc= SourceUtil.readDocument(new URL(surl)); - XPathFactory factory = XPathFactory.newInstance(); - XPath xpath = (XPath) factory.newXPath(); - String sval = (String) xpath.evaluate("/DataResult/FileDescription/Name/text()", doc, XPathConstants.STRING); - logger.exiting("CdawebServicesHapiRecordIterator", "getCdfDownloadURL"); - return new URL(sval); - } catch (XPathExpressionException | SAXException | IOException | ParserConfigurationException ex) { - throw new RuntimeException("unable to handle XML response", ex ); - } - - } else { - logger.exiting("CdawebServicesHapiRecordIterator", "getCdfDownloadURL"); - return new URL( file ); - } - - } - - /** - * return the processID (pid), or the fallback if the pid cannot be found. - * @param fallback the string (null is okay) to return when the pid cannot be found. - * @return the process id or the fallback provided by the caller. - * //TODO: Java9 has method for accessing process ID. - */ - public static String getProcessId(final String fallback) { - // Note: may fail in some JVM implementations - // therefore fallback has to be provided - - // something like '@', at least in SUN / Oracle JVMs - final String jvmName = ManagementFactory.getRuntimeMXBean().getName(); - final int index = jvmName.indexOf('@'); - - if (index < 1) { - // part before '@' empty (index = 0) / '@' not found (index = -1) - return fallback; - } - - try { - return Long.toString(Long.parseLong(jvmName.substring(0, index))); - } catch (NumberFormatException e) { - // ignore - } - return fallback; - } - - /** - * flatten 3-D array into 2-D. Thanks, Bard! - * @param array - * @return - */ - public static double[][] flatten(double[][][] array) { - int len1= array[0].length * array[0][0].length; - double[][] flattenedArray = new double[array.length][len1]; - int index; - for (int i = 0; i < array.length; i++) { - index=0; - for (int j = 0; j < array[i].length; j++) { - System.arraycopy(array[i][j], 0, flattenedArray[i], index, array[i][j].length); - index+=array[i][j].length; - } - } - - return flattenedArray; - } - - private double[][] flattenDoubleArray( Object array ) { - int numDimensions = 1; - Class componentType = array.getClass().getComponentType(); - while (componentType != double.class) { - numDimensions++; - componentType = componentType.getComponentType(); - } - switch (numDimensions) { - case 2: - return (double[][])array; - case 3: - return flatten((double[][][])array); - default: - throw new IllegalArgumentException("Not supported: rank 4"); - } - } - - /** - * return the record iterator for the dataset. This presumes that start and stop are based on the intervals - * calculated by CdawebServicesHapiRecordSource, and an incomplete set of records will be returned if this is not - * the case. The file, possibly calculated when figuring out intervals, can be provided as well, so that the - * web service identifying the file is only called once. - * @param id the dataset id, such as AC_OR_SSC or RBSP-A_DENSITY_EMFISIS-L4 - * @param info the info for this id - * @param start the start time - * @param stop the stop time - * @param params the parameters to read - * @param file the file, (or null if not known), of the data. - */ - public CdawebServicesHapiRecordIterator(String id, JSONObject info, int[] start, int[] stop, String[] params, String file ) { - try { - - logger.entering( CdawebServicesHapiRecordIterator.class.getCanonicalName(), "constructor" ); - - String ss= String.join(",", Arrays.copyOfRange( params, 1, params.length ) ); // CDAWeb WS will send time. - if ( params.length>2 || ( params.length==2 && !params[0].equals("Time") ) ) { - ss= "ALL-VARIABLES"; - } - - String sstart= String.format( "%04d%02d%02dT%02d%02d%02dZ", start[0], start[1], start[2], start[3], start[4], start[5] ); - String sstop= String.format( "%04d%02d%02dT%02d%02d%02dZ", stop[0], stop[1], stop[2], stop[3], stop[4], stop[5] ); - - String name= String.format( "%s_%s_%s_%s", id, sstart, sstop, ss ); - - String u= System.getProperty("user.name"); // getProcessId("000"); - File p= new File( "/home/tomcat/tmp/"+u+"/" ); - - if ( !p.exists() ) { - if ( !p.mkdirs() ) { - logger.warning("fail to make download area"); - } - } - - File tmpFile= new File( p, name + ".cdf" ); // madness... apparently tomcat can't write to /tmp - - if ( tmpFile.exists() && ( System.currentTimeMillis()-tmpFile.lastModified() )<(5*86400000) ) { - logger.fine( "no need to download file I already have loaded!"); - } else { - URL cdfUrl= getCdfDownloadURL(id, info, start, stop, params, file ); - logger.log(Level.FINER, "request {0}", cdfUrl); - tmpFile= SourceUtil.downloadFile( cdfUrl, tmpFile ); - logger.log(Level.FINER, "downloaded {0}", cdfUrl); - } - - adapters= new Adapter[params.length]; - - int nrec=-1; - - CDFReader reader= new CDFReader(tmpFile.toString()); - for ( int i=0; i0 ) { - switch (type) { - case 31: - adapters[i]= new IsotimeEpochAdapter( (double[])o, length ); - break; - case 33: - adapters[i]= new IsotimeTT2000Adapter( (long[])o, length ); - break; - default: - //TODO: epoch16. - throw new IllegalArgumentException("type not supported for column 0 time (cdf_epoch16"); - } - nindex= Array.getLength(o); - } else { - nindex=0; - } - - } else { - String param= params[i]; - int type= reader.getType(param); - Object o= reader.get(param); - if ( Array.getLength(o)!=nrec ) { - if ( Array.getLength(o)==1 ) { - // let's assume they meant for this to non-time varying. - Object newO= Array.newInstance( o.getClass().getComponentType(), nrec ); - Object v1= Array.get( o, 0 ); - for ( int irec=0; irec getGranuleIterator(int[] start, int[] stop) { - logger.entering("CdawebServicesHapiRecordSource","getGranuleIterator"); - String availInfo= CdawebAvailabilitySource.getInfo( "availability/"+id ); - JSONObject jsonObject; - try { - jsonObject = new JSONObject(availInfo); - } catch (JSONException ex) { - throw new RuntimeException(ex); - } - CdawebAvailabilitySource source= new CdawebAvailabilitySource( "notUsed", "availability/"+id, jsonObject, new JSONObject() ); - Iterator it = source.getIterator(start, stop); - this.root= source.getRoot(); - - availabilityIterator= new AvailabilityIterator(it); - logger.exiting("CdawebServicesHapiRecordSource","getGranuleIterator"); - return availabilityIterator; - } - - @Override - public boolean hasParamSubsetIterator() { - return true; - } - - @Override - public Iterator getIterator(int[] start, int[] stop, String[] params) { - logger.entering("CdawebServicesHapiRecordSource","getIterator"); - String f= this.root + availabilityIterator.getFile(); - - CdawebServicesHapiRecordIterator result=new CdawebServicesHapiRecordIterator(id, info, start, stop, params, f ); - - logger.exiting("CdawebServicesHapiRecordSource","getIterator"); - return result; - } - -} diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/catalog.json b/HapiServerBase/src/org/hapiserver/source/cdaweb/catalog.json deleted file mode 100644 index 60f78491..00000000 --- a/HapiServerBase/src/org/hapiserver/source/cdaweb/catalog.json +++ /dev/null @@ -1,52 +0,0 @@ - { - "HAPI": "3.0", - "catalog": [ - { - "x_group_id": "cdaweb", - "_x_source": "spawn", - "_x_command": "wget -O - https://cdaweb.gsfc.nasa.gov/hapi/catalog", - "x_source": "classpath", - "x_class": "org.hapiserver.source.cdaweb.CdawebInfoCatalogSource", - "x_method": "getCatalog", - "x_config": { - "info": { - "x_source":"classpath", - "x_class":"org.hapiserver.source.cdaweb.CdawebInfoCatalogSource", - "x_method": "getInfo", - "x_args": [ "${id}" ] - }, - "_info": { - "source":"spawn", - "command":"wget -O - https://cdaweb.gsfc.nasa.gov/hapi/info?id=${id}" - }, - "data": { - "source": "classpath", - "class":"org.hapiserver.source.cdaweb.CdawebServicesHapiRecordSource" - } - } - }, - { - "x_group_id": "cdaweb_availability", - "x_source": "classpath", - "x_class": "org.hapiserver.source.cdaweb.CdawebAvailabilitySource", - "x_method": "getCatalog", - "x_config": { - "info": { - "x_source":"classpath", - "x_class":"org.hapiserver.source.cdaweb.CdawebAvailabilitySource", - "x_method": "getInfo", - "x_args": [ "${id}" ] - }, - "data": { - "x_source": "classpath", - "x_class":"org.hapiserver.source.cdaweb.CdawebAvailabilitySource" - } - } - } - - ], - "status": { - "code": 1200, - "message": "OK request successful" - } - } \ No newline at end of file diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/sampleTimes.txt b/HapiServerBase/src/org/hapiserver/source/cdaweb/sampleTimes.txt deleted file mode 100644 index 1fbc97bf..00000000 --- a/HapiServerBase/src/org/hapiserver/source/cdaweb/sampleTimes.txt +++ /dev/null @@ -1,5 +0,0 @@ -# see https://github.com/autoplot/dev/blob/master/demos/2023/20230516/unplotCdawebAvail.jy -WI_SW_ION_DIST_SWE_FARADAY 1994-12-31T01:52:02.767Z 2023-05-03T00:18:40.461Z -WI_STRAHL0_SWE 1995-02-05T09:26:27.320Z 2001-07-23T21:43:03.285Z -WI_SOSP_3DP 1995-02-05T09:26:27.320Z 2023-06-08T07:53:05.014Z -WI_SOSD_3DP 1995-02-05T09:26:27.320Z 2023-06-08T07:53:05.014Z diff --git a/HapiServerBase/src/org/hapiserver/source/cdaweb/skips.txt b/HapiServerBase/src/org/hapiserver/source/cdaweb/skips.txt deleted file mode 100644 index 3489a195..00000000 --- a/HapiServerBase/src/org/hapiserver/source/cdaweb/skips.txt +++ /dev/null @@ -1,7 +0,0 @@ -AIM_CIPS_SCI_3A, Tami NetCDF file. -AMPTECCE_H0_MEPA, Nand's server fails to give response. -APOLLO12_.*, Doesn't show up on Nand's server but Autoplot can plot from CDAWeb. -APOLLO15_.*, Presumably same problem as APOLLO12. -BAR_2K_L2_FSPC, data contains repeat timetags. -BAR_2L_L2_MAGN, data contains repeat timetags. -AEROCUBE-6-B_DOSIMETER_L2, is.FileStructureOK(): The verifier should have enough information to make a request that returns data. diff --git a/HapiServerBase/src/org/hapiserver/source/tap/catalog.json b/HapiServerBase/src/org/hapiserver/source/tap/catalog.json deleted file mode 100644 index fe282c92..00000000 --- a/HapiServerBase/src/org/hapiserver/source/tap/catalog.json +++ /dev/null @@ -1,28 +0,0 @@ - { - "HAPI": "3.0", - "catalog": [ - { - "x_group_id": "csa", - "x_source": "classpath", - "x_class": "org.hapiserver.source.tap.CsaInfoCatalogSource", - "x_method": "getCatalog", - "x_config": { - "info": { - "x_source":"classpath", - "x_class":"org.hapiserver.source.tap.CsaInfoCatalogSource", - "x_method": "getInfo", - "x_args": [ "${id}" ] - }, - "data": { - "source": "classpath", - "class":"org.hapiserver.source.tap.TAPDataSource", - "args":["https://csa.esac.esa.int/csa-sl-tap/","${id}"] - } - } - } - ], - "status": { - "code": 1200, - "message": "OK request successful" - } - } \ No newline at end of file diff --git a/HapiServerBase/src/test/java/org/hapiserver/TimeStringTest.java b/HapiServerBase/src/test/java/org/hapiserver/TimeStringTest.java new file mode 100644 index 00000000..2d0ef64f --- /dev/null +++ b/HapiServerBase/src/test/java/org/hapiserver/TimeStringTest.java @@ -0,0 +1,31 @@ + +package org.hapiserver; + +import org.junit.Test; +import static org.junit.Assert.*; + +/** + * + * @author jbf + */ +public class TimeStringTest { + + public TimeStringTest() { + } + + + /** + * Test of toComponents method, of class TimeString. + */ + @Test + public void testToComponents() { + System.out.println("toComponents"); + TimeString r= new TimeString("2043-04-05T23:13:02.123456789"); + int[] expResult = new int[] { 2043, 4, 5, 23, 13, 2, 123456789 }; + int[] result = r.toComponents(); + assertArrayEquals(expResult, result); + + } + + +} diff --git a/HapiServerBase/src/test/java/org/hapiserver/source/SourceUtilTest.java b/HapiServerBase/src/test/java/org/hapiserver/source/SourceUtilTest.java new file mode 100644 index 00000000..32bd52c8 --- /dev/null +++ b/HapiServerBase/src/test/java/org/hapiserver/source/SourceUtilTest.java @@ -0,0 +1,224 @@ +/* + * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license + * Click nbfs://nbhost/SystemFileSystem/Templates/UnitTests/JUnit4TestClass.java to edit this template + */ +package org.hapiserver.source; + +import java.io.File; +import java.io.InputStream; +import java.net.URL; +import java.util.Iterator; +import java.util.stream.IntStream; +import org.codehaus.jettison.json.JSONObject; +import org.hapiserver.HapiRecord; +import org.junit.Test; +import static org.junit.Assert.*; +import org.w3c.dom.Document; + +/** + * + * @author jbf + */ +public class SourceUtilTest { + + public SourceUtilTest() { + } + + + /** + * Test of getFileLines method, of class SourceUtil. + */ + @Test + public void testGetFileLines_File() throws Exception { + System.out.println("getFileLines"); + + File f = new File( SourceUtilTest.class.getResource( "/org/hapiserver/source/testjson.json").getFile() ); + Iterator result = SourceUtil.getFileLines(f); + int count=0; + while ( result.hasNext() ) { + count++; + result.next(); + } + assertEquals( 7, count ); + + } + + /** + * Test of getFileLines method, of class SourceUtil. + */ + @Test + public void testGetFileLines_URL() throws Exception { + System.out.println("getFileLines"); + URL url = SourceUtilTest.class.getResource( "/org/hapiserver/source/testjson.json"); + Iterator result = SourceUtil.getFileLines(url); + int count=0; + while ( result.hasNext() ) { + count++; + result.next(); + } + assertEquals( 7, count ); + // TODO review the generated test code and remove the default call to fail. + } + + /** + * Test of getAllFileLines method, of class SourceUtil. + */ + @Test + public void testGetAllFileLines() throws Exception { + System.out.println("getAllFileLines"); + URL url = SourceUtilTest.class.getResource( "/org/hapiserver/source/testjson.json"); + String expResult = "{\n" + +" \"name\": \"Zach\",\n" + +" \"age\": 150,\n" + +" \"address\": {\n" + +" \"city\":\"Iowa City\",\n" + +" \"state\":\"Iowa\",\n" + +" \"country\":\"USA\"\n" + +" }\n" + +"}\n" + +""; + String result = SourceUtil.getAllFileLines(url); + assertEquals(expResult, result); + // TODO review the generated test code and remove the default call to fail. + } + + /** + * Test of getEmptyHapiRecordIterator method, of class SourceUtil. + */ + @Test + public void testGetEmptyHapiRecordIterator() { + System.out.println("getEmptyHapiRecordIterator"); + Iterator result = SourceUtil.getEmptyHapiRecordIterator(); + assertEquals( false, result.hasNext() ); + + } + + /** + * Test of getGranuleIterator method, of class SourceUtil. + */ + @Test + public void testGetGranuleIterator() { + System.out.println("getGranuleIterator"); + int[] start = new int[] { 2024,2,1,0,0,0,0 }; + int[] stop = new int[] { 2024,2,10,0,0,0,0 }; + int digit = 2; // count by days + int count = 0; + Iterator result = SourceUtil.getGranuleIterator(start, stop, digit); + while ( result.hasNext() ) { + count++; + result.next(); + } + assertEquals( 10, count ); + + } + + /** + * Test of guardedSplit method, of class SourceUtil. + */ + @Test + public void testGuardedSplit() { + System.out.println("guardedSplit"); + String s = "2022-02-02T02:02:02,\"thruster,mode2,on\",2"; + char delim = ','; + char exclude1 = '\"'; + String[] expResult = new String[] { "2022-02-02T02:02:02", "\"thruster,mode2,on\"", "2" }; + String[] result = SourceUtil.guardedSplit(s, delim, exclude1); + assertArrayEquals(expResult, result); + } + + /** + * Test of stringSplit method, of class SourceUtil. + */ + @Test + public void testStringSplit() { + System.out.println("stringSplit"); + String s = "C3_PP_CIS,\"Proton and ion densities, bulk velocities and temperatures, spin resolution\""; + String[] expResult = new String[] { "C3_PP_CIS","Proton and ion densities, bulk velocities and temperatures, spin resolution" }; + String[] result = SourceUtil.stringSplit(s); + assertArrayEquals(expResult, result); + + } + + /** + * Test of readDocument method, of class SourceUtil. + */ + @Test + public void testReadDocument_URL() throws Exception { + System.out.println("readDocument"); + URL url = new URL( "https://raw.githubusercontent.com/hapi-server/server-java/refs/heads/main/HapiServerBase/nbproject/project.xml" ); + Document result = SourceUtil.readDocument(url); + assertEquals( 1, result.getChildNodes().getLength() ); + } + + /** + * Test of getInputStream method, of class SourceUtil. + */ + @Test + public void testGetInputStream() throws Exception { + System.out.println("getInputStream"); + URL url = new URL( "https://cottagesystems.com/server/ct/hapi/about"); + int ageSeconds = 0; + InputStream result = SourceUtil.getInputStream(url, ageSeconds); + assertNotNull(result); + result.close(); + + } + + /** + * Test of readDocument method, of class SourceUtil. + */ + @Test + public void testReadDocument_URL_int() throws Exception { + System.out.println("readDocument"); + URL url = new URL( "https://raw.githubusercontent.com/hapi-server/server-java/refs/heads/main/HapiServerBase/nbproject/project.xml" ); + int ageSeconds = 0; + Document result = SourceUtil.readDocument(url, ageSeconds); + assertNotNull(result); + + } + + /** + * Test of readDocument method, of class SourceUtil. + */ + @Test + public void testReadDocument_String() throws Exception { + System.out.println("readDocument"); + String src = "\n" + +"Tove\n" + +"Jani\n" + +"Reminder\n" + +"Don't forget me this weekend!\n" + +""; + Document result = SourceUtil.readDocument(src); + assertNotNull(result); + + } + + /** + * Test of readJSONObject method, of class SourceUtil. + */ + @Test + public void testReadJSONObject() throws Exception { + System.out.println("readJSONObject"); + URL url = SourceUtilTest.class.getResource( "/org/hapiserver/source/testjson.json"); + //new URL( "https://cottagesystems.com/server/ct/hapi/about"); + JSONObject result = SourceUtil.readJSONObject(url); + assertEquals(3, result.length()); + + } + + /** + * Test of downloadFile method, of class SourceUtil. + */ + @Test + public void testDownloadFile() throws Exception { + System.out.println("downloadFile"); + URL url = new URL( "https://cottagesystems.com/server/ct/hapi/about"); + File file = File.createTempFile( "SourceUtilTest", ".json"); + File result = SourceUtil.downloadFile(url, file); + assertEquals( 436, result.length() ); + assertEquals( result, file ); + + } + +} diff --git a/HapiServerBase/src/test/java/org/hapiserver/source/testjson.json b/HapiServerBase/src/test/java/org/hapiserver/source/testjson.json new file mode 100644 index 00000000..300359b1 --- /dev/null +++ b/HapiServerBase/src/test/java/org/hapiserver/source/testjson.json @@ -0,0 +1,9 @@ +{ + "name": "Zach", + "age": 150, + "address": { + "city":"Iowa City", + "state":"Iowa", + "country":"USA" + } +} diff --git a/README.md b/README.md index 9b429294..a99d375e 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,5 @@ https://cottagesystems.com/autoplot/git/server-java/HapiServer/dist/HapiServer.w # Manual For installation instuctions, see the wiki: https://github.com/hapi-server/server-java/wiki. ---- -last_modified_at: 2022-07-26T16:26:24 ---- + diff --git a/SimpleClasspathExtension/src/com/cottagesystems/hapiext/SimpleRecordSource.java b/SimpleClasspathExtension/src/com/cottagesystems/hapiext/SimpleRecordSource.java index 95999831..fc6e9d7d 100644 --- a/SimpleClasspathExtension/src/com/cottagesystems/hapiext/SimpleRecordSource.java +++ b/SimpleClasspathExtension/src/com/cottagesystems/hapiext/SimpleRecordSource.java @@ -86,4 +86,9 @@ public String getTimeStamp(int[] start, int[] stop) { return null; } + @Override + public void doFinalize() { + // no actions needed + } + } diff --git a/assumptions.md b/assumptions.md index 81d67b34..c7412153 100644 --- a/assumptions.md +++ b/assumptions.md @@ -2,7 +2,7 @@ The following assumptions are made about the workgroup using this server: * They are comfortable running web applications on a Java-based server such as Tomcat. * They know how to configure a web application using its web.xml. * They are familiar with the HAPI protocol. -* They are willing to read the documentation on how new data set are introduced to the server. +* They are willing to read the documentation on how new data sets are introduced to the server. * They are running a server which can run Java 8 code. diff --git a/tilde-geonet/src/main/java/org/hapiserver/source/tilde/geonet/DartCatalog.java b/tilde-geonet/src/main/java/org/hapiserver/source/tilde/geonet/DartCatalog.java new file mode 100644 index 00000000..a367a386 --- /dev/null +++ b/tilde-geonet/src/main/java/org/hapiserver/source/tilde/geonet/DartCatalog.java @@ -0,0 +1,129 @@ + +package org.hapiserver.source.tilde.geonet; + +import java.util.LinkedHashMap; +import java.util.Map; + +/** + * + * @author jbf + */ +public class DartCatalog { + + static String data="Station code Location Type Longitude Latitude Start End\n" + +"NZA 40 DART bottom pressure recorder 176.9109 -42.3707 2019-12-22T00:00:00Z 2021-12-17T23:59:59Z\n" + +"NZA 41 DART bottom pressure recorder 176.9093 -42.3717 2021-12-18T00:00:00Z 2023-12-15T00:00:00Z\n" + +"NZA 42 DART bottom pressure recorder 176.9120 -42.3690 2023-12-16T00:15:00Z \n" + +"NZB 40 DART bottom pressure recorder 179.0996 -40.6003 2020-09-18T21:12:30Z 2022-08-14T22:59:45Z\n" + +"NZB 41 DART bottom pressure recorder 179.0962 -40.5992 2022-08-15T20:15:00Z 2023-12-06T00:00:00Z\n" + +"NZB 42 DART bottom pressure recorder 179.1005 -40.5979 2023-12-07T14:15:00Z \n" + +"NZC 40 DART bottom pressure recorder -179.7978 -38.2001 2019-12-13T00:00:00Z 2021-12-10T23:59:59Z\n" + +"NZC 41 DART bottom pressure recorder -179.7978 -38.2004 2021-12-11T00:00:00Z 2023-12-08T00:00:00Z\n" + +"NZC 42 DART bottom pressure recorder -179.7968 -38.1969 2023-12-09T01:15:00Z \n" + +"NZD 40 DART bottom pressure recorder 178.6037 -36.0998 2021-07-23T00:00:00Z 2023-06-11T18:00:00Z\n" + +"NZD 41 DART bottom pressure recorder 178.6009 -36.1000 2023-06-13T21:15:00Z \n" + +"NZE 40 DART bottom pressure recorder -177.7080 -36.0493 2019-12-19T00:00:00Z 2021-12-11T23:59:59Z\n" + +"NZE 41 DART bottom pressure recorder -177.6970 -36.0499 2021-12-14T00:00:00Z 2023-12-09T23:00:00Z\n" + +"NZE 42 DART bottom pressure recorder -177.6986 -36.0500 2023-12-12T20:15:00Z \n" + +"NZF 41 DART bottom pressure recorder -175.0124 -29.6823 2020-08-31T23:10:15Z 2022-08-10T00:59:45Z\n" + +"NZF 42 DART bottom pressure recorder -175.0125 -29.6826 2022-08-11T00:15:00Z 2024-07-02T22:00:00Z\n" + +"NZF 43 DART bottom pressure recorder -175.0129 -29.6827 2024-07-04T00:00:00Z \n" + +"NZG 40 DART bottom pressure recorder -173.4012 -23.3516 2020-09-10T22:57:30Z 2022-07-29T18:02:45Z\n" + +"NZG 41 DART bottom pressure recorder -173.4018 -23.3517 2022-07-30T20:15:00Z 2024-06-22T00:00:00Z\n" + +"NZG 42 DART bottom pressure recorder -173.4034 -23.3509 2024-06-23T05:15:00Z \n" + +"NZH 40 DART bottom pressure recorder -171.8599 -20.0896 2020-09-03T22:24:30Z 2022-08-01T00:12:30Z\n" + +"NZH 41 DART bottom pressure recorder -171.8630 -20.0885 2022-08-01T20:15:00Z 2024-06-24T22:00:00Z\n" + +"NZH 42 DART bottom pressure recorder -171.8605 -20.0900 2024-06-25T21:37:00Z \n" + +"NZI 40 DART bottom pressure recorder -171.1904 -16.8921 2020-09-08T03:15:45Z 2022-08-02T21:59:30Z\n" + +"NZI 41 DART bottom pressure recorder -171.1905 -16.8890 2022-08-03T21:15:00Z 2024-06-27T22:00:00Z\n" + +"NZI 42 DART bottom pressure recorder -171.1893 -16.8913 2024-06-28T21:29:00Z \n" + +"NZJ 40 DART bottom pressure recorder 163.9549 -26.6672 2021-07-09T00:00:00Z 2023-05-28T18:00:00Z\n" + +"NZJ 41 DART bottom pressure recorder 163.9536 -26.6685 2023-05-29T22:15:00Z \n" + +"NZK 40 DART bottom pressure recorder 169.4988 -24.3093 2021-07-15T00:00:00Z 2023-06-06T00:00:00Z\n" + +"NZK 41 DART bottom pressure recorder 169.5001 -24.3086 2023-06-06T21:00:00Z \n" + +"NZL 40 DART bottom pressure recorder 166.7820 -19.3096 2021-07-12T00:00:00Z 2023-06-03T00:00:00Z\n" + +"NZL 41 DART bottom pressure recorder 166.8110 -19.2876 2023-06-03T20:15:00Z "; + + private static class Station { + String id; + String start; + String stop; + String lat; + String lon; + } + + private static final Map stations; + + static { + stations= new LinkedHashMap<>(); + String[] lines= data.split("\n"); + for (String line : lines) { + if ( line.startsWith("NZ") ) { + String[] fields= line.split("\\s+"); + Station e= new Station(); + e.id= fields[0] + "_" + fields[1]; + int nstart= fields.length; + if ( nstart==10 ) { + e.start= fields[nstart-2]; + e.stop= fields[nstart-1]; + e.lat= fields[nstart-3]; + e.lon= fields[nstart-4]; + } else { + e.start= fields[nstart-1]; + e.stop= "lasthour"; + e.lat= fields[nstart-2]; + e.lon= fields[nstart-3]; + } + + stations.put( e.id, e ); + } + } + } + + static String infoTemplate= "{\n" + +" \"startDate\": \"%s\",\n" + +" \"stopDate\": \"%s\",\n" + +" \"cadence\": \"PT15S\",\n" + +" \"x_latitude\": %s,\n" + +" \"x_longitude\": %s,\n" + +" \"parameters\": [\n" + +" {\n" + +" \"name\": \"Time\",\n" + +" \"type\": \"isotime\",\n" + +" \"units\": \"UTC\",\n" + +" \"length\": 20,\n" + +" \"fill\": null\n" + +" },\n" + +" {\n" + +" \"name\": \"water-height\",\n" + +" \"type\": \"double\",\n" + +" \"fill\": null\n" + +" }\n" + +" ],\n" + +" \"x_info_caching\": false\n" + +"}\n" + +""; + public static String getCatalog() { + StringBuilder b= new StringBuilder(); + b.append("{\n"); + b.append(" \"catalog\": [\n"); + for ( Station e : stations.values() ) { + b.append(" {\n"); + b.append(" \"id\":"+"\"").append(e.id).append("\"\n"); + b.append(" },\n"); + } + // remove the extra comma. + b.delete( b.length()-3, b.length() ); + b.append("}\n"); + + b.append(" ]\n"); + b.append("\n}"); + return b.toString(); + } + + public static String getInfo(String id) { + Station e= stations.get(id); + if ( e==null ) throw new IllegalArgumentException("no such station:" +id); + return String.format( infoTemplate, e.start, e.stop, e.lat, e.lon ); + } +} diff --git a/tilde-geonet/src/main/java/org/hapiserver/source/tilde/geonet/DartDataHapiRecordSource.java b/tilde-geonet/src/main/java/org/hapiserver/source/tilde/geonet/DartDataHapiRecordSource.java new file mode 100644 index 00000000..9586ea62 --- /dev/null +++ b/tilde-geonet/src/main/java/org/hapiserver/source/tilde/geonet/DartDataHapiRecordSource.java @@ -0,0 +1,109 @@ +package org.hapiserver.source.tilde.geonet; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Iterator; +import org.hapiserver.AbstractHapiRecord; +import org.hapiserver.AbstractHapiRecordSource; +import org.hapiserver.HapiRecord; +import org.hapiserver.source.AggregationGranuleIterator; + +/** + * + * @author jbf + */ +public class DartDataHapiRecordSource extends AbstractHapiRecordSource { + + String id; + String location; + + public DartDataHapiRecordSource( String id ) { + if ( id.charAt(3)!='_') throw new IllegalArgumentException("expecting underscore in string, as in NZA_40"); + this.id= id.substring(0,3); + this.location= id.substring(id.length()-2); + } + + @Override + public boolean hasGranuleIterator() { + return true; + } + + @Override + public Iterator getGranuleIterator(int[] start, int[] stop) { + return new AggregationGranuleIterator("$Y-$m-$d", start, stop ); + } + + @Override + public boolean hasParamSubsetIterator() { + return false; + } + + @Override + public Iterator getIterator(int[] start, int[] stop) { + try { + String requestTemplate= "https://tilde.geonet.org.nz/v4/data/dart/%s/water-height/%s/15s/nil/%s/%s"; + String starts= String.format( "%04d-%02d-%02d",start[0],start[1],start[2]); + String request= String.format( requestTemplate, id, location, starts, starts ); + URL url= new URL(request); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestProperty("Accept", "text/csv"); + + BufferedReader read= new BufferedReader( new InputStreamReader( conn.getInputStream() ) ); + + return getRecordIterator( read ); + + } catch (MalformedURLException ex) { + throw new RuntimeException(ex); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + } + + private Iterator getRecordIterator(final BufferedReader read) { + final Iterator linesStream = read.lines().iterator(); + String header= linesStream.next(); + + return new Iterator() { + @Override + public boolean hasNext() { + return linesStream.hasNext(); + } + + @Override + public HapiRecord next() { + String[] s= linesStream.next().split(","); + return new AbstractHapiRecord() { + @Override + public String getIsoTime(int i) { + return s[6]; + } + + @Override + public String getString(int i) { + return s[6+i]; + } + + @Override + public double getDouble(int i) { + assert i==1; + return Double.parseDouble(s[7]); + } + + @Override + public int length() { + return 2; + } + }; + } + + }; + } + + + + +} diff --git a/tilde-geonet/src/main/java/org/hapiserver/source/tilde/geonet/config.json b/tilde-geonet/src/main/java/org/hapiserver/source/tilde/geonet/config.json new file mode 100644 index 00000000..5acca6b4 --- /dev/null +++ b/tilde-geonet/src/main/java/org/hapiserver/source/tilde/geonet/config.json @@ -0,0 +1,27 @@ + { + "HAPI": "3.2", + "catalog": [ + { + "x_group_id": "dart", + "x_source": "classpath", + "x_classpath": "tilde-geonet.jar", + "x_class": "org.hapiserver.source.tilde.geonet.DartCatalog", + "x_method": "getCatalog", + "x_config": { + "info": { + "x_source":"classpath", + "x_classpath": "tilde-geonet.jar", + "x_class":"org.hapiserver.source.tilde.geonet.DartCatalog", + "x_method": "getInfo" + }, + "data": { + "source": "classpath", + "x_classpath": "tilde-geonet.jar", + "x_class":"org.hapiserver.source.tilde.geonet.DartDataHapiRecordSource", + "x_args": [ "${id}" ], + "x_doc_arg1": "the id, like \"NZB\"" + } + } + } + ] +}